/Zephyr-latest/tests/subsys/ipc/ipc_sessions/interoperability/ |
D | icmsg_v1.c | 31 static void mbox_callback_process(struct icmsg_data_t *dev_data); 35 struct icmsg_data_t *dev_data) in mbox_deinit() argument 50 (void)k_work_cancel(&dev_data->mbox_work); in mbox_deinit() 51 (void)k_work_cancel_delayable(&dev_data->notify_work); in mbox_deinit() 57 static bool is_endpoint_ready(struct icmsg_data_t *dev_data) in is_endpoint_ready() argument 59 return atomic_get(&dev_data->state) == ICMSG_STATE_READY; in is_endpoint_ready() 66 struct icmsg_data_t *dev_data = in notify_process() local 69 (void)mbox_send_dt(&dev_data->cfg->mbox_tx, NULL); in notify_process() 71 atomic_t state = atomic_get(&dev_data->state); in notify_process() 82 static void notify_process(struct icmsg_data_t *dev_data) in notify_process() argument [all …]
|
/Zephyr-latest/subsys/usb/device/class/ |
D | cdc_acm.c | 155 struct cdc_acm_dev_data_t *dev_data; in cdc_acm_class_handle_req() local 168 dev_data = CONTAINER_OF(common, struct cdc_acm_dev_data_t, common); in cdc_acm_class_handle_req() 173 rate = sys_le32_to_cpu(dev_data->line_coding.dwDTERate); in cdc_acm_class_handle_req() 174 memcpy(&dev_data->line_coding, *data, in cdc_acm_class_handle_req() 175 sizeof(dev_data->line_coding)); in cdc_acm_class_handle_req() 176 new_rate = sys_le32_to_cpu(dev_data->line_coding.dwDTERate); in cdc_acm_class_handle_req() 179 dev_data->line_coding.bCharFormat, in cdc_acm_class_handle_req() 180 dev_data->line_coding.bParityType, in cdc_acm_class_handle_req() 181 dev_data->line_coding.bDataBits); in cdc_acm_class_handle_req() 183 if (rate != new_rate && dev_data->rate_cb != NULL) { in cdc_acm_class_handle_req() [all …]
|
/Zephyr-latest/subsys/ipc/ipc_service/lib/ |
D | icmsg.c | 64 struct icmsg_data_t *dev_data) in mbox_deinit() argument 79 (void)k_work_cancel(&dev_data->mbox_work); in mbox_deinit() 90 static inline int reserve_tx_buffer_if_unused(struct icmsg_data_t *dev_data) in reserve_tx_buffer_if_unused() argument 93 return k_mutex_lock(&dev_data->tx_lock, SHMEM_ACCESS_TO); in reserve_tx_buffer_if_unused() 99 static inline int release_tx_buffer(struct icmsg_data_t *dev_data) in release_tx_buffer() argument 102 return k_mutex_unlock(&dev_data->tx_lock); in release_tx_buffer() 108 static uint32_t data_available(struct icmsg_data_t *dev_data) in data_available() argument 110 return pbuf_read(dev_data->rx_pb, NULL, 0); in data_available() 114 static void submit_mbox_work(struct icmsg_data_t *dev_data) in submit_mbox_work() argument 116 if (k_work_submit_to_queue(workq, &dev_data->mbox_work) < 0) { in submit_mbox_work() [all …]
|
/Zephyr-latest/drivers/ethernet/ |
D | eth_ivshmem.c | 58 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_get_stats() local 60 return &dev_data->stats; in eth_ivshmem_get_stats() 66 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_start() local 68 dev_data->enabled = true; in eth_ivshmem_start() 71 k_poll_signal_raise(&dev_data->poll_signal, 0); in eth_ivshmem_start() 78 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_stop() local 80 dev_data->enabled = false; in eth_ivshmem_stop() 83 k_poll_signal_raise(&dev_data->poll_signal, 0); in eth_ivshmem_stop() 96 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_send() local 101 int res = eth_ivshmem_queue_tx_get_buff(&dev_data->ivshmem_queue, &data, len); in eth_ivshmem_send() [all …]
|
D | eth_esp32.c | 66 struct eth_esp32_dev_data *const dev_data = dev->data; in eth_esp32_set_config() local 71 memcpy(dev_data->mac_addr, config->mac_address.addr, 6); in eth_esp32_set_config() 72 emac_hal_set_address(&dev_data->hal, dev_data->mac_addr); in eth_esp32_set_config() 73 net_if_set_link_addr(dev_data->iface, dev_data->mac_addr, in eth_esp32_set_config() 74 sizeof(dev_data->mac_addr), in eth_esp32_set_config() 87 struct eth_esp32_dev_data *dev_data = dev->data; in eth_esp32_send() local 90 if (net_pkt_read(pkt, dev_data->txb, len)) { in eth_esp32_send() 94 uint32_t sent_len = emac_hal_transmit_frame(&dev_data->hal, dev_data->txb, len); in eth_esp32_send() 102 struct eth_esp32_dev_data *const dev_data, uint32_t *frames_remaining) in eth_esp32_rx() argument 106 &dev_data->hal, dev_data->rxb, sizeof(dev_data->rxb), in eth_esp32_rx() [all …]
|
D | eth_xlnx_gem.c | 227 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in eth_xlnx_gem_iface_init() local 230 dev_data->iface = iface; in eth_xlnx_gem_iface_init() 231 net_if_set_link_addr(iface, dev_data->mac_addr, 6, NET_LINK_ETHERNET); in eth_xlnx_gem_iface_init() 239 k_work_init(&dev_data->tx_done_work, eth_xlnx_gem_tx_done_work); in eth_xlnx_gem_iface_init() 240 k_work_init(&dev_data->rx_pend_work, eth_xlnx_gem_rx_pending_work); in eth_xlnx_gem_iface_init() 241 k_work_init_delayable(&dev_data->phy_poll_delayed_work, in eth_xlnx_gem_iface_init() 245 k_sem_init(&dev_data->tx_done_sem, 0, 1); in eth_xlnx_gem_iface_init() 251 k_sem_init(&dev_data->txbd_ring.ring_sem, 1, 1); in eth_xlnx_gem_iface_init() 258 k_work_reschedule(&dev_data->phy_poll_delayed_work, K_NO_WAIT); in eth_xlnx_gem_iface_init() 272 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in eth_xlnx_gem_isr() local [all …]
|
D | eth_stellaris.c | 43 struct eth_stellaris_runtime *dev_data = dev->data; in eth_stellaris_flush() local 45 if (dev_data->tx_pos != 0) { in eth_stellaris_flush() 46 sys_write32(dev_data->tx_word, REG_MACDATA); in eth_stellaris_flush() 47 dev_data->tx_pos = 0; in eth_stellaris_flush() 48 dev_data->tx_word = 0U; in eth_stellaris_flush() 54 struct eth_stellaris_runtime *dev_data = dev->data; in eth_stellaris_send_byte() local 56 dev_data->tx_word |= byte << (dev_data->tx_pos * 8); in eth_stellaris_send_byte() 57 dev_data->tx_pos++; in eth_stellaris_send_byte() 58 if (dev_data->tx_pos == 4) { in eth_stellaris_send_byte() 59 sys_write32(dev_data->tx_word, REG_MACDATA); in eth_stellaris_send_byte() [all …]
|
D | phy_xlnx_gem.c | 200 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in phy_xlnx_gem_marvell_alaska_reset() local 209 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 212 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 217 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 222 dev->name, dev_data->phy_addr); in phy_xlnx_gem_marvell_alaska_reset() 235 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in phy_xlnx_gem_marvell_alaska_cfg() local 247 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() 250 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() 254 if ((dev_data->phy_id & PHY_MRVL_PHY_ID_MODEL_MASK) == in phy_xlnx_gem_marvell_alaska_cfg() 267 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() [all …]
|
/Zephyr-latest/drivers/lora/ |
D | sx12xx_common.c | 41 } dev_data; variable 103 struct k_poll_signal *sig = dev_data.operation_done; in sx12xx_ev_rx_done() 106 if (dev_data.async_rx_cb) { in sx12xx_ev_rx_done() 110 dev_data.async_rx_cb(dev_data.dev, payload, size, rssi, snr, in sx12xx_ev_rx_done() 111 dev_data.async_user_data); in sx12xx_ev_rx_done() 120 if (!atomic_cas(&dev_data.modem_usage, STATE_BUSY, STATE_CLEANUP)) { in sx12xx_ev_rx_done() 134 if (size < *dev_data.rx_params.size) { in sx12xx_ev_rx_done() 135 *dev_data.rx_params.size = size; in sx12xx_ev_rx_done() 138 memcpy(dev_data.rx_params.buf, payload, in sx12xx_ev_rx_done() 139 *dev_data.rx_params.size); in sx12xx_ev_rx_done() [all …]
|
/Zephyr-latest/drivers/mspi/ |
D | mspi_dw.c | 122 struct mspi_dw_data *dev_data = dev->data; in tx_data() local 124 const uint8_t *buf_pos = dev_data->buf_pos; in tx_data() 125 const uint8_t *buf_end = dev_data->buf_end; in tx_data() 135 uint8_t bytes_per_frame_exp = dev_data->bytes_per_frame_exp; in tx_data() 163 dev_data->buf_pos = (uint8_t *)buf_pos; in tx_data() 168 struct mspi_dw_data *dev_data = dev->data; in make_rx_cycles() local 170 uint16_t dummy_bytes = dev_data->dummy_bytes; in make_rx_cycles() 180 dev_data->dummy_bytes = 0; in make_rx_cycles() 190 dev_data->dummy_bytes = dummy_bytes; in make_rx_cycles() 197 struct mspi_dw_data *dev_data = dev->data; in read_rx_fifo() local [all …]
|
/Zephyr-latest/drivers/clock_control/ |
D | clock_control_nrf2_hfxo.c | 57 struct dev_data_hfxo *dev_data = in hfxo_start_up_timer_handler() local 69 if (dev_data->notify) { in hfxo_start_up_timer_handler() 70 dev_data->notify(&dev_data->mgr, 0); in hfxo_start_up_timer_handler() 74 static void start_hfxo(struct dev_data_hfxo *dev_data) in start_hfxo() argument 77 soc_lrcconf_poweron_request(&dev_data->hfxo_node, NRF_LRCCONF_POWER_MAIN); in start_hfxo() 81 static void request_hfxo(struct dev_data_hfxo *dev_data) in request_hfxo() argument 87 if (dev_data->request_count == 0) { in request_hfxo() 88 start_hfxo(dev_data); in request_hfxo() 91 dev_data->request_count++; in request_hfxo() 94 start_hfxo(dev_data); in request_hfxo() [all …]
|
D | clock_control_nrf2_global_hsfll.c | 68 struct global_hsfll_dev_data *dev_data = dev->data; in global_hsfll_find_mgr() local 73 return &dev_data->clk_cfg.onoff[0].mgr; in global_hsfll_find_mgr() 90 return &dev_data->clk_cfg.onoff[i].mgr; in global_hsfll_find_mgr() 173 struct global_hsfll_dev_data *dev_data = in global_hsfll_work_handler() local 175 const struct device *dev = dev_data->dev; in global_hsfll_work_handler() 184 err = nrfs_gdfs_request_freq(target_freq, dev_data); in global_hsfll_work_handler() 186 clock_config_update_end(&dev_data->clk_cfg, -EIO); in global_hsfll_work_handler() 190 k_work_schedule(&dev_data->timeout_dwork, GLOBAL_HSFLL_FREQ_REQ_TIMEOUT); in global_hsfll_work_handler() 195 struct global_hsfll_dev_data *dev_data = in global_hsfll_evt_handler() local 199 k_work_cancel_delayable(&dev_data->timeout_dwork); in global_hsfll_evt_handler() [all …]
|
D | clock_control_nrf2_lfclk.c | 70 struct lfclk_dev_data *dev_data = context; in clock_evt_handler() local 73 k_timer_stop(&dev_data->timer); in clock_evt_handler() 79 clock_config_update_end(&dev_data->clk_cfg, status); in clock_evt_handler() 84 struct lfclk_dev_data *dev_data = in lfclk_update_timeout_handler() local 87 clock_config_update_end(&dev_data->clk_cfg, -ETIMEDOUT); in lfclk_update_timeout_handler() 92 struct lfclk_dev_data *dev_data = in lfclk_work_handler() local 100 dev_data); in lfclk_work_handler() 102 clock_config_update_end(&dev_data->clk_cfg, -EIO); in lfclk_work_handler() 104 k_timer_start(&dev_data->timer, NRFS_CLOCK_TIMEOUT, K_NO_WAIT); in lfclk_work_handler() 111 struct lfclk_dev_data *dev_data = dev->data; in lfclk_find_mgr() local [all …]
|
/Zephyr-latest/drivers/flash/ |
D | flash_mspi_nor.c | 54 struct flash_mspi_nor_data *dev_data = dev->data; in acquire() local 57 k_sem_take(&dev_data->acquired, K_FOREVER); in acquire() 78 k_sem_give(&dev_data->acquired); in acquire() 85 struct flash_mspi_nor_data *dev_data = dev->data; in release() local 92 k_sem_give(&dev_data->acquired); in release() 111 struct flash_mspi_nor_data *dev_data = dev->data; in api_read() local 129 dev_data->xfer.cmd_length = 2; in api_read() 130 dev_data->xfer.addr_length = 4; in api_read() 131 dev_data->xfer.rx_dummy = 20; in api_read() 132 dev_data->packet.dir = MSPI_RX; in api_read() [all …]
|
/Zephyr-latest/drivers/serial/ |
D | uart_sam0.c | 138 struct uart_sam0_dev_data *const dev_data = in uart_sam0_dma_tx_done() local 140 const struct uart_sam0_dev_cfg *const cfg = dev_data->cfg; in uart_sam0_dma_tx_done() 147 static int uart_sam0_tx_halt(struct uart_sam0_dev_data *dev_data) in uart_sam0_tx_halt() argument 149 const struct uart_sam0_dev_cfg *const cfg = dev_data->cfg; in uart_sam0_tx_halt() 151 size_t tx_active = dev_data->tx_len; in uart_sam0_tx_halt() 157 .buf = dev_data->tx_buf, in uart_sam0_tx_halt() 162 dev_data->tx_buf = NULL; in uart_sam0_tx_halt() 163 dev_data->tx_len = 0U; in uart_sam0_tx_halt() 174 if (dev_data->async_cb) { in uart_sam0_tx_halt() 175 dev_data->async_cb(dev_data->dev, in uart_sam0_tx_halt() [all …]
|
D | uart_bt.c | 49 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_notif_enabled() local 51 (void)atomic_set(&dev_data->bt.enabled, enabled ? 1 : 0); in bt_notif_enabled() 55 if (!ring_buf_is_empty(dev_data->uart.tx_ringbuf)) { in bt_notif_enabled() 56 k_work_reschedule_for_queue(&nus_work_queue, &dev_data->uart.tx_work, K_NO_WAIT); in bt_notif_enabled() 68 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_received() local 69 struct ring_buf *ringbuf = dev_data->uart.rx_ringbuf; in bt_received() 80 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in bt_received() 117 struct uart_bt_data *dev_data = CONTAINER_OF(work, struct uart_bt_data, uart.cb_work); in cb_work_handler() local 119 if (dev_data->uart.callback.cb) { in cb_work_handler() 120 dev_data->uart.callback.cb( in cb_work_handler() [all …]
|
/Zephyr-latest/subsys/usb/device/class/hid/ |
D | core.c | 153 static int hid_on_get_idle(struct hid_device_info *dev_data, in hid_on_get_idle() argument 173 *data = &dev_data->idle_rate[0]; in hid_on_get_idle() 174 *len = sizeof(dev_data->idle_rate[0]); in hid_on_get_idle() 176 *data = &dev_data->idle_rate[report_id - 1]; in hid_on_get_idle() 177 *len = sizeof(dev_data->idle_rate[report_id - 1]); in hid_on_get_idle() 186 static int hid_on_get_report(struct hid_device_info *dev_data, in hid_on_get_report() argument 197 static int hid_on_get_protocol(struct hid_device_info *dev_data, in hid_on_get_protocol() argument 202 uint32_t size = sizeof(dev_data->protocol); in hid_on_get_protocol() 209 LOG_DBG("Get Protocol: %d", dev_data->protocol); in hid_on_get_protocol() 211 *data = &dev_data->protocol; in hid_on_get_protocol() [all …]
|
/Zephyr-latest/drivers/i2c/ |
D | i2c_andes_atciic100.c | 44 struct i2c_atciic100_dev_data_t *dev_data = dev->data; in i2c_atciic100_default_control() local 47 k_sem_init(&dev_data->bus_lock, 1, 1); in i2c_atciic100_default_control() 48 k_sem_init(&dev_data->device_sync_sem, 0, 1); in i2c_atciic100_default_control() 60 dev_data->fifo_depth = 2; in i2c_atciic100_default_control() 63 dev_data->fifo_depth = 4; in i2c_atciic100_default_control() 66 dev_data->fifo_depth = 8; in i2c_atciic100_default_control() 69 dev_data->fifo_depth = 16; in i2c_atciic100_default_control() 88 dev_data->driver_state = I2C_DRV_INIT; in i2c_atciic100_default_control() 89 dev_data->status.mode = 0; in i2c_atciic100_default_control() 90 dev_data->status.arbitration_lost = 0; in i2c_atciic100_default_control() [all …]
|
/Zephyr-latest/drivers/display/ |
D | display_mcux_elcdif.c | 76 struct mcux_elcdif_data *dev_data = dev->data; in mcux_elcdif_write() local 83 __ASSERT((dev_data->pixel_bytes * desc->pitch * desc->height) <= desc->buf_size, in mcux_elcdif_write() 91 LOG_DBG("Setting FB from %p->%p", (void *)dev_data->active_fb, (void *)buf); in mcux_elcdif_write() 92 dev_data->active_fb = buf; in mcux_elcdif_write() 100 LOG_DBG("Setting FB from %p->%p", (void *)dev_data->active_fb, (void *)buf); in mcux_elcdif_write() 101 dev_data->active_fb = buf; in mcux_elcdif_write() 108 } else if (dev_data->active_fb != dev_data->fb[dev_data->next_idx]) { in mcux_elcdif_write() 114 src = dev_data->active_fb; in mcux_elcdif_write() 115 dst = dev_data->fb[dev_data->next_idx]; in mcux_elcdif_write() 116 memcpy(dst, src, dev_data->fb_bytes); in mcux_elcdif_write() [all …]
|
/Zephyr-latest/drivers/gpio/ |
D | gpio_xlnx_ps_bank.h | 17 #define GPIO_XLNX_PS_BANK_MASK_DATA_LSW_REG (dev_data->base\ 19 #define GPIO_XLNX_PS_BANK_MASK_DATA_MSW_REG ((dev_data->base + 0x04)\ 21 #define GPIO_XLNX_PS_BANK_DATA_REG ((dev_data->base + 0x40)\ 23 #define GPIO_XLNX_PS_BANK_DATA_RO_REG ((dev_data->base + 0x60)\ 25 #define GPIO_XLNX_PS_BANK_DIRM_REG ((dev_data->base + 0x204)\ 27 #define GPIO_XLNX_PS_BANK_OEN_REG ((dev_data->base + 0x208)\ 29 #define GPIO_XLNX_PS_BANK_INT_MASK_REG ((dev_data->base + 0x20C)\ 31 #define GPIO_XLNX_PS_BANK_INT_EN_REG ((dev_data->base + 0x210)\ 33 #define GPIO_XLNX_PS_BANK_INT_DIS_REG ((dev_data->base + 0x214)\ 35 #define GPIO_XLNX_PS_BANK_INT_STAT_REG ((dev_data->base + 0x218)\ [all …]
|
/Zephyr-latest/drivers/dma/ |
D | dma_nios2_msgdma.c | 38 struct nios2_msgdma_dev_data *dev_data = (struct nios2_msgdma_dev_data *)dev->data; in nios2_msgdma_isr() local 41 alt_handle_irq(dev_data->msgdma_dev, DT_INST_IRQN(0)); in nios2_msgdma_isr() 46 struct nios2_msgdma_dev_data *dev_data = in nios2_msgdma_callback() local 51 status = IORD_ALTERA_MSGDMA_CSR_STATUS(dev_data->msgdma_dev->csr_base); in nios2_msgdma_callback() 63 dev_data->dma_callback(dev_data->dev, dev_data->user_data, 0, dma_status); in nios2_msgdma_callback() 69 struct nios2_msgdma_dev_data *dev_data = (struct nios2_msgdma_dev_data *)dev->data; in nios2_msgdma_config() local 103 k_sem_take(&dev_data->sem_lock, K_FOREVER); in nios2_msgdma_config() 104 dev_data->dma_callback = cfg->dma_callback; in nios2_msgdma_config() 105 dev_data->user_data = cfg->user_data; in nios2_msgdma_config() 106 dev_data->direction = cfg->channel_direction; in nios2_msgdma_config() [all …]
|
/Zephyr-latest/subsys/ipc/ipc_service/backends/ |
D | ipc_icbmsg.c | 335 static int alloc_tx_buffer(struct backend_data *dev_data, uint32_t *size, in alloc_tx_buffer() argument 338 const struct icbmsg_config *conf = dev_data->conf; in alloc_tx_buffer() 359 r = k_sem_take(&dev_data->block_wait_sem, timeout); in alloc_tx_buffer() 374 k_sem_give(&dev_data->block_wait_sem); in alloc_tx_buffer() 435 static int release_tx_blocks(struct backend_data *dev_data, size_t tx_block_index, in release_tx_blocks() argument 438 const struct icbmsg_config *conf = dev_data->conf; in release_tx_blocks() 481 k_sem_give(&dev_data->block_wait_sem); in release_tx_blocks() 499 static int release_tx_buffer(struct backend_data *dev_data, const uint8_t *buffer, in release_tx_buffer() argument 502 const struct icbmsg_config *conf = dev_data->conf; in release_tx_buffer() 511 return release_tx_blocks(dev_data, tx_block_index, size, new_size); in release_tx_buffer() [all …]
|
/Zephyr-latest/drivers/spi/ |
D | spi_nrfx_spi.c | 90 struct spi_nrfx_data *dev_data = dev->data; in configure() local 92 struct spi_context *ctx = &dev_data->ctx; in configure() 97 if (dev_data->initialized && spi_context_configured(ctx, spi_cfg)) { in configure() 145 if (dev_data->initialized) { in configure() 147 dev_data->initialized = false; in configure() 151 event_handler, dev_data); in configure() 157 dev_data->initialized = true; in configure() 166 struct spi_nrfx_data *dev_data = dev->data; in finish_transaction() local 167 struct spi_context *ctx = &dev_data->ctx; in finish_transaction() 172 dev_data->busy = false; in finish_transaction() [all …]
|
D | spi_nrfx_spim.c | 102 struct spi_nrfx_data *dev_data = dev->data; in request_clock() local 118 dev_data->clock_requested = true; in request_clock() 129 struct spi_nrfx_data *dev_data = dev->data; in release_clock() local 132 if (!dev_data->clock_requested) { in release_clock() 136 dev_data->clock_requested = false; in release_clock() 146 struct spi_nrfx_data *dev_data = dev->data; in finalize_spi_transaction() local 151 spi_context_cs_control(&dev_data->ctx, false); in finalize_spi_transaction() 154 if (NRF_SPIM_IS_320MHZ_SPIM(reg) && !(dev_data->ctx.config->operation & SPI_HOLD_ON_CS)) { in finalize_spi_transaction() 219 struct spi_nrfx_data *dev_data = dev->data; in configure() local 221 struct spi_context *ctx = &dev_data->ctx; in configure() [all …]
|
/Zephyr-latest/drivers/watchdog/ |
D | wdt_ifx_cat1.c | 34 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_isr_handler() local 36 if (dev_data->callback) { in ifx_cat1_wdt_isr_handler() 37 dev_data->callback(dev, 0); in ifx_cat1_wdt_isr_handler() 46 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_setup() local 49 result = cyhal_wdt_init(&dev_data->obj, dev_data->timeout); in ifx_cat1_wdt_setup() 56 if (dev_data->callback) { in ifx_cat1_wdt_setup() 67 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_disable() local 74 cyhal_wdt_free(&dev_data->obj); in ifx_cat1_wdt_disable() 81 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_install_timeout() local 83 if (dev_data->timeout_installed) { in ifx_cat1_wdt_install_timeout() [all …]
|