| /Zephyr-4.2.1/tests/subsys/ipc/ipc_sessions/interoperability/ |
| D | icmsg_v1.c | 31 static void mbox_callback_process(struct icmsg_data_t *dev_data); 35 struct icmsg_data_t *dev_data) in mbox_deinit() argument 50 (void)k_work_cancel(&dev_data->mbox_work); in mbox_deinit() 51 (void)k_work_cancel_delayable(&dev_data->notify_work); in mbox_deinit() 57 static bool is_endpoint_ready(struct icmsg_data_t *dev_data) in is_endpoint_ready() argument 59 return atomic_get(&dev_data->state) == ICMSG_STATE_READY; in is_endpoint_ready() 66 struct icmsg_data_t *dev_data = in notify_process() local 69 (void)mbox_send_dt(&dev_data->cfg->mbox_tx, NULL); in notify_process() 71 atomic_t state = atomic_get(&dev_data->state); in notify_process() 82 static void notify_process(struct icmsg_data_t *dev_data) in notify_process() argument [all …]
|
| /Zephyr-4.2.1/subsys/ipc/ipc_service/lib/ |
| D | icmsg.c | 64 struct icmsg_data_t *dev_data) in mbox_deinit() argument 79 (void)k_work_cancel(&dev_data->mbox_work); in mbox_deinit() 90 static inline int reserve_tx_buffer_if_unused(struct icmsg_data_t *dev_data) in reserve_tx_buffer_if_unused() argument 93 return k_mutex_lock(&dev_data->tx_lock, SHMEM_ACCESS_TO); in reserve_tx_buffer_if_unused() 99 static inline int release_tx_buffer(struct icmsg_data_t *dev_data) in release_tx_buffer() argument 102 return k_mutex_unlock(&dev_data->tx_lock); in release_tx_buffer() 108 static uint32_t data_available(struct icmsg_data_t *dev_data) in data_available() argument 110 return pbuf_read(dev_data->rx_pb, NULL, 0); in data_available() 114 static void submit_mbox_work(struct icmsg_data_t *dev_data) in submit_mbox_work() argument 116 if (k_work_submit_to_queue(workq, &dev_data->mbox_work) < 0) { in submit_mbox_work() [all …]
|
| /Zephyr-4.2.1/subsys/usb/device/class/ |
| D | cdc_acm.c | 155 struct cdc_acm_dev_data_t *dev_data; in cdc_acm_class_handle_req() local 168 dev_data = CONTAINER_OF(common, struct cdc_acm_dev_data_t, common); in cdc_acm_class_handle_req() 173 rate = sys_le32_to_cpu(dev_data->line_coding.dwDTERate); in cdc_acm_class_handle_req() 174 memcpy(&dev_data->line_coding, *data, in cdc_acm_class_handle_req() 175 sizeof(dev_data->line_coding)); in cdc_acm_class_handle_req() 176 new_rate = sys_le32_to_cpu(dev_data->line_coding.dwDTERate); in cdc_acm_class_handle_req() 179 dev_data->line_coding.bCharFormat, in cdc_acm_class_handle_req() 180 dev_data->line_coding.bParityType, in cdc_acm_class_handle_req() 181 dev_data->line_coding.bDataBits); in cdc_acm_class_handle_req() 183 if (rate != new_rate && dev_data->rate_cb != NULL) { in cdc_acm_class_handle_req() [all …]
|
| /Zephyr-4.2.1/drivers/mspi/ |
| D | mspi_dw.c | 176 struct mspi_dw_data *dev_data = dev->data; in call_user_callback_with_context() local 178 &dev_data->xfer.packets[packet_idx]; in call_user_callback_with_context() 179 struct mspi_callback_context *cb_ctx = dev_data->cb_ctxs[evt_type]; in call_user_callback_with_context() 182 !dev_data->cbs[evt_type]) { in call_user_callback_with_context() 190 cb_ctx->mspi_evt.evt_data.dev_id = dev_data->dev_id; in call_user_callback_with_context() 195 dev_data->cbs[evt_type](cb_ctx); in call_user_callback_with_context() 200 struct mspi_dw_data *dev_data = in async_timeout_timer_handler() local 204 k_work_submit(&dev_data->async_timeout_work); in async_timeout_timer_handler() 209 struct mspi_dw_data *dev_data = in async_timeout_work_handler() local 211 const struct device *dev = dev_data->dev; in async_timeout_work_handler() [all …]
|
| /Zephyr-4.2.1/drivers/ethernet/ |
| D | eth_ivshmem.c | 58 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_get_stats() local 60 return &dev_data->stats; in eth_ivshmem_get_stats() 66 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_start() local 68 dev_data->enabled = true; in eth_ivshmem_start() 71 k_poll_signal_raise(&dev_data->poll_signal, 0); in eth_ivshmem_start() 78 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_stop() local 80 dev_data->enabled = false; in eth_ivshmem_stop() 83 k_poll_signal_raise(&dev_data->poll_signal, 0); in eth_ivshmem_stop() 96 struct eth_ivshmem_dev_data *dev_data = dev->data; in eth_ivshmem_send() local 101 int res = eth_ivshmem_queue_tx_get_buff(&dev_data->ivshmem_queue, &data, len); in eth_ivshmem_send() [all …]
|
| D | eth_esp32.c | 66 struct eth_esp32_dev_data *const dev_data = dev->data; in eth_esp32_set_config() local 71 memcpy(dev_data->mac_addr, config->mac_address.addr, 6); in eth_esp32_set_config() 72 emac_hal_set_address(&dev_data->hal, dev_data->mac_addr); in eth_esp32_set_config() 73 net_if_set_link_addr(dev_data->iface, dev_data->mac_addr, in eth_esp32_set_config() 74 sizeof(dev_data->mac_addr), in eth_esp32_set_config() 87 struct eth_esp32_dev_data *dev_data = dev->data; in eth_esp32_send() local 90 if (net_pkt_read(pkt, dev_data->txb, len)) { in eth_esp32_send() 94 uint32_t sent_len = emac_hal_transmit_frame(&dev_data->hal, dev_data->txb, len); in eth_esp32_send() 102 struct eth_esp32_dev_data *const dev_data, uint32_t *frames_remaining) in eth_esp32_rx() argument 106 &dev_data->hal, dev_data->rxb, sizeof(dev_data->rxb), in eth_esp32_rx() [all …]
|
| D | eth_xlnx_gem.c | 222 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in eth_xlnx_gem_iface_init() local 225 dev_data->iface = iface; in eth_xlnx_gem_iface_init() 226 net_if_set_link_addr(iface, dev_data->mac_addr, 6, NET_LINK_ETHERNET); in eth_xlnx_gem_iface_init() 234 k_work_init(&dev_data->tx_done_work, eth_xlnx_gem_tx_done_work); in eth_xlnx_gem_iface_init() 235 k_work_init(&dev_data->rx_pend_work, eth_xlnx_gem_rx_pending_work); in eth_xlnx_gem_iface_init() 236 k_work_init_delayable(&dev_data->phy_poll_delayed_work, in eth_xlnx_gem_iface_init() 240 k_sem_init(&dev_data->tx_done_sem, 0, 1); in eth_xlnx_gem_iface_init() 246 k_sem_init(&dev_data->txbd_ring.ring_sem, 1, 1); in eth_xlnx_gem_iface_init() 253 k_work_reschedule(&dev_data->phy_poll_delayed_work, K_NO_WAIT); in eth_xlnx_gem_iface_init() 267 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in eth_xlnx_gem_isr() local [all …]
|
| D | eth_stellaris.c | 43 struct eth_stellaris_runtime *dev_data = dev->data; in eth_stellaris_flush() local 45 if (dev_data->tx_pos != 0) { in eth_stellaris_flush() 46 sys_write32(dev_data->tx_word, REG_MACDATA); in eth_stellaris_flush() 47 dev_data->tx_pos = 0; in eth_stellaris_flush() 48 dev_data->tx_word = 0U; in eth_stellaris_flush() 54 struct eth_stellaris_runtime *dev_data = dev->data; in eth_stellaris_send_byte() local 56 dev_data->tx_word |= byte << (dev_data->tx_pos * 8); in eth_stellaris_send_byte() 57 dev_data->tx_pos++; in eth_stellaris_send_byte() 58 if (dev_data->tx_pos == 4) { in eth_stellaris_send_byte() 59 sys_write32(dev_data->tx_word, REG_MACDATA); in eth_stellaris_send_byte() [all …]
|
| D | phy_xlnx_gem.c | 200 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in phy_xlnx_gem_marvell_alaska_reset() local 209 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 212 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 217 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_reset() 222 dev->name, dev_data->phy_addr); in phy_xlnx_gem_marvell_alaska_reset() 235 struct eth_xlnx_gem_dev_data *dev_data = dev->data; in phy_xlnx_gem_marvell_alaska_cfg() local 247 phy_data = phy_xlnx_gem_mdio_read(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() 250 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() 254 if ((dev_data->phy_id & PHY_MRVL_PHY_ID_MODEL_MASK) == in phy_xlnx_gem_marvell_alaska_cfg() 267 phy_xlnx_gem_mdio_write(dev_conf->base_addr, dev_data->phy_addr, in phy_xlnx_gem_marvell_alaska_cfg() [all …]
|
| /Zephyr-4.2.1/drivers/lora/loramac_node/ |
| D | sx12xx_common.c | 41 } dev_data; variable 103 struct k_poll_signal *sig = dev_data.operation_done; in sx12xx_ev_rx_done() 106 if (dev_data.async_rx_cb) { in sx12xx_ev_rx_done() 110 dev_data.async_rx_cb(dev_data.dev, payload, size, rssi, snr, in sx12xx_ev_rx_done() 111 dev_data.async_user_data); in sx12xx_ev_rx_done() 120 if (!atomic_cas(&dev_data.modem_usage, STATE_BUSY, STATE_CLEANUP)) { in sx12xx_ev_rx_done() 134 if (size < *dev_data.rx_params.size) { in sx12xx_ev_rx_done() 135 *dev_data.rx_params.size = size; in sx12xx_ev_rx_done() 138 memcpy(dev_data.rx_params.buf, payload, in sx12xx_ev_rx_done() 139 *dev_data.rx_params.size); in sx12xx_ev_rx_done() [all …]
|
| /Zephyr-4.2.1/drivers/clock_control/ |
| D | clock_control_nrf54h_hfxo.c | 57 struct dev_data_hfxo *dev_data = in hfxo_start_up_timer_handler() local 69 if (dev_data->notify) { in hfxo_start_up_timer_handler() 70 dev_data->notify(&dev_data->mgr, 0); in hfxo_start_up_timer_handler() 74 static void start_hfxo(struct dev_data_hfxo *dev_data) in start_hfxo() argument 77 soc_lrcconf_poweron_request(&dev_data->hfxo_node, NRF_LRCCONF_POWER_MAIN); in start_hfxo() 81 static void request_hfxo(struct dev_data_hfxo *dev_data) in request_hfxo() argument 87 if (dev_data->request_count == 0) { in request_hfxo() 88 start_hfxo(dev_data); in request_hfxo() 91 dev_data->request_count++; in request_hfxo() 94 start_hfxo(dev_data); in request_hfxo() [all …]
|
| D | clock_control_nrfs_audiopll.c | 38 struct shim_data *dev_data = dev->data; in shim_nrfs_request_enable() local 43 dev_data->evt = NRFS_AUDIOPLL_EVT_ENABLED; in shim_nrfs_request_enable() 44 err = nrfs_audiopll_enable_request(dev_data); in shim_nrfs_request_enable() 54 struct shim_data *dev_data = dev->data; in shim_nrfs_request_disable() local 59 dev_data->evt = NRFS_AUDIOPLL_EVT_DISABLED; in shim_nrfs_request_disable() 60 err = nrfs_audiopll_disable_request(dev_data); in shim_nrfs_request_disable() 70 struct shim_data *dev_data = CONTAINER_OF(mgr, struct shim_data, mgr); in onoff_start_option() local 71 const struct device *dev = dev_data->dev; in onoff_start_option() 74 dev_data->mgr_notify = notify; in onoff_start_option() 78 dev_data->mgr_notify = NULL; in onoff_start_option() [all …]
|
| D | clock_control_nrf_lfclk.c | 97 struct lfclk_dev_data *dev_data = context; in clock_evt_handler() local 100 k_timer_stop(&dev_data->timer); in clock_evt_handler() 106 clock_config_update_end(&dev_data->clk_cfg, status); in clock_evt_handler() 111 struct lfclk_dev_data *dev_data = in lfclk_update_timeout_handler() local 114 clock_config_update_end(&dev_data->clk_cfg, -ETIMEDOUT); in lfclk_update_timeout_handler() 119 struct lfclk_dev_data *dev_data = in lfclk_work_handler() local 127 dev_data); in lfclk_work_handler() 129 clock_config_update_end(&dev_data->clk_cfg, -EIO); in lfclk_work_handler() 131 k_timer_start(&dev_data->timer, NRFS_CLOCK_TIMEOUT, K_NO_WAIT); in lfclk_work_handler() 138 struct lfclk_dev_data *dev_data = dev->data; in lfclk_resolve_spec_to_idx() local [all …]
|
| /Zephyr-4.2.1/drivers/i2s/ |
| D | i2s_renesas_ra_ssie.c | 94 struct renesas_ra_ssie_data *dev_data = (struct renesas_ra_ssie_data *)dev->data; in ssi_rt_isr() local 96 if (dev_data->active_dir == I2S_DIR_TX) { in ssi_rt_isr() 100 if (dev_data->active_dir == I2S_DIR_RX) { in ssi_rt_isr() 231 static void free_tx_buffer(struct renesas_ra_ssie_data *dev_data, const void *buffer) in free_tx_buffer() argument 233 k_mem_slab_free(dev_data->tx_stream.cfg.mem_slab, (void *)buffer); in free_tx_buffer() 237 static void free_rx_buffer(struct renesas_ra_ssie_data *dev_data, void *buffer) in free_rx_buffer() argument 239 k_mem_slab_free(dev_data->rx_stream.cfg.mem_slab, buffer); in free_rx_buffer() 245 struct renesas_ra_ssie_data *dev_data = dev->data; in drop_queue() local 249 while (k_msgq_get(&dev_data->tx_queue, &msg_item, K_NO_WAIT) == 0) { in drop_queue() 250 free_tx_buffer(dev_data, msg_item.mem_block); in drop_queue() [all …]
|
| /Zephyr-4.2.1/drivers/serial/ |
| D | uart_sam0.c | 138 struct uart_sam0_dev_data *const dev_data = in uart_sam0_dma_tx_done() local 140 const struct uart_sam0_dev_cfg *const cfg = dev_data->cfg; in uart_sam0_dma_tx_done() 147 static int uart_sam0_tx_halt(struct uart_sam0_dev_data *dev_data) in uart_sam0_tx_halt() argument 149 const struct uart_sam0_dev_cfg *const cfg = dev_data->cfg; in uart_sam0_tx_halt() 151 size_t tx_active = dev_data->tx_len; in uart_sam0_tx_halt() 157 .buf = dev_data->tx_buf, in uart_sam0_tx_halt() 162 dev_data->tx_buf = NULL; in uart_sam0_tx_halt() 163 dev_data->tx_len = 0U; in uart_sam0_tx_halt() 174 if (dev_data->async_cb) { in uart_sam0_tx_halt() 175 dev_data->async_cb(dev_data->dev, in uart_sam0_tx_halt() [all …]
|
| D | uart_bt.c | 49 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_notif_enabled() local 51 (void)atomic_set(&dev_data->bt.enabled, enabled ? 1 : 0); in bt_notif_enabled() 55 if (!ring_buf_is_empty(dev_data->uart.tx_ringbuf)) { in bt_notif_enabled() 56 k_work_reschedule_for_queue(&nus_work_queue, &dev_data->uart.tx_work, K_NO_WAIT); in bt_notif_enabled() 68 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_received() local 69 struct ring_buf *ringbuf = dev_data->uart.rx_ringbuf; in bt_received() 80 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in bt_received() 117 struct uart_bt_data *dev_data = CONTAINER_OF(work, struct uart_bt_data, uart.cb_work); in cb_work_handler() local 119 if (dev_data->uart.callback.cb) { in cb_work_handler() 120 dev_data->uart.callback.cb( in cb_work_handler() [all …]
|
| /Zephyr-4.2.1/subsys/usb/device/class/hid/ |
| D | core.c | 153 static int hid_on_get_idle(struct hid_device_info *dev_data, in hid_on_get_idle() argument 173 *data = &dev_data->idle_rate[0]; in hid_on_get_idle() 174 *len = sizeof(dev_data->idle_rate[0]); in hid_on_get_idle() 176 *data = &dev_data->idle_rate[report_id - 1]; in hid_on_get_idle() 177 *len = sizeof(dev_data->idle_rate[report_id - 1]); in hid_on_get_idle() 186 static int hid_on_get_report(struct hid_device_info *dev_data, in hid_on_get_report() argument 197 static int hid_on_get_protocol(struct hid_device_info *dev_data, in hid_on_get_protocol() argument 202 uint32_t size = sizeof(dev_data->protocol); in hid_on_get_protocol() 209 LOG_DBG("Get Protocol: %d", dev_data->protocol); in hid_on_get_protocol() 211 *data = &dev_data->protocol; in hid_on_get_protocol() [all …]
|
| /Zephyr-4.2.1/drivers/i2c/ |
| D | i2c_andes_atciic100.c | 44 struct i2c_atciic100_dev_data_t *dev_data = dev->data; in i2c_atciic100_default_control() local 47 k_sem_init(&dev_data->bus_lock, 1, 1); in i2c_atciic100_default_control() 48 k_sem_init(&dev_data->device_sync_sem, 0, 1); in i2c_atciic100_default_control() 60 dev_data->fifo_depth = 2; in i2c_atciic100_default_control() 63 dev_data->fifo_depth = 4; in i2c_atciic100_default_control() 66 dev_data->fifo_depth = 8; in i2c_atciic100_default_control() 69 dev_data->fifo_depth = 16; in i2c_atciic100_default_control() 88 dev_data->driver_state = I2C_DRV_INIT; in i2c_atciic100_default_control() 89 dev_data->status.mode = 0; in i2c_atciic100_default_control() 90 dev_data->status.arbitration_lost = 0; in i2c_atciic100_default_control() [all …]
|
| /Zephyr-4.2.1/drivers/display/ |
| D | display_mcux_elcdif.c | 80 struct mcux_elcdif_data *dev_data = dev->data; in mcux_elcdif_write() local 87 __ASSERT((dev_data->pixel_bytes * desc->pitch * desc->height) <= desc->buf_size, in mcux_elcdif_write() 95 LOG_DBG("Setting FB from %p->%p", (void *)dev_data->active_fb, (void *)buf); in mcux_elcdif_write() 96 dev_data->active_fb = buf; in mcux_elcdif_write() 104 LOG_DBG("Setting FB from %p->%p", (void *)dev_data->active_fb, (void *)buf); in mcux_elcdif_write() 105 dev_data->active_fb = buf; in mcux_elcdif_write() 112 } else if (dev_data->active_fb != dev_data->fb[dev_data->next_idx]) { in mcux_elcdif_write() 118 src = dev_data->active_fb; in mcux_elcdif_write() 119 dst = dev_data->fb[dev_data->next_idx]; in mcux_elcdif_write() 120 memcpy(dst, src, dev_data->fb_bytes); in mcux_elcdif_write() [all …]
|
| /Zephyr-4.2.1/drivers/gpio/ |
| D | gpio_xlnx_ps_bank.h | 17 #define GPIO_XLNX_PS_BANK_MASK_DATA_LSW_REG (dev_data->base\ 19 #define GPIO_XLNX_PS_BANK_MASK_DATA_MSW_REG ((dev_data->base + 0x04)\ 21 #define GPIO_XLNX_PS_BANK_DATA_REG ((dev_data->base + 0x40)\ 23 #define GPIO_XLNX_PS_BANK_DATA_RO_REG ((dev_data->base + 0x60)\ 25 #define GPIO_XLNX_PS_BANK_DIRM_REG ((dev_data->base + 0x204)\ 27 #define GPIO_XLNX_PS_BANK_OEN_REG ((dev_data->base + 0x208)\ 29 #define GPIO_XLNX_PS_BANK_INT_MASK_REG ((dev_data->base + 0x20C)\ 31 #define GPIO_XLNX_PS_BANK_INT_EN_REG ((dev_data->base + 0x210)\ 33 #define GPIO_XLNX_PS_BANK_INT_DIS_REG ((dev_data->base + 0x214)\ 35 #define GPIO_XLNX_PS_BANK_INT_STAT_REG ((dev_data->base + 0x218)\ [all …]
|
| /Zephyr-4.2.1/drivers/dma/ |
| D | dma_nios2_msgdma.c | 38 struct nios2_msgdma_dev_data *dev_data = (struct nios2_msgdma_dev_data *)dev->data; in nios2_msgdma_isr() local 41 alt_handle_irq(dev_data->msgdma_dev, DT_INST_IRQN(0)); in nios2_msgdma_isr() 46 struct nios2_msgdma_dev_data *dev_data = in nios2_msgdma_callback() local 51 status = IORD_ALTERA_MSGDMA_CSR_STATUS(dev_data->msgdma_dev->csr_base); in nios2_msgdma_callback() 63 dev_data->dma_callback(dev_data->dev, dev_data->user_data, 0, dma_status); in nios2_msgdma_callback() 69 struct nios2_msgdma_dev_data *dev_data = (struct nios2_msgdma_dev_data *)dev->data; in nios2_msgdma_config() local 103 k_sem_take(&dev_data->sem_lock, K_FOREVER); in nios2_msgdma_config() 104 dev_data->dma_callback = cfg->dma_callback; in nios2_msgdma_config() 105 dev_data->user_data = cfg->user_data; in nios2_msgdma_config() 106 dev_data->direction = cfg->channel_direction; in nios2_msgdma_config() [all …]
|
| /Zephyr-4.2.1/drivers/spi/ |
| D | spi_nrfx_spim.c | 64 struct spi_nrfx_data *dev_data = dev->data; in finalize_spi_transaction() local 65 void *reg = dev_data->spim.p_reg; in finalize_spi_transaction() 68 spi_context_cs_control(&dev_data->ctx, false); in finalize_spi_transaction() 71 if (NRF_SPIM_IS_320MHZ_SPIM(reg) && !(dev_data->ctx.config->operation & SPI_HOLD_ON_CS)) { in finalize_spi_transaction() 133 struct spi_nrfx_data *dev_data = dev->data; in configure() local 135 struct spi_context *ctx = &dev_data->ctx; in configure() 141 if (dev_data->initialized && spi_context_configured(ctx, spi_cfg)) { in configure() 196 sck_pin = nrfy_spim_sck_pin_get(dev_data->spim.p_reg); in configure() 202 if (dev_data->initialized) { in configure() 203 nrfx_spim_uninit(&dev_data->spim); in configure() [all …]
|
| /Zephyr-4.2.1/drivers/flash/ |
| D | flash_mspi_nor.c | 40 struct flash_mspi_nor_data *dev_data = dev->data; in in_octal_io() local 42 return dev_data->last_applied_cfg && in in_octal_io() 43 dev_data->last_applied_cfg->io_mode == MSPI_IO_MODE_OCTAL; in in_octal_io() 61 struct flash_mspi_nor_data *dev_data = dev->data; in set_up_xfer() local 63 memset(&dev_data->xfer, 0, sizeof(dev_data->xfer)); in set_up_xfer() 64 memset(&dev_data->packet, 0, sizeof(dev_data->packet)); in set_up_xfer() 66 dev_data->xfer.xfer_mode = MSPI_PIO; in set_up_xfer() 67 dev_data->xfer.packets = &dev_data->packet; in set_up_xfer() 68 dev_data->xfer.num_packet = 1; in set_up_xfer() 69 dev_data->xfer.timeout = dev_config->transfer_timeout; in set_up_xfer() [all …]
|
| /Zephyr-4.2.1/drivers/watchdog/ |
| D | wdt_infineon.c | 236 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_isr_handler() local 238 if (dev_data->callback) { in ifx_cat1_wdt_isr_handler() 239 dev_data->callback(dev, 0); in ifx_cat1_wdt_isr_handler() 247 struct ifx_cat1_wdt_data *dev_data = dev->data; in ifx_cat1_wdt_setup() local 250 if ((dev_data->timeout == 0) || (dev_data->timeout > IFX_WDT_MAX_TIMEOUT_MS)) { in ifx_cat1_wdt_setup() 255 if (dev_data->wdt_initialized) { in ifx_cat1_wdt_setup() 267 dev_data->wdt_initial_timeout_ms = dev_data->timeout; in ifx_cat1_wdt_setup() 269 Cy_WDT_SetUpperLimit(ifx_wdt_timeout_to_match(dev_data->wdt_initial_timeout_ms)); in ifx_cat1_wdt_setup() 272 dev_data->wdt_ignore_bits = ifx_wdt_timeout_to_ignore_bits(&dev_data->timeout); in ifx_cat1_wdt_setup() 273 dev_data->wdt_rounded_timeout_ms = dev_data->timeout; in ifx_cat1_wdt_setup() [all …]
|
| /Zephyr-4.2.1/drivers/can/ |
| D | can_max32.c | 127 struct max32_can_data *dev_data = dev->data; in can_max32_set_mode() local 132 if (dev_data->common.started) { in can_max32_set_mode() 141 dev_data->common.mode = mode; in can_max32_set_mode() 176 struct max32_can_data *dev_data = dev->data; in can_max32_set_timing() local 185 if (dev_data->common.started) { in can_max32_set_timing() 189 k_mutex_lock(&dev_data->inst_mutex, K_FOREVER); in can_max32_set_timing() 202 k_mutex_unlock(&dev_data->inst_mutex); in can_max32_set_timing() 210 struct max32_can_data *dev_data = dev->data; in can_max32_start() local 214 k_mutex_lock(&dev_data->inst_mutex, K_FOREVER); in can_max32_start() 216 if (dev_data->common.started) { in can_max32_start() [all …]
|