Lines Matching refs:xfer

32 	struct mspi_xfer              xfer;  member
134 const struct mspi_xfer *xfer, in mspi_context_lock() argument
140 if (k_sem_take(&ctx->lock, K_MSEC(xfer->timeout))) { in mspi_context_lock()
146 if ((xfer->tx_dummy == ctx->xfer.tx_dummy) && in mspi_context_lock()
147 (xfer->rx_dummy == ctx->xfer.rx_dummy) && in mspi_context_lock()
148 (xfer->cmd_length == ctx->xfer.cmd_length) && in mspi_context_lock()
149 (xfer->addr_length == ctx->xfer.addr_length)) { in mspi_context_lock()
157 ctx->xfer = *xfer; in mspi_context_lock()
159 ctx->asynchronous = ctx->xfer.async; in mspi_context_lock()
185 const struct mspi_xfer *xfer) in mspi_xfer_config() argument
189 data->dev_cfg.cmd_length = xfer->cmd_length; in mspi_xfer_config()
190 data->dev_cfg.addr_length = xfer->addr_length; in mspi_xfer_config()
191 data->dev_cfg.tx_dummy = xfer->tx_dummy; in mspi_xfer_config()
192 data->dev_cfg.rx_dummy = xfer->rx_dummy; in mspi_xfer_config()
314 static inline int mspi_xfer_check(const struct mspi_xfer *xfer) in mspi_xfer_check() argument
316 if (xfer->xfer_mode > MSPI_DMA) { in mspi_xfer_check()
321 if (!xfer->packets || !xfer->num_packet) { in mspi_xfer_check()
326 for (int i = 0; i < xfer->num_packet; ++i) { in mspi_xfer_check()
328 if (!xfer->packets[i].data_buf || in mspi_xfer_check()
329 !xfer->packets[i].num_bytes) { in mspi_xfer_check()
334 if (xfer->packets[i].dir > MSPI_TX) { in mspi_xfer_check()
339 if (xfer->packets[i].cb_mask > MSPI_BUS_XFER_COMPLETE_CB) { in mspi_xfer_check()
396 packet = &ctx->xfer.packets[ctx->packets_done]; in emul_mspi_trigger_event()
737 const struct mspi_xfer *xfer) in mspi_emul_transceive() argument
758 ret = mspi_xfer_check(xfer); in mspi_emul_transceive()
766 if (xfer->async) { in mspi_emul_transceive()
771 cfg_flag = mspi_context_lock(ctx, dev_id, xfer, cb, cb_ctx); in mspi_emul_transceive()
775 ret = mspi_xfer_config(controller, xfer); in mspi_emul_transceive()
788 ctx->xfer.packets, in mspi_emul_transceive()
789 ctx->xfer.num_packet, in mspi_emul_transceive()