Lines Matching +full:tx +full:- +full:dummy

4  * SPDX-License-Identifier: Apache-2.0
60 while ((regs->SYNCBUSY.reg & SERCOM_SPI_SYNCBUSY_MASK) != 0) { in wait_synchronization()
64 while ((regs->STATUS.reg & SERCOM_SPI_STATUS_SYNCBUSY) != 0) { in wait_synchronization()
74 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_configure()
75 struct spi_sam0_data *data = dev->data; in spi_sam0_configure()
76 SercomSpi *regs = cfg->regs; in spi_sam0_configure()
81 if (spi_context_configured(&data->ctx, config)) { in spi_sam0_configure()
85 if (config->operation & SPI_HALF_DUPLEX) { in spi_sam0_configure()
86 LOG_ERR("Half-duplex not supported"); in spi_sam0_configure()
87 return -ENOTSUP; in spi_sam0_configure()
90 if (SPI_OP_MODE_GET(config->operation) != SPI_OP_MODE_MASTER) { in spi_sam0_configure()
92 return -ENOTSUP; in spi_sam0_configure()
97 if ((config->operation & SPI_TRANSFER_LSB) != 0U) { in spi_sam0_configure()
101 if ((config->operation & SPI_MODE_CPOL) != 0U) { in spi_sam0_configure()
105 if ((config->operation & SPI_MODE_CPHA) != 0U) { in spi_sam0_configure()
109 ctrla.reg |= cfg->pads; in spi_sam0_configure()
111 if ((config->operation & SPI_MODE_LOOP) != 0U) { in spi_sam0_configure()
120 if (SPI_WORD_SIZE_GET(config->operation) != 8) { in spi_sam0_configure()
121 return -ENOTSUP; in spi_sam0_configure()
128 div = (SOC_ATMEL_SAM0_GCLK0_FREQ_HZ / config->frequency) / 2U - 1; in spi_sam0_configure()
132 if (regs->CTRLA.reg != ctrla.reg || regs->CTRLB.reg != ctrlb.reg || in spi_sam0_configure()
133 regs->BAUD.reg != div) { in spi_sam0_configure()
134 regs->CTRLA.bit.ENABLE = 0; in spi_sam0_configure()
137 regs->CTRLB = ctrlb; in spi_sam0_configure()
139 regs->BAUD.reg = div; in spi_sam0_configure()
141 regs->CTRLA = ctrla; in spi_sam0_configure()
145 data->ctx.config = config; in spi_sam0_configure()
152 return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx); in spi_sam0_transfer_ongoing()
157 uint8_t tx; in spi_sam0_shift_master() local
160 if (spi_context_tx_buf_on(&data->ctx)) { in spi_sam0_shift_master()
161 tx = *(uint8_t *)(data->ctx.tx_buf); in spi_sam0_shift_master()
163 tx = 0U; in spi_sam0_shift_master()
166 while (!regs->INTFLAG.bit.DRE) { in spi_sam0_shift_master()
169 regs->DATA.reg = tx; in spi_sam0_shift_master()
170 spi_context_update_tx(&data->ctx, 1, 1); in spi_sam0_shift_master()
172 while (!regs->INTFLAG.bit.RXC) { in spi_sam0_shift_master()
175 rx = regs->DATA.reg; in spi_sam0_shift_master()
177 if (spi_context_rx_buf_on(&data->ctx)) { in spi_sam0_shift_master()
178 *data->ctx.rx_buf = rx; in spi_sam0_shift_master()
180 spi_context_update_rx(&data->ctx, 1, 1); in spi_sam0_shift_master()
186 while (!regs->INTFLAG.bit.TXC) { in spi_sam0_finish()
189 while (regs->INTFLAG.bit.RXC) { in spi_sam0_finish()
190 (void)regs->DATA.reg; in spi_sam0_finish()
197 const uint8_t *p = tx_buf->buf; in spi_sam0_fast_tx()
198 const uint8_t *pend = (uint8_t *)tx_buf->buf + tx_buf->len; in spi_sam0_fast_tx()
204 while (!regs->INTFLAG.bit.DRE) { in spi_sam0_fast_tx()
207 regs->DATA.reg = ch; in spi_sam0_fast_tx()
216 uint8_t *rx = rx_buf->buf; in spi_sam0_fast_rx()
217 int len = rx_buf->len; in spi_sam0_fast_rx()
225 regs->DATA.reg = 0; in spi_sam0_fast_rx()
226 len--; in spi_sam0_fast_rx()
229 while (!regs->INTFLAG.bit.RXC) { in spi_sam0_fast_rx()
231 *rx++ = regs->DATA.reg; in spi_sam0_fast_rx()
242 const uint8_t *tx = tx_buf->buf; in spi_sam0_fast_txrx() local
243 const uint8_t *txend = (uint8_t *)tx_buf->buf + tx_buf->len; in spi_sam0_fast_txrx()
244 uint8_t *rx = rx_buf->buf; in spi_sam0_fast_txrx()
245 size_t len = rx_buf->len; in spi_sam0_fast_txrx()
251 while (tx != txend) { in spi_sam0_fast_txrx()
253 regs->DATA.reg = *tx++; in spi_sam0_fast_txrx()
256 while (!regs->INTFLAG.bit.RXC) { in spi_sam0_fast_txrx()
258 *rx++ = regs->DATA.reg; in spi_sam0_fast_txrx()
264 /* Fast path where every overlapping tx and rx buffer is the same length */
270 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_fast_transceive()
273 SercomSpi *regs = cfg->regs; in spi_sam0_fast_transceive()
274 const struct spi_buf *tx = NULL; in spi_sam0_fast_transceive() local
278 tx = tx_bufs->buffers; in spi_sam0_fast_transceive()
279 tx_count = tx_bufs->count; in spi_sam0_fast_transceive()
283 rx = rx_bufs->buffers; in spi_sam0_fast_transceive()
284 rx_count = rx_bufs->count; in spi_sam0_fast_transceive()
290 if (tx->buf == NULL) { in spi_sam0_fast_transceive()
292 } else if (rx->buf == NULL) { in spi_sam0_fast_transceive()
293 spi_sam0_fast_tx(regs, tx); in spi_sam0_fast_transceive()
295 spi_sam0_fast_txrx(regs, tx, rx); in spi_sam0_fast_transceive()
298 tx++; in spi_sam0_fast_transceive()
299 tx_count--; in spi_sam0_fast_transceive()
301 rx_count--; in spi_sam0_fast_transceive()
304 for (; tx_count != 0; tx_count--) { in spi_sam0_fast_transceive()
305 spi_sam0_fast_tx(regs, tx++); in spi_sam0_fast_transceive()
308 for (; rx_count != 0; rx_count--) { in spi_sam0_fast_transceive()
316 * - Zero or more RX and TX buf pairs where each is the same length.
317 * - Zero or more trailing RX only bufs
318 * - Zero or more trailing TX only bufs
323 const struct spi_buf *tx = NULL; in spi_sam0_is_regular() local
329 tx = tx_bufs->buffers; in spi_sam0_is_regular()
330 tx_count = tx_bufs->count; in spi_sam0_is_regular()
334 rx = rx_bufs->buffers; in spi_sam0_is_regular()
335 rx_count = rx_bufs->count; in spi_sam0_is_regular()
339 if (tx->len != rx->len) { in spi_sam0_is_regular()
343 tx++; in spi_sam0_is_regular()
344 tx_count--; in spi_sam0_is_regular()
346 rx_count--; in spi_sam0_is_regular()
357 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_transceive()
358 struct spi_sam0_data *data = dev->data; in spi_sam0_transceive()
359 SercomSpi *regs = cfg->regs; in spi_sam0_transceive()
362 spi_context_lock(&data->ctx, false, NULL, NULL, config); in spi_sam0_transceive()
369 spi_context_cs_control(&data->ctx, true); in spi_sam0_transceive()
379 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in spi_sam0_transceive()
386 spi_context_cs_control(&data->ctx, false); in spi_sam0_transceive()
389 spi_context_release(&data->ctx, err); in spi_sam0_transceive()
409 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_dma_rx_load()
410 struct spi_sam0_data *data = dev->data; in spi_sam0_dma_rx_load()
411 SercomSpi *regs = cfg->regs; in spi_sam0_dma_rx_load()
423 dma_cfg.dma_slot = cfg->rx_dma_request; in spi_sam0_dma_rx_load()
430 static uint8_t dummy; in spi_sam0_dma_rx_load() local
432 dma_blk.dest_address = (uint32_t)&dummy; in spi_sam0_dma_rx_load()
436 dma_blk.source_address = (uint32_t)(&(regs->DATA.reg)); in spi_sam0_dma_rx_load()
439 retval = dma_config(cfg->dma_dev, cfg->rx_dma_channel, in spi_sam0_dma_rx_load()
445 return dma_start(cfg->dma_dev, cfg->rx_dma_channel); in spi_sam0_dma_rx_load()
451 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_dma_tx_load()
452 SercomSpi *regs = cfg->regs; in spi_sam0_dma_tx_load()
462 dma_cfg.dma_slot = cfg->tx_dma_request; in spi_sam0_dma_tx_load()
469 static const uint8_t dummy; in spi_sam0_dma_tx_load() local
471 dma_blk.source_address = (uint32_t)&dummy; in spi_sam0_dma_tx_load()
475 dma_blk.dest_address = (uint32_t)(&(regs->DATA.reg)); in spi_sam0_dma_tx_load()
478 retval = dma_config(cfg->dma_dev, cfg->tx_dma_channel, in spi_sam0_dma_tx_load()
485 return dma_start(cfg->dma_dev, cfg->tx_dma_channel); in spi_sam0_dma_tx_load()
490 struct spi_sam0_data *data = dev->data; in spi_sam0_dma_advance_segment()
494 if (data->ctx.rx_len != 0) { in spi_sam0_dma_advance_segment()
495 segment_len = data->ctx.rx_len; in spi_sam0_dma_advance_segment()
496 if (data->ctx.tx_len != 0) { in spi_sam0_dma_advance_segment()
497 segment_len = MIN(segment_len, data->ctx.tx_len); in spi_sam0_dma_advance_segment()
500 segment_len = data->ctx.tx_len; in spi_sam0_dma_advance_segment()
509 data->dma_segment_len = segment_len; in spi_sam0_dma_advance_segment()
515 struct spi_sam0_data *data = dev->data; in spi_sam0_dma_advance_buffers()
518 if (data->dma_segment_len == 0) { in spi_sam0_dma_advance_buffers()
519 return -EINVAL; in spi_sam0_dma_advance_buffers()
523 if (data->ctx.rx_len) { in spi_sam0_dma_advance_buffers()
524 retval = spi_sam0_dma_rx_load(dev, data->ctx.rx_buf, in spi_sam0_dma_advance_buffers()
525 data->dma_segment_len); in spi_sam0_dma_advance_buffers()
527 retval = spi_sam0_dma_rx_load(dev, NULL, data->dma_segment_len); in spi_sam0_dma_advance_buffers()
535 if (data->ctx.tx_len) { in spi_sam0_dma_advance_buffers()
536 retval = spi_sam0_dma_tx_load(dev, data->ctx.tx_buf, in spi_sam0_dma_advance_buffers()
537 data->dma_segment_len); in spi_sam0_dma_advance_buffers()
539 retval = spi_sam0_dma_tx_load(dev, NULL, data->dma_segment_len); in spi_sam0_dma_advance_buffers()
553 const struct device *dev = data->dev; in spi_sam0_dma_rx_done()
554 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_dma_rx_done()
560 spi_context_update_tx(&data->ctx, 1, data->dma_segment_len); in spi_sam0_dma_rx_done()
561 spi_context_update_rx(&data->ctx, 1, data->dma_segment_len); in spi_sam0_dma_rx_done()
565 spi_context_cs_control(&data->ctx, false); in spi_sam0_dma_rx_done()
566 spi_context_complete(&data->ctx, dev, 0); in spi_sam0_dma_rx_done()
572 dma_stop(cfg->dma_dev, cfg->tx_dma_channel); in spi_sam0_dma_rx_done()
573 dma_stop(cfg->dma_dev, cfg->rx_dma_channel); in spi_sam0_dma_rx_done()
574 spi_context_cs_control(&data->ctx, false); in spi_sam0_dma_rx_done()
575 spi_context_complete(&data->ctx, dev, retval); in spi_sam0_dma_rx_done()
588 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_transceive_async()
589 struct spi_sam0_data *data = dev->data; in spi_sam0_transceive_async()
596 if (cfg->tx_dma_channel == 0xFF || cfg->rx_dma_channel == 0xFF) { in spi_sam0_transceive_async()
597 return -ENOTSUP; in spi_sam0_transceive_async()
600 spi_context_lock(&data->ctx, true, cb, userdata, config); in spi_sam0_transceive_async()
607 spi_context_cs_control(&data->ctx, true); in spi_sam0_transceive_async()
609 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in spi_sam0_transceive_async()
620 dma_stop(cfg->dma_dev, cfg->tx_dma_channel); in spi_sam0_transceive_async()
621 dma_stop(cfg->dma_dev, cfg->rx_dma_channel); in spi_sam0_transceive_async()
623 spi_context_cs_control(&data->ctx, false); in spi_sam0_transceive_async()
626 spi_context_release(&data->ctx, retval); in spi_sam0_transceive_async()
634 struct spi_sam0_data *data = dev->data; in spi_sam0_release()
636 spi_context_unlock_unconditionally(&data->ctx); in spi_sam0_release()
644 const struct spi_sam0_config *cfg = dev->config; in spi_sam0_init()
645 struct spi_sam0_data *data = dev->data; in spi_sam0_init()
646 SercomSpi *regs = cfg->regs; in spi_sam0_init()
650 GCLK->PCHCTRL[cfg->gclk_core_id].reg = GCLK_PCHCTRL_GEN_GCLK0 | in spi_sam0_init()
654 *cfg->mclk |= cfg->mclk_mask; in spi_sam0_init()
657 GCLK->CLKCTRL.reg = cfg->gclk_clkctrl_id | GCLK_CLKCTRL_GEN_GCLK0 | in spi_sam0_init()
661 PM->APBCMASK.reg |= cfg->pm_apbcmask; in spi_sam0_init()
665 regs->INTENCLR.reg = SERCOM_SPI_INTENCLR_MASK; in spi_sam0_init()
668 err = pinctrl_apply_state(cfg->pcfg, PINCTRL_STATE_DEFAULT); in spi_sam0_init()
674 if (!device_is_ready(cfg->dma_dev)) { in spi_sam0_init()
675 return -ENODEV; in spi_sam0_init()
677 data->dev = dev; in spi_sam0_init()
680 err = spi_context_cs_configure_all(&data->ctx); in spi_sam0_init()
685 spi_context_unlock_unconditionally(&data->ctx); in spi_sam0_init()
707 .dma_dev = DEVICE_DT_GET(ATMEL_SAM0_DT_INST_DMA_CTLR(n, tx)), \
708 .tx_dma_request = ATMEL_SAM0_DT_INST_DMA_TRIGSRC(n, tx), \
709 .tx_dma_channel = ATMEL_SAM0_DT_INST_DMA_CHANNEL(n, tx), \