Lines Matching +full:spi +full:- +full:dev
4 * SPDX-License-Identifier: Apache-2.0
18 #include <zephyr/drivers/spi.h>
19 #include <zephyr/drivers/spi/rtio.h>
35 #include <zephyr/dt-bindings/memory-attr/memory-attr-arm.h>
39 #include <zephyr/linker/linker-defs.h>
76 static void spi_stm32_pm_policy_state_lock_get(const struct device *dev) in spi_stm32_pm_policy_state_lock_get() argument
79 struct spi_stm32_data *data = dev->data; in spi_stm32_pm_policy_state_lock_get()
81 if (!data->pm_policy_state_on) { in spi_stm32_pm_policy_state_lock_get()
82 data->pm_policy_state_on = true; in spi_stm32_pm_policy_state_lock_get()
87 pm_device_runtime_get(dev); in spi_stm32_pm_policy_state_lock_get()
92 static void spi_stm32_pm_policy_state_lock_put(const struct device *dev) in spi_stm32_pm_policy_state_lock_put() argument
95 struct spi_stm32_data *data = dev->data; in spi_stm32_pm_policy_state_lock_put()
97 if (data->pm_policy_state_on) { in spi_stm32_pm_policy_state_lock_put()
98 data->pm_policy_state_on = false; in spi_stm32_pm_policy_state_lock_put()
99 pm_device_runtime_put(dev); in spi_stm32_pm_policy_state_lock_put()
119 * the linker to avoid potential DMA cache-coherency problems.
131 /* arg holds SPI DMA data in dma_callback()
138 spi_dma_data->status_flags |= SPI_STM32_DMA_ERROR_FLAG; in dma_callback()
141 if (channel == spi_dma_data->dma_tx.channel) { in dma_callback()
143 spi_dma_data->status_flags |= SPI_STM32_DMA_TX_DONE_FLAG; in dma_callback()
144 } else if (channel == spi_dma_data->dma_rx.channel) { in dma_callback()
146 spi_dma_data->status_flags |= SPI_STM32_DMA_RX_DONE_FLAG; in dma_callback()
149 spi_dma_data->status_flags |= SPI_STM32_DMA_ERROR_FLAG; in dma_callback()
153 k_sem_give(&spi_dma_data->status_sem); in dma_callback()
156 static int spi_stm32_dma_tx_load(const struct device *dev, const uint8_t *buf, in spi_stm32_dma_tx_load() argument
159 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_dma_tx_load()
160 struct spi_stm32_data *data = dev->data; in spi_stm32_dma_tx_load()
165 struct stream *stream = &data->dma_tx; in spi_stm32_dma_tx_load()
167 blk_cfg = &stream->dma_blk_cfg; in spi_stm32_dma_tx_load()
171 blk_cfg->block_size = len; in spi_stm32_dma_tx_load()
180 blk_cfg->source_address = (uint32_t)&dummy_rx_tx_buffer; in spi_stm32_dma_tx_load()
181 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
183 blk_cfg->source_address = (uint32_t)buf; in spi_stm32_dma_tx_load()
184 if (data->dma_tx.src_addr_increment) { in spi_stm32_dma_tx_load()
185 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_tx_load()
187 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
191 blk_cfg->dest_address = ll_func_dma_get_reg_addr(cfg->spi, SPI_STM32_DMA_TX); in spi_stm32_dma_tx_load()
193 if (data->dma_tx.dst_addr_increment) { in spi_stm32_dma_tx_load()
194 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_tx_load()
196 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
200 blk_cfg->fifo_mode_control = data->dma_tx.fifo_threshold; in spi_stm32_dma_tx_load()
203 stream->dma_cfg.head_block = blk_cfg; in spi_stm32_dma_tx_load()
205 stream->dma_cfg.user_data = data; in spi_stm32_dma_tx_load()
206 /* pass our client origin to the dma: data->dma_tx.dma_channel */ in spi_stm32_dma_tx_load()
207 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, in spi_stm32_dma_tx_load()
208 &stream->dma_cfg); in spi_stm32_dma_tx_load()
215 return dma_start(data->dma_tx.dma_dev, data->dma_tx.channel); in spi_stm32_dma_tx_load()
218 static int spi_stm32_dma_rx_load(const struct device *dev, uint8_t *buf, in spi_stm32_dma_rx_load() argument
221 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_dma_rx_load()
222 struct spi_stm32_data *data = dev->data; in spi_stm32_dma_rx_load()
227 struct stream *stream = &data->dma_rx; in spi_stm32_dma_rx_load()
229 blk_cfg = &stream->dma_blk_cfg; in spi_stm32_dma_rx_load()
233 blk_cfg->block_size = len; in spi_stm32_dma_rx_load()
239 blk_cfg->dest_address = (uint32_t)&dummy_rx_tx_buffer; in spi_stm32_dma_rx_load()
240 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
242 blk_cfg->dest_address = (uint32_t)buf; in spi_stm32_dma_rx_load()
243 if (data->dma_rx.dst_addr_increment) { in spi_stm32_dma_rx_load()
244 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_rx_load()
246 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
250 blk_cfg->source_address = ll_func_dma_get_reg_addr(cfg->spi, SPI_STM32_DMA_RX); in spi_stm32_dma_rx_load()
251 if (data->dma_rx.src_addr_increment) { in spi_stm32_dma_rx_load()
252 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_rx_load()
254 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
258 blk_cfg->fifo_mode_control = data->dma_rx.fifo_threshold; in spi_stm32_dma_rx_load()
261 stream->dma_cfg.head_block = blk_cfg; in spi_stm32_dma_rx_load()
262 stream->dma_cfg.user_data = data; in spi_stm32_dma_rx_load()
265 /* pass our client origin to the dma: data->dma_rx.channel */ in spi_stm32_dma_rx_load()
266 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_stm32_dma_rx_load()
267 &stream->dma_cfg); in spi_stm32_dma_rx_load()
274 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_stm32_dma_rx_load()
277 static int spi_dma_move_buffers(const struct device *dev, size_t len) in spi_dma_move_buffers() argument
279 struct spi_stm32_data *data = dev->data; in spi_dma_move_buffers()
283 dma_segment_len = len * data->dma_rx.dma_cfg.dest_data_size; in spi_dma_move_buffers()
284 ret = spi_stm32_dma_rx_load(dev, data->ctx.rx_buf, dma_segment_len); in spi_dma_move_buffers()
290 dma_segment_len = len * data->dma_tx.dma_cfg.source_data_size; in spi_dma_move_buffers()
291 ret = spi_stm32_dma_tx_load(dev, data->ctx.tx_buf, dma_segment_len); in spi_dma_move_buffers()
301 static void spi_stm32_send_next_frame(SPI_TypeDef *spi, in spi_stm32_send_next_frame() argument
304 const uint8_t frame_size = SPI_WORD_SIZE_GET(data->ctx.config->operation); in spi_stm32_send_next_frame()
308 if (spi_context_tx_buf_on(&data->ctx)) { in spi_stm32_send_next_frame()
309 tx_frame = UNALIGNED_GET((uint8_t *)(data->ctx.tx_buf)); in spi_stm32_send_next_frame()
311 LL_SPI_TransmitData8(spi, tx_frame); in spi_stm32_send_next_frame()
312 spi_context_update_tx(&data->ctx, 1, 1); in spi_stm32_send_next_frame()
314 if (spi_context_tx_buf_on(&data->ctx)) { in spi_stm32_send_next_frame()
315 tx_frame = UNALIGNED_GET((uint16_t *)(data->ctx.tx_buf)); in spi_stm32_send_next_frame()
317 LL_SPI_TransmitData16(spi, tx_frame); in spi_stm32_send_next_frame()
318 spi_context_update_tx(&data->ctx, 2, 1); in spi_stm32_send_next_frame()
322 static void spi_stm32_read_next_frame(SPI_TypeDef *spi, in spi_stm32_read_next_frame() argument
325 const uint8_t frame_size = SPI_WORD_SIZE_GET(data->ctx.config->operation); in spi_stm32_read_next_frame()
329 rx_frame = LL_SPI_ReceiveData8(spi); in spi_stm32_read_next_frame()
330 if (spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_read_next_frame()
331 UNALIGNED_PUT(rx_frame, (uint8_t *)data->ctx.rx_buf); in spi_stm32_read_next_frame()
333 spi_context_update_rx(&data->ctx, 1, 1); in spi_stm32_read_next_frame()
335 rx_frame = LL_SPI_ReceiveData16(spi); in spi_stm32_read_next_frame()
336 if (spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_read_next_frame()
337 UNALIGNED_PUT(rx_frame, (uint16_t *)data->ctx.rx_buf); in spi_stm32_read_next_frame()
339 spi_context_update_rx(&data->ctx, 2, 1); in spi_stm32_read_next_frame()
345 return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx); in spi_stm32_transfer_ongoing()
348 static int spi_stm32_get_err(SPI_TypeDef *spi) in spi_stm32_get_err() argument
350 uint32_t sr = LL_SPI_ReadReg(spi, SR); in spi_stm32_get_err()
357 if (LL_SPI_IsActiveFlag_OVR(spi)) { in spi_stm32_get_err()
358 LL_SPI_ClearFlag_OVR(spi); in spi_stm32_get_err()
361 return -EIO; in spi_stm32_get_err()
367 static void spi_stm32_shift_fifo(SPI_TypeDef *spi, struct spi_stm32_data *data) in spi_stm32_shift_fifo() argument
369 if (ll_func_rx_is_not_empty(spi)) { in spi_stm32_shift_fifo()
370 spi_stm32_read_next_frame(spi, data); in spi_stm32_shift_fifo()
373 if (ll_func_tx_is_not_full(spi)) { in spi_stm32_shift_fifo()
374 spi_stm32_send_next_frame(spi, data); in spi_stm32_shift_fifo()
378 /* Shift a SPI frame as master. */
382 if (cfg->fifo_enabled) { in spi_stm32_shift_m()
383 spi_stm32_shift_fifo(cfg->spi, data); in spi_stm32_shift_m()
385 while (!ll_func_tx_is_not_full(cfg->spi)) { in spi_stm32_shift_m()
389 spi_stm32_send_next_frame(cfg->spi, data); in spi_stm32_shift_m()
391 while (!ll_func_rx_is_not_empty(cfg->spi)) { in spi_stm32_shift_m()
395 spi_stm32_read_next_frame(cfg->spi, data); in spi_stm32_shift_m()
399 /* Shift a SPI frame as slave. */
400 static void spi_stm32_shift_s(SPI_TypeDef *spi, struct spi_stm32_data *data) in spi_stm32_shift_s() argument
402 if (ll_func_tx_is_not_full(spi) && spi_context_tx_on(&data->ctx)) { in spi_stm32_shift_s()
405 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_stm32_shift_s()
406 tx_frame = UNALIGNED_GET((uint8_t *)(data->ctx.tx_buf)); in spi_stm32_shift_s()
407 LL_SPI_TransmitData8(spi, tx_frame); in spi_stm32_shift_s()
408 spi_context_update_tx(&data->ctx, 1, 1); in spi_stm32_shift_s()
410 tx_frame = UNALIGNED_GET((uint16_t *)(data->ctx.tx_buf)); in spi_stm32_shift_s()
411 LL_SPI_TransmitData16(spi, tx_frame); in spi_stm32_shift_s()
412 spi_context_update_tx(&data->ctx, 2, 1); in spi_stm32_shift_s()
415 ll_func_disable_int_tx_empty(spi); in spi_stm32_shift_s()
418 if (ll_func_rx_is_not_empty(spi) && in spi_stm32_shift_s()
419 spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_shift_s()
422 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_stm32_shift_s()
423 rx_frame = LL_SPI_ReceiveData8(spi); in spi_stm32_shift_s()
424 UNALIGNED_PUT(rx_frame, (uint8_t *)data->ctx.rx_buf); in spi_stm32_shift_s()
425 spi_context_update_rx(&data->ctx, 1, 1); in spi_stm32_shift_s()
427 rx_frame = LL_SPI_ReceiveData16(spi); in spi_stm32_shift_s()
428 UNALIGNED_PUT(rx_frame, (uint16_t *)data->ctx.rx_buf); in spi_stm32_shift_s()
429 spi_context_update_rx(&data->ctx, 2, 1); in spi_stm32_shift_s()
435 * Without a FIFO, we can only shift out one frame's worth of SPI
438 * TODO: support 16-bit data frames.
443 uint16_t operation = data->ctx.config->operation; in spi_stm32_shift_frames()
448 spi_stm32_shift_s(cfg->spi, data); in spi_stm32_shift_frames()
451 return spi_stm32_get_err(cfg->spi); in spi_stm32_shift_frames()
454 static void spi_stm32_cs_control(const struct device *dev, bool on) in spi_stm32_cs_control() argument
456 struct spi_stm32_data *data = dev->data; in spi_stm32_cs_control()
458 spi_context_cs_control(&data->ctx, on); in spi_stm32_cs_control()
461 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_cs_control()
463 if (cfg->use_subghzspi_nss) { in spi_stm32_cs_control()
473 static void spi_stm32_complete(const struct device *dev, int status) in spi_stm32_complete() argument
475 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_complete()
476 SPI_TypeDef *spi = cfg->spi; in spi_stm32_complete() local
477 struct spi_stm32_data *data = dev->data; in spi_stm32_complete()
480 ll_func_disable_int_tx_empty(spi); in spi_stm32_complete()
481 ll_func_disable_int_rx_not_empty(spi); in spi_stm32_complete()
482 ll_func_disable_int_errors(spi); in spi_stm32_complete()
485 if (cfg->fifo_enabled) { in spi_stm32_complete()
486 LL_SPI_DisableIT_EOT(spi); in spi_stm32_complete()
495 while (ll_func_rx_is_not_empty(spi)) { in spi_stm32_complete()
496 (void) LL_SPI_ReceiveData8(spi); in spi_stm32_complete()
500 if (LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) { in spi_stm32_complete()
501 while (ll_func_spi_is_busy(spi)) { in spi_stm32_complete()
505 spi_stm32_cs_control(dev, false); in spi_stm32_complete()
509 if (LL_SPI_IsActiveFlag_MODF(spi)) { in spi_stm32_complete()
510 LL_SPI_ClearFlag_MODF(spi); in spi_stm32_complete()
514 if (cfg->fifo_enabled) { in spi_stm32_complete()
515 LL_SPI_ClearFlag_TXTF(spi); in spi_stm32_complete()
516 LL_SPI_ClearFlag_OVR(spi); in spi_stm32_complete()
517 LL_SPI_ClearFlag_EOT(spi); in spi_stm32_complete()
518 LL_SPI_SetTransferSize(spi, 0); in spi_stm32_complete()
522 if (!(data->ctx.config->operation & SPI_HOLD_ON_CS)) { in spi_stm32_complete()
523 ll_func_disable_spi(spi); in spi_stm32_complete()
527 spi_context_complete(&data->ctx, dev, status); in spi_stm32_complete()
530 spi_stm32_pm_policy_state_lock_put(dev); in spi_stm32_complete()
534 static void spi_stm32_isr(const struct device *dev) in spi_stm32_isr() argument
536 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_isr()
537 struct spi_stm32_data *data = dev->data; in spi_stm32_isr()
538 SPI_TypeDef *spi = cfg->spi; in spi_stm32_isr() local
541 /* Some spurious interrupts are triggered when SPI is not enabled; ignore them. in spi_stm32_isr()
542 * Do it only when fifo is enabled to leave non-fifo functionality untouched for now in spi_stm32_isr()
544 if (cfg->fifo_enabled) { in spi_stm32_isr()
545 if (!LL_SPI_IsEnabled(spi)) { in spi_stm32_isr()
550 err = spi_stm32_get_err(spi); in spi_stm32_isr()
552 spi_stm32_complete(dev, err); in spi_stm32_isr()
561 spi_stm32_complete(dev, err); in spi_stm32_isr()
566 static int spi_stm32_configure(const struct device *dev, in spi_stm32_configure() argument
569 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_configure()
570 struct spi_stm32_data *data = dev->data; in spi_stm32_configure()
581 SPI_TypeDef *spi = cfg->spi; in spi_stm32_configure() local
585 if (spi_context_configured(&data->ctx, config)) { in spi_stm32_configure()
590 if ((SPI_WORD_SIZE_GET(config->operation) != 8) in spi_stm32_configure()
591 && (SPI_WORD_SIZE_GET(config->operation) != 16)) { in spi_stm32_configure()
592 return -ENOTSUP; in spi_stm32_configure()
596 if ((config->operation & SPI_FRAME_FORMAT_TI) == SPI_FRAME_FORMAT_TI) { in spi_stm32_configure()
598 LL_SPI_SetStandard(spi, LL_SPI_PROTOCOL_TI); in spi_stm32_configure()
602 return -ENOTSUP; in spi_stm32_configure()
606 LL_SPI_SetStandard(spi, LL_SPI_PROTOCOL_MOTOROLA); in spi_stm32_configure()
610 if (IS_ENABLED(STM32_SPI_DOMAIN_CLOCK_SUPPORT) && (cfg->pclk_len > 1)) { in spi_stm32_configure()
612 (clock_control_subsys_t) &cfg->pclken[1], &clock) < 0) { in spi_stm32_configure()
614 return -EIO; in spi_stm32_configure()
618 (clock_control_subsys_t) &cfg->pclken[0], &clock) < 0) { in spi_stm32_configure()
620 return -EIO; in spi_stm32_configure()
627 if (clk <= config->frequency) { in spi_stm32_configure()
634 config->frequency, in spi_stm32_configure()
637 return -EINVAL; in spi_stm32_configure()
640 LL_SPI_Disable(spi); in spi_stm32_configure()
641 LL_SPI_SetBaudRatePrescaler(spi, scaler[br - 1]); in spi_stm32_configure()
643 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) { in spi_stm32_configure()
644 LL_SPI_SetClockPolarity(spi, LL_SPI_POLARITY_HIGH); in spi_stm32_configure()
646 LL_SPI_SetClockPolarity(spi, LL_SPI_POLARITY_LOW); in spi_stm32_configure()
649 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) { in spi_stm32_configure()
650 LL_SPI_SetClockPhase(spi, LL_SPI_PHASE_2EDGE); in spi_stm32_configure()
652 LL_SPI_SetClockPhase(spi, LL_SPI_PHASE_1EDGE); in spi_stm32_configure()
655 LL_SPI_SetTransferDirection(spi, LL_SPI_FULL_DUPLEX); in spi_stm32_configure()
657 if (config->operation & SPI_TRANSFER_LSB) { in spi_stm32_configure()
658 LL_SPI_SetTransferBitOrder(spi, LL_SPI_LSB_FIRST); in spi_stm32_configure()
660 LL_SPI_SetTransferBitOrder(spi, LL_SPI_MSB_FIRST); in spi_stm32_configure()
663 LL_SPI_DisableCRC(spi); in spi_stm32_configure()
667 if (SPI_OP_MODE_GET(config->operation) == SPI_OP_MODE_MASTER) { in spi_stm32_configure()
668 if (LL_SPI_GetNSSPolarity(spi) == LL_SPI_NSS_POLARITY_LOW) in spi_stm32_configure()
669 LL_SPI_SetInternalSSLevel(spi, LL_SPI_SS_LEVEL_HIGH); in spi_stm32_configure()
672 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_SOFT); in spi_stm32_configure()
674 if (config->operation & SPI_OP_MODE_SLAVE) { in spi_stm32_configure()
675 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_HARD_INPUT); in spi_stm32_configure()
677 LL_SPI_SetNSSMode(spi, LL_SPI_NSS_HARD_OUTPUT); in spi_stm32_configure()
681 if (config->operation & SPI_OP_MODE_SLAVE) { in spi_stm32_configure()
682 LL_SPI_SetMode(spi, LL_SPI_MODE_SLAVE); in spi_stm32_configure()
684 LL_SPI_SetMode(spi, LL_SPI_MODE_MASTER); in spi_stm32_configure()
687 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in spi_stm32_configure()
688 LL_SPI_SetDataWidth(spi, LL_SPI_DATAWIDTH_8BIT); in spi_stm32_configure()
690 LL_SPI_SetDataWidth(spi, LL_SPI_DATAWIDTH_16BIT); in spi_stm32_configure()
694 LL_SPI_SetMasterSSIdleness(spi, cfg->mssi_clocks); in spi_stm32_configure()
695 LL_SPI_SetInterDataIdleness(spi, (cfg->midi_clocks << SPI_CFG2_MIDI_Pos)); in spi_stm32_configure()
699 ll_func_set_fifo_threshold_8bit(spi); in spi_stm32_configure()
703 data->ctx.config = config; in spi_stm32_configure()
708 (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) ? 1 : 0, in spi_stm32_configure()
709 (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) ? 1 : 0, in spi_stm32_configure()
710 (SPI_MODE_GET(config->operation) & SPI_MODE_LOOP) ? 1 : 0, in spi_stm32_configure()
711 config->slave); in spi_stm32_configure()
716 static int spi_stm32_release(const struct device *dev, in spi_stm32_release() argument
719 struct spi_stm32_data *data = dev->data; in spi_stm32_release()
720 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_release()
722 spi_context_unlock_unconditionally(&data->ctx); in spi_stm32_release()
723 ll_func_disable_spi(cfg->spi); in spi_stm32_release()
738 for (size_t i = 0; i < bufs->count; i++) { in spi_stm32_count_bufset_frames()
739 num_bytes += bufs->buffers[i].len; in spi_stm32_count_bufset_frames()
742 uint8_t bytes_per_frame = SPI_WORD_SIZE_GET(config->operation) / 8; in spi_stm32_count_bufset_frames()
745 return -EINVAL; in spi_stm32_count_bufset_frames()
767 return -EMSGSIZE; in spi_stm32_count_total_frames()
774 static int transceive(const struct device *dev, in transceive() argument
782 const struct spi_stm32_config *cfg = dev->config; in transceive()
783 struct spi_stm32_data *data = dev->data; in transceive()
784 SPI_TypeDef *spi = cfg->spi; in transceive() local
793 return -ENOTSUP; in transceive()
797 spi_context_lock(&data->ctx, asynchronous, cb, userdata, config); in transceive()
799 spi_stm32_pm_policy_state_lock_get(dev); in transceive()
801 ret = spi_stm32_configure(dev, config); in transceive()
807 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in transceive()
808 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in transceive()
810 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 2); in transceive()
814 if (cfg->fifo_enabled && SPI_OP_MODE_GET(config->operation) == SPI_OP_MODE_MASTER) { in transceive()
821 LL_SPI_SetTransferSize(spi, (uint32_t)total_frames); in transceive()
826 LL_SPI_Enable(spi); in transceive()
830 * if the device is the SPI master, in transceive()
832 * LL_SPI_StartMasterTransfer(spi) in transceive()
834 if (LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) { in transceive()
835 LL_SPI_StartMasterTransfer(spi); in transceive()
836 while (!LL_SPI_IsActiveMasterTransfer(spi)) { in transceive()
851 spi_stm32_cs_control(dev, true); in transceive()
856 if (cfg->fifo_enabled) { in transceive()
857 LL_SPI_EnableIT_EOT(spi); in transceive()
861 ll_func_enable_int_errors(spi); in transceive()
864 ll_func_enable_int_rx_not_empty(spi); in transceive()
867 ll_func_enable_int_tx_empty(spi); in transceive()
869 ret = spi_context_wait_for_completion(&data->ctx); in transceive()
875 spi_stm32_complete(dev, ret); in transceive()
878 if (spi_context_is_slave(&data->ctx) && !ret) { in transceive()
879 ret = data->ctx.recv_frames; in transceive()
886 spi_context_release(&data->ctx, ret); in transceive()
893 static int wait_dma_rx_tx_done(const struct device *dev) in wait_dma_rx_tx_done() argument
895 struct spi_stm32_data *data = dev->data; in wait_dma_rx_tx_done()
896 int res = -1; in wait_dma_rx_tx_done()
903 if (IS_ENABLED(CONFIG_SPI_SLAVE) && spi_context_is_slave(&data->ctx)) { in wait_dma_rx_tx_done()
910 res = k_sem_take(&data->status_sem, timeout); in wait_dma_rx_tx_done()
915 if (data->status_flags & SPI_STM32_DMA_ERROR_FLAG) { in wait_dma_rx_tx_done()
916 return -EIO; in wait_dma_rx_tx_done()
919 if (data->status_flags & SPI_STM32_DMA_DONE_FLAG) { in wait_dma_rx_tx_done()
935 ((buf + len_bytes - 1) <= ((uintptr_t)_nocache_ram_end)); in buf_in_nocache()
950 return buf->buf == NULL; in is_dummy_buffer()
955 for (size_t i = 0; i < bufs->count; i++) { in spi_buf_set_in_nocache()
956 const struct spi_buf *buf = &bufs->buffers[i]; in spi_buf_set_in_nocache()
959 !buf_in_nocache((uintptr_t)buf->buf, buf->len)) { in spi_buf_set_in_nocache()
967 static int transceive_dma(const struct device *dev, in transceive_dma() argument
975 const struct spi_stm32_config *cfg = dev->config; in transceive_dma()
976 struct spi_stm32_data *data = dev->data; in transceive_dma()
977 SPI_TypeDef *spi = cfg->spi; in transceive_dma() local
986 return -ENOTSUP; in transceive_dma()
992 return -EFAULT; in transceive_dma()
996 spi_context_lock(&data->ctx, asynchronous, cb, userdata, config); in transceive_dma()
998 spi_stm32_pm_policy_state_lock_get(dev); in transceive_dma()
1000 k_sem_reset(&data->status_sem); in transceive_dma()
1002 ret = spi_stm32_configure(dev, config); in transceive_dma()
1008 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in transceive_dma()
1009 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in transceive_dma()
1011 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 2); in transceive_dma()
1015 /* set request before enabling (else SPI CFG1 reg is write protected) */ in transceive_dma()
1016 LL_SPI_EnableDMAReq_RX(spi); in transceive_dma()
1017 LL_SPI_EnableDMAReq_TX(spi); in transceive_dma()
1019 LL_SPI_Enable(spi); in transceive_dma()
1020 if (LL_SPI_GetMode(spi) == LL_SPI_MODE_MASTER) { in transceive_dma()
1021 LL_SPI_StartMasterTransfer(spi); in transceive_dma()
1024 LL_SPI_Enable(spi); in transceive_dma()
1028 spi_stm32_cs_control(dev, true); in transceive_dma()
1030 while (data->ctx.rx_len > 0 || data->ctx.tx_len > 0) { in transceive_dma()
1033 if (data->ctx.rx_len == 0) { in transceive_dma()
1034 dma_len = data->ctx.tx_len; in transceive_dma()
1035 } else if (data->ctx.tx_len == 0) { in transceive_dma()
1036 dma_len = data->ctx.rx_len; in transceive_dma()
1038 dma_len = MIN(data->ctx.tx_len, data->ctx.rx_len); in transceive_dma()
1041 data->status_flags = 0; in transceive_dma()
1043 ret = spi_dma_move_buffers(dev, dma_len); in transceive_dma()
1051 LL_SPI_EnableDMAReq_RX(spi); in transceive_dma()
1052 LL_SPI_EnableDMAReq_TX(spi); in transceive_dma()
1055 ret = wait_dma_rx_tx_done(dev); in transceive_dma()
1061 while (LL_SPI_GetTxFIFOLevel(spi) > 0) { in transceive_dma()
1066 WAIT_FOR(ll_func_spi_dma_busy(spi) != 0, in transceive_dma()
1070 /* wait until spi is no more busy (spi TX fifo is really empty) */ in transceive_dma()
1071 while (ll_func_spi_dma_busy(spi) == 0) { in transceive_dma()
1077 LL_SPI_DisableDMAReq_TX(spi); in transceive_dma()
1078 LL_SPI_DisableDMAReq_RX(spi); in transceive_dma()
1082 SPI_WORD_SIZE_GET(config->operation)); in transceive_dma()
1084 spi_context_update_tx(&data->ctx, frame_size_bytes, dma_len); in transceive_dma()
1085 spi_context_update_rx(&data->ctx, frame_size_bytes, dma_len); in transceive_dma()
1088 /* spi complete relies on SPI Status Reg which cannot be disabled */ in transceive_dma()
1089 spi_stm32_complete(dev, ret); in transceive_dma()
1090 /* disable spi instance after completion */ in transceive_dma()
1091 LL_SPI_Disable(spi); in transceive_dma()
1092 /* The Config. Reg. on some mcus is write un-protected when SPI is disabled */ in transceive_dma()
1093 LL_SPI_DisableDMAReq_TX(spi); in transceive_dma()
1094 LL_SPI_DisableDMAReq_RX(spi); in transceive_dma()
1096 err = dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel); in transceive_dma()
1100 err = dma_stop(data->dma_tx.dma_dev, data->dma_tx.channel); in transceive_dma()
1106 if (spi_context_is_slave(&data->ctx) && !ret) { in transceive_dma()
1107 ret = data->ctx.recv_frames; in transceive_dma()
1112 spi_context_release(&data->ctx, ret); in transceive_dma()
1114 spi_stm32_pm_policy_state_lock_put(dev); in transceive_dma()
1120 static int spi_stm32_transceive(const struct device *dev, in spi_stm32_transceive() argument
1126 struct spi_stm32_data *data = dev->data; in spi_stm32_transceive()
1128 if ((data->dma_tx.dma_dev != NULL) in spi_stm32_transceive()
1129 && (data->dma_rx.dma_dev != NULL)) { in spi_stm32_transceive()
1130 return transceive_dma(dev, config, tx_bufs, rx_bufs, in spi_stm32_transceive()
1134 return transceive(dev, config, tx_bufs, rx_bufs, false, NULL, NULL); in spi_stm32_transceive()
1138 static int spi_stm32_transceive_async(const struct device *dev, in spi_stm32_transceive_async() argument
1145 return transceive(dev, config, tx_bufs, rx_bufs, true, cb, userdata); in spi_stm32_transceive_async()
1149 static DEVICE_API(spi, api_funcs) = {
1160 static inline bool spi_stm32_is_subghzspi(const struct device *dev) in spi_stm32_is_subghzspi() argument
1163 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_is_subghzspi()
1165 return cfg->use_subghzspi_nss; in spi_stm32_is_subghzspi()
1167 ARG_UNUSED(dev); in spi_stm32_is_subghzspi()
1172 static int spi_stm32_init(const struct device *dev) in spi_stm32_init() argument
1174 struct spi_stm32_data *data __attribute__((unused)) = dev->data; in spi_stm32_init()
1175 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_init()
1180 return -ENODEV; in spi_stm32_init()
1184 (clock_control_subsys_t) &cfg->pclken[0]); in spi_stm32_init()
1186 LOG_ERR("Could not enable SPI clock"); in spi_stm32_init()
1190 if (IS_ENABLED(STM32_SPI_DOMAIN_CLOCK_SUPPORT) && (cfg->pclk_len > 1)) { in spi_stm32_init()
1192 (clock_control_subsys_t) &cfg->pclken[1], in spi_stm32_init()
1195 LOG_ERR("Could not select SPI domain clock"); in spi_stm32_init()
1200 if (!spi_stm32_is_subghzspi(dev)) { in spi_stm32_init()
1202 err = pinctrl_apply_state(cfg->pcfg, PINCTRL_STATE_DEFAULT); in spi_stm32_init()
1204 LOG_ERR("SPI pinctrl setup failed (%d)", err); in spi_stm32_init()
1210 cfg->irq_config(dev); in spi_stm32_init()
1214 if ((data->dma_rx.dma_dev != NULL) && in spi_stm32_init()
1215 !device_is_ready(data->dma_rx.dma_dev)) { in spi_stm32_init()
1216 LOG_ERR("%s device not ready", data->dma_rx.dma_dev->name); in spi_stm32_init()
1217 return -ENODEV; in spi_stm32_init()
1220 if ((data->dma_tx.dma_dev != NULL) && in spi_stm32_init()
1221 !device_is_ready(data->dma_tx.dma_dev)) { in spi_stm32_init()
1222 LOG_ERR("%s device not ready", data->dma_tx.dma_dev->name); in spi_stm32_init()
1223 return -ENODEV; in spi_stm32_init()
1226 LOG_DBG("SPI with DMA transfer"); in spi_stm32_init()
1230 err = spi_context_cs_configure_all(&data->ctx); in spi_stm32_init()
1235 spi_context_unlock_unconditionally(&data->ctx); in spi_stm32_init()
1237 return pm_device_runtime_enable(dev); in spi_stm32_init()
1241 static int spi_stm32_pm_action(const struct device *dev, in spi_stm32_pm_action() argument
1244 const struct spi_stm32_config *config = dev->config; in spi_stm32_pm_action()
1251 if (!spi_stm32_is_subghzspi(dev)) { in spi_stm32_pm_action()
1253 err = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT); in spi_stm32_pm_action()
1260 err = clock_control_on(clk, (clock_control_subsys_t)&config->pclken[0]); in spi_stm32_pm_action()
1262 LOG_ERR("Could not enable SPI clock"); in spi_stm32_pm_action()
1268 err = clock_control_off(clk, (clock_control_subsys_t)&config->pclken[0]); in spi_stm32_pm_action()
1270 LOG_ERR("Could not disable SPI clock"); in spi_stm32_pm_action()
1274 if (!spi_stm32_is_subghzspi(dev)) { in spi_stm32_pm_action()
1276 err = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_SLEEP); in spi_stm32_pm_action()
1277 if ((err < 0) && (err != -ENOENT)) { in spi_stm32_pm_action()
1279 * If returning -ENOENT, no pins where defined for sleep mode : in spi_stm32_pm_action()
1282 * "SPI pinctrl sleep state not available" in spi_stm32_pm_action()
1291 return -ENOTSUP; in spi_stm32_pm_action()
1300 static void spi_stm32_irq_config_func_##id(const struct device *dev)
1304 static void spi_stm32_irq_config_func_##id(const struct device *dev) \
1371 .spi = (SPI_TypeDef *) DT_INST_REG_ADDR(id), \