Lines Matching +full:periph +full:- +full:clock +full:- +full:config

4  * SPDX-License-Identifier: Apache-2.0
35 #include <zephyr/dt-bindings/memory-attr/memory-attr-arm.h>
39 #include <zephyr/linker/linker-defs.h>
79 struct spi_stm32_data *data = dev->data; in spi_stm32_pm_policy_state_lock_get()
81 if (!data->pm_policy_state_on) { in spi_stm32_pm_policy_state_lock_get()
82 data->pm_policy_state_on = true; in spi_stm32_pm_policy_state_lock_get()
95 struct spi_stm32_data *data = dev->data; in spi_stm32_pm_policy_state_lock_put()
97 if (data->pm_policy_state_on) { in spi_stm32_pm_policy_state_lock_put()
98 data->pm_policy_state_on = false; in spi_stm32_pm_policy_state_lock_put()
119 * the linker to avoid potential DMA cache-coherency problems.
138 spi_dma_data->status_flags |= SPI_STM32_DMA_ERROR_FLAG; in dma_callback()
141 if (channel == spi_dma_data->dma_tx.channel) { in dma_callback()
143 spi_dma_data->status_flags |= SPI_STM32_DMA_TX_DONE_FLAG; in dma_callback()
144 } else if (channel == spi_dma_data->dma_rx.channel) { in dma_callback()
146 spi_dma_data->status_flags |= SPI_STM32_DMA_RX_DONE_FLAG; in dma_callback()
149 spi_dma_data->status_flags |= SPI_STM32_DMA_ERROR_FLAG; in dma_callback()
153 k_sem_give(&spi_dma_data->status_sem); in dma_callback()
159 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_dma_tx_load()
160 struct spi_stm32_data *data = dev->data; in spi_stm32_dma_tx_load()
165 struct stream *stream = &data->dma_tx; in spi_stm32_dma_tx_load()
167 blk_cfg = &stream->dma_blk_cfg; in spi_stm32_dma_tx_load()
171 blk_cfg->block_size = len; in spi_stm32_dma_tx_load()
173 /* tx direction has memory as source and periph as dest. */ in spi_stm32_dma_tx_load()
180 blk_cfg->source_address = (uint32_t)&dummy_rx_tx_buffer; in spi_stm32_dma_tx_load()
181 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
183 blk_cfg->source_address = (uint32_t)buf; in spi_stm32_dma_tx_load()
184 if (data->dma_tx.src_addr_increment) { in spi_stm32_dma_tx_load()
185 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_tx_load()
187 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
191 blk_cfg->dest_address = ll_func_dma_get_reg_addr(cfg->spi, SPI_STM32_DMA_TX); in spi_stm32_dma_tx_load()
193 if (data->dma_tx.dst_addr_increment) { in spi_stm32_dma_tx_load()
194 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_tx_load()
196 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_tx_load()
200 blk_cfg->fifo_mode_control = data->dma_tx.fifo_threshold; in spi_stm32_dma_tx_load()
203 stream->dma_cfg.head_block = blk_cfg; in spi_stm32_dma_tx_load()
205 stream->dma_cfg.user_data = data; in spi_stm32_dma_tx_load()
206 /* pass our client origin to the dma: data->dma_tx.dma_channel */ in spi_stm32_dma_tx_load()
207 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, in spi_stm32_dma_tx_load()
208 &stream->dma_cfg); in spi_stm32_dma_tx_load()
215 return dma_start(data->dma_tx.dma_dev, data->dma_tx.channel); in spi_stm32_dma_tx_load()
221 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_dma_rx_load()
222 struct spi_stm32_data *data = dev->data; in spi_stm32_dma_rx_load()
227 struct stream *stream = &data->dma_rx; in spi_stm32_dma_rx_load()
229 blk_cfg = &stream->dma_blk_cfg; in spi_stm32_dma_rx_load()
233 blk_cfg->block_size = len; in spi_stm32_dma_rx_load()
236 /* rx direction has periph as source and mem as dest. */ in spi_stm32_dma_rx_load()
239 blk_cfg->dest_address = (uint32_t)&dummy_rx_tx_buffer; in spi_stm32_dma_rx_load()
240 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
242 blk_cfg->dest_address = (uint32_t)buf; in spi_stm32_dma_rx_load()
243 if (data->dma_rx.dst_addr_increment) { in spi_stm32_dma_rx_load()
244 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_rx_load()
246 blk_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
250 blk_cfg->source_address = ll_func_dma_get_reg_addr(cfg->spi, SPI_STM32_DMA_RX); in spi_stm32_dma_rx_load()
251 if (data->dma_rx.src_addr_increment) { in spi_stm32_dma_rx_load()
252 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_stm32_dma_rx_load()
254 blk_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_stm32_dma_rx_load()
258 blk_cfg->fifo_mode_control = data->dma_rx.fifo_threshold; in spi_stm32_dma_rx_load()
261 stream->dma_cfg.head_block = blk_cfg; in spi_stm32_dma_rx_load()
262 stream->dma_cfg.user_data = data; in spi_stm32_dma_rx_load()
265 /* pass our client origin to the dma: data->dma_rx.channel */ in spi_stm32_dma_rx_load()
266 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_stm32_dma_rx_load()
267 &stream->dma_cfg); in spi_stm32_dma_rx_load()
274 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_stm32_dma_rx_load()
279 struct spi_stm32_data *data = dev->data; in spi_dma_move_buffers()
283 dma_segment_len = len * data->dma_rx.dma_cfg.dest_data_size; in spi_dma_move_buffers()
284 ret = spi_stm32_dma_rx_load(dev, data->ctx.rx_buf, dma_segment_len); in spi_dma_move_buffers()
290 dma_segment_len = len * data->dma_tx.dma_cfg.source_data_size; in spi_dma_move_buffers()
291 ret = spi_stm32_dma_tx_load(dev, data->ctx.tx_buf, dma_segment_len); in spi_dma_move_buffers()
304 const uint8_t frame_size = SPI_WORD_SIZE_GET(data->ctx.config->operation); in spi_stm32_send_next_frame()
308 if (spi_context_tx_buf_on(&data->ctx)) { in spi_stm32_send_next_frame()
309 tx_frame = UNALIGNED_GET((uint8_t *)(data->ctx.tx_buf)); in spi_stm32_send_next_frame()
312 spi_context_update_tx(&data->ctx, 1, 1); in spi_stm32_send_next_frame()
314 if (spi_context_tx_buf_on(&data->ctx)) { in spi_stm32_send_next_frame()
315 tx_frame = UNALIGNED_GET((uint16_t *)(data->ctx.tx_buf)); in spi_stm32_send_next_frame()
318 spi_context_update_tx(&data->ctx, 2, 1); in spi_stm32_send_next_frame()
325 const uint8_t frame_size = SPI_WORD_SIZE_GET(data->ctx.config->operation); in spi_stm32_read_next_frame()
330 if (spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_read_next_frame()
331 UNALIGNED_PUT(rx_frame, (uint8_t *)data->ctx.rx_buf); in spi_stm32_read_next_frame()
333 spi_context_update_rx(&data->ctx, 1, 1); in spi_stm32_read_next_frame()
336 if (spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_read_next_frame()
337 UNALIGNED_PUT(rx_frame, (uint16_t *)data->ctx.rx_buf); in spi_stm32_read_next_frame()
339 spi_context_update_rx(&data->ctx, 2, 1); in spi_stm32_read_next_frame()
345 return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx); in spi_stm32_transfer_ongoing()
361 return -EIO; in spi_stm32_get_err()
382 if (cfg->fifo_enabled) { in spi_stm32_shift_m()
383 spi_stm32_shift_fifo(cfg->spi, data); in spi_stm32_shift_m()
385 while (!ll_func_tx_is_not_full(cfg->spi)) { in spi_stm32_shift_m()
389 spi_stm32_send_next_frame(cfg->spi, data); in spi_stm32_shift_m()
391 while (!ll_func_rx_is_not_empty(cfg->spi)) { in spi_stm32_shift_m()
395 spi_stm32_read_next_frame(cfg->spi, data); in spi_stm32_shift_m()
402 if (ll_func_tx_is_not_full(spi) && spi_context_tx_on(&data->ctx)) { in spi_stm32_shift_s()
405 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_stm32_shift_s()
406 tx_frame = UNALIGNED_GET((uint8_t *)(data->ctx.tx_buf)); in spi_stm32_shift_s()
408 spi_context_update_tx(&data->ctx, 1, 1); in spi_stm32_shift_s()
410 tx_frame = UNALIGNED_GET((uint16_t *)(data->ctx.tx_buf)); in spi_stm32_shift_s()
412 spi_context_update_tx(&data->ctx, 2, 1); in spi_stm32_shift_s()
419 spi_context_rx_buf_on(&data->ctx)) { in spi_stm32_shift_s()
422 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_stm32_shift_s()
424 UNALIGNED_PUT(rx_frame, (uint8_t *)data->ctx.rx_buf); in spi_stm32_shift_s()
425 spi_context_update_rx(&data->ctx, 1, 1); in spi_stm32_shift_s()
428 UNALIGNED_PUT(rx_frame, (uint16_t *)data->ctx.rx_buf); in spi_stm32_shift_s()
429 spi_context_update_rx(&data->ctx, 2, 1); in spi_stm32_shift_s()
438 * TODO: support 16-bit data frames.
443 uint16_t operation = data->ctx.config->operation; in spi_stm32_shift_frames()
448 spi_stm32_shift_s(cfg->spi, data); in spi_stm32_shift_frames()
451 return spi_stm32_get_err(cfg->spi); in spi_stm32_shift_frames()
456 struct spi_stm32_data *data = dev->data; in spi_stm32_cs_control()
458 spi_context_cs_control(&data->ctx, on); in spi_stm32_cs_control()
461 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_cs_control()
463 if (cfg->use_subghzspi_nss) { in spi_stm32_cs_control()
475 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_complete()
476 SPI_TypeDef *spi = cfg->spi; in spi_stm32_complete()
477 struct spi_stm32_data *data = dev->data; in spi_stm32_complete()
485 if (cfg->fifo_enabled) { in spi_stm32_complete()
514 if (cfg->fifo_enabled) { in spi_stm32_complete()
522 if (!(data->ctx.config->operation & SPI_HOLD_ON_CS)) { in spi_stm32_complete()
527 spi_context_complete(&data->ctx, dev, status); in spi_stm32_complete()
536 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_isr()
537 struct spi_stm32_data *data = dev->data; in spi_stm32_isr()
538 SPI_TypeDef *spi = cfg->spi; in spi_stm32_isr()
542 * Do it only when fifo is enabled to leave non-fifo functionality untouched for now in spi_stm32_isr()
544 if (cfg->fifo_enabled) { in spi_stm32_isr()
567 const struct spi_config *config) in spi_stm32_configure() argument
569 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_configure()
570 struct spi_stm32_data *data = dev->data; in spi_stm32_configure()
581 SPI_TypeDef *spi = cfg->spi; in spi_stm32_configure()
582 uint32_t clock; in spi_stm32_configure() local
585 if (spi_context_configured(&data->ctx, config)) { in spi_stm32_configure()
590 if ((SPI_WORD_SIZE_GET(config->operation) != 8) in spi_stm32_configure()
591 && (SPI_WORD_SIZE_GET(config->operation) != 16)) { in spi_stm32_configure()
592 return -ENOTSUP; in spi_stm32_configure()
596 if ((config->operation & SPI_FRAME_FORMAT_TI) == SPI_FRAME_FORMAT_TI) { in spi_stm32_configure()
602 return -ENOTSUP; in spi_stm32_configure()
610 if (IS_ENABLED(STM32_SPI_DOMAIN_CLOCK_SUPPORT) && (cfg->pclk_len > 1)) { in spi_stm32_configure()
612 (clock_control_subsys_t) &cfg->pclken[1], &clock) < 0) { in spi_stm32_configure()
614 return -EIO; in spi_stm32_configure()
618 (clock_control_subsys_t) &cfg->pclken[0], &clock) < 0) { in spi_stm32_configure()
620 return -EIO; in spi_stm32_configure()
625 uint32_t clk = clock >> br; in spi_stm32_configure()
627 if (clk <= config->frequency) { in spi_stm32_configure()
634 config->frequency, in spi_stm32_configure()
635 clock >> 1, in spi_stm32_configure()
636 clock >> ARRAY_SIZE(scaler)); in spi_stm32_configure()
637 return -EINVAL; in spi_stm32_configure()
641 LL_SPI_SetBaudRatePrescaler(spi, scaler[br - 1]); in spi_stm32_configure()
643 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) { in spi_stm32_configure()
649 if (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) { in spi_stm32_configure()
657 if (config->operation & SPI_TRANSFER_LSB) { in spi_stm32_configure()
665 if (spi_cs_is_gpio(config) || !IS_ENABLED(CONFIG_SPI_STM32_USE_HW_SS)) { in spi_stm32_configure()
667 if (SPI_OP_MODE_GET(config->operation) == SPI_OP_MODE_MASTER) { in spi_stm32_configure()
674 if (config->operation & SPI_OP_MODE_SLAVE) { in spi_stm32_configure()
681 if (config->operation & SPI_OP_MODE_SLAVE) { in spi_stm32_configure()
687 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in spi_stm32_configure()
694 LL_SPI_SetMasterSSIdleness(spi, cfg->mssi_clocks); in spi_stm32_configure()
695 LL_SPI_SetInterDataIdleness(spi, (cfg->midi_clocks << SPI_CFG2_MIDI_Pos)); in spi_stm32_configure()
703 data->ctx.config = config; in spi_stm32_configure()
705 LOG_DBG("Installed config %p: freq %uHz (div = %u)," in spi_stm32_configure()
707 config, clock >> br, 1 << br, in spi_stm32_configure()
708 (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) ? 1 : 0, in spi_stm32_configure()
709 (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA) ? 1 : 0, in spi_stm32_configure()
710 (SPI_MODE_GET(config->operation) & SPI_MODE_LOOP) ? 1 : 0, in spi_stm32_configure()
711 config->slave); in spi_stm32_configure()
717 const struct spi_config *config) in spi_stm32_release() argument
719 struct spi_stm32_data *data = dev->data; in spi_stm32_release()
720 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_release()
722 spi_context_unlock_unconditionally(&data->ctx); in spi_stm32_release()
723 ll_func_disable_spi(cfg->spi); in spi_stm32_release()
729 static int32_t spi_stm32_count_bufset_frames(const struct spi_config *config, in spi_stm32_count_bufset_frames() argument
738 for (size_t i = 0; i < bufs->count; i++) { in spi_stm32_count_bufset_frames()
739 num_bytes += bufs->buffers[i].len; in spi_stm32_count_bufset_frames()
742 uint8_t bytes_per_frame = SPI_WORD_SIZE_GET(config->operation) / 8; in spi_stm32_count_bufset_frames()
745 return -EINVAL; in spi_stm32_count_bufset_frames()
750 static int32_t spi_stm32_count_total_frames(const struct spi_config *config, in spi_stm32_count_total_frames() argument
754 int tx_frames = spi_stm32_count_bufset_frames(config, tx_bufs); in spi_stm32_count_total_frames()
760 int rx_frames = spi_stm32_count_bufset_frames(config, rx_bufs); in spi_stm32_count_total_frames()
767 return -EMSGSIZE; in spi_stm32_count_total_frames()
775 const struct spi_config *config, in transceive() argument
782 const struct spi_stm32_config *cfg = dev->config; in transceive()
783 struct spi_stm32_data *data = dev->data; in transceive()
784 SPI_TypeDef *spi = cfg->spi; in transceive()
793 return -ENOTSUP; in transceive()
797 spi_context_lock(&data->ctx, asynchronous, cb, userdata, config); in transceive()
801 ret = spi_stm32_configure(dev, config); in transceive()
807 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in transceive()
808 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in transceive()
810 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 2); in transceive()
814 if (cfg->fifo_enabled && SPI_OP_MODE_GET(config->operation) == SPI_OP_MODE_MASTER) { in transceive()
816 config, tx_bufs, rx_bufs); in transceive()
845 * system clock frequency (see errata sheet ES0392). in transceive()
856 if (cfg->fifo_enabled) { in transceive()
869 ret = spi_context_wait_for_completion(&data->ctx); in transceive()
878 if (spi_context_is_slave(&data->ctx) && !ret) { in transceive()
879 ret = data->ctx.recv_frames; in transceive()
886 spi_context_release(&data->ctx, ret); in transceive()
895 struct spi_stm32_data *data = dev->data; in wait_dma_rx_tx_done()
896 int res = -1; in wait_dma_rx_tx_done()
903 if (IS_ENABLED(CONFIG_SPI_SLAVE) && spi_context_is_slave(&data->ctx)) { in wait_dma_rx_tx_done()
910 res = k_sem_take(&data->status_sem, timeout); in wait_dma_rx_tx_done()
915 if (data->status_flags & SPI_STM32_DMA_ERROR_FLAG) { in wait_dma_rx_tx_done()
916 return -EIO; in wait_dma_rx_tx_done()
919 if (data->status_flags & SPI_STM32_DMA_DONE_FLAG) { in wait_dma_rx_tx_done()
935 ((buf + len_bytes - 1) <= ((uintptr_t)_nocache_ram_end)); in buf_in_nocache()
950 return buf->buf == NULL; in is_dummy_buffer()
955 for (size_t i = 0; i < bufs->count; i++) { in spi_buf_set_in_nocache()
956 const struct spi_buf *buf = &bufs->buffers[i]; in spi_buf_set_in_nocache()
959 !buf_in_nocache((uintptr_t)buf->buf, buf->len)) { in spi_buf_set_in_nocache()
968 const struct spi_config *config, in transceive_dma() argument
975 const struct spi_stm32_config *cfg = dev->config; in transceive_dma()
976 struct spi_stm32_data *data = dev->data; in transceive_dma()
977 SPI_TypeDef *spi = cfg->spi; in transceive_dma()
986 return -ENOTSUP; in transceive_dma()
992 return -EFAULT; in transceive_dma()
996 spi_context_lock(&data->ctx, asynchronous, cb, userdata, config); in transceive_dma()
1000 k_sem_reset(&data->status_sem); in transceive_dma()
1002 ret = spi_stm32_configure(dev, config); in transceive_dma()
1008 if (SPI_WORD_SIZE_GET(config->operation) == 8) { in transceive_dma()
1009 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 1); in transceive_dma()
1011 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, 2); in transceive_dma()
1030 while (data->ctx.rx_len > 0 || data->ctx.tx_len > 0) { in transceive_dma()
1033 if (data->ctx.rx_len == 0) { in transceive_dma()
1034 dma_len = data->ctx.tx_len; in transceive_dma()
1035 } else if (data->ctx.tx_len == 0) { in transceive_dma()
1036 dma_len = data->ctx.rx_len; in transceive_dma()
1038 dma_len = MIN(data->ctx.tx_len, data->ctx.rx_len); in transceive_dma()
1041 data->status_flags = 0; in transceive_dma()
1082 SPI_WORD_SIZE_GET(config->operation)); in transceive_dma()
1084 spi_context_update_tx(&data->ctx, frame_size_bytes, dma_len); in transceive_dma()
1085 spi_context_update_rx(&data->ctx, frame_size_bytes, dma_len); in transceive_dma()
1092 /* The Config. Reg. on some mcus is write un-protected when SPI is disabled */ in transceive_dma()
1096 err = dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel); in transceive_dma()
1100 err = dma_stop(data->dma_tx.dma_dev, data->dma_tx.channel); in transceive_dma()
1106 if (spi_context_is_slave(&data->ctx) && !ret) { in transceive_dma()
1107 ret = data->ctx.recv_frames; in transceive_dma()
1112 spi_context_release(&data->ctx, ret); in transceive_dma()
1121 const struct spi_config *config, in spi_stm32_transceive() argument
1126 struct spi_stm32_data *data = dev->data; in spi_stm32_transceive()
1128 if ((data->dma_tx.dma_dev != NULL) in spi_stm32_transceive()
1129 && (data->dma_rx.dma_dev != NULL)) { in spi_stm32_transceive()
1130 return transceive_dma(dev, config, tx_bufs, rx_bufs, in spi_stm32_transceive()
1134 return transceive(dev, config, tx_bufs, rx_bufs, false, NULL, NULL); in spi_stm32_transceive()
1139 const struct spi_config *config, in spi_stm32_transceive_async() argument
1145 return transceive(dev, config, tx_bufs, rx_bufs, true, cb, userdata); in spi_stm32_transceive_async()
1163 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_is_subghzspi()
1165 return cfg->use_subghzspi_nss; in spi_stm32_is_subghzspi()
1174 struct spi_stm32_data *data __attribute__((unused)) = dev->data; in spi_stm32_init()
1175 const struct spi_stm32_config *cfg = dev->config; in spi_stm32_init()
1179 LOG_ERR("clock control device not ready"); in spi_stm32_init()
1180 return -ENODEV; in spi_stm32_init()
1184 (clock_control_subsys_t) &cfg->pclken[0]); in spi_stm32_init()
1186 LOG_ERR("Could not enable SPI clock"); in spi_stm32_init()
1190 if (IS_ENABLED(STM32_SPI_DOMAIN_CLOCK_SUPPORT) && (cfg->pclk_len > 1)) { in spi_stm32_init()
1192 (clock_control_subsys_t) &cfg->pclken[1], in spi_stm32_init()
1195 LOG_ERR("Could not select SPI domain clock"); in spi_stm32_init()
1202 err = pinctrl_apply_state(cfg->pcfg, PINCTRL_STATE_DEFAULT); in spi_stm32_init()
1210 cfg->irq_config(dev); in spi_stm32_init()
1214 if ((data->dma_rx.dma_dev != NULL) && in spi_stm32_init()
1215 !device_is_ready(data->dma_rx.dma_dev)) { in spi_stm32_init()
1216 LOG_ERR("%s device not ready", data->dma_rx.dma_dev->name); in spi_stm32_init()
1217 return -ENODEV; in spi_stm32_init()
1220 if ((data->dma_tx.dma_dev != NULL) && in spi_stm32_init()
1221 !device_is_ready(data->dma_tx.dma_dev)) { in spi_stm32_init()
1222 LOG_ERR("%s device not ready", data->dma_tx.dma_dev->name); in spi_stm32_init()
1223 return -ENODEV; in spi_stm32_init()
1230 err = spi_context_cs_configure_all(&data->ctx); in spi_stm32_init()
1235 spi_context_unlock_unconditionally(&data->ctx); in spi_stm32_init()
1244 const struct spi_stm32_config *config = dev->config; in spi_stm32_pm_action() local
1253 err = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT); in spi_stm32_pm_action()
1259 /* enable clock */ in spi_stm32_pm_action()
1260 err = clock_control_on(clk, (clock_control_subsys_t)&config->pclken[0]); in spi_stm32_pm_action()
1262 LOG_ERR("Could not enable SPI clock"); in spi_stm32_pm_action()
1267 /* Stop device clock. */ in spi_stm32_pm_action()
1268 err = clock_control_off(clk, (clock_control_subsys_t)&config->pclken[0]); in spi_stm32_pm_action()
1270 LOG_ERR("Could not disable SPI clock"); in spi_stm32_pm_action()
1276 err = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_SLEEP); in spi_stm32_pm_action()
1277 if ((err < 0) && (err != -ENOENT)) { in spi_stm32_pm_action()
1279 * If returning -ENOENT, no pins where defined for sleep mode : in spi_stm32_pm_action()
1291 return -ENOTSUP; in spi_stm32_pm_action()