Lines Matching +full:dma +full:- +full:ch +full:- +full:sph

4  * SPDX-License-Identifier: Apache-2.0
22 #include <zephyr/drivers/dma.h>
71 #define SSP_CR0_MASK_SPH SSP_MASK(CR0, SPH)
206 * DMA Control Register
213 /* Receive DMA Enable bit */
215 /* Transmit DMA Enable bit */
293 const struct spi_pl022_dma_config dma[NUM_OF_DIRECTION]; member
303 struct spi_pl022_dma_data dma[NUM_OF_DIRECTION]; member
333 for (postdiv = SCR_MAX + 1; postdiv > SCR_MIN + 1; --postdiv) { in spi_pl022_calc_postdiv()
334 if (pclk / (prescale * (postdiv - 1)) > baud) { in spi_pl022_calc_postdiv()
338 return postdiv - 1; in spi_pl022_calc_postdiv()
344 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_configure()
345 struct spi_pl022_data *data = dev->data; in spi_pl022_configure()
346 const uint16_t op = spicfg->operation; in spi_pl022_configure()
354 if (spi_context_configured(&data->ctx, spicfg)) { in spi_pl022_configure()
359 ret = clock_control_get_rate(cfg->clk_dev, cfg->clk_id, &pclk); in spi_pl022_configure()
361 return -EINVAL; in spi_pl022_configure()
365 if (spicfg->frequency > MAX_FREQ_CONTROLLER_MODE(pclk)) { in spi_pl022_configure()
368 return -ENOTSUP; in spi_pl022_configure()
372 LOG_ERR("LSB-first not supported"); in spi_pl022_configure()
373 return -ENOTSUP; in spi_pl022_configure()
376 /* Half-duplex mode has not been implemented */ in spi_pl022_configure()
378 LOG_ERR("Half-duplex not supported"); in spi_pl022_configure()
379 return -ENOTSUP; in spi_pl022_configure()
385 return -ENOTSUP; in spi_pl022_configure()
391 return -ENOTSUP; in spi_pl022_configure()
396 prescale = spi_pl022_calc_prescale(pclk, spicfg->frequency); in spi_pl022_configure()
397 postdiv = spi_pl022_calc_postdiv(pclk, spicfg->frequency, prescale); in spi_pl022_configure()
401 cr0 |= (SPI_WORD_SIZE_GET(op) - 1); in spi_pl022_configure()
409 SSP_WRITE_REG(SSP_CPSR(cfg->reg), prescale); in spi_pl022_configure()
410 SSP_WRITE_REG(SSP_CR0(cfg->reg), cr0); in spi_pl022_configure()
411 SSP_WRITE_REG(SSP_CR1(cfg->reg), cr1); in spi_pl022_configure()
414 if (!cfg->dma_enabled) { in spi_pl022_configure()
415 SSP_WRITE_REG(SSP_IMSC(cfg->reg), in spi_pl022_configure()
420 data->ctx.config = spicfg; in spi_pl022_configure()
427 return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx); in spi_pl022_transfer_ongoing()
436 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_dma_enabled_num()
438 return cfg->dma_enabled ? 2 : 0; in spi_pl022_dma_enabled_num()
443 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_dma_setup()
444 struct spi_pl022_data *data = dev->data; in spi_pl022_dma_setup()
445 struct dma_config *dma_cfg = &data->dma[dir].config; in spi_pl022_dma_setup()
446 struct dma_block_config *block_cfg = &data->dma[dir].block; in spi_pl022_dma_setup()
447 const struct spi_pl022_dma_config *dma = &cfg->dma[dir]; in spi_pl022_dma_setup() local
453 dma_cfg->source_burst_length = 1; in spi_pl022_dma_setup()
454 dma_cfg->dest_burst_length = 1; in spi_pl022_dma_setup()
455 dma_cfg->user_data = (void *)dev; in spi_pl022_dma_setup()
456 dma_cfg->block_count = 1U; in spi_pl022_dma_setup()
457 dma_cfg->head_block = block_cfg; in spi_pl022_dma_setup()
458 dma_cfg->dma_slot = cfg->dma[dir].slot; in spi_pl022_dma_setup()
459 dma_cfg->channel_direction = dir == TX ? MEMORY_TO_PERIPHERAL : PERIPHERAL_TO_MEMORY; in spi_pl022_dma_setup()
461 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_pl022_dma_setup()
462 dma_cfg->source_data_size = 1; in spi_pl022_dma_setup()
463 dma_cfg->dest_data_size = 1; in spi_pl022_dma_setup()
465 dma_cfg->source_data_size = 2; in spi_pl022_dma_setup()
466 dma_cfg->dest_data_size = 2; in spi_pl022_dma_setup()
469 block_cfg->block_size = spi_context_max_continuous_chunk(&data->ctx); in spi_pl022_dma_setup()
472 dma_cfg->dma_callback = spi_pl022_dma_callback; in spi_pl022_dma_setup()
473 block_cfg->dest_address = SSP_DR(cfg->reg); in spi_pl022_dma_setup()
474 block_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_pl022_dma_setup()
475 if (spi_context_tx_buf_on(&data->ctx)) { in spi_pl022_dma_setup()
476 block_cfg->source_address = (uint32_t)data->ctx.tx_buf; in spi_pl022_dma_setup()
477 block_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_pl022_dma_setup()
479 block_cfg->source_address = (uint32_t)&dummy_tx; in spi_pl022_dma_setup()
480 block_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_pl022_dma_setup()
485 dma_cfg->dma_callback = spi_pl022_dma_callback; in spi_pl022_dma_setup()
486 block_cfg->source_address = SSP_DR(cfg->reg); in spi_pl022_dma_setup()
487 block_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_pl022_dma_setup()
489 if (spi_context_rx_buf_on(&data->ctx)) { in spi_pl022_dma_setup()
490 block_cfg->dest_address = (uint32_t)data->ctx.rx_buf; in spi_pl022_dma_setup()
491 block_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_pl022_dma_setup()
493 block_cfg->dest_address = (uint32_t)&dummy_rx; in spi_pl022_dma_setup()
494 block_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_pl022_dma_setup()
498 ret = dma_config(dma->dev, dma->channel, dma_cfg); in spi_pl022_dma_setup()
500 LOG_ERR("dma_config %p failed %d\n", dma->dev, ret); in spi_pl022_dma_setup()
504 data->dma[dir].callbacked = false; in spi_pl022_dma_setup()
506 ret = dma_start(dma->dev, dma->channel); in spi_pl022_dma_setup()
508 LOG_ERR("dma_start %p failed %d\n", dma->dev, ret); in spi_pl022_dma_setup()
517 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_start_dma_transceive()
520 SSP_CLEAR_REG(SSP_DMACR(cfg->reg), SSP_DMACR_MASK_RXDMAE | SSP_DMACR_MASK_TXDMAE); in spi_pl022_start_dma_transceive()
529 SSP_WRITE_REG(SSP_DMACR(cfg->reg), SSP_DMACR_MASK_RXDMAE | SSP_DMACR_MASK_TXDMAE); in spi_pl022_start_dma_transceive()
534 dma_stop(cfg->dma[i].dev, cfg->dma[i].channel); in spi_pl022_start_dma_transceive()
542 struct spi_pl022_data *data = dev->data; in spi_pl022_chunk_transfer_finished()
543 struct spi_pl022_dma_data *dma = data->dma; in spi_pl022_chunk_transfer_finished() local
544 const size_t chunk_len = spi_context_max_continuous_chunk(&data->ctx); in spi_pl022_chunk_transfer_finished()
546 return (MIN(dma[TX].count, dma[RX].count) >= chunk_len); in spi_pl022_chunk_transfer_finished()
551 struct spi_pl022_data *data = dev->data; in spi_pl022_complete()
552 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_complete()
555 dma_stop(cfg->dma[i].dev, cfg->dma[i].channel); in spi_pl022_complete()
558 spi_context_complete(&data->ctx, dev, status); in spi_pl022_complete()
565 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_dma_callback()
566 struct spi_pl022_data *data = dev->data; in spi_pl022_dma_callback()
573 key = k_spin_lock(&data->lock); in spi_pl022_dma_callback()
575 LOG_ERR("dma:%p ch:%d callback gets error: %d", dma_dev, channel, status); in spi_pl022_dma_callback()
578 k_spin_unlock(&data->lock, key); in spi_pl022_dma_callback()
582 key = k_spin_lock(&data->lock); in spi_pl022_dma_callback()
584 chunk_len = spi_context_max_continuous_chunk(&data->ctx); in spi_pl022_dma_callback()
585 for (size_t i = 0; i < ARRAY_SIZE(cfg->dma); i++) { in spi_pl022_dma_callback()
586 if (dma_dev == cfg->dma[i].dev && channel == cfg->dma[i].channel) { in spi_pl022_dma_callback()
587 data->dma[i].count += chunk_len; in spi_pl022_dma_callback()
588 data->dma[i].callbacked = true; in spi_pl022_dma_callback()
592 * The transmission of this chunk is complete if both the dma[TX].count in spi_pl022_dma_callback()
593 * and the dma[RX].count reach greater than or equal to the chunk_len. in spi_pl022_dma_callback()
597 if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) { in spi_pl022_dma_callback()
598 spi_context_update_tx(&data->ctx, 1, chunk_len); in spi_pl022_dma_callback()
599 spi_context_update_rx(&data->ctx, 1, chunk_len); in spi_pl022_dma_callback()
601 spi_context_update_tx(&data->ctx, 2, chunk_len); in spi_pl022_dma_callback()
602 spi_context_update_rx(&data->ctx, 2, chunk_len); in spi_pl022_dma_callback()
609 data->dma[TX].count = 0; in spi_pl022_dma_callback()
610 data->dma[RX].count = 0; in spi_pl022_dma_callback()
617 if (!complete && data->dma[TX].callbacked && data->dma[RX].callbacked) { in spi_pl022_dma_callback()
628 k_spin_unlock(&data->lock, key); in spi_pl022_dma_callback()
631 #endif /* DMA */
637 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_async_xfer()
638 struct spi_pl022_data *data = dev->data; in spi_pl022_async_xfer()
639 struct spi_context *ctx = &data->ctx; in spi_pl022_async_xfer()
645 while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg) && (data->rx_count < chunk_len)) { in spi_pl022_async_xfer()
646 txrx = SSP_READ_REG(SSP_DR(cfg->reg)); in spi_pl022_async_xfer()
649 if (ctx->rx_buf) { in spi_pl022_async_xfer()
650 *(((uint8_t *)ctx->rx_buf) + data->rx_count) = (uint8_t)txrx; in spi_pl022_async_xfer()
652 data->rx_count++; in spi_pl022_async_xfer()
660 if (MIN(data->tx_count, data->rx_count) >= chunk_len && chunk_len > 0) { in spi_pl022_async_xfer()
665 data->tx_count = 0; in spi_pl022_async_xfer()
666 data->rx_count = 0; in spi_pl022_async_xfer()
677 if ((data->tx_count < chunk_len) && SSP_TX_FIFO_NOT_FULL(cfg->reg)) { in spi_pl022_async_xfer()
681 if (ctx->tx_buf) { in spi_pl022_async_xfer()
682 txrx = *(((uint8_t *)ctx->tx_buf) + data->tx_count); in spi_pl022_async_xfer()
684 SSP_WRITE_REG(SSP_DR(cfg->reg), txrx); in spi_pl022_async_xfer()
685 data->tx_count++; in spi_pl022_async_xfer()
694 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_start_async_xfer()
695 struct spi_pl022_data *data = dev->data; in spi_pl022_start_async_xfer()
698 while (!SSP_TX_FIFO_EMPTY(cfg->reg)) { in spi_pl022_start_async_xfer()
702 while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) { in spi_pl022_start_async_xfer()
703 SSP_READ_REG(SSP_DR(cfg->reg)); in spi_pl022_start_async_xfer()
706 data->tx_count = 0; in spi_pl022_start_async_xfer()
707 data->rx_count = 0; in spi_pl022_start_async_xfer()
709 SSP_WRITE_REG(SSP_ICR(cfg->reg), SSP_ICR_MASK_RORIC | SSP_ICR_MASK_RTIC); in spi_pl022_start_async_xfer()
716 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_isr()
717 struct spi_pl022_data *data = dev->data; in spi_pl022_isr()
718 struct spi_context *ctx = &data->ctx; in spi_pl022_isr()
719 uint32_t mis = SSP_READ_REG(SSP_MIS(cfg->reg)); in spi_pl022_isr()
722 SSP_WRITE_REG(SSP_IMSC(cfg->reg), 0); in spi_pl022_isr()
723 spi_context_complete(ctx, dev, -EIO); in spi_pl022_isr()
728 SSP_WRITE_REG(SSP_ICR(cfg->reg), SSP_ICR_MASK_RORIC | SSP_ICR_MASK_RTIC); in spi_pl022_isr()
735 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_xfer()
736 struct spi_pl022_data *data = dev->data; in spi_pl022_xfer()
737 const size_t chunk_len = spi_context_max_continuous_chunk(&data->ctx); in spi_pl022_xfer()
738 const void *txbuf = data->ctx.tx_buf; in spi_pl022_xfer()
739 void *rxbuf = data->ctx.rx_buf; in spi_pl022_xfer()
743 data->tx_count = 0; in spi_pl022_xfer()
744 data->rx_count = 0; in spi_pl022_xfer()
747 while (!SSP_TX_FIFO_EMPTY(cfg->reg)) { in spi_pl022_xfer()
751 while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) { in spi_pl022_xfer()
752 SSP_READ_REG(SSP_DR(cfg->reg)); in spi_pl022_xfer()
755 while (data->rx_count < chunk_len || data->tx_count < chunk_len) { in spi_pl022_xfer()
757 while (SSP_TX_FIFO_NOT_FULL(cfg->reg) && data->tx_count < chunk_len && in spi_pl022_xfer()
763 txrx = ((uint8_t *)txbuf)[data->tx_count]; in spi_pl022_xfer()
765 SSP_WRITE_REG(SSP_DR(cfg->reg), txrx); in spi_pl022_xfer()
766 data->tx_count++; in spi_pl022_xfer()
769 while (data->rx_count < chunk_len && fifo_cnt > 0) { in spi_pl022_xfer()
770 if (!SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) { in spi_pl022_xfer()
774 txrx = SSP_READ_REG(SSP_DR(cfg->reg)); in spi_pl022_xfer()
778 ((uint8_t *)rxbuf)[data->rx_count] = (uint8_t)txrx; in spi_pl022_xfer()
780 data->rx_count++; in spi_pl022_xfer()
781 fifo_cnt--; in spi_pl022_xfer()
795 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_transceive_impl()
796 struct spi_pl022_data *data = dev->data; in spi_pl022_transceive_impl()
797 struct spi_context *ctx = &data->ctx; in spi_pl022_transceive_impl()
800 spi_context_lock(&data->ctx, (cb ? true : false), cb, userdata, config); in spi_pl022_transceive_impl()
811 if (cfg->dma_enabled) { in spi_pl022_transceive_impl()
813 for (size_t i = 0; i < ARRAY_SIZE(data->dma); i++) { in spi_pl022_transceive_impl()
816 dma_stop(cfg->dma[i].dev, cfg->dma[i].channel); in spi_pl022_transceive_impl()
819 dma_get_status(cfg->dma[i].dev, in spi_pl022_transceive_impl()
820 cfg->dma[i].channel, &stat); in spi_pl022_transceive_impl()
823 data->dma[i].count = 0; in spi_pl022_transceive_impl()
843 spi_context_update_tx(ctx, 1, data->tx_count); in spi_pl022_transceive_impl()
844 spi_context_update_rx(ctx, 1, data->rx_count); in spi_pl022_transceive_impl()
848 spi_context_complete(&data->ctx, dev, ret); in spi_pl022_transceive_impl()
856 spi_context_release(&data->ctx, ret); in spi_pl022_transceive_impl()
888 struct spi_pl022_data *data = dev->data; in spi_pl022_release()
890 spi_context_unlock_unconditionally(&data->ctx); in spi_pl022_release()
914 const struct spi_pl022_cfg *cfg = dev->config; in spi_pl022_init()
915 struct spi_pl022_data *data = dev->data; in spi_pl022_init()
919 if (cfg->clk_dev) { in spi_pl022_init()
920 ret = clock_control_on(cfg->clk_dev, cfg->clk_id); in spi_pl022_init()
929 if (cfg->reset.dev) { in spi_pl022_init()
930 ret = reset_line_toggle_dt(&cfg->reset); in spi_pl022_init()
938 ret = pinctrl_apply_state(cfg->pincfg, PINCTRL_STATE_DEFAULT); in spi_pl022_init()
945 if (cfg->dma_enabled) { in spi_pl022_init()
948 uint32_t ch_filter = BIT(cfg->dma[i].channel); in spi_pl022_init()
950 if (!device_is_ready(cfg->dma[i].dev)) { in spi_pl022_init()
951 LOG_ERR("DMA %s not ready", cfg->dma[i].dev->name); in spi_pl022_init()
952 return -ENODEV; in spi_pl022_init()
955 ret = dma_request_channel(cfg->dma[i].dev, &ch_filter); in spi_pl022_init()
964 cfg->irq_config(dev); in spi_pl022_init()
974 ret = spi_context_cs_configure_all(&data->ctx); in spi_pl022_init()
981 spi_context_unlock_unconditionally(&data->ctx); in spi_pl022_init()
1029 IF_ENABLED(CONFIG_SPI_PL022_DMA, (.dma = DMAS_DECL(idx),)) COND_CODE_1( \