1 /*
2  * SPDX-FileCopyrightText: 2015-2022 Espressif Systems (Shanghai) CO LTD
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 // The HAL layer for SPI Slave HD
8 
9 #include <string.h>
10 #include "esp_types.h"
11 #include "esp_attr.h"
12 #include "esp_err.h"
13 #include "sdkconfig.h"
14 #include "soc/spi_periph.h"
15 #include "soc/lldesc.h"
16 #include "soc/soc_caps.h"
17 #include "hal/spi_slave_hd_hal.h"
18 #include "hal/assert.h"
19 
20 //This GDMA related part will be introduced by GDMA dedicated APIs in the future. Here we temporarily use macros.
21 #if SOC_GDMA_SUPPORTED
22 #include "soc/gdma_struct.h"
23 #include "hal/gdma_ll.h"
24 #define spi_dma_ll_tx_restart(dev, chan)                           gdma_ll_tx_restart(&GDMA, chan)
25 #define spi_dma_ll_rx_restart(dev, chan)                           gdma_ll_rx_restart(&GDMA, chan)
26 #define spi_dma_ll_rx_reset(dev, chan)                             gdma_ll_rx_reset_channel(&GDMA, chan)
27 #define spi_dma_ll_tx_reset(dev, chan)                             gdma_ll_tx_reset_channel(&GDMA, chan)
28 #define spi_dma_ll_rx_enable_burst_data(dev, chan, enable)         gdma_ll_rx_enable_data_burst(&GDMA, chan, enable)
29 #define spi_dma_ll_tx_enable_burst_data(dev, chan, enable)         gdma_ll_tx_enable_data_burst(&GDMA, chan, enable)
30 #define spi_dma_ll_rx_enable_burst_desc(dev, chan, enable)         gdma_ll_rx_enable_descriptor_burst(&GDMA, chan, enable)
31 #define spi_dma_ll_tx_enable_burst_desc(dev, chan, enable)         gdma_ll_tx_enable_descriptor_burst(&GDMA, chan, enable)
32 #define spi_dma_ll_enable_out_auto_wrback(dev, chan, enable)       gdma_ll_tx_enable_auto_write_back(&GDMA, chan, enable)
33 #define spi_dma_ll_set_out_eof_generation(dev, chan, enable)       gdma_ll_tx_set_eof_mode(&GDMA, chan, enable)
34 #define spi_dma_ll_get_out_eof_desc_addr(dev, chan)                gdma_ll_tx_get_eof_desc_addr(&GDMA, chan)
35 #define spi_dma_ll_get_in_suc_eof_desc_addr(dev, chan)             gdma_ll_rx_get_success_eof_desc_addr(&GDMA, chan)
36 #define spi_dma_ll_rx_start(dev, chan, addr) do {\
37             gdma_ll_rx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
38             gdma_ll_rx_start(&GDMA, chan);\
39         } while (0)
40 #define spi_dma_ll_tx_start(dev, chan, addr) do {\
41             gdma_ll_tx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
42             gdma_ll_tx_start(&GDMA, chan);\
43         } while (0)
44 #endif
45 
s_spi_slave_hd_hal_dma_init_config(const spi_slave_hd_hal_context_t * hal)46 static void s_spi_slave_hd_hal_dma_init_config(const spi_slave_hd_hal_context_t *hal)
47 {
48     spi_dma_ll_rx_enable_burst_data(hal->dma_in, hal->rx_dma_chan, 1);
49     spi_dma_ll_tx_enable_burst_data(hal->dma_out, hal->tx_dma_chan, 1);
50     spi_dma_ll_rx_enable_burst_desc(hal->dma_in, hal->rx_dma_chan, 1);
51     spi_dma_ll_tx_enable_burst_desc(hal->dma_out, hal->tx_dma_chan, 1);
52     spi_dma_ll_enable_out_auto_wrback(hal->dma_out, hal->tx_dma_chan, 1);
53     spi_dma_ll_set_out_eof_generation(hal->dma_out, hal->tx_dma_chan, 1);
54 }
55 
spi_slave_hd_hal_init(spi_slave_hd_hal_context_t * hal,const spi_slave_hd_hal_config_t * hal_config)56 void spi_slave_hd_hal_init(spi_slave_hd_hal_context_t *hal, const spi_slave_hd_hal_config_t *hal_config)
57 {
58     spi_dev_t *hw = SPI_LL_GET_HW(hal_config->host_id);
59     hal->dev = hw;
60     hal->dma_in = hal_config->dma_in;
61     hal->dma_out = hal_config->dma_out;
62     hal->dma_enabled = hal_config->dma_enabled;
63     hal->tx_dma_chan = hal_config->tx_dma_chan;
64     hal->rx_dma_chan = hal_config->rx_dma_chan;
65     hal->append_mode = hal_config->append_mode;
66     hal->rx_cur_desc = hal->dmadesc_rx;
67     hal->tx_cur_desc = hal->dmadesc_tx;
68     STAILQ_NEXT(&hal->tx_dummy_head.desc, qe) = &hal->dmadesc_tx->desc;
69     hal->tx_dma_head = &hal->tx_dummy_head;
70     STAILQ_NEXT(&hal->rx_dummy_head.desc, qe) = &hal->dmadesc_rx->desc;
71     hal->rx_dma_head = &hal->rx_dummy_head;
72 
73     //Configure slave
74     if (hal_config->dma_enabled) {
75         s_spi_slave_hd_hal_dma_init_config(hal);
76     }
77 
78     spi_ll_slave_hd_init(hw);
79     spi_ll_set_addr_bitlen(hw, hal_config->address_bits);
80     spi_ll_set_command_bitlen(hw, hal_config->command_bits);
81     spi_ll_set_dummy(hw, hal_config->dummy_bits);
82     spi_ll_set_rx_lsbfirst(hw, hal_config->rx_lsbfirst);
83     spi_ll_set_tx_lsbfirst(hw, hal_config->tx_lsbfirst);
84     spi_ll_slave_set_mode(hw, hal_config->mode, (hal_config->dma_enabled));
85 
86     spi_ll_disable_intr(hw, UINT32_MAX);
87     spi_ll_clear_intr(hw, UINT32_MAX);
88     if (!hal_config->append_mode) {
89         spi_ll_set_intr(hw, SPI_LL_INTR_CMD7 | SPI_LL_INTR_CMD8);
90 
91         bool workaround_required = false;
92         if (!spi_ll_get_intr(hw, SPI_LL_INTR_CMD7)) {
93             hal->intr_not_triggered |= SPI_EV_RECV;
94             workaround_required = true;
95         }
96         if (!spi_ll_get_intr(hw, SPI_LL_INTR_CMD8)) {
97             hal->intr_not_triggered |= SPI_EV_SEND;
98             workaround_required = true;
99         }
100 
101         if (workaround_required) {
102             //Workaround if the previous interrupts are not writable
103             spi_ll_set_intr(hw, SPI_LL_INTR_TRANS_DONE);
104         }
105     } else {
106 #if SOC_GDMA_SUPPORTED
107         spi_ll_enable_intr(hw, SPI_LL_INTR_CMD7);
108 #else
109         spi_ll_clear_intr(hw, SPI_LL_INTR_OUT_EOF | SPI_LL_INTR_CMD7);
110         spi_ll_enable_intr(hw, SPI_LL_INTR_OUT_EOF | SPI_LL_INTR_CMD7);
111 #endif //SOC_GDMA_SUPPORTED
112     }
113 
114     spi_ll_slave_hd_set_len_cond(hw,    SPI_LL_TRANS_LEN_COND_WRBUF |
115                                         SPI_LL_TRANS_LEN_COND_WRDMA |
116                                         SPI_LL_TRANS_LEN_COND_RDBUF |
117                                         SPI_LL_TRANS_LEN_COND_RDDMA);
118 
119     spi_ll_slave_set_seg_mode(hal->dev, true);
120 }
121 
spi_salve_hd_hal_get_max_bus_size(spi_slave_hd_hal_context_t * hal)122 uint32_t spi_salve_hd_hal_get_max_bus_size(spi_slave_hd_hal_context_t *hal)
123 {
124     return hal->dma_desc_num * LLDESC_MAX_NUM_PER_DESC;
125 }
126 
spi_slave_hd_hal_get_total_desc_size(spi_slave_hd_hal_context_t * hal,uint32_t bus_size)127 uint32_t spi_slave_hd_hal_get_total_desc_size(spi_slave_hd_hal_context_t *hal, uint32_t bus_size)
128 {
129     //See how many dma descriptors we need
130     int dma_desc_ct = (bus_size + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
131     if (dma_desc_ct == 0) {
132         dma_desc_ct = 1; //default to 4k when max is not given
133     }
134     hal->dma_desc_num = dma_desc_ct;
135 
136     return hal->dma_desc_num * sizeof(spi_slave_hd_hal_desc_append_t);
137 }
138 
spi_slave_hd_hal_rxdma(spi_slave_hd_hal_context_t * hal,uint8_t * out_buf,size_t len)139 void spi_slave_hd_hal_rxdma(spi_slave_hd_hal_context_t *hal, uint8_t *out_buf, size_t len)
140 {
141     lldesc_setup_link(&hal->dmadesc_rx->desc, out_buf, len, true);
142 
143     spi_ll_dma_rx_fifo_reset(hal->dev);
144     spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
145     spi_ll_slave_reset(hal->dev);
146     spi_ll_infifo_full_clr(hal->dev);
147     spi_ll_clear_intr(hal->dev, SPI_LL_INTR_CMD7);
148 
149     spi_ll_dma_rx_enable(hal->dev, 1);
150     spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, &hal->dmadesc_rx->desc);
151 }
152 
spi_slave_hd_hal_txdma(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len)153 void spi_slave_hd_hal_txdma(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len)
154 {
155     lldesc_setup_link(&hal->dmadesc_tx->desc, data, len, false);
156 
157     spi_ll_dma_tx_fifo_reset(hal->dev);
158     spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
159     spi_ll_slave_reset(hal->dev);
160     spi_ll_outfifo_empty_clr(hal->dev);
161     spi_ll_clear_intr(hal->dev, SPI_LL_INTR_CMD8);
162 
163     spi_ll_dma_tx_enable(hal->dev, 1);
164     spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, &hal->dmadesc_tx->desc);
165 }
166 
get_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)167 static spi_ll_intr_t get_event_intr(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
168 {
169     spi_ll_intr_t intr = 0;
170 #if CONFIG_IDF_TARGET_ESP32S2
171     if ((ev & SPI_EV_SEND) && hal->append_mode) intr |= SPI_LL_INTR_OUT_EOF;
172 #endif
173     if ((ev & SPI_EV_SEND) && !hal->append_mode) intr |= SPI_LL_INTR_CMD8;
174     if (ev & SPI_EV_RECV)          intr |= SPI_LL_INTR_CMD7;
175     if (ev & SPI_EV_BUF_TX)        intr |= SPI_LL_INTR_RDBUF;
176     if (ev & SPI_EV_BUF_RX)        intr |= SPI_LL_INTR_WRBUF;
177     if (ev & SPI_EV_CMD9)          intr |= SPI_LL_INTR_CMD9;
178     if (ev & SPI_EV_CMDA)          intr |= SPI_LL_INTR_CMDA;
179     if (ev & SPI_EV_TRANS)         intr |= SPI_LL_INTR_TRANS_DONE;
180     return intr;
181 }
182 
spi_slave_hd_hal_check_clear_event(spi_slave_hd_hal_context_t * hal,spi_event_t ev)183 bool spi_slave_hd_hal_check_clear_event(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
184 {
185     spi_ll_intr_t intr = get_event_intr(hal, ev);
186     if (spi_ll_get_intr(hal->dev, intr)) {
187         spi_ll_clear_intr(hal->dev, intr);
188         return true;
189     }
190     return false;
191 }
192 
spi_slave_hd_hal_check_disable_event(spi_slave_hd_hal_context_t * hal,spi_event_t ev)193 bool spi_slave_hd_hal_check_disable_event(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
194 {
195     //The trans_done interrupt is used for the workaround when some interrupt is not writable
196     spi_ll_intr_t intr = get_event_intr(hal, ev);
197 
198     // Workaround for these interrupts not writable
199     uint32_t missing_intr = hal->intr_not_triggered & ev;
200     if (missing_intr) {
201         if ((missing_intr & SPI_EV_RECV) && spi_ll_get_intr(hal->dev, SPI_LL_INTR_CMD7)) {
202             hal->intr_not_triggered &= ~SPI_EV_RECV;
203         }
204         if ((missing_intr & SPI_EV_SEND) && spi_ll_get_intr(hal->dev, SPI_LL_INTR_CMD8)) {
205             hal->intr_not_triggered &= ~SPI_EV_SEND;
206         }
207         if (spi_ll_get_intr(hal->dev, SPI_LL_INTR_TRANS_DONE)) {
208             spi_ll_disable_intr(hal->dev, SPI_LL_INTR_TRANS_DONE);
209         }
210     }
211 
212     if (spi_ll_get_intr(hal->dev, intr)) {
213         spi_ll_disable_intr(hal->dev, intr);
214         return true;
215     }
216     return false;
217 }
218 
spi_slave_hd_hal_enable_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)219 void spi_slave_hd_hal_enable_event_intr(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
220 {
221     spi_ll_intr_t intr = get_event_intr(hal, ev);
222     spi_ll_enable_intr(hal->dev, intr);
223 }
224 
spi_slave_hd_hal_invoke_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)225 void spi_slave_hd_hal_invoke_event_intr(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
226 {
227     spi_ll_intr_t intr = get_event_intr(hal, ev);
228 
229     // Workaround for these interrupts not writable
230     if (hal->intr_not_triggered & ev & (SPI_EV_RECV | SPI_EV_SEND)) {
231         intr |= SPI_LL_INTR_TRANS_DONE;
232     }
233 
234     spi_ll_enable_intr(hal->dev, intr);
235 }
236 
spi_slave_hd_hal_read_buffer(spi_slave_hd_hal_context_t * hal,int addr,uint8_t * out_data,size_t len)237 void spi_slave_hd_hal_read_buffer(spi_slave_hd_hal_context_t *hal, int addr, uint8_t *out_data, size_t len)
238 {
239     spi_ll_read_buffer_byte(hal->dev, addr, out_data, len);
240 }
241 
spi_slave_hd_hal_write_buffer(spi_slave_hd_hal_context_t * hal,int addr,uint8_t * data,size_t len)242 void spi_slave_hd_hal_write_buffer(spi_slave_hd_hal_context_t *hal, int addr, uint8_t *data, size_t len)
243 {
244     spi_ll_write_buffer_byte(hal->dev, addr, data, len);
245 }
246 
spi_slave_hd_hal_get_last_addr(spi_slave_hd_hal_context_t * hal)247 int spi_slave_hd_hal_get_last_addr(spi_slave_hd_hal_context_t *hal)
248 {
249     return spi_ll_slave_hd_get_last_addr(hal->dev);
250 }
251 
spi_slave_hd_hal_get_rxlen(spi_slave_hd_hal_context_t * hal)252 int spi_slave_hd_hal_get_rxlen(spi_slave_hd_hal_context_t *hal)
253 {
254     //this is by -byte
255     return spi_ll_slave_get_rx_byte_len(hal->dev);
256 }
257 
spi_slave_hd_hal_rxdma_seg_get_len(spi_slave_hd_hal_context_t * hal)258 int spi_slave_hd_hal_rxdma_seg_get_len(spi_slave_hd_hal_context_t *hal)
259 {
260     lldesc_t *desc = &hal->dmadesc_rx->desc;
261     return lldesc_get_received_len(desc, NULL);
262 }
263 
spi_slave_hd_hal_get_tx_finished_trans(spi_slave_hd_hal_context_t * hal,void ** out_trans)264 bool spi_slave_hd_hal_get_tx_finished_trans(spi_slave_hd_hal_context_t *hal, void **out_trans)
265 {
266     if ((uint32_t)&hal->tx_dma_head->desc == spi_dma_ll_get_out_eof_desc_addr(hal->dma_out, hal->tx_dma_chan)) {
267         return false;
268     }
269 
270     hal->tx_dma_head = (spi_slave_hd_hal_desc_append_t *)STAILQ_NEXT(&hal->tx_dma_head->desc, qe);
271     *out_trans = hal->tx_dma_head->arg;
272     hal->tx_recycled_desc_cnt++;
273 
274     return true;
275 }
276 
spi_slave_hd_hal_get_rx_finished_trans(spi_slave_hd_hal_context_t * hal,void ** out_trans,size_t * out_len)277 bool spi_slave_hd_hal_get_rx_finished_trans(spi_slave_hd_hal_context_t *hal, void **out_trans, size_t *out_len)
278 {
279     if ((uint32_t)&hal->rx_dma_head->desc == spi_dma_ll_get_in_suc_eof_desc_addr(hal->dma_in, hal->rx_dma_chan)) {
280         return false;
281     }
282 
283     hal->rx_dma_head = (spi_slave_hd_hal_desc_append_t *)STAILQ_NEXT(&hal->rx_dma_head->desc, qe);
284     *out_trans = hal->rx_dma_head->arg;
285     *out_len = hal->rx_dma_head->desc.length;
286     hal->rx_recycled_desc_cnt++;
287 
288     return true;
289 }
290 
spi_slave_hd_hal_link_append_desc(spi_slave_hd_hal_desc_append_t * dmadesc,const void * data,int len,bool isrx,void * arg)291 static void spi_slave_hd_hal_link_append_desc(spi_slave_hd_hal_desc_append_t *dmadesc, const void *data, int len, bool isrx, void *arg)
292 {
293     HAL_ASSERT(len <= LLDESC_MAX_NUM_PER_DESC);     //TODO: Add support for transaction with length larger than 4092, IDF-2660
294     int n = 0;
295     while (len) {
296         int dmachunklen = len;
297         if (dmachunklen > LLDESC_MAX_NUM_PER_DESC) {
298             dmachunklen = LLDESC_MAX_NUM_PER_DESC;
299         }
300         if (isrx) {
301             //Receive needs DMA length rounded to next 32-bit boundary
302             dmadesc[n].desc.size = (dmachunklen + 3) & (~3);
303             dmadesc[n].desc.length = (dmachunklen + 3) & (~3);
304         } else {
305             dmadesc[n].desc.size = dmachunklen;
306             dmadesc[n].desc.length = dmachunklen;
307         }
308         dmadesc[n].desc.buf = (uint8_t *)data;
309         dmadesc[n].desc.eof = 0;
310         dmadesc[n].desc.sosf = 0;
311         dmadesc[n].desc.owner = 1;
312         dmadesc[n].desc.qe.stqe_next = &dmadesc[n + 1].desc;
313         dmadesc[n].arg = arg;
314         len -= dmachunklen;
315         data += dmachunklen;
316         n++;
317     }
318     dmadesc[n - 1].desc.eof = 1; //Mark last DMA desc as end of stream.
319     dmadesc[n - 1].desc.qe.stqe_next = NULL;
320 }
321 
spi_slave_hd_hal_txdma_append(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len,void * arg)322 esp_err_t spi_slave_hd_hal_txdma_append(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len, void *arg)
323 {
324     //Check if there are enough available DMA descriptors for software to use
325     int num_required = (len + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
326     int not_recycled_desc_num = hal->tx_used_desc_cnt - hal->tx_recycled_desc_cnt;
327     int available_desc_num = hal->dma_desc_num - not_recycled_desc_num;
328     if (num_required > available_desc_num) {
329         return ESP_ERR_INVALID_STATE;
330     }
331 
332     spi_slave_hd_hal_link_append_desc(hal->tx_cur_desc, data, len, false, arg);
333 
334     if (!hal->tx_dma_started) {
335         hal->tx_dma_started = true;
336         //start a link
337         hal->tx_dma_tail = hal->tx_cur_desc;
338         spi_ll_dma_tx_fifo_reset(hal->dma_out);
339         spi_ll_outfifo_empty_clr(hal->dev);
340         spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
341         spi_ll_dma_tx_enable(hal->dev, 1);
342         spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, &hal->tx_cur_desc->desc);
343     } else {
344         //there is already a consecutive link
345         STAILQ_NEXT(&hal->tx_dma_tail->desc, qe) = &hal->tx_cur_desc->desc;
346         hal->tx_dma_tail = hal->tx_cur_desc;
347         spi_dma_ll_tx_restart(hal->dma_out, hal->tx_dma_chan);
348     }
349 
350     //Move the current descriptor pointer according to the number of the linked descriptors
351     for (int i = 0; i < num_required; i++) {
352         hal->tx_used_desc_cnt++;
353         hal->tx_cur_desc++;
354         if (hal->tx_cur_desc == hal->dmadesc_tx + hal->dma_desc_num) {
355             hal->tx_cur_desc = hal->dmadesc_tx;
356         }
357     }
358 
359     return ESP_OK;
360 }
361 
spi_slave_hd_hal_rxdma_append(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len,void * arg)362 esp_err_t spi_slave_hd_hal_rxdma_append(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len, void *arg)
363 {
364     //Check if there are enough available dma descriptors for software to use
365     int num_required = (len + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
366     int not_recycled_desc_num = hal->rx_used_desc_cnt - hal->rx_recycled_desc_cnt;
367     int available_desc_num = hal->dma_desc_num - not_recycled_desc_num;
368     if (num_required > available_desc_num) {
369         return ESP_ERR_INVALID_STATE;
370     }
371 
372     spi_slave_hd_hal_link_append_desc(hal->rx_cur_desc, data, len, false, arg);
373 
374     if (!hal->rx_dma_started) {
375         hal->rx_dma_started = true;
376         //start a link
377         hal->rx_dma_tail = hal->rx_cur_desc;
378         spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
379         spi_ll_dma_rx_fifo_reset(hal->dma_in);
380         spi_ll_infifo_full_clr(hal->dev);
381         spi_ll_dma_rx_enable(hal->dev, 1);
382         spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, &hal->rx_cur_desc->desc);
383     } else {
384         //there is already a consecutive link
385         STAILQ_NEXT(&hal->rx_dma_tail->desc, qe) = &hal->rx_cur_desc->desc;
386         hal->rx_dma_tail = hal->rx_cur_desc;
387         spi_dma_ll_rx_restart(hal->dma_in, hal->rx_dma_chan);
388     }
389 
390     //Move the current descriptor pointer according to the number of the linked descriptors
391     for (int i = 0; i < num_required; i++) {
392         hal->rx_used_desc_cnt++;
393         hal->rx_cur_desc++;
394         if (hal->rx_cur_desc == hal->dmadesc_rx + hal->dma_desc_num) {
395             hal->rx_cur_desc = hal->dmadesc_rx;
396         }
397     }
398 
399     return ESP_OK;
400 }
401