1 // Copyright 2015-2020 Espressif Systems (Shanghai) PTE LTD
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 // The HAL layer for SPI Slave HD
16 
17 #include <string.h>
18 #include "esp_types.h"
19 #include "esp_attr.h"
20 #include "esp_err.h"
21 #include "sdkconfig.h"
22 #include "soc/spi_periph.h"
23 #include "soc/lldesc.h"
24 #include "soc/soc_caps.h"
25 #include "hal/spi_slave_hd_hal.h"
26 #include "hal/assert.h"
27 
28 //This GDMA related part will be introduced by GDMA dedicated APIs in the future. Here we temporarily use macros.
29 #if SOC_GDMA_SUPPORTED
30 #include "soc/gdma_struct.h"
31 #include "hal/gdma_ll.h"
32 
33 #define spi_dma_ll_rx_reset(dev, chan)                             gdma_ll_rx_reset_channel(&GDMA, chan)
34 #define spi_dma_ll_tx_reset(dev, chan)                             gdma_ll_tx_reset_channel(&GDMA, chan)
35 #define spi_dma_ll_rx_enable_burst_data(dev, chan, enable)         gdma_ll_rx_enable_data_burst(&GDMA, chan, enable)
36 #define spi_dma_ll_tx_enable_burst_data(dev, chan, enable)         gdma_ll_tx_enable_data_burst(&GDMA, chan, enable)
37 #define spi_dma_ll_rx_enable_burst_desc(dev, chan, enable)         gdma_ll_rx_enable_descriptor_burst(&GDMA, chan, enable)
38 #define spi_dma_ll_tx_enable_burst_desc(dev, chan, enable)         gdma_ll_tx_enable_descriptor_burst(&GDMA, chan, enable)
39 #define spi_dma_ll_enable_out_auto_wrback(dev, chan, enable)       gdma_ll_tx_enable_auto_write_back(&GDMA, chan, enable)
40 #define spi_dma_ll_set_out_eof_generation(dev, chan, enable)       gdma_ll_tx_set_eof_mode(&GDMA, chan, enable)
41 #define spi_dma_ll_get_out_eof_desc_addr(dev, chan)                gdma_ll_tx_get_eof_desc_addr(&GDMA, chan)
42 #define spi_dma_ll_get_in_suc_eof_desc_addr(dev, chan)             gdma_ll_rx_get_success_eof_desc_addr(&GDMA, chan)
43 #define spi_dma_ll_rx_start(dev, chan, addr) do {\
44             gdma_ll_rx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
45             gdma_ll_rx_start(&GDMA, chan);\
46         } while (0)
47 #define spi_dma_ll_tx_start(dev, chan, addr) do {\
48             gdma_ll_tx_set_desc_addr(&GDMA, chan, (uint32_t)addr);\
49             gdma_ll_tx_start(&GDMA, chan);\
50         } while (0)
51 #endif
52 
s_spi_slave_hd_hal_dma_init_config(const spi_slave_hd_hal_context_t * hal)53 static void s_spi_slave_hd_hal_dma_init_config(const spi_slave_hd_hal_context_t *hal)
54 {
55     spi_dma_ll_rx_enable_burst_data(hal->dma_in, hal->rx_dma_chan, 1);
56     spi_dma_ll_tx_enable_burst_data(hal->dma_out, hal->tx_dma_chan, 1);
57     spi_dma_ll_rx_enable_burst_desc(hal->dma_in, hal->rx_dma_chan, 1);
58     spi_dma_ll_tx_enable_burst_desc(hal->dma_out, hal->tx_dma_chan, 1);
59     spi_dma_ll_enable_out_auto_wrback(hal->dma_out, hal->tx_dma_chan, 1);
60     spi_dma_ll_set_out_eof_generation(hal->dma_out, hal->tx_dma_chan, 1);
61 }
62 
spi_slave_hd_hal_init(spi_slave_hd_hal_context_t * hal,const spi_slave_hd_hal_config_t * hal_config)63 void spi_slave_hd_hal_init(spi_slave_hd_hal_context_t *hal, const spi_slave_hd_hal_config_t *hal_config)
64 {
65     spi_dev_t* hw = SPI_LL_GET_HW(hal_config->host_id);
66     hal->dev = hw;
67     hal->dma_in = hal_config->dma_in;
68     hal->dma_out = hal_config->dma_out;
69     hal->dma_enabled = hal_config->dma_enabled;
70     hal->tx_dma_chan = hal_config->tx_dma_chan;
71     hal->rx_dma_chan = hal_config->rx_dma_chan;
72     hal->append_mode = hal_config->append_mode;
73     hal->rx_cur_desc = hal->dmadesc_rx;
74     hal->tx_cur_desc = hal->dmadesc_tx;
75     STAILQ_NEXT(&hal->tx_dummy_head.desc, qe) = &hal->dmadesc_tx->desc;
76     hal->tx_dma_head = &hal->tx_dummy_head;
77     STAILQ_NEXT(&hal->rx_dummy_head.desc, qe) = &hal->dmadesc_rx->desc;
78     hal->rx_dma_head = &hal->rx_dummy_head;
79 
80     //Configure slave
81     s_spi_slave_hd_hal_dma_init_config(hal);
82 
83     spi_ll_slave_hd_init(hw);
84     spi_ll_set_addr_bitlen(hw, hal_config->address_bits);
85     spi_ll_set_command_bitlen(hw, hal_config->command_bits);
86     spi_ll_set_dummy(hw, hal_config->dummy_bits);
87     spi_ll_set_rx_lsbfirst(hw, hal_config->rx_lsbfirst);
88     spi_ll_set_tx_lsbfirst(hw, hal_config->tx_lsbfirst);
89     spi_ll_slave_set_mode(hw, hal_config->mode, (hal_config->dma_enabled));
90 
91     spi_ll_disable_intr(hw, UINT32_MAX);
92     spi_ll_clear_intr(hw, UINT32_MAX);
93     if (!hal_config->append_mode) {
94         spi_ll_set_intr(hw, SPI_LL_INTR_CMD7 | SPI_LL_INTR_CMD8);
95 
96         bool workaround_required = false;
97         if (!spi_ll_get_intr(hw, SPI_LL_INTR_CMD7)) {
98             hal->intr_not_triggered |= SPI_EV_RECV;
99             workaround_required = true;
100         }
101         if (!spi_ll_get_intr(hw, SPI_LL_INTR_CMD8)) {
102             hal->intr_not_triggered |= SPI_EV_SEND;
103             workaround_required = true;
104         }
105 
106         if (workaround_required) {
107             //Workaround if the previous interrupts are not writable
108             spi_ll_set_intr(hw, SPI_LL_INTR_TRANS_DONE);
109         }
110     }
111 #if CONFIG_IDF_TARGET_ESP32S2
112     //Append mode is only supported on ESP32S2 now
113     else {
114         spi_ll_enable_intr(hw, SPI_LL_INTR_OUT_EOF | SPI_LL_INTR_CMD7);
115     }
116 #endif
117 
118     spi_ll_slave_hd_set_len_cond(hw,    SPI_LL_TRANS_LEN_COND_WRBUF |
119                                         SPI_LL_TRANS_LEN_COND_WRDMA |
120                                         SPI_LL_TRANS_LEN_COND_RDBUF |
121                                         SPI_LL_TRANS_LEN_COND_RDDMA);
122 
123     spi_ll_slave_set_seg_mode(hal->dev, true);
124 }
125 
spi_salve_hd_hal_get_max_bus_size(spi_slave_hd_hal_context_t * hal)126 uint32_t spi_salve_hd_hal_get_max_bus_size(spi_slave_hd_hal_context_t *hal)
127 {
128     return hal->dma_desc_num * LLDESC_MAX_NUM_PER_DESC;
129 }
130 
spi_slave_hd_hal_get_total_desc_size(spi_slave_hd_hal_context_t * hal,uint32_t bus_size)131 uint32_t spi_slave_hd_hal_get_total_desc_size(spi_slave_hd_hal_context_t *hal, uint32_t bus_size)
132 {
133     //See how many dma descriptors we need
134     int dma_desc_ct = (bus_size + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
135     if (dma_desc_ct == 0) {
136         dma_desc_ct = 1; //default to 4k when max is not given
137     }
138     hal->dma_desc_num = dma_desc_ct;
139 
140     return hal->dma_desc_num * sizeof(spi_slave_hd_hal_desc_append_t);
141 }
142 
spi_slave_hd_hal_rxdma(spi_slave_hd_hal_context_t * hal,uint8_t * out_buf,size_t len)143 void spi_slave_hd_hal_rxdma(spi_slave_hd_hal_context_t *hal, uint8_t *out_buf, size_t len)
144 {
145     lldesc_setup_link(&hal->dmadesc_rx->desc, out_buf, len, true);
146 
147     spi_ll_dma_rx_fifo_reset(hal->dev);
148     spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
149     spi_ll_slave_reset(hal->dev);
150     spi_ll_infifo_full_clr(hal->dev);
151     spi_ll_clear_intr(hal->dev, SPI_LL_INTR_CMD7);
152 
153     spi_ll_dma_rx_enable(hal->dev, 1);
154     spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, &hal->dmadesc_rx->desc);
155 }
156 
spi_slave_hd_hal_txdma(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len)157 void spi_slave_hd_hal_txdma(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len)
158 {
159     lldesc_setup_link(&hal->dmadesc_tx->desc, data, len, false);
160 
161     spi_ll_dma_tx_fifo_reset(hal->dev);
162     spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
163     spi_ll_slave_reset(hal->dev);
164     spi_ll_outfifo_empty_clr(hal->dev);
165     spi_ll_clear_intr(hal->dev, SPI_LL_INTR_CMD8);
166 
167     spi_ll_dma_tx_enable(hal->dev, 1);
168     spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, &hal->dmadesc_tx->desc);
169 }
170 
get_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)171 static spi_ll_intr_t get_event_intr(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
172 {
173     spi_ll_intr_t intr = 0;
174 #if CONFIG_IDF_TARGET_ESP32S2
175 //Append mode is only supported on ESP32S2 now
176     if ((ev & SPI_EV_SEND) && hal->append_mode) intr |= SPI_LL_INTR_OUT_EOF;
177 #endif
178     if ((ev & SPI_EV_SEND) && !hal->append_mode) intr |= SPI_LL_INTR_CMD8;
179     if (ev & SPI_EV_RECV)          intr |= SPI_LL_INTR_CMD7;
180     if (ev & SPI_EV_BUF_TX)        intr |= SPI_LL_INTR_RDBUF;
181     if (ev & SPI_EV_BUF_RX)        intr |= SPI_LL_INTR_WRBUF;
182     if (ev & SPI_EV_CMD9)          intr |= SPI_LL_INTR_CMD9;
183     if (ev & SPI_EV_CMDA)          intr |= SPI_LL_INTR_CMDA;
184     if (ev & SPI_EV_TRANS)         intr |= SPI_LL_INTR_TRANS_DONE;
185     return intr;
186 }
187 
spi_slave_hd_hal_check_clear_event(spi_slave_hd_hal_context_t * hal,spi_event_t ev)188 bool spi_slave_hd_hal_check_clear_event(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
189 {
190     spi_ll_intr_t intr = get_event_intr(hal, ev);
191     if (spi_ll_get_intr(hal->dev, intr)) {
192         spi_ll_clear_intr(hal->dev, intr);
193         return true;
194     }
195     return false;
196 }
197 
spi_slave_hd_hal_check_disable_event(spi_slave_hd_hal_context_t * hal,spi_event_t ev)198 bool spi_slave_hd_hal_check_disable_event(spi_slave_hd_hal_context_t *hal, spi_event_t ev)
199 {
200     //The trans_done interrupt is used for the workaround when some interrupt is not writable
201     spi_ll_intr_t intr = get_event_intr(hal, ev);
202 
203     // Workaround for these interrupts not writable
204     uint32_t missing_intr = hal->intr_not_triggered & ev;
205     if (missing_intr) {
206         if ((missing_intr & SPI_EV_RECV) && spi_ll_get_intr(hal->dev, SPI_LL_INTR_CMD7)) {
207             hal->intr_not_triggered &= ~SPI_EV_RECV;
208         }
209         if ((missing_intr & SPI_EV_SEND) && spi_ll_get_intr(hal->dev, SPI_LL_INTR_CMD8)) {
210             hal->intr_not_triggered &= ~SPI_EV_SEND;
211         }
212         if (spi_ll_get_intr(hal->dev, SPI_LL_INTR_TRANS_DONE)) {
213             spi_ll_disable_intr(hal->dev, SPI_LL_INTR_TRANS_DONE);
214         }
215     }
216 
217     if (spi_ll_get_intr(hal->dev, intr)) {
218         spi_ll_disable_intr(hal->dev, intr);
219         return true;
220     }
221     return false;
222 }
223 
spi_slave_hd_hal_enable_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)224 void spi_slave_hd_hal_enable_event_intr(spi_slave_hd_hal_context_t* hal, spi_event_t ev)
225 {
226     spi_ll_intr_t intr = get_event_intr(hal, ev);
227     spi_ll_enable_intr(hal->dev, intr);
228 }
229 
spi_slave_hd_hal_invoke_event_intr(spi_slave_hd_hal_context_t * hal,spi_event_t ev)230 void spi_slave_hd_hal_invoke_event_intr(spi_slave_hd_hal_context_t* hal, spi_event_t ev)
231 {
232     spi_ll_intr_t intr = get_event_intr(hal, ev);
233 
234     // Workaround for these interrupts not writable
235     if (hal->intr_not_triggered & ev & (SPI_EV_RECV | SPI_EV_SEND)) {
236         intr |= SPI_LL_INTR_TRANS_DONE;
237     }
238 
239     spi_ll_enable_intr(hal->dev, intr);
240 }
241 
spi_slave_hd_hal_read_buffer(spi_slave_hd_hal_context_t * hal,int addr,uint8_t * out_data,size_t len)242 void spi_slave_hd_hal_read_buffer(spi_slave_hd_hal_context_t *hal, int addr, uint8_t *out_data, size_t len)
243 {
244     spi_ll_read_buffer_byte(hal->dev, addr, out_data, len);
245 }
246 
spi_slave_hd_hal_write_buffer(spi_slave_hd_hal_context_t * hal,int addr,uint8_t * data,size_t len)247 void spi_slave_hd_hal_write_buffer(spi_slave_hd_hal_context_t *hal, int addr, uint8_t *data, size_t len)
248 {
249     spi_ll_write_buffer_byte(hal->dev, addr, data, len);
250 }
251 
spi_slave_hd_hal_get_last_addr(spi_slave_hd_hal_context_t * hal)252 int spi_slave_hd_hal_get_last_addr(spi_slave_hd_hal_context_t *hal)
253 {
254     return spi_ll_slave_hd_get_last_addr(hal->dev);
255 }
256 
spi_slave_hd_hal_get_rxlen(spi_slave_hd_hal_context_t * hal)257 int spi_slave_hd_hal_get_rxlen(spi_slave_hd_hal_context_t *hal)
258 {
259     //this is by -byte
260     return spi_ll_slave_get_rx_byte_len(hal->dev);
261 }
262 
spi_slave_hd_hal_rxdma_seg_get_len(spi_slave_hd_hal_context_t * hal)263 int spi_slave_hd_hal_rxdma_seg_get_len(spi_slave_hd_hal_context_t *hal)
264 {
265     lldesc_t* desc = &hal->dmadesc_rx->desc;
266     return lldesc_get_received_len(desc, NULL);
267 }
268 
spi_slave_hd_hal_get_tx_finished_trans(spi_slave_hd_hal_context_t * hal,void ** out_trans)269 bool spi_slave_hd_hal_get_tx_finished_trans(spi_slave_hd_hal_context_t *hal, void **out_trans)
270 {
271     if ((uint32_t)&hal->tx_dma_head->desc == spi_dma_ll_get_out_eof_desc_addr(hal->dma_out, hal->tx_dma_chan)) {
272         return false;
273     }
274 
275     hal->tx_dma_head = (spi_slave_hd_hal_desc_append_t *)STAILQ_NEXT(&hal->tx_dma_head->desc, qe);
276     *out_trans = hal->tx_dma_head->arg;
277     hal->tx_recycled_desc_cnt++;
278 
279     return true;
280 }
281 
spi_slave_hd_hal_get_rx_finished_trans(spi_slave_hd_hal_context_t * hal,void ** out_trans,size_t * out_len)282 bool spi_slave_hd_hal_get_rx_finished_trans(spi_slave_hd_hal_context_t *hal, void **out_trans, size_t *out_len)
283 {
284     if ((uint32_t)&hal->rx_dma_head->desc == spi_dma_ll_get_in_suc_eof_desc_addr(hal->dma_in, hal->rx_dma_chan)) {
285         return false;
286     }
287 
288     hal->rx_dma_head = (spi_slave_hd_hal_desc_append_t *)STAILQ_NEXT(&hal->rx_dma_head->desc, qe);
289     *out_trans = hal->rx_dma_head->arg;
290     *out_len = hal->rx_dma_head->desc.length;
291     hal->rx_recycled_desc_cnt++;
292 
293     return true;
294 }
295 
296 #if CONFIG_IDF_TARGET_ESP32S2
297 //Append mode is only supported on ESP32S2 now
spi_slave_hd_hal_link_append_desc(spi_slave_hd_hal_desc_append_t * dmadesc,const void * data,int len,bool isrx,void * arg)298 static void spi_slave_hd_hal_link_append_desc(spi_slave_hd_hal_desc_append_t *dmadesc, const void *data, int len, bool isrx, void *arg)
299 {
300     HAL_ASSERT(len <= LLDESC_MAX_NUM_PER_DESC);     //TODO: Add support for transaction with length larger than 4092, IDF-2660
301     int n = 0;
302     while (len) {
303         int dmachunklen = len;
304         if (dmachunklen > LLDESC_MAX_NUM_PER_DESC) {
305             dmachunklen = LLDESC_MAX_NUM_PER_DESC;
306         }
307         if (isrx) {
308             //Receive needs DMA length rounded to next 32-bit boundary
309             dmadesc[n].desc.size = (dmachunklen + 3) & (~3);
310             dmadesc[n].desc.length = (dmachunklen + 3) & (~3);
311         } else {
312             dmadesc[n].desc.size = dmachunklen;
313             dmadesc[n].desc.length = dmachunklen;
314         }
315         dmadesc[n].desc.buf = (uint8_t *)data;
316         dmadesc[n].desc.eof = 0;
317         dmadesc[n].desc.sosf = 0;
318         dmadesc[n].desc.owner = 1;
319         dmadesc[n].desc.qe.stqe_next = &dmadesc[n + 1].desc;
320         dmadesc[n].arg = arg;
321         len -= dmachunklen;
322         data += dmachunklen;
323         n++;
324     }
325     dmadesc[n - 1].desc.eof = 1; //Mark last DMA desc as end of stream.
326     dmadesc[n - 1].desc.qe.stqe_next = NULL;
327 }
328 
spi_slave_hd_hal_txdma_append(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len,void * arg)329 esp_err_t spi_slave_hd_hal_txdma_append(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len, void *arg)
330 {
331     //Check if there are enough available DMA descriptors for software to use
332     int num_required = (len + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
333     int not_recycled_desc_num = hal->tx_used_desc_cnt - hal->tx_recycled_desc_cnt;
334     int available_desc_num = hal->dma_desc_num - not_recycled_desc_num;
335     if (num_required > available_desc_num) {
336         return ESP_ERR_INVALID_STATE;
337     }
338 
339     spi_slave_hd_hal_link_append_desc(hal->tx_cur_desc, data, len, false, arg);
340 
341     if (!hal->tx_dma_started) {
342         hal->tx_dma_started = true;
343         //start a link
344         hal->tx_dma_tail = hal->tx_cur_desc;
345         spi_ll_clear_intr(hal->dev, SPI_LL_INTR_OUT_EOF);
346         spi_ll_dma_tx_fifo_reset(hal->dma_out);
347         spi_ll_outfifo_empty_clr(hal->dev);
348         spi_dma_ll_tx_reset(hal->dma_out, hal->tx_dma_chan);
349         spi_ll_dma_tx_enable(hal->dev, 1);
350         spi_dma_ll_tx_start(hal->dma_out, hal->tx_dma_chan, &hal->tx_cur_desc->desc);
351     } else {
352         //there is already a consecutive link
353         STAILQ_NEXT(&hal->tx_dma_tail->desc, qe) = &hal->tx_cur_desc->desc;
354         hal->tx_dma_tail = hal->tx_cur_desc;
355         spi_dma_ll_tx_restart(hal->dma_out, hal->tx_dma_chan);
356     }
357 
358     //Move the current descriptor pointer according to the number of the linked descriptors
359     for (int i = 0; i < num_required; i++) {
360         hal->tx_used_desc_cnt++;
361         hal->tx_cur_desc++;
362         if (hal->tx_cur_desc == hal->dmadesc_tx + hal->dma_desc_num) {
363             hal->tx_cur_desc = hal->dmadesc_tx;
364         }
365     }
366 
367     return ESP_OK;
368 }
369 
spi_slave_hd_hal_rxdma_append(spi_slave_hd_hal_context_t * hal,uint8_t * data,size_t len,void * arg)370 esp_err_t spi_slave_hd_hal_rxdma_append(spi_slave_hd_hal_context_t *hal, uint8_t *data, size_t len, void *arg)
371 {
372     //Check if there are enough available dma descriptors for software to use
373     int num_required = (len + LLDESC_MAX_NUM_PER_DESC - 1) / LLDESC_MAX_NUM_PER_DESC;
374     int not_recycled_desc_num = hal->rx_used_desc_cnt - hal->rx_recycled_desc_cnt;
375     int available_desc_num = hal->dma_desc_num - not_recycled_desc_num;
376     if (num_required > available_desc_num) {
377         return ESP_ERR_INVALID_STATE;
378     }
379 
380     spi_slave_hd_hal_link_append_desc(hal->rx_cur_desc, data, len, false, arg);
381 
382     if (!hal->rx_dma_started) {
383         hal->rx_dma_started = true;
384         //start a link
385         hal->rx_dma_tail = hal->rx_cur_desc;
386         spi_ll_clear_intr(hal->dev, SPI_LL_INTR_CMD7);
387         spi_dma_ll_rx_reset(hal->dma_in, hal->rx_dma_chan);
388         spi_ll_dma_rx_fifo_reset(hal->dma_in);
389         spi_ll_infifo_full_clr(hal->dev);
390         spi_ll_dma_rx_enable(hal->dev, 1);
391         spi_dma_ll_rx_start(hal->dma_in, hal->rx_dma_chan, &hal->rx_cur_desc->desc);
392     } else {
393         //there is already a consecutive link
394         STAILQ_NEXT(&hal->rx_dma_tail->desc, qe) = &hal->rx_cur_desc->desc;
395         hal->rx_dma_tail = hal->rx_cur_desc;
396         spi_dma_ll_rx_restart(hal->dma_in, hal->rx_dma_chan);
397     }
398 
399     //Move the current descriptor pointer according to the number of the linked descriptors
400     for (int i = 0; i < num_required; i++) {
401         hal->rx_used_desc_cnt++;
402         hal->rx_cur_desc++;
403         if (hal->rx_cur_desc == hal->dmadesc_rx + hal->dma_desc_num) {
404             hal->rx_cur_desc = hal->dmadesc_rx;
405         }
406     }
407 
408     return ESP_OK;
409 }
410 #endif  //#if CONFIG_IDF_TARGET_ESP32S2
411