1 /*
2 * SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdio.h>
8 #include <string.h>
9 #include "freertos/FreeRTOS.h"
10 #include "freertos/queue.h"
11 #include "freertos/task.h"
12 #include "freertos/semphr.h"
13 #include "esp_log.h"
14 #include "driver/uart.h"
15 #include "esp_hci_transport.h"
16 #include "esp_hci_internal.h"
17 #include "common/hci_driver_h4.h"
18 #include "common/hci_driver_util.h"
19 #include "common/hci_driver_mem.h"
20 #include "hci_driver_uart.h"
21
22 #include "ble_hci_trans.h"
23 #include "esp_private/periph_ctrl.h"
24 #include "esp_private/gdma.h"
25 #include "hal/uhci_ll.h"
26
27 /*
28 * UART DMA Desc struct
29 *
30 * --------------------------------------------------------------
31 * | own | EoF | sub_sof | 5'b0 | length [11:0] | size [11:0] |
32 * --------------------------------------------------------------
33 * | buf_ptr [31:0] |
34 * --------------------------------------------------------------
35 * | next_desc_ptr [31:0] |
36 * --------------------------------------------------------------
37 */
38
39 /* this bitfield is start from the LSB!!! */
40 typedef struct uhci_lldesc_s {
41 volatile uint32_t size : 12,
42 length: 12,
43 offset: 5, /* h/w reserved 5bit, s/w use it as offset in buffer */
44 sosf : 1, /* start of sub-frame */
45 eof : 1, /* end of frame */
46 owner : 1; /* hw or sw */
47 volatile const uint8_t *buf; /* point to buffer data */
48 union {
49 volatile uint32_t empty;
50 STAILQ_ENTRY(uhci_lldesc_s) qe; /* pointing to the next desc */
51 };
52 } uhci_lldesc_t;
53
54 /**
55 * @brief Enumeration of HCI transport transmission states.
56 */
57 typedef enum {
58 HCI_TRANS_TX_IDLE, ///< HCI Transport TX is in idle state.
59 HCI_TRANS_TX_START, ///< HCI Transport TX is starting transmission.
60 HCI_TRANS_TX_END, ///< HCI Transport TX has completed transmission.
61 } hci_trans_tx_state_t;
62
63 typedef struct {
64 TaskHandle_t task_handler;
65 hci_driver_uart_params_config_t *hci_uart_params;
66 SemaphoreHandle_t process_sem;
67 struct hci_h4_sm *h4_sm;
68 hci_driver_forward_fn *forward_cb;
69 struct os_mempool *hci_rx_data_pool; /*!< Init a memory pool for rx_data cache */
70 uint8_t *hci_rx_data_buffer;
71 struct os_mempool *hci_rxinfo_pool; /*!< Init a memory pool for rxinfo cache */
72 os_membuf_t *hci_rxinfo_buffer;
73 volatile bool rxinfo_mem_exhausted; /*!< Indicate rxinfo memory does not exist */
74 volatile bool is_continue_rx; /*!< Continue to rx */
75 volatile hci_trans_tx_state_t hci_tx_state; /*!< HCI Tx State */
76 struct os_mempool lldesc_mem_pool;/*!< Init a memory pool for uhci_lldesc_t */
77 uhci_lldesc_t *lldesc_mem;
78 } hci_driver_uart_dma_env_t;
79
80 #define ESP_BT_HCI_TL_STATUS_OK (0) /*!< HCI_TL Tx/Rx operation status OK */
81 /* The number of lldescs pool */
82 #define HCI_LLDESCS_POOL_NUM (CONFIG_BT_LE_HCI_LLDESCS_POOL_NUM)
83 /* Default block size for HCI RX data */
84 #define HCI_RX_DATA_BLOCK_SIZE (DEFAULT_BT_LE_ACL_BUF_SIZE + BLE_HCI_TRANS_CMD_SZ)
85 #define HCI_RX_DATA_POOL_NUM (CONFIG_BT_LE_HCI_TRANS_RX_MEM_NUM)
86 #define HCI_RX_INFO_POOL_NUM (CONFIG_BT_LE_HCI_TRANS_RX_MEM_NUM + 1)
87
88 /**
89 * @brief callback function for HCI Transport Layer send/receive operations
90 */
91 typedef void (* esp_bt_hci_tl_callback_t) (void *arg, uint8_t status);
92
93 struct uart_txrxchannel {
94 esp_bt_hci_tl_callback_t callback;
95 void *arg;
96 uhci_lldesc_t *link_head;
97 };
98
99 struct uart_env_tag {
100 struct uart_txrxchannel tx;
101 struct uart_txrxchannel rx;
102 };
103
104 typedef struct hci_message {
105 void *ptr; ///< Pointer to the message data.
106 uint32_t length; ///< Length of the message data.
107 STAILQ_ENTRY(hci_message) next; ///< Next element in the linked list.
108 } hci_message_t;
109
110 static void hci_driver_uart_dma_recv_async(uint8_t *buf, uint32_t size, esp_bt_hci_tl_callback_t callback, void *arg);
111 int hci_driver_uart_dma_rx_start(uint8_t *rx_data, uint32_t length);
112 int hci_driver_uart_dma_tx_start(esp_bt_hci_tl_callback_t callback, void *arg);
113
114 static const char *TAG = "uart_dma";
115 static hci_driver_uart_dma_env_t s_hci_driver_uart_dma_env;
116 static struct hci_h4_sm s_hci_driver_uart_h4_sm;
117 static hci_driver_uart_params_config_t hci_driver_uart_dma_params = BT_HCI_DRIVER_UART_CONFIG_DEFAULT();
118
119 /* The list for hci_rx_data */
120 STAILQ_HEAD(g_hci_rxinfo_list, hci_message);
121
122 DRAM_ATTR struct g_hci_rxinfo_list g_hci_rxinfo_head;
123 static DRAM_ATTR struct uart_env_tag uart_env;
124 static volatile uhci_dev_t *s_uhci_hw = &UHCI0;
125 static DRAM_ATTR gdma_channel_handle_t s_rx_channel;
126 static DRAM_ATTR gdma_channel_handle_t s_tx_channel;
127
hci_driver_uart_dma_memory_deinit(void)128 static int hci_driver_uart_dma_memory_deinit(void)
129 {
130
131 if (s_hci_driver_uart_dma_env.hci_rxinfo_buffer) {
132 free(s_hci_driver_uart_dma_env.hci_rxinfo_buffer);
133 s_hci_driver_uart_dma_env.hci_rxinfo_buffer = NULL;
134 }
135
136 if (s_hci_driver_uart_dma_env.hci_rxinfo_pool) {
137 free(s_hci_driver_uart_dma_env.hci_rxinfo_pool);
138 s_hci_driver_uart_dma_env.hci_rxinfo_pool = NULL;
139 }
140
141 if (s_hci_driver_uart_dma_env.hci_rx_data_buffer) {
142 free(s_hci_driver_uart_dma_env.hci_rx_data_buffer);
143 s_hci_driver_uart_dma_env.hci_rx_data_buffer = NULL;
144 }
145
146 if (s_hci_driver_uart_dma_env.hci_rx_data_pool) {
147 free(s_hci_driver_uart_dma_env.hci_rx_data_pool);
148 s_hci_driver_uart_dma_env.hci_rx_data_pool = NULL;
149 }
150
151 if (s_hci_driver_uart_dma_env.lldesc_mem) {
152 free(s_hci_driver_uart_dma_env.lldesc_mem);
153 s_hci_driver_uart_dma_env.lldesc_mem = NULL;
154 }
155
156 return 0;
157 }
158
hci_driver_uart_dma_memory_init(void)159 static int hci_driver_uart_dma_memory_init(void)
160 {
161 int rc = 0;
162
163 s_hci_driver_uart_dma_env.lldesc_mem = malloc(OS_MEMPOOL_SIZE(HCI_LLDESCS_POOL_NUM,
164 sizeof (uhci_lldesc_t)) * sizeof(os_membuf_t));
165 if (!s_hci_driver_uart_dma_env.lldesc_mem) {
166 return -1;
167 }
168
169 rc = os_mempool_init(&s_hci_driver_uart_dma_env.lldesc_mem_pool, HCI_LLDESCS_POOL_NUM,
170 sizeof (uhci_lldesc_t), s_hci_driver_uart_dma_env.lldesc_mem, "hci_lldesc_pool");
171 if (rc) {
172 goto init_err;
173 }
174
175 s_hci_driver_uart_dma_env.hci_rx_data_pool = (struct os_mempool *)malloc(sizeof(struct os_mempool));
176 if (!s_hci_driver_uart_dma_env.hci_rx_data_pool) {
177 goto init_err;
178 }
179
180 memset(s_hci_driver_uart_dma_env.hci_rx_data_pool, 0, sizeof(struct os_mempool));
181 s_hci_driver_uart_dma_env.hci_rx_data_buffer = malloc(OS_MEMPOOL_SIZE(HCI_RX_DATA_POOL_NUM,
182 HCI_RX_DATA_BLOCK_SIZE) * sizeof(os_membuf_t));
183 if (!s_hci_driver_uart_dma_env.hci_rx_data_buffer) {
184 goto init_err;
185 }
186
187 memset(s_hci_driver_uart_dma_env.hci_rx_data_buffer, 0, OS_MEMPOOL_SIZE(HCI_RX_DATA_POOL_NUM,
188 HCI_RX_DATA_BLOCK_SIZE) * sizeof(os_membuf_t));
189 rc = os_mempool_init(s_hci_driver_uart_dma_env.hci_rx_data_pool, HCI_RX_DATA_POOL_NUM,
190 HCI_RX_DATA_BLOCK_SIZE, s_hci_driver_uart_dma_env.hci_rx_data_buffer,
191 "hci_rx_data_pool");
192 if (rc) {
193 goto init_err;
194 }
195
196
197 /* Malloc hci rxinfo pool */
198 s_hci_driver_uart_dma_env.hci_rxinfo_pool = (struct os_mempool *)malloc(sizeof(struct os_mempool));
199 if (!s_hci_driver_uart_dma_env.hci_rxinfo_pool) {
200 goto init_err;
201 }
202
203 memset(s_hci_driver_uart_dma_env.hci_rxinfo_pool, 0, sizeof(struct os_mempool));
204 s_hci_driver_uart_dma_env.hci_rxinfo_buffer = malloc(OS_MEMPOOL_SIZE(HCI_RX_INFO_POOL_NUM,
205 sizeof(hci_message_t)) * sizeof(os_membuf_t));
206 if (!s_hci_driver_uart_dma_env.hci_rxinfo_buffer) {
207 goto init_err;
208 }
209
210 memset(s_hci_driver_uart_dma_env.hci_rxinfo_buffer, 0, OS_MEMPOOL_SIZE(HCI_RX_INFO_POOL_NUM,
211 sizeof(hci_message_t)) * sizeof(os_membuf_t));
212 rc = os_mempool_init(s_hci_driver_uart_dma_env.hci_rxinfo_pool, HCI_RX_INFO_POOL_NUM,
213 sizeof(hci_message_t), s_hci_driver_uart_dma_env.hci_rxinfo_buffer,
214 "hci_rxinfo_pool");
215 if (rc) {
216 goto init_err;
217 }
218
219 return rc;
220 init_err:
221 hci_driver_uart_dma_memory_deinit();
222 return rc;
223 }
224
hci_uart_tl_rx_eof_callback(gdma_channel_handle_t dma_chan,gdma_event_data_t * event_data,void * user_data)225 static IRAM_ATTR bool hci_uart_tl_rx_eof_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data)
226 {
227 esp_bt_hci_tl_callback_t callback = uart_env.rx.callback;
228 void *arg = uart_env.rx.arg;
229 assert(dma_chan == s_rx_channel);
230 assert(uart_env.rx.callback != NULL);
231 // clear callback pointer
232 uart_env.rx.callback = NULL;
233 uart_env.rx.arg = NULL;
234 // call handler
235 callback(arg, ESP_BT_HCI_TL_STATUS_OK);
236 return true;
237 }
238
hci_uart_tl_tx_eof_callback(gdma_channel_handle_t dma_chan,gdma_event_data_t * event_data,void * user_data)239 static IRAM_ATTR bool hci_uart_tl_tx_eof_callback(gdma_channel_handle_t dma_chan, gdma_event_data_t *event_data, void *user_data)
240 {
241 esp_bt_hci_tl_callback_t callback = uart_env.tx.callback;
242 assert(dma_chan == s_tx_channel);
243 assert(uart_env.tx.callback != NULL);
244 // clear callback pointer
245 uart_env.tx.callback = NULL;
246 // call handler
247 callback(uart_env.tx.arg, ESP_BT_HCI_TL_STATUS_OK);
248 uart_env.tx.arg = NULL;
249 return true;
250 }
251
hci_driver_uart_dma_rxdata_memory_get(void)252 uint8_t * IRAM_ATTR hci_driver_uart_dma_rxdata_memory_get(void)
253 {
254 uint8_t *rx_data;
255 rx_data = os_memblock_get(s_hci_driver_uart_dma_env.hci_rx_data_pool);
256 return rx_data;
257 }
258
hci_driver_uart_dma_rxinfo_memory_get(void)259 hci_message_t * IRAM_ATTR hci_driver_uart_dma_rxinfo_memory_get(void)
260 {
261 hci_message_t *rx_info;
262 rx_info = os_memblock_get(s_hci_driver_uart_dma_env.hci_rxinfo_pool);
263 return rx_info;
264 }
265
hci_driver_uart_dma_cache_rxinfo(hci_message_t * hci_rxinfo)266 void IRAM_ATTR hci_driver_uart_dma_cache_rxinfo(hci_message_t *hci_rxinfo)
267 {
268 os_sr_t sr;
269
270 OS_ENTER_CRITICAL(sr);
271 STAILQ_INSERT_TAIL(&g_hci_rxinfo_head, hci_rxinfo, next);
272 OS_EXIT_CRITICAL(sr);
273 }
274
hci_driver_uart_dma_continue_rx_enable(bool enable)275 void IRAM_ATTR hci_driver_uart_dma_continue_rx_enable(bool enable)
276 {
277 os_sr_t sr;
278 OS_ENTER_CRITICAL(sr);
279 s_hci_driver_uart_dma_env.is_continue_rx = enable;
280 OS_EXIT_CRITICAL(sr);
281 }
282
hci_driver_uart_dma_rxinfo_mem_exhausted_set(bool is_exhausted)283 void IRAM_ATTR hci_driver_uart_dma_rxinfo_mem_exhausted_set(bool is_exhausted)
284 {
285 os_sr_t sr;
286 OS_ENTER_CRITICAL(sr);
287 s_hci_driver_uart_dma_env.rxinfo_mem_exhausted = is_exhausted;
288 OS_EXIT_CRITICAL(sr);
289 }
290
hci_driver_uart_dma_recv_callback(void * arg,uint8_t status)291 void IRAM_ATTR hci_driver_uart_dma_recv_callback(void *arg, uint8_t status)
292 {
293 BaseType_t xHigherPriorityTaskWoken = pdFALSE;
294 hci_message_t *hci_rxinfo;
295 uint8_t *rx_data;
296
297 if (s_hci_driver_uart_dma_env.rxinfo_mem_exhausted) {
298 ESP_LOGE(TAG, "Will lost rx data, need adjust rxinfo memory count\n");
299 assert(0);
300 }
301
302 hci_rxinfo = hci_driver_uart_dma_rxinfo_memory_get();
303 if (!hci_rxinfo) {
304 ESP_LOGW(TAG, "set rxinfo mem exhausted flag\n");
305 hci_driver_uart_dma_rxinfo_mem_exhausted_set(true);
306 xSemaphoreGiveFromISR(s_hci_driver_uart_dma_env.process_sem, &xHigherPriorityTaskWoken);
307 return;
308 }
309
310 hci_rxinfo->ptr = (void *)uart_env.rx.link_head->buf;
311 hci_rxinfo->length = uart_env.rx.link_head->length;
312 hci_driver_uart_dma_cache_rxinfo(hci_rxinfo);
313 xSemaphoreGiveFromISR(s_hci_driver_uart_dma_env.process_sem, &xHigherPriorityTaskWoken);
314 rx_data = hci_driver_uart_dma_rxdata_memory_get();
315 if (!rx_data) {
316 hci_driver_uart_dma_continue_rx_enable(true);
317 }else {
318 hci_driver_uart_dma_rx_start(rx_data, HCI_RX_DATA_BLOCK_SIZE);
319 }
320 }
321
hci_driver_uart_dma_txstate_set(hci_trans_tx_state_t tx_state)322 void IRAM_ATTR hci_driver_uart_dma_txstate_set(hci_trans_tx_state_t tx_state)
323 {
324 os_sr_t sr;
325 OS_ENTER_CRITICAL(sr);
326 s_hci_driver_uart_dma_env.hci_tx_state = tx_state;
327 OS_EXIT_CRITICAL(sr);
328 }
329
hci_driver_uart_dma_send_callback(void * arg,uint8_t status)330 void IRAM_ATTR hci_driver_uart_dma_send_callback(void *arg, uint8_t status)
331 {
332 uhci_lldesc_t *lldesc_head;
333 uhci_lldesc_t *lldesc_nxt;
334 BaseType_t xHigherPriorityTaskWoken = pdFALSE;
335
336 lldesc_head = uart_env.tx.link_head;
337 while (lldesc_head) {
338 lldesc_nxt = lldesc_head->qe.stqe_next;
339 os_memblock_put(&s_hci_driver_uart_dma_env.lldesc_mem_pool, lldesc_head);
340 lldesc_head = lldesc_nxt;
341 }
342
343 uart_env.tx.link_head = NULL;
344 hci_driver_uart_dma_txstate_set(HCI_TRANS_TX_IDLE);
345 xSemaphoreGiveFromISR(s_hci_driver_uart_dma_env.process_sem, &xHigherPriorityTaskWoken);
346 }
347
hci_driver_uart_dma_recv_async(uint8_t * buf,uint32_t size,esp_bt_hci_tl_callback_t callback,void * arg)348 static IRAM_ATTR void hci_driver_uart_dma_recv_async(uint8_t *buf, uint32_t size, esp_bt_hci_tl_callback_t callback, void *arg)
349 {
350 uhci_lldesc_t *lldesc_head;
351 assert(buf != NULL);
352 assert(size != 0);
353 assert(callback != NULL);
354 uart_env.rx.callback = callback;
355 uart_env.rx.arg = arg;
356 lldesc_head = uart_env.rx.link_head;
357
358 while (lldesc_head) {
359 os_memblock_put(&s_hci_driver_uart_dma_env.lldesc_mem_pool, lldesc_head),
360 lldesc_head = lldesc_head->qe.stqe_next;
361 }
362
363 uart_env.rx.link_head = NULL;
364 lldesc_head = os_memblock_get(&s_hci_driver_uart_dma_env.lldesc_mem_pool);
365 assert(lldesc_head);
366 memset(lldesc_head, 0, sizeof(uhci_lldesc_t));
367 lldesc_head->buf = buf;
368 lldesc_head->size = size;
369 lldesc_head->eof = 0;
370 s_uhci_hw->pkt_thres.pkt_thrs = size;
371 uart_env.rx.link_head = lldesc_head;
372 gdma_start(s_rx_channel, (intptr_t)(uart_env.rx.link_head));
373 }
374
hci_driver_uart_dma_rx_start(uint8_t * rx_data,uint32_t length)375 int IRAM_ATTR hci_driver_uart_dma_rx_start(uint8_t *rx_data, uint32_t length)
376 {
377 hci_driver_uart_dma_recv_async(rx_data, length, hci_driver_uart_dma_recv_callback, NULL);
378 return 0;
379 }
380
hci_driver_uart_dma_tx_start(esp_bt_hci_tl_callback_t callback,void * arg)381 int hci_driver_uart_dma_tx_start(esp_bt_hci_tl_callback_t callback, void *arg)
382 {
383 void *data;
384 bool last_frame;
385 bool head_is_setted;
386 uint32_t tx_len;
387 uhci_lldesc_t *lldesc_data;
388 uhci_lldesc_t *lldesc_head;
389 uhci_lldesc_t *lldesc_tail;
390
391 lldesc_head = NULL;
392 lldesc_tail = NULL;
393 head_is_setted = false;
394 last_frame = false;
395 while (true) {
396 tx_len = hci_driver_util_tx_list_dequeue(0xffffff, &data, &last_frame);
397 if (!tx_len) {
398 break;
399 }
400
401 lldesc_data = os_memblock_get(&s_hci_driver_uart_dma_env.lldesc_mem_pool);
402 /* According to the current processing logic, It should not be empty */
403 assert(lldesc_data);
404 memset(lldesc_data, 0, sizeof(uhci_lldesc_t));
405 lldesc_data->length = tx_len;
406 lldesc_data->buf = data;
407 lldesc_data->eof = 0;
408 if (!head_is_setted) {
409 lldesc_head = lldesc_data;
410 head_is_setted = true;
411 } else {
412 lldesc_tail->qe.stqe_next = lldesc_data;
413 }
414
415 lldesc_tail = lldesc_data;
416 if (last_frame) {
417 break;
418 }
419 }
420
421 if (lldesc_head) {
422 lldesc_tail->eof = 1;
423 uart_env.tx.link_head = lldesc_head;
424 uart_env.tx.callback = callback;
425 uart_env.tx.arg = arg;
426 /* The DMA interrupt may have been triggered before setting the tx_state,
427 * So we set it first.
428 */
429 hci_driver_uart_dma_txstate_set(HCI_TRANS_TX_START);
430 gdma_start(s_tx_channel, (intptr_t)(uart_env.tx.link_head));
431 return 0;
432 } else {
433 return -1;
434 }
435 }
436
hci_driver_uart_dma_install(void)437 static void hci_driver_uart_dma_install(void)
438 {
439 periph_module_enable(PERIPH_UHCI0_MODULE);
440 periph_module_reset(PERIPH_UHCI0_MODULE);
441 // install DMA driver
442 gdma_channel_alloc_config_t tx_channel_config = {
443 .flags.reserve_sibling = 1,
444 .direction = GDMA_CHANNEL_DIRECTION_TX,
445 };
446
447 ESP_ERROR_CHECK(gdma_new_channel(&tx_channel_config, &s_tx_channel));
448 gdma_channel_alloc_config_t rx_channel_config = {
449 .direction = GDMA_CHANNEL_DIRECTION_RX,
450 .sibling_chan = s_tx_channel,
451 };
452
453 ESP_ERROR_CHECK(gdma_new_channel(&rx_channel_config, &s_rx_channel));
454 gdma_connect(s_tx_channel, GDMA_MAKE_TRIGGER(GDMA_TRIG_PERIPH_UHCI, 0));
455 gdma_connect(s_rx_channel, GDMA_MAKE_TRIGGER(GDMA_TRIG_PERIPH_UHCI, 0));
456 gdma_strategy_config_t strategy_config = {
457 .auto_update_desc = false,
458 .owner_check = false
459 };
460
461 gdma_apply_strategy(s_tx_channel, &strategy_config);
462 gdma_apply_strategy(s_rx_channel, &strategy_config);
463 gdma_rx_event_callbacks_t rx_cbs = {
464 .on_recv_eof = hci_uart_tl_rx_eof_callback
465 };
466
467 gdma_register_rx_event_callbacks(s_rx_channel, &rx_cbs, NULL);
468 gdma_tx_event_callbacks_t tx_cbs = {
469 .on_trans_eof = hci_uart_tl_tx_eof_callback
470 };
471
472 gdma_register_tx_event_callbacks(s_tx_channel, &tx_cbs, NULL);
473 // configure UHCI
474 uhci_ll_init((uhci_dev_t *)s_uhci_hw);
475 // uhci_ll_set_eof_mode((uhci_dev_t *)s_uhci_hw, UHCI_RX_LEN_EOF);
476 uhci_ll_set_eof_mode((uhci_dev_t *)s_uhci_hw, UHCI_RX_IDLE_EOF);
477 // disable software flow control
478 s_uhci_hw->escape_conf.val = 0;
479 uhci_ll_attach_uart_port((uhci_dev_t *)s_uhci_hw, s_hci_driver_uart_dma_env.hci_uart_params->hci_uart_port);
480 }
481
482 static int
hci_driver_uart_dma_tx(hci_driver_data_type_t data_type,uint8_t * data,uint32_t length,hci_driver_direction_t dir)483 hci_driver_uart_dma_tx(hci_driver_data_type_t data_type, uint8_t *data, uint32_t length,
484 hci_driver_direction_t dir)
485 {
486 /* By now, this layer is only used by controller. */
487 assert(dir == HCI_DRIVER_DIR_C2H);
488 ESP_LOGD(TAG, "dma tx:");
489 ESP_LOG_BUFFER_HEXDUMP(TAG, data, length, ESP_LOG_DEBUG);
490
491 hci_driver_util_tx_list_enqueue(data_type, data, length);
492 xSemaphoreGive(s_hci_driver_uart_dma_env.process_sem);
493 return 0;
494 }
495
496 static int
hci_driver_uart_dma_h4_frame_cb(uint8_t pkt_type,void * data)497 hci_driver_uart_dma_h4_frame_cb(uint8_t pkt_type, void *data)
498 {
499 hci_driver_forward_fn *forward_cb;
500 forward_cb = s_hci_driver_uart_dma_env.forward_cb;
501 if (!forward_cb) {
502 return -1;
503 }
504 ESP_LOGD(TAG, "h4 frame\n");
505 return forward_cb(pkt_type, data, 0, HCI_DRIVER_DIR_H2C);
506 }
507
508 static void
hci_driver_uart_dma_process_task(void * p)509 hci_driver_uart_dma_process_task(void *p)
510 {
511 hci_message_t *rxinfo_container;
512 os_sr_t sr;
513 int ret;
514 uint8_t* rx_data;
515 uint32_t rx_len;
516
517 while (true) {
518 xSemaphoreTake(s_hci_driver_uart_dma_env.process_sem, portMAX_DELAY);
519 ESP_LOGD(TAG, "task run:%d\n",s_hci_driver_uart_dma_env.hci_tx_state);
520 /* Process Tx data */
521 if (s_hci_driver_uart_dma_env.hci_tx_state == HCI_TRANS_TX_IDLE) {
522 hci_driver_uart_dma_tx_start(hci_driver_uart_dma_send_callback, (void*)&uart_env);
523 }
524
525 if (s_hci_driver_uart_dma_env.rxinfo_mem_exhausted) {
526 rx_data = (void *)uart_env.rx.link_head->buf;
527 rx_len = uart_env.rx.link_head->length;
528 ESP_LOGD(TAG, "rxinfo exhausted:");
529 ESP_LOG_BUFFER_HEXDUMP(TAG, rx_data, rx_len, ESP_LOG_DEBUG);
530 ret = hci_h4_sm_rx(s_hci_driver_uart_dma_env.h4_sm, rx_data, rx_len);
531 hci_driver_uart_dma_rx_start(rx_data, HCI_RX_DATA_BLOCK_SIZE);
532 hci_driver_uart_dma_rxinfo_mem_exhausted_set(false);
533 if (ret < 0) {
534 ESP_LOGW(TAG, "parse rx data error!\n");
535 r_ble_ll_hci_ev_hw_err(ESP_HCI_SYNC_LOSS_ERR);
536 }
537 }
538
539 while (!STAILQ_EMPTY(&g_hci_rxinfo_head)) {
540 OS_ENTER_CRITICAL(sr);
541 rxinfo_container = STAILQ_FIRST(&g_hci_rxinfo_head);
542 STAILQ_REMOVE_HEAD(&g_hci_rxinfo_head, next);
543 OS_EXIT_CRITICAL(sr);
544
545 rx_data = rxinfo_container->ptr;
546 rx_len = rxinfo_container->length;
547 ESP_LOGD(TAG, "uart rx");
548 ESP_LOG_BUFFER_HEXDUMP(TAG, rx_data, rx_len, ESP_LOG_DEBUG);
549 ret = hci_h4_sm_rx(s_hci_driver_uart_dma_env.h4_sm, rx_data, rx_len);
550 if (ret < 0) {
551 ESP_LOGW(TAG, "parse rx data error!\n");
552 r_ble_ll_hci_ev_hw_err(ESP_HCI_SYNC_LOSS_ERR);
553 }
554
555 os_memblock_put(s_hci_driver_uart_dma_env.hci_rxinfo_pool, rxinfo_container);
556 /* No need to enter CRITICAL */
557 if (s_hci_driver_uart_dma_env.is_continue_rx) {
558 /* We should set continux rx flag first, RX interrupted may happened when rx start soon */
559 hci_driver_uart_dma_continue_rx_enable(false);
560 hci_driver_uart_dma_rx_start(rx_data, HCI_RX_DATA_BLOCK_SIZE);
561 } else {
562 os_memblock_put(s_hci_driver_uart_dma_env.hci_rx_data_pool, rx_data);
563 }
564 }
565 }
566 }
567
568 static int
hci_driver_uart_dma_task_create(void)569 hci_driver_uart_dma_task_create(void)
570 {
571 /* !TODO: Set the core id by menuconfig */
572 xTaskCreatePinnedToCore(hci_driver_uart_dma_process_task, "hci_driver_uart_dma_process_task",
573 CONFIG_BT_LE_HCI_TRANS_TASK_STACK_SIZE, NULL,
574 ESP_TASK_BT_CONTROLLER_PRIO, &s_hci_driver_uart_dma_env.task_handler,
575 0);
576 assert(s_hci_driver_uart_dma_env.task_handler);
577
578 ESP_LOGI(TAG, "hci transport task create successfully, prio:%d, stack size: %ld",
579 ESP_TASK_BT_CONTROLLER_PRIO, CONFIG_BT_LE_HCI_TRANS_TASK_STACK_SIZE);
580
581 return 0;
582 }
583
584
585 static void
hci_driver_uart_dma_deinit(void)586 hci_driver_uart_dma_deinit(void)
587 {
588 if (s_hci_driver_uart_dma_env.task_handler) {
589 vTaskDelete(s_hci_driver_uart_dma_env.task_handler);
590 s_hci_driver_uart_dma_env.task_handler = NULL;
591 }
592
593 ESP_ERROR_CHECK(uart_driver_delete(s_hci_driver_uart_dma_env.hci_uart_params->hci_uart_port));
594 hci_driver_uart_dma_memory_deinit();
595 if (!s_hci_driver_uart_dma_env.process_sem) {
596 vSemaphoreDelete(s_hci_driver_uart_dma_env.process_sem);
597 }
598
599 hci_driver_util_deinit();
600 memset(&s_hci_driver_uart_dma_env, 0, sizeof(hci_driver_uart_dma_env_t));
601 }
602
603
604 static int
hci_driver_uart_dma_init(hci_driver_forward_fn * cb)605 hci_driver_uart_dma_init(hci_driver_forward_fn *cb)
606 {
607 int rc;
608
609 memset(&s_hci_driver_uart_dma_env, 0, sizeof(hci_driver_uart_dma_env_t));
610
611 s_hci_driver_uart_dma_env.h4_sm = &s_hci_driver_uart_h4_sm;
612 hci_h4_sm_init(s_hci_driver_uart_dma_env.h4_sm, &s_hci_driver_mem_alloc, hci_driver_uart_dma_h4_frame_cb);
613
614 rc = hci_driver_util_init();
615 if (rc) {
616 goto error;
617 }
618
619 s_hci_driver_uart_dma_env.process_sem = xSemaphoreCreateBinary();
620 if (!s_hci_driver_uart_dma_env.process_sem) {
621 goto error;
622 }
623
624 rc = hci_driver_uart_dma_memory_init();
625 if (rc) {
626 goto error;
627 }
628
629 s_hci_driver_uart_dma_env.forward_cb = cb;
630 s_hci_driver_uart_dma_env.hci_uart_params = &hci_driver_uart_dma_params;
631 hci_driver_uart_config(&hci_driver_uart_dma_params);
632
633 ESP_LOGI(TAG, "uart attach uhci!");
634 hci_driver_uart_dma_install();
635
636 STAILQ_INIT(&g_hci_rxinfo_head);
637
638 rc = hci_driver_uart_dma_task_create();
639 if (rc) {
640 goto error;
641 }
642
643 s_hci_driver_uart_dma_env.hci_tx_state = HCI_TRANS_TX_IDLE;
644 s_hci_driver_uart_dma_env.rxinfo_mem_exhausted = false;
645 s_hci_driver_uart_dma_env.is_continue_rx = false;
646 hci_driver_uart_dma_rx_start(os_memblock_get(s_hci_driver_uart_dma_env.hci_rx_data_pool),
647 HCI_RX_DATA_BLOCK_SIZE);
648 return 0;
649
650 error:
651 hci_driver_uart_dma_deinit();
652 return rc;
653 }
654
655 int
hci_driver_uart_dma_reconfig_pin(int tx_pin,int rx_pin,int cts_pin,int rts_pin)656 hci_driver_uart_dma_reconfig_pin(int tx_pin, int rx_pin, int cts_pin, int rts_pin)
657 {
658 hci_driver_uart_params_config_t *uart_param = s_hci_driver_uart_dma_env.hci_uart_params;
659 uart_param->hci_uart_tx_pin = tx_pin;
660 uart_param->hci_uart_rx_pin = rx_pin;
661 uart_param->hci_uart_rts_pin = rts_pin;
662 uart_param->hci_uart_cts_pin = cts_pin;
663 return hci_driver_uart_config(uart_param);
664 }
665
666
667 hci_driver_ops_t hci_driver_uart_dma_ops = {
668 .hci_driver_tx = hci_driver_uart_dma_tx,
669 .hci_driver_init = hci_driver_uart_dma_init,
670 .hci_driver_deinit = hci_driver_uart_dma_deinit,
671 };
672