1 /*
2 * Copyright (c) 2018-2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include <stdbool.h>
9 #include <stddef.h>
10 #include <limits.h>
11
12 #include <zephyr/bluetooth/hci_types.h>
13 #include <zephyr/sys/byteorder.h>
14 #include <soc.h>
15
16 #include "hal/cpu.h"
17 #include "hal/ccm.h"
18 #include "hal/radio.h"
19 #include "hal/ticker.h"
20
21 #include "util/util.h"
22 #include "util/mem.h"
23 #include "util/memq.h"
24 #include "util/mfifo.h"
25
26 #include "ticker/ticker.h"
27
28 #include "pdu_vendor.h"
29 #include "pdu.h"
30
31 #include "lll.h"
32 #include "lll_vendor.h"
33 #include "lll_adv_types.h"
34 #include "lll_adv.h"
35 #include "lll_adv_pdu.h"
36 #include "lll_df_types.h"
37 #include "lll_conn.h"
38 #include "lll_chan.h"
39 #include "lll_filter.h"
40
41 #include "lll_internal.h"
42 #include "lll_tim_internal.h"
43 #include "lll_adv_internal.h"
44 #include "lll_prof_internal.h"
45
46 #include "hal/debug.h"
47
48 static int init_reset(void);
49 static int prepare_cb(struct lll_prepare_param *prepare_param);
50 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb);
51 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
52 static void isr_tx(void *param);
53 static void isr_rx(void *param);
54 static void isr_done(void *param);
55 static void isr_abort(void *param);
56 static void isr_cleanup(void *param);
57 static void isr_race(void *param);
58 static void chan_prepare(struct lll_adv *lll);
59 static inline int isr_rx_pdu(struct lll_adv *lll,
60 uint8_t devmatch_ok, uint8_t devmatch_id,
61 uint8_t irkmatch_ok, uint8_t irkmatch_id,
62 uint8_t rssi_ready);
63 static inline bool isr_rx_sr_check(struct lll_adv *lll, struct pdu_adv *adv,
64 struct pdu_adv *sr, uint8_t devmatch_ok,
65 uint8_t *rl_idx);
66 static inline bool isr_rx_sr_adva_check(struct pdu_adv *adv,
67 struct pdu_adv *sr);
68 #if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
69 static inline int isr_rx_sr_report(struct pdu_adv *pdu_adv_rx,
70 uint8_t rssi_ready);
71 #endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
72 static inline bool isr_rx_ci_check(struct lll_adv *lll, struct pdu_adv *adv,
73 struct pdu_adv *ci, uint8_t devmatch_ok,
74 uint8_t *rl_idx);
75 static inline bool isr_rx_ci_tgta_check(struct lll_adv *lll,
76 struct pdu_adv *adv, struct pdu_adv *ci,
77 uint8_t rl_idx);
78 static inline bool isr_rx_ci_adva_check(struct pdu_adv *adv,
79 struct pdu_adv *ci);
80
81 #if defined(CONFIG_BT_CTLR_ADV_EXT)
82 #define PAYLOAD_BASED_FRAG_COUNT \
83 DIV_ROUND_UP(CONFIG_BT_CTLR_ADV_DATA_LEN_MAX, \
84 PDU_AC_PAYLOAD_SIZE_MAX)
85 #define BT_CTLR_ADV_AUX_SET CONFIG_BT_CTLR_ADV_AUX_SET
86 #if defined(CONFIG_BT_CTLR_ADV_PERIODIC)
87 #define BT_CTLR_ADV_SYNC_SET CONFIG_BT_CTLR_ADV_SYNC_SET
88 #else /* !CONFIG_BT_CTLR_ADV_PERIODIC */
89 #define BT_CTLR_ADV_SYNC_SET 0
90 #endif /* !CONFIG_BT_CTLR_ADV_PERIODIC */
91 #else
92 #define PAYLOAD_FRAG_COUNT 1
93 #define BT_CTLR_ADV_AUX_SET 0
94 #define BT_CTLR_ADV_SYNC_SET 0
95 #endif
96
97 #define PDU_MEM_SIZE MROUND(PDU_AC_LL_HEADER_SIZE + \
98 PDU_AC_PAYLOAD_SIZE_MAX)
99 #define PDU_MEM_COUNT_MIN (BT_CTLR_ADV_SET + \
100 (BT_CTLR_ADV_SET * PAYLOAD_FRAG_COUNT) + \
101 (BT_CTLR_ADV_AUX_SET * PAYLOAD_FRAG_COUNT) + \
102 (BT_CTLR_ADV_SYNC_SET * PAYLOAD_FRAG_COUNT))
103 #define PDU_MEM_FIFO_COUNT ((BT_CTLR_ADV_SET * PAYLOAD_FRAG_COUNT * 2) + \
104 (CONFIG_BT_CTLR_ADV_DATA_BUF_MAX * \
105 PAYLOAD_FRAG_COUNT))
106 #define PDU_MEM_COUNT (PDU_MEM_COUNT_MIN + PDU_MEM_FIFO_COUNT)
107 #define PDU_POOL_SIZE (PDU_MEM_SIZE * PDU_MEM_COUNT)
108
109 /* Free AD data PDU buffer pool */
110 static struct {
111 void *free;
112 uint8_t pool[PDU_POOL_SIZE];
113 } mem_pdu;
114
115 /* FIFO to return stale AD data PDU buffers from LLL to thread context */
116 static MFIFO_DEFINE(pdu_free, sizeof(void *), PDU_MEM_FIFO_COUNT);
117
118 /* Semaphore to wakeup thread waiting for free AD data PDU buffers */
119 static struct k_sem sem_pdu_free;
120
lll_adv_init(void)121 int lll_adv_init(void)
122 {
123 int err;
124
125 err = init_reset();
126 if (err) {
127 return err;
128 }
129
130 return 0;
131 }
132
lll_adv_reset(void)133 int lll_adv_reset(void)
134 {
135 int err;
136
137 err = init_reset();
138 if (err) {
139 return err;
140 }
141
142 return 0;
143 }
144
lll_adv_data_init(struct lll_adv_pdu * pdu)145 int lll_adv_data_init(struct lll_adv_pdu *pdu)
146 {
147 struct pdu_adv *p;
148
149 p = mem_acquire(&mem_pdu.free);
150 if (!p) {
151 return -ENOMEM;
152 }
153
154 p->len = 0U;
155 pdu->pdu[0] = (void *)p;
156
157 return 0;
158 }
159
lll_adv_data_reset(struct lll_adv_pdu * pdu)160 int lll_adv_data_reset(struct lll_adv_pdu *pdu)
161 {
162 /* NOTE: this function is used on HCI reset to mem-zero the structure
163 * members that otherwise was zero-ed by the architecture
164 * startup code that zero-ed the .bss section.
165 * pdu[0] element in the array is not initialized as subsequent
166 * call to lll_adv_data_init will allocate a PDU buffer and
167 * assign that.
168 */
169
170 pdu->first = 0U;
171 pdu->last = 0U;
172 pdu->pdu[1] = NULL;
173
174 return 0;
175 }
176
lll_adv_data_release(struct lll_adv_pdu * pdu)177 int lll_adv_data_release(struct lll_adv_pdu *pdu)
178 {
179 uint8_t last;
180 void *p;
181
182 last = pdu->last;
183 p = pdu->pdu[last];
184 pdu->pdu[last] = NULL;
185 mem_release(p, &mem_pdu.free);
186
187 last++;
188 if (last == DOUBLE_BUFFER_SIZE) {
189 last = 0U;
190 }
191 p = pdu->pdu[last];
192 if (p) {
193 pdu->pdu[last] = NULL;
194 mem_release(p, &mem_pdu.free);
195 }
196
197 return 0;
198 }
199
lll_adv_pdu_alloc(struct lll_adv_pdu * pdu,uint8_t * idx)200 struct pdu_adv *lll_adv_pdu_alloc(struct lll_adv_pdu *pdu, uint8_t *idx)
201 {
202 uint8_t first, last;
203 struct pdu_adv *p;
204 int err;
205
206 first = pdu->first;
207 last = pdu->last;
208 if (first == last) {
209 last++;
210 if (last == DOUBLE_BUFFER_SIZE) {
211 last = 0U;
212 }
213 } else {
214 uint8_t first_latest;
215
216 pdu->last = first;
217 /* NOTE: Ensure that data is synchronized so that an ISR
218 * vectored, after pdu->last has been updated, does
219 * access the latest value.
220 */
221 cpu_dmb();
222 first_latest = pdu->first;
223 if (first_latest != first) {
224 last++;
225 if (last == DOUBLE_BUFFER_SIZE) {
226 last = 0U;
227 }
228 }
229 }
230
231 *idx = last;
232
233 p = (void *)pdu->pdu[last];
234 if (p) {
235 return p;
236 }
237
238 p = MFIFO_DEQUEUE_PEEK(pdu_free);
239 if (p) {
240 err = k_sem_take(&sem_pdu_free, K_NO_WAIT);
241 LL_ASSERT(!err);
242
243 MFIFO_DEQUEUE(pdu_free);
244 pdu->pdu[last] = (void *)p;
245
246 return p;
247 }
248
249 p = mem_acquire(&mem_pdu.free);
250 if (p) {
251 pdu->pdu[last] = (void *)p;
252
253 return p;
254 }
255
256 err = k_sem_take(&sem_pdu_free, K_FOREVER);
257 LL_ASSERT(!err);
258
259 p = MFIFO_DEQUEUE(pdu_free);
260 LL_ASSERT(p);
261
262 pdu->pdu[last] = (void *)p;
263
264 return p;
265 }
266
lll_adv_pdu_latest_get(struct lll_adv_pdu * pdu,uint8_t * is_modified)267 struct pdu_adv *lll_adv_pdu_latest_get(struct lll_adv_pdu *pdu,
268 uint8_t *is_modified)
269 {
270 uint8_t first;
271
272 first = pdu->first;
273 if (first != pdu->last) {
274 uint8_t free_idx;
275 uint8_t pdu_idx;
276 void *p;
277
278 if (!MFIFO_ENQUEUE_IDX_GET(pdu_free, &free_idx)) {
279 return NULL;
280 }
281
282 pdu_idx = first;
283
284 first += 1U;
285 if (first == DOUBLE_BUFFER_SIZE) {
286 first = 0U;
287 }
288 pdu->first = first;
289 *is_modified = 1U;
290
291 p = pdu->pdu[pdu_idx];
292 pdu->pdu[pdu_idx] = NULL;
293
294 MFIFO_BY_IDX_ENQUEUE(pdu_free, free_idx, p);
295 k_sem_give(&sem_pdu_free);
296 }
297
298 return (void *)pdu->pdu[first];
299 }
300
lll_adv_prepare(void * param)301 void lll_adv_prepare(void *param)
302 {
303 struct lll_prepare_param *p = param;
304 int err;
305
306 err = lll_clk_on();
307 LL_ASSERT(!err || err == -EINPROGRESS);
308
309 err = lll_prepare(is_abort_cb, abort_cb, prepare_cb, 0, p);
310 LL_ASSERT(!err || err == -EINPROGRESS);
311 }
312
init_reset(void)313 static int init_reset(void)
314 {
315 return 0;
316 }
317
prepare_cb(struct lll_prepare_param * prepare_param)318 static int prepare_cb(struct lll_prepare_param *prepare_param)
319 {
320 struct lll_adv *lll = prepare_param->param;
321 uint32_t aa = sys_cpu_to_le32(PDU_AC_ACCESS_ADDR);
322 uint32_t ticks_at_event, ticks_at_start;
323 uint32_t remainder_us;
324 struct ull_hdr *ull;
325 uint32_t remainder;
326
327 DEBUG_RADIO_START_A(1);
328
329 #if defined(CONFIG_BT_PERIPHERAL)
330 /* Check if stopped (on connection establishment race between LLL and
331 * ULL.
332 */
333 if (unlikely(lll->conn && lll->conn->central.initiated)) {
334 int err;
335
336 err = lll_clk_off();
337 LL_ASSERT(!err || err == -EBUSY);
338
339 lll_done(NULL);
340
341 DEBUG_RADIO_START_A(0);
342 return 0;
343 }
344 #endif /* CONFIG_BT_PERIPHERAL */
345
346 radio_reset();
347 /* TODO: other Tx Power settings */
348 radio_tx_power_set(RADIO_TXP_DEFAULT);
349
350 #if defined(CONFIG_BT_CTLR_ADV_EXT)
351 /* TODO: if coded we use S8? */
352 radio_phy_set(lll->phy_p, 1);
353 radio_pkt_configure(8, PDU_AC_LEG_PAYLOAD_SIZE_MAX, (lll->phy_p << 1));
354 #else /* !CONFIG_BT_CTLR_ADV_EXT */
355 radio_phy_set(0, 0);
356 radio_pkt_configure(8, PDU_AC_LEG_PAYLOAD_SIZE_MAX, 0);
357 #endif /* !CONFIG_BT_CTLR_ADV_EXT */
358
359 radio_aa_set((uint8_t *)&aa);
360 radio_crc_configure(((0x5bUL) | ((0x06UL) << 8) | ((0x00UL) << 16)),
361 0x555555);
362
363 lll->chan_map_curr = lll->chan_map;
364
365 chan_prepare(lll);
366
367 #if defined(CONFIG_BT_HCI_MESH_EXT)
368 _radio.mesh_adv_end_us = 0;
369 #endif /* CONFIG_BT_HCI_MESH_EXT */
370
371
372 #if defined(CONFIG_BT_CTLR_PRIVACY)
373 if (ull_filter_lll_rl_enabled()) {
374 struct lll_filter *filter =
375 ull_filter_lll_get(!!(lll->filter_policy));
376
377 radio_filter_configure(filter->enable_bitmask,
378 filter->addr_type_bitmask,
379 (uint8_t *)filter->bdaddr);
380 } else
381 #endif /* CONFIG_BT_CTLR_PRIVACY */
382
383 if (IS_ENABLED(CONFIG_BT_CTLR_FILTER_ACCEPT_LIST) && lll->filter_policy) {
384 /* Setup Radio Filter */
385 struct lll_filter *fal = ull_filter_lll_get(true);
386
387 radio_filter_configure(fal->enable_bitmask,
388 fal->addr_type_bitmask,
389 (uint8_t *)fal->bdaddr);
390 }
391
392 ticks_at_event = prepare_param->ticks_at_expire;
393 ull = HDR_LLL2ULL(lll);
394 ticks_at_event += lll_event_offset_get(ull);
395
396 ticks_at_start = ticks_at_event;
397 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
398
399 remainder = prepare_param->remainder;
400 remainder_us = radio_tmr_start(1, ticks_at_start, remainder);
401
402 /* capture end of Tx-ed PDU, used to calculate HCTO. */
403 radio_tmr_end_capture();
404
405 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
406 radio_gpio_pa_setup();
407 radio_gpio_pa_lna_enable(remainder_us +
408 radio_tx_ready_delay_get(0, 0) -
409 HAL_RADIO_GPIO_PA_OFFSET);
410 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
411 ARG_UNUSED(remainder_us);
412 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
413
414 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
415 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
416 /* check if preempt to start has changed */
417 if (lll_preempt_calc(ull, (TICKER_ID_ADV_BASE +
418 ull_adv_lll_handle_get(lll)),
419 ticks_at_event)) {
420 radio_isr_set(isr_abort, lll);
421 radio_disable();
422 } else
423 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
424 {
425 uint32_t ret;
426
427 ret = lll_prepare_done(lll);
428 LL_ASSERT(!ret);
429 }
430
431 DEBUG_RADIO_START_A(1);
432
433 return 0;
434 }
435
436 #if defined(CONFIG_BT_PERIPHERAL)
resume_prepare_cb(struct lll_prepare_param * p)437 static int resume_prepare_cb(struct lll_prepare_param *p)
438 {
439 struct ull_hdr *ull = HDR_LLL2ULL(p->param);
440
441 p->ticks_at_expire = ticker_ticks_now_get() - lll_event_offset_get(ull);
442 p->remainder = 0;
443 p->lazy = 0;
444
445 return prepare_cb(p);
446 }
447 #endif /* CONFIG_BT_PERIPHERAL */
448
is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)449 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb)
450 {
451 #if defined(CONFIG_BT_PERIPHERAL)
452 struct lll_adv *lll = curr;
453 struct pdu_adv *pdu;
454 #endif /* CONFIG_BT_PERIPHERAL */
455
456 /* TODO: prio check */
457 if (next != curr) {
458 if (0) {
459 #if defined(CONFIG_BT_PERIPHERAL)
460 } else if (lll->is_hdcd) {
461 int err;
462
463 /* wrap back after the pre-empter */
464 *resume_cb = resume_prepare_cb;
465
466 /* Retain HF clk */
467 err = lll_clk_on();
468 LL_ASSERT(!err || err == -EINPROGRESS);
469
470 return -EAGAIN;
471 #endif /* CONFIG_BT_PERIPHERAL */
472 } else {
473 return -ECANCELED;
474 }
475 }
476
477 #if defined(CONFIG_BT_PERIPHERAL)
478 pdu = lll_adv_data_curr_get(lll);
479 if (pdu->type == PDU_ADV_TYPE_DIRECT_IND) {
480 return 0;
481 }
482 #endif /* CONFIG_BT_PERIPHERAL */
483
484 return -ECANCELED;
485 }
486
abort_cb(struct lll_prepare_param * prepare_param,void * param)487 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
488 {
489 int err;
490
491 /* NOTE: This is not a prepare being cancelled */
492 if (!prepare_param) {
493 /* Perform event abort here.
494 * After event has been cleanly aborted, clean up resources
495 * and dispatch event done.
496 */
497 radio_isr_set(isr_abort, param);
498 radio_disable();
499 return;
500 }
501
502 /* NOTE: Else clean the top half preparations of the aborted event
503 * currently in preparation pipeline.
504 */
505 err = lll_clk_off();
506 LL_ASSERT(!err || err == -EBUSY);
507
508 lll_done(param);
509 }
510
isr_tx(void * param)511 static void isr_tx(void *param)
512 {
513 uint32_t hcto;
514
515 /* TODO: MOVE to a common interface, isr_lll_radio_status? */
516 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
517 lll_prof_latency_capture();
518 }
519
520 /* Clear radio status and events */
521 radio_status_reset();
522 radio_tmr_status_reset();
523
524 if (IS_ENABLED(HAL_RADIO_GPIO_HAVE_PA_PIN) ||
525 IS_ENABLED(HAL_RADIO_GPIO_HAVE_LNA_PIN)) {
526 radio_gpio_pa_lna_disable();
527 }
528 /* TODO: MOVE ^^ */
529
530 /* setup tIFS switching */
531 radio_tmr_tifs_set(EVENT_IFS_US);
532 radio_switch_complete_and_tx(0, 0, 0, 0);
533
534 radio_pkt_rx_set(radio_pkt_scratch_get());
535 /* assert if radio packet ptr is not set and radio started rx */
536 LL_ASSERT(!radio_is_ready());
537
538 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
539 lll_prof_cputime_capture();
540 }
541
542 radio_isr_set(isr_rx, param);
543
544 #if defined(CONFIG_BT_CTLR_PRIVACY)
545 if (ull_filter_lll_rl_enabled()) {
546 uint8_t count, *irks = ull_filter_lll_irks_get(&count);
547
548 radio_ar_configure(count, irks);
549 }
550 #endif /* CONFIG_BT_CTLR_PRIVACY */
551
552 /* +/- 2us active clock jitter, +1 us hcto compensation */
553 hcto = radio_tmr_tifs_base_get() + EVENT_IFS_US + 4 + 1;
554 hcto += radio_rx_chain_delay_get(0, 0);
555 hcto += addr_us_get(0);
556 hcto -= radio_tx_chain_delay_get(0, 0);
557 radio_tmr_hcto_configure(hcto);
558
559 /* capture end of CONNECT_IND PDU, used for calculating first
560 * peripheral event.
561 */
562 radio_tmr_end_capture();
563
564 if (IS_ENABLED(CONFIG_BT_CTLR_SCAN_REQ_RSSI) ||
565 IS_ENABLED(CONFIG_BT_CTLR_CONN_RSSI)) {
566 radio_rssi_measure();
567 }
568
569 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
570 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
571 /* PA/LNA enable is overwriting packet end used in ISR
572 * profiling, hence back it up for later use.
573 */
574 lll_prof_radio_end_backup();
575 }
576
577 radio_gpio_lna_setup();
578 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US - 4 -
579 radio_tx_chain_delay_get(0, 0) -
580 HAL_RADIO_GPIO_LNA_OFFSET);
581 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
582
583 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
584 /* NOTE: as scratch packet is used to receive, it is safe to
585 * generate profile event using rx nodes.
586 */
587 lll_prof_send();
588 }
589 }
590
isr_rx(void * param)591 static void isr_rx(void *param)
592 {
593 uint8_t trx_done;
594 uint8_t crc_ok;
595 uint8_t devmatch_ok;
596 uint8_t devmatch_id;
597 uint8_t irkmatch_ok;
598 uint8_t irkmatch_id;
599 uint8_t rssi_ready;
600
601 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
602 lll_prof_latency_capture();
603 }
604
605 /* Read radio status and events */
606 trx_done = radio_is_done();
607 if (trx_done) {
608 crc_ok = radio_crc_is_valid();
609 devmatch_ok = radio_filter_has_match();
610 devmatch_id = radio_filter_match_get();
611 if (IS_ENABLED(CONFIG_BT_CTLR_PRIVACY)) {
612 irkmatch_ok = radio_ar_has_match();
613 irkmatch_id = radio_ar_match_get();
614 } else {
615 irkmatch_ok = 0U;
616 irkmatch_id = FILTER_IDX_NONE;
617 }
618 rssi_ready = radio_rssi_is_ready();
619 } else {
620 crc_ok = devmatch_ok = irkmatch_ok = rssi_ready = 0U;
621 devmatch_id = irkmatch_id = FILTER_IDX_NONE;
622 }
623
624 /* Clear radio status and events */
625 lll_isr_status_reset();
626
627 if (IS_ENABLED(HAL_RADIO_GPIO_HAVE_PA_PIN) ||
628 IS_ENABLED(HAL_RADIO_GPIO_HAVE_LNA_PIN)) {
629 radio_gpio_pa_lna_disable();
630 }
631
632 if (!trx_done) {
633 goto isr_rx_do_close;
634 }
635
636 if (crc_ok) {
637 int err;
638
639 err = isr_rx_pdu(param, devmatch_ok, devmatch_id, irkmatch_ok,
640 irkmatch_id, rssi_ready);
641 if (!err) {
642 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
643 lll_prof_send();
644 }
645
646 return;
647 }
648 }
649
650 isr_rx_do_close:
651 radio_isr_set(isr_done, param);
652 radio_disable();
653 }
654
isr_done(void * param)655 static void isr_done(void *param)
656 {
657 struct node_rx_pdu *node_rx;
658 struct lll_adv *lll = param;
659
660 /* TODO: MOVE to a common interface, isr_lll_radio_status? */
661 /* Clear radio status and events */
662 lll_isr_status_reset();
663
664 if (IS_ENABLED(HAL_RADIO_GPIO_HAVE_PA_PIN) ||
665 IS_ENABLED(HAL_RADIO_GPIO_HAVE_LNA_PIN)) {
666 radio_gpio_pa_lna_disable();
667 }
668 /* TODO: MOVE ^^ */
669
670 #if defined(CONFIG_BT_HCI_MESH_EXT)
671 if (_radio.advertiser.is_mesh &&
672 !_radio.mesh_adv_end_us) {
673 _radio.mesh_adv_end_us = radio_tmr_end_get();
674 }
675 #endif /* CONFIG_BT_HCI_MESH_EXT */
676
677 #if defined(CONFIG_BT_PERIPHERAL)
678 if (!lll->chan_map_curr && lll->is_hdcd) {
679 lll->chan_map_curr = lll->chan_map;
680 }
681 #endif /* CONFIG_BT_PERIPHERAL */
682
683 if (lll->chan_map_curr) {
684 uint32_t start_us;
685
686 chan_prepare(lll);
687
688 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
689 start_us = radio_tmr_start_now(1);
690
691 radio_gpio_pa_setup();
692 radio_gpio_pa_lna_enable(start_us +
693 radio_tx_ready_delay_get(0, 0) -
694 HAL_RADIO_GPIO_PA_OFFSET);
695 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
696 ARG_UNUSED(start_us);
697
698 radio_tx_enable();
699 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
700
701 /* capture end of Tx-ed PDU, used to calculate HCTO. */
702 radio_tmr_end_capture();
703
704 return;
705 }
706
707 radio_filter_disable();
708
709 #if defined(CONFIG_BT_PERIPHERAL)
710 if (!lll->is_hdcd)
711 #endif /* CONFIG_BT_PERIPHERAL */
712 {
713 #if defined(CONFIG_BT_HCI_MESH_EXT)
714 if (_radio.advertiser.is_mesh) {
715 uint32_t err;
716
717 err = isr_close_adv_mesh();
718 if (err) {
719 return;
720 }
721 }
722 #endif /* CONFIG_BT_HCI_MESH_EXT */
723 }
724
725 #if defined(CONFIG_BT_CTLR_ADV_INDICATION)
726 node_rx = ull_pdu_rx_alloc_peek(3);
727 if (node_rx) {
728 ull_pdu_rx_alloc();
729
730 /* TODO: add other info by defining a payload struct */
731 node_rx->hdr.type = NODE_RX_TYPE_ADV_INDICATION;
732
733 ull_rx_put_sched(node_rx->hdr.link, node_rx);
734 }
735 #else /* !CONFIG_BT_CTLR_ADV_INDICATION */
736 ARG_UNUSED(node_rx);
737 #endif /* !CONFIG_BT_CTLR_ADV_INDICATION */
738
739 isr_cleanup(param);
740 }
741
isr_abort(void * param)742 static void isr_abort(void *param)
743 {
744 /* Clear radio status and events */
745 lll_isr_status_reset();
746
747 if (IS_ENABLED(HAL_RADIO_GPIO_HAVE_PA_PIN) ||
748 IS_ENABLED(HAL_RADIO_GPIO_HAVE_LNA_PIN)) {
749 radio_gpio_pa_lna_disable();
750 }
751
752 radio_filter_disable();
753
754 isr_cleanup(param);
755 }
756
isr_cleanup(void * param)757 static void isr_cleanup(void *param)
758 {
759 int err;
760
761 radio_isr_set(isr_race, param);
762 radio_tmr_stop();
763
764 err = lll_clk_off();
765 LL_ASSERT(!err || err == -EBUSY);
766
767 lll_done(NULL);
768 }
769
isr_race(void * param)770 static void isr_race(void *param)
771 {
772 /* NOTE: lll_disable could have a race with ... */
773 radio_status_reset();
774 }
775
chan_prepare(struct lll_adv * lll)776 static void chan_prepare(struct lll_adv *lll)
777 {
778 struct pdu_adv *pdu;
779 struct pdu_adv *scan_pdu;
780 uint8_t chan;
781 uint8_t upd = 0U;
782
783 pdu = lll_adv_data_latest_get(lll, &upd);
784 LL_ASSERT(pdu);
785 scan_pdu = lll_adv_scan_rsp_latest_get(lll, &upd);
786 LL_ASSERT(scan_pdu);
787
788 #if defined(CONFIG_BT_CTLR_PRIVACY)
789 if (upd) {
790 /* Copy the address from the adv packet we will send into the
791 * scan response.
792 */
793 memcpy(&scan_pdu->scan_rsp.addr[0],
794 &pdu->adv_ind.addr[0], BDADDR_SIZE);
795 }
796 #else
797 ARG_UNUSED(scan_pdu);
798 ARG_UNUSED(upd);
799 #endif /* !CONFIG_BT_CTLR_PRIVACY */
800
801 radio_pkt_tx_set(pdu);
802
803 if ((pdu->type != PDU_ADV_TYPE_NONCONN_IND) &&
804 (!IS_ENABLED(CONFIG_BT_CTLR_ADV_EXT) ||
805 (pdu->type != PDU_ADV_TYPE_EXT_IND))) {
806 radio_isr_set(isr_tx, lll);
807 radio_tmr_tifs_set(EVENT_IFS_US);
808 radio_switch_complete_and_rx(0);
809 } else {
810 radio_isr_set(isr_done, lll);
811 radio_switch_complete_and_disable();
812 }
813
814 chan = find_lsb_set(lll->chan_map_curr);
815 LL_ASSERT(chan);
816
817 lll->chan_map_curr &= (lll->chan_map_curr - 1);
818
819 lll_chan_set(36 + chan);
820 }
821
isr_rx_pdu(struct lll_adv * lll,uint8_t devmatch_ok,uint8_t devmatch_id,uint8_t irkmatch_ok,uint8_t irkmatch_id,uint8_t rssi_ready)822 static inline int isr_rx_pdu(struct lll_adv *lll,
823 uint8_t devmatch_ok, uint8_t devmatch_id,
824 uint8_t irkmatch_ok, uint8_t irkmatch_id,
825 uint8_t rssi_ready)
826 {
827 struct pdu_adv *pdu_rx, *pdu_adv;
828
829 #if defined(CONFIG_BT_CTLR_PRIVACY)
830 /* An IRK match implies address resolution enabled */
831 uint8_t rl_idx = irkmatch_ok ? ull_filter_lll_rl_irk_idx(irkmatch_id) :
832 FILTER_IDX_NONE;
833 #else
834 uint8_t rl_idx = FILTER_IDX_NONE;
835 #endif /* CONFIG_BT_CTLR_PRIVACY */
836
837 pdu_rx = (void *)radio_pkt_scratch_get();
838 pdu_adv = lll_adv_data_curr_get(lll);
839
840 if ((pdu_rx->type == PDU_ADV_TYPE_SCAN_REQ) &&
841 (pdu_rx->len == sizeof(struct pdu_adv_scan_req)) &&
842 (pdu_adv->type != PDU_ADV_TYPE_DIRECT_IND) &&
843 isr_rx_sr_check(lll, pdu_adv, pdu_rx, devmatch_ok, &rl_idx)) {
844 radio_isr_set(isr_done, lll);
845 radio_switch_complete_and_disable();
846 radio_pkt_tx_set(lll_adv_scan_rsp_curr_get(lll));
847
848 /* assert if radio packet ptr is not set and radio started tx */
849 LL_ASSERT(!radio_is_ready());
850
851 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
852 lll_prof_cputime_capture();
853 }
854
855 #if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
856 if (!IS_ENABLED(CONFIG_BT_CTLR_ADV_EXT) ||
857 0 /* TODO: extended adv. scan req notification enabled */) {
858 uint32_t err;
859
860 /* Generate the scan request event */
861 err = isr_rx_sr_report(pdu_rx, rssi_ready);
862 if (err) {
863 /* Scan Response will not be transmitted */
864 return err;
865 }
866 }
867 #endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
868
869 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
870 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
871 /* PA/LNA enable is overwriting packet end used in ISR
872 * profiling, hence back it up for later use.
873 */
874 lll_prof_radio_end_backup();
875 }
876
877 radio_gpio_pa_setup();
878 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() +
879 EVENT_IFS_US -
880 radio_rx_chain_delay_get(0, 0) -
881 HAL_RADIO_GPIO_PA_OFFSET);
882 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
883 return 0;
884
885 #if defined(CONFIG_BT_PERIPHERAL)
886 } else if ((pdu_rx->type == PDU_ADV_TYPE_CONNECT_IND) &&
887 (pdu_rx->len == sizeof(struct pdu_adv_connect_ind)) &&
888 isr_rx_ci_check(lll, pdu_adv, pdu_rx, devmatch_ok,
889 &rl_idx) &&
890 lll->conn) {
891 struct node_rx_ftr *ftr;
892 struct node_rx_pdu *rx;
893
894 if (IS_ENABLED(CONFIG_BT_CTLR_CHAN_SEL_2)) {
895 rx = ull_pdu_rx_alloc_peek(4);
896 } else {
897 rx = ull_pdu_rx_alloc_peek(3);
898 }
899
900 if (!rx) {
901 return -ENOBUFS;
902 }
903
904 radio_isr_set(isr_abort, lll);
905 radio_disable();
906
907 /* assert if radio started tx */
908 LL_ASSERT(!radio_is_ready());
909
910 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
911 lll_prof_cputime_capture();
912 }
913
914 #if defined(CONFIG_BT_CTLR_CONN_RSSI)
915 if (rssi_ready) {
916 lll->conn->rssi_latest = radio_rssi_get();
917 }
918 #endif /* CONFIG_BT_CTLR_CONN_RSSI */
919 /* Stop further LLL radio events */
920 lll->conn->central.initiated = 1;
921
922 rx = ull_pdu_rx_alloc();
923
924 rx->hdr.type = NODE_RX_TYPE_CONNECTION;
925 rx->hdr.handle = 0xffff;
926
927 memcpy(rx->pdu, pdu_rx, (offsetof(struct pdu_adv, connect_ind) +
928 sizeof(struct pdu_adv_connect_ind)));
929
930 ftr = &(rx->rx_ftr);
931 ftr->param = lll;
932 ftr->ticks_anchor = radio_tmr_start_get();
933 ftr->radio_end_us = radio_tmr_end_get() -
934 radio_tx_chain_delay_get(0, 0);
935
936 #if defined(CONFIG_BT_CTLR_PRIVACY)
937 ftr->rl_idx = irkmatch_ok ? rl_idx : FILTER_IDX_NONE;
938 #endif /* CONFIG_BT_CTLR_PRIVACY */
939
940 if (IS_ENABLED(CONFIG_BT_CTLR_CHAN_SEL_2)) {
941 ftr->extra = ull_pdu_rx_alloc();
942 }
943
944 ull_rx_put_sched(rx->hdr.link, rx);
945
946 return 0;
947 #endif /* CONFIG_BT_PERIPHERAL */
948 }
949
950 return -EINVAL;
951 }
952
isr_rx_sr_check(struct lll_adv * lll,struct pdu_adv * adv,struct pdu_adv * sr,uint8_t devmatch_ok,uint8_t * rl_idx)953 static inline bool isr_rx_sr_check(struct lll_adv *lll, struct pdu_adv *adv,
954 struct pdu_adv *sr, uint8_t devmatch_ok,
955 uint8_t *rl_idx)
956 {
957 #if defined(CONFIG_BT_CTLR_PRIVACY)
958 return ((((lll->filter_policy & BT_LE_ADV_FP_FILTER_SCAN_REQ) == 0) &&
959 ull_filter_lll_rl_addr_allowed(sr->tx_addr,
960 sr->scan_req.scan_addr,
961 rl_idx)) ||
962 (((lll->filter_policy & BT_LE_ADV_FP_FILTER_SCAN_REQ) != 0) &&
963 (devmatch_ok || ull_filter_lll_irk_in_fal(*rl_idx)))) &&
964 isr_rx_sr_adva_check(adv, sr);
965 #else
966 return (((lll->filter_policy & BT_LE_ADV_FP_FILTER_SCAN_REQ) == 0U) ||
967 devmatch_ok) &&
968 isr_rx_sr_adva_check(adv, sr);
969 #endif /* CONFIG_BT_CTLR_PRIVACY */
970 }
971
isr_rx_sr_adva_check(struct pdu_adv * adv,struct pdu_adv * sr)972 static inline bool isr_rx_sr_adva_check(struct pdu_adv *adv,
973 struct pdu_adv *sr)
974 {
975 return (adv->tx_addr == sr->rx_addr) &&
976 !memcmp(adv->adv_ind.addr, sr->scan_req.adv_addr, BDADDR_SIZE);
977 }
978
979 #if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
isr_rx_sr_report(struct pdu_adv * pdu_adv_rx,uint8_t rssi_ready)980 static inline int isr_rx_sr_report(struct pdu_adv *pdu_adv_rx,
981 uint8_t rssi_ready)
982 {
983 struct node_rx_pdu *node_rx;
984 struct pdu_adv *pdu_adv;
985 uint8_t pdu_len;
986
987 node_rx = ull_pdu_rx_alloc_peek(3);
988 if (!node_rx) {
989 return -ENOBUFS;
990 }
991 ull_pdu_rx_alloc();
992
993 /* Prepare the report (scan req) */
994 node_rx->hdr.type = NODE_RX_TYPE_SCAN_REQ;
995 node_rx->hdr.handle = 0xffff;
996
997 /* Make a copy of PDU into Rx node (as the received PDU is in the
998 * scratch buffer), and save the RSSI value.
999 */
1000 pdu_adv = (void *)node_rx->pdu;
1001 pdu_len = offsetof(struct pdu_adv, payload) + pdu_adv_rx->len;
1002 memcpy(pdu_adv, pdu_adv_rx, pdu_len);
1003
1004 node_rx->rx_ftr.rssi = (rssi_ready) ? (radio_rssi_get() & 0x7f) :
1005 0x7f;
1006
1007 ull_rx_put_sched(node_rx->hdr.link, node_rx);
1008
1009 return 0;
1010 }
1011 #endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
1012
isr_rx_ci_check(struct lll_adv * lll,struct pdu_adv * adv,struct pdu_adv * ci,uint8_t devmatch_ok,uint8_t * rl_idx)1013 static inline bool isr_rx_ci_check(struct lll_adv *lll, struct pdu_adv *adv,
1014 struct pdu_adv *ci, uint8_t devmatch_ok,
1015 uint8_t *rl_idx)
1016 {
1017 /* LL 4.3.2: filter policy shall be ignored for directed adv */
1018 if (adv->type == PDU_ADV_TYPE_DIRECT_IND) {
1019 #if defined(CONFIG_BT_CTLR_PRIVACY)
1020 return ull_filter_lll_rl_addr_allowed(ci->tx_addr,
1021 ci->connect_ind.init_addr,
1022 rl_idx) &&
1023 #else
1024 return (1) &&
1025 #endif
1026 isr_rx_ci_adva_check(adv, ci) &&
1027 isr_rx_ci_tgta_check(lll, adv, ci, *rl_idx);
1028 }
1029
1030 #if defined(CONFIG_BT_CTLR_PRIVACY)
1031 return ((((lll->filter_policy & BT_LE_ADV_FP_FILTER_CONN_IND) == 0) &&
1032 ull_filter_lll_rl_addr_allowed(ci->tx_addr,
1033 ci->connect_ind.init_addr,
1034 rl_idx)) ||
1035 (((lll->filter_policy & BT_LE_ADV_FP_FILTER_CONN_IND) != 0) &&
1036 (devmatch_ok || ull_filter_lll_irk_in_fal(*rl_idx)))) &&
1037 isr_rx_ci_adva_check(adv, ci);
1038 #else
1039 return (((lll->filter_policy & BT_LE_ADV_FP_FILTER_CONN_IND) == 0) ||
1040 (devmatch_ok)) &&
1041 isr_rx_ci_adva_check(adv, ci);
1042 #endif /* CONFIG_BT_CTLR_PRIVACY */
1043 }
1044
isr_rx_ci_tgta_check(struct lll_adv * lll,struct pdu_adv * adv,struct pdu_adv * ci,uint8_t rl_idx)1045 static inline bool isr_rx_ci_tgta_check(struct lll_adv *lll,
1046 struct pdu_adv *adv, struct pdu_adv *ci,
1047 uint8_t rl_idx)
1048 {
1049 #if defined(CONFIG_BT_CTLR_PRIVACY)
1050 if (rl_idx != FILTER_IDX_NONE && lll->rl_idx != FILTER_IDX_NONE) {
1051 return rl_idx == lll->rl_idx;
1052 }
1053 #endif /* CONFIG_BT_CTLR_PRIVACY */
1054 return (adv->rx_addr == ci->tx_addr) &&
1055 !memcmp(adv->direct_ind.tgt_addr, ci->connect_ind.init_addr,
1056 BDADDR_SIZE);
1057 }
1058
isr_rx_ci_adva_check(struct pdu_adv * adv,struct pdu_adv * ci)1059 static inline bool isr_rx_ci_adva_check(struct pdu_adv *adv,
1060 struct pdu_adv *ci)
1061 {
1062 return (adv->tx_addr == ci->rx_addr) &&
1063 (((adv->type == PDU_ADV_TYPE_DIRECT_IND) &&
1064 !memcmp(adv->direct_ind.adv_addr, ci->connect_ind.adv_addr,
1065 BDADDR_SIZE)) ||
1066 (!memcmp(adv->adv_ind.addr, ci->connect_ind.adv_addr,
1067 BDADDR_SIZE)));
1068 }
1069