1 /*
2 * Copyright (c) 2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include <string.h>
9
10 #include <soc.h>
11 #include <zephyr/sys/byteorder.h>
12 #include <zephyr/sys/util.h>
13 #include <zephyr/bluetooth/hci_types.h>
14
15 #include "hal/cpu.h"
16 #include "hal/ccm.h"
17 #include "hal/radio.h"
18 #include "hal/ticker.h"
19
20 #include "util/util.h"
21 #include "util/mem.h"
22 #include "util/memq.h"
23
24 #include "pdu_df.h"
25 #include "lll/pdu_vendor.h"
26 #include "pdu.h"
27
28 #include "lll.h"
29 #include "lll_vendor.h"
30 #include "lll_clock.h"
31 #include "lll_chan.h"
32 #include "lll_sync_iso.h"
33
34 #include "lll_internal.h"
35 #include "lll_tim_internal.h"
36 #include "lll_prof_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static void prepare(void *param);
44 static void create_prepare_bh(void *param);
45 static void prepare_bh(void *param);
46 static int create_prepare_cb(struct lll_prepare_param *p);
47 static int prepare_cb(struct lll_prepare_param *p);
48 static int prepare_cb_common(struct lll_prepare_param *p);
49 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb);
50 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
51 static void isr_rx_estab(void *param);
52 static void isr_rx(void *param);
53 static void isr_rx_done(void *param);
54 static void isr_done(void *param);
55 static void next_chan_calc(struct lll_sync_iso *lll, uint16_t event_counter,
56 uint16_t data_chan_id);
57 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
58 uint16_t handle, struct node_rx_pdu *node_rx);
59 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
60 uint16_t latency, uint8_t bn,
61 uint16_t handle,
62 struct node_rx_pdu *node_rx);
63 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu);
64
65 /* FIXME: Optimize by moving to a common place, as similar variable is used for
66 * connections too.
67 */
68 static uint8_t trx_cnt;
69 static uint8_t crc_ok_anchor;
70
lll_sync_iso_init(void)71 int lll_sync_iso_init(void)
72 {
73 int err;
74
75 err = init_reset();
76 if (err) {
77 return err;
78 }
79
80 return 0;
81 }
82
lll_sync_iso_reset(void)83 int lll_sync_iso_reset(void)
84 {
85 int err;
86
87 err = init_reset();
88 if (err) {
89 return err;
90 }
91
92 return 0;
93 }
94
lll_sync_iso_create_prepare(void * param)95 void lll_sync_iso_create_prepare(void *param)
96 {
97 prepare(param);
98 create_prepare_bh(param);
99 }
100
lll_sync_iso_prepare(void * param)101 void lll_sync_iso_prepare(void *param)
102 {
103 prepare(param);
104 prepare_bh(param);
105 }
106
lll_sync_iso_flush(uint8_t handle,struct lll_sync_iso * lll)107 void lll_sync_iso_flush(uint8_t handle, struct lll_sync_iso *lll)
108 {
109 ARG_UNUSED(handle);
110 ARG_UNUSED(lll);
111 }
112
init_reset(void)113 static int init_reset(void)
114 {
115 return 0;
116 }
117
prepare(void * param)118 static void prepare(void *param)
119 {
120 struct lll_prepare_param *p;
121 struct lll_sync_iso *lll;
122 uint16_t elapsed;
123 int err;
124
125 err = lll_hfclock_on();
126 LL_ASSERT(err >= 0);
127
128 p = param;
129
130 /* Instants elapsed */
131 elapsed = p->lazy + 1U;
132
133 lll = p->param;
134
135 /* Save the (latency + 1) for use in event */
136 lll->latency_prepare += elapsed;
137
138 /* Accumulate window widening */
139 lll->window_widening_prepare_us += lll->window_widening_periodic_us *
140 elapsed;
141 if (lll->window_widening_prepare_us > lll->window_widening_max_us) {
142 lll->window_widening_prepare_us = lll->window_widening_max_us;
143 }
144 }
145
create_prepare_bh(void * param)146 static void create_prepare_bh(void *param)
147 {
148 int err;
149
150 /* Invoke common pipeline handling of prepare */
151 err = lll_prepare(is_abort_cb, abort_cb, create_prepare_cb, 0U,
152 param);
153 LL_ASSERT(!err || err == -EINPROGRESS);
154 }
155
prepare_bh(void * param)156 static void prepare_bh(void *param)
157 {
158 int err;
159
160 /* Invoke common pipeline handling of prepare */
161 err = lll_prepare(is_abort_cb, abort_cb, prepare_cb, 0U, param);
162 LL_ASSERT(!err || err == -EINPROGRESS);
163 }
164
create_prepare_cb(struct lll_prepare_param * p)165 static int create_prepare_cb(struct lll_prepare_param *p)
166 {
167 int err;
168
169 err = prepare_cb_common(p);
170 if (err) {
171 DEBUG_RADIO_START_O(1);
172 return 0;
173 }
174
175 radio_isr_set(isr_rx_estab, p->param);
176
177 DEBUG_RADIO_START_O(1);
178 return 0;
179 }
180
prepare_cb(struct lll_prepare_param * p)181 static int prepare_cb(struct lll_prepare_param *p)
182 {
183 int err;
184
185 err = prepare_cb_common(p);
186 if (err) {
187 DEBUG_RADIO_START_O(1);
188 return 0;
189 }
190
191 radio_isr_set(isr_rx, p->param);
192
193 DEBUG_RADIO_START_O(1);
194 return 0;
195 }
196
prepare_cb_common(struct lll_prepare_param * p)197 static int prepare_cb_common(struct lll_prepare_param *p)
198 {
199 struct lll_sync_iso_stream *stream;
200 struct node_rx_pdu *node_rx;
201 struct lll_sync_iso *lll;
202 uint32_t ticks_at_event;
203 uint32_t ticks_at_start;
204 uint16_t stream_handle;
205 uint16_t event_counter;
206 uint8_t access_addr[4];
207 uint16_t data_chan_id;
208 uint8_t data_chan_use;
209 uint32_t remainder_us;
210 uint8_t crc_init[3];
211 struct ull_hdr *ull;
212 uint32_t remainder;
213 uint32_t hcto;
214 uint32_t ret;
215 uint8_t phy;
216
217 DEBUG_RADIO_START_O(1);
218
219 lll = p->param;
220
221 /* Deduce the latency */
222 lll->latency_event = lll->latency_prepare - 1U;
223
224 /* Calculate the current event counter value */
225 event_counter = (lll->payload_count / lll->bn) + lll->latency_event;
226
227 /* Update BIS packet counter to next value */
228 lll->payload_count += (lll->latency_prepare * lll->bn);
229
230 /* Reset accumulated latencies */
231 lll->latency_prepare = 0U;
232
233 /* Current window widening */
234 lll->window_widening_event_us += lll->window_widening_prepare_us;
235 lll->window_widening_prepare_us = 0U;
236 if (lll->window_widening_event_us > lll->window_widening_max_us) {
237 lll->window_widening_event_us = lll->window_widening_max_us;
238 }
239
240 /* Initialize trx chain count */
241 trx_cnt = 0U;
242
243 /* Initialize anchor point CRC ok flag */
244 crc_ok_anchor = 0U;
245
246 /* Initialize to mandatory parameter values */
247 lll->bis_curr = 1U;
248 lll->ptc_curr = 0U;
249 lll->irc_curr = 1U;
250 lll->bn_curr = 1U;
251
252 /* Initialize control subevent flag */
253 lll->ctrl = 0U;
254
255 /* Calculate the Access Address for the BIS event */
256 util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr, access_addr);
257 data_chan_id = lll_chan_id(access_addr);
258
259 /* Calculate the radio channel to use for ISO event and hence store the
260 * channel to be used for control subevent.
261 */
262 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
263 lll->data_chan_map,
264 lll->data_chan_count,
265 &lll->data_chan_prn_s,
266 &lll->data_chan_remap_idx);
267
268 /* Initialize stream current */
269 lll->stream_curr = 0U;
270
271 /* Skip subevents until first selected BIS */
272 stream_handle = lll->stream_handle[lll->stream_curr];
273 stream = ull_sync_iso_lll_stream_get(stream_handle);
274 if ((stream->bis_index != lll->bis_curr) &&
275 (stream->bis_index <= lll->num_bis)) {
276 /* First selected BIS */
277 lll->bis_curr = stream->bis_index;
278
279 /* Calculate the Access Address for the current BIS */
280 util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr,
281 access_addr);
282 data_chan_id = lll_chan_id(access_addr);
283
284 /* Calculate the channel id for the next BIS subevent */
285 data_chan_use = lll_chan_iso_event(event_counter,
286 data_chan_id,
287 lll->data_chan_map,
288 lll->data_chan_count,
289 &lll->data_chan_prn_s,
290 &lll->data_chan_remap_idx);
291 }
292
293 /* Calculate the CRC init value for the BIS event,
294 * preset with the BaseCRCInit value from the BIGInfo data the most
295 * significant 2 octets and the BIS_Number for the specific BIS in the
296 * least significant octet.
297 */
298 crc_init[0] = lll->bis_curr;
299 (void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
300
301 /* Start setting up of Radio h/w */
302 radio_reset();
303
304 phy = lll->phy;
305 radio_phy_set(phy, PHY_FLAGS_S8);
306 radio_aa_set(access_addr);
307 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
308 lll_chan_set(data_chan_use);
309
310 /* By design, there shall always be one free node rx available for
311 * setting up radio for new PDU reception.
312 */
313 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
314 LL_ASSERT(node_rx);
315
316 /* Encryption */
317 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
318 lll->enc) {
319 uint64_t payload_count;
320 uint8_t pkt_flags;
321
322 payload_count = lll->payload_count - lll->bn;
323 lll->ccm_rx.counter = payload_count;
324
325 (void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
326 mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
327
328 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
329 phy,
330 RADIO_PKT_CONF_CTE_DISABLED);
331 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
332 (lll->max_pdu + PDU_MIC_SIZE), pkt_flags);
333 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, phy,
334 RADIO_PKT_CONF_PDU_TYPE_BIS,
335 node_rx->pdu));
336 } else {
337 uint8_t pkt_flags;
338
339 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
340 phy,
341 RADIO_PKT_CONF_CTE_DISABLED);
342 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, lll->max_pdu,
343 pkt_flags);
344 radio_pkt_rx_set(node_rx->pdu);
345 }
346
347 radio_switch_complete_and_disable();
348
349 ticks_at_event = p->ticks_at_expire;
350 ull = HDR_LLL2ULL(lll);
351 ticks_at_event += lll_event_offset_get(ull);
352
353 ticks_at_start = ticks_at_event;
354 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
355
356 remainder = p->remainder;
357 remainder_us = radio_tmr_start(0U, ticks_at_start, remainder);
358
359 radio_tmr_ready_save(remainder_us);
360 radio_tmr_aa_save(0U);
361 radio_tmr_aa_capture();
362
363 hcto = remainder_us +
364 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
365 lll->window_widening_event_us) << 1) +
366 lll->window_size_event_us;
367 hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
368 hcto += addr_us_get(lll->phy);
369 hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
370 radio_tmr_hcto_configure(hcto);
371
372 radio_tmr_end_capture();
373 radio_rssi_measure();
374
375 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
376 radio_gpio_lna_setup();
377
378 radio_gpio_pa_lna_enable(remainder_us +
379 radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8) -
380 HAL_RADIO_GPIO_LNA_OFFSET);
381 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
382
383 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
384 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
385 uint32_t overhead;
386
387 overhead = lll_preempt_calc(ull, (TICKER_ID_SCAN_SYNC_ISO_BASE +
388 ull_sync_iso_lll_index_get(lll)), ticks_at_event);
389 /* check if preempt to start has changed */
390 if (overhead) {
391 LL_ASSERT_OVERHEAD(overhead);
392
393 radio_isr_set(lll_isr_abort, lll);
394 radio_disable();
395
396 return -ECANCELED;
397 }
398 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
399
400 ret = lll_prepare_done(lll);
401 LL_ASSERT(!ret);
402
403 /* Calculate ahead the next subevent channel index */
404 next_chan_calc(lll, event_counter, data_chan_id);
405
406 return 0;
407 }
408
is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)409 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb)
410 {
411 if (next != curr) {
412 struct lll_sync_iso *lll;
413
414 lll = curr;
415 if (lll->bn_curr <= lll->bn) {
416 return 0;
417 }
418 }
419
420 return -ECANCELED;
421 }
422
abort_cb(struct lll_prepare_param * prepare_param,void * param)423 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
424 {
425 struct event_done_extra *e;
426 int err;
427
428 /* NOTE: This is not a prepare being cancelled */
429 if (!prepare_param) {
430 radio_isr_set(isr_done, param);
431 radio_disable();
432 return;
433 }
434
435 /* NOTE: Else clean the top half preparations of the aborted event
436 * currently in preparation pipeline.
437 */
438 err = lll_hfclock_off();
439 LL_ASSERT(err >= 0);
440
441 /* Extra done event, to check sync lost */
442 e = ull_event_done_extra_get();
443 LL_ASSERT(e);
444
445 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
446 e->estab_failed = 0U;
447 e->trx_cnt = 0U;
448 e->crc_valid = 0U;
449
450 lll_done(param);
451 }
452
isr_rx_estab(void * param)453 static void isr_rx_estab(void *param)
454 {
455 struct event_done_extra *e;
456 struct lll_sync_iso *lll;
457 uint8_t trx_done;
458 uint8_t crc_ok;
459
460 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
461 lll_prof_latency_capture();
462 }
463
464 /* Read radio status and events */
465 trx_done = radio_is_done();
466 if (trx_done) {
467 crc_ok = radio_crc_is_valid();
468 trx_cnt++;
469 } else {
470 crc_ok = 0U;
471 }
472
473 /* Clear radio rx status and events */
474 lll_isr_rx_status_reset();
475
476 /* Get reference to LLL context */
477 lll = param;
478
479 /* Check for MIC failures for encrypted Broadcast ISO streams */
480 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) && crc_ok && lll->enc) {
481 struct node_rx_pdu *node_rx;
482 struct pdu_bis *pdu;
483
484 /* By design, there shall always be one free node rx available when setting up radio
485 * for new PDU reception.
486 */
487 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
488 LL_ASSERT(node_rx);
489
490 /* Get reference to received PDU and validate MIC for non-empty PDU */
491 pdu = (void *)node_rx->pdu;
492 if (pdu->len) {
493 bool mic_failure;
494 uint32_t done;
495
496 done = radio_ccm_is_done();
497 LL_ASSERT(done);
498
499 mic_failure = !radio_ccm_mic_is_valid();
500 if (mic_failure) {
501 lll->term_reason = BT_HCI_ERR_TERM_DUE_TO_MIC_FAIL;
502 }
503 }
504 }
505
506 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
507 lll_prof_cputime_capture();
508 }
509
510 /* Calculate and place the drift information in done event */
511 e = ull_event_done_extra_get();
512 LL_ASSERT(e);
513
514 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_ESTAB;
515 e->estab_failed = lll->term_reason ? 1U : 0U;
516 e->trx_cnt = trx_cnt;
517 e->crc_valid = crc_ok;
518
519 if (trx_cnt) {
520 e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
521 e->drift.start_to_address_actual_us =
522 radio_tmr_aa_get() - radio_tmr_ready_get();
523 e->drift.window_widening_event_us =
524 lll->window_widening_event_us;
525
526 /* Reset window widening, as anchor point sync-ed */
527 lll->window_widening_event_us = 0U;
528 lll->window_size_event_us = 0U;
529 }
530
531 lll_isr_cleanup(param);
532
533 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
534 lll_prof_send();
535 }
536 }
537
isr_rx(void * param)538 static void isr_rx(void *param)
539 {
540 struct lll_sync_iso_stream *stream;
541 struct lll_sync_iso *lll;
542 uint8_t access_addr[4];
543 uint16_t data_chan_id;
544 uint8_t data_chan_use;
545 uint8_t crc_init[3];
546 uint8_t stream_curr;
547 uint8_t rssi_ready;
548 uint32_t start_us;
549 uint8_t new_burst;
550 uint8_t trx_done;
551 uint8_t bis_idx;
552 uint8_t skipped;
553 uint8_t crc_ok;
554 uint32_t hcto;
555 uint8_t bis;
556 uint8_t nse;
557
558 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
559 lll_prof_latency_capture();
560 }
561
562 /* Read radio status and events */
563 trx_done = radio_is_done();
564 if (!trx_done) {
565 /* Clear radio rx status and events */
566 lll_isr_rx_status_reset();
567
568 /* initialize LLL context reference */
569 lll = param;
570
571 /* BIS index */
572 bis_idx = lll->bis_curr - 1U;
573
574 /* Current stream */
575 stream_curr = lll->stream_curr;
576
577 goto isr_rx_done;
578 }
579
580 crc_ok = radio_crc_is_valid();
581 rssi_ready = radio_rssi_is_ready();
582 trx_cnt++;
583
584 /* initialize LLL context reference */
585 lll = param;
586
587 /* Save the AA captured for the first anchor point sync */
588 if (!radio_tmr_aa_restore()) {
589 const struct lll_sync_iso_stream *sync_stream;
590 uint32_t se_offset_us;
591 uint8_t se;
592
593 crc_ok_anchor = crc_ok;
594
595 sync_stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
596 se = ((lll->bis_curr - sync_stream->bis_index) *
597 ((lll->bn * lll->irc) + lll->ptc)) +
598 ((lll->irc_curr - 1U) * lll->bn) + (lll->bn_curr - 1U) +
599 lll->ptc_curr + lll->ctrl;
600 se_offset_us = lll->sub_interval * se;
601 radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
602 radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
603 }
604
605 /* Clear radio rx status and events */
606 lll_isr_rx_status_reset();
607
608 /* BIS index */
609 bis_idx = lll->bis_curr - 1U;
610
611 /* Current stream */
612 stream_curr = lll->stream_curr;
613
614 /* Check CRC and generate ISO Data PDU */
615 if (crc_ok) {
616 struct lll_sync_iso_stream *sync_stream;
617 struct node_rx_pdu *node_rx;
618 uint32_t payload_offset;
619 uint16_t payload_index;
620 uint16_t stream_handle;
621 struct pdu_bis *pdu;
622
623 /* Check if Control Subevent being received */
624 if ((lll->bn_curr == lll->bn) &&
625 (lll->irc_curr == lll->irc) &&
626 (lll->ptc_curr == lll->ptc) &&
627 (lll->bis_curr == lll->num_bis) &&
628 lll->ctrl) {
629 lll->cssn_curr = lll->cssn_next;
630
631 /* Check the dedicated Control PDU buffer */
632 pdu = radio_pkt_big_ctrl_get();
633 if (pdu->ll_id == PDU_BIS_LLID_CTRL) {
634 isr_rx_ctrl_recv(lll, pdu);
635 }
636
637 goto isr_rx_done;
638 } else {
639 /* Check if there are 2 free rx buffers, one will be
640 * consumed to receive the current PDU, and the other
641 * is to ensure a PDU can be setup for the radio DMA to
642 * receive in the next sub_interval/iso_interval.
643 */
644 node_rx = ull_iso_pdu_rx_alloc_peek(2U);
645 if (!node_rx) {
646 goto isr_rx_done;
647 }
648 }
649
650 pdu = (void *)node_rx->pdu;
651
652 /* Check for new control PDU in control subevent */
653 if (pdu->cstf && (pdu->cssn != lll->cssn_curr)) {
654 lll->cssn_next = pdu->cssn;
655 /* TODO: check same CSSN is used in every subevent */
656 }
657
658 /* Check payload buffer overflow.
659 * Ensure we are not having offset values over 255 in payload_count_max, used to
660 * allocate the buffers.
661 */
662 payload_offset = (lll->latency_event * lll->bn) + (lll->bn_curr - 1U) +
663 (lll->ptc_curr * lll->pto);
664 if (payload_offset >= lll->payload_count_max) {
665 goto isr_rx_done;
666 }
667
668 /* Calculate the payload index in the sliding window */
669 payload_index = lll->payload_tail + payload_offset;
670 if (payload_index >= lll->payload_count_max) {
671 payload_index -= lll->payload_count_max;
672 }
673
674 /* Get reference to stream context */
675 stream_handle = lll->stream_handle[stream_curr];
676 sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
677
678 /* Store the received PDU if selected stream and not already
679 * received (say in previous event as pre-transmitted PDU.
680 */
681 if ((lll->bis_curr == sync_stream->bis_index) && pdu->len &&
682 !lll->payload[stream_curr][payload_index]) {
683 uint16_t handle;
684
685 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
686 lll->enc) {
687 bool mic_failure;
688 uint32_t done;
689
690 done = radio_ccm_is_done();
691 LL_ASSERT(done);
692
693 mic_failure = !radio_ccm_mic_is_valid();
694 if (mic_failure) {
695 lll->term_reason = BT_HCI_ERR_TERM_DUE_TO_MIC_FAIL;
696
697 goto isr_rx_mic_failure;
698 }
699 }
700
701 ull_iso_pdu_rx_alloc();
702
703 handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
704 isr_rx_iso_data_valid(lll, handle, node_rx);
705
706 lll->payload[stream_curr][payload_index] = node_rx;
707 }
708 }
709
710 isr_rx_done:
711 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
712 lll_prof_cputime_capture();
713 }
714
715 uint8_t bis_idx_old = bis_idx;
716
717 new_burst = 0U;
718 skipped = 0U;
719
720 isr_rx_find_subevent:
721 /* FIXME: Sequential or Interleaved BIS subevents decision */
722 /* NOTE: below code is for Sequential Rx only */
723
724 /* Find the next (bn_curr)th subevent to receive PDU */
725 while (lll->bn_curr < lll->bn) {
726 uint32_t payload_offset;
727 uint16_t payload_index;
728
729 /* Next burst number to check for reception required */
730 lll->bn_curr++;
731
732 /* Check payload buffer overflow.
733 * Ensure we are not having offset values over 255 in payload_count_max, used to
734 * allocate the buffers.
735 */
736 payload_offset = (lll->latency_event * lll->bn) + (lll->bn_curr - 1U);
737 if (payload_offset >= lll->payload_count_max) {
738 /* (bn_curr)th Rx PDU skip subevent */
739 skipped++;
740
741 continue;
742 }
743
744 /* Find the index of the (bn_curr)th Rx PDU buffer */
745 payload_index = lll->payload_tail + payload_offset;
746 if (payload_index >= lll->payload_count_max) {
747 payload_index -= lll->payload_count_max;
748 }
749
750 /* Check if (bn_curr)th Rx PDU has been received */
751 if (!lll->payload[stream_curr][payload_index]) {
752 /* Receive the (bn_curr)th Rx PDU of bis_curr */
753 bis = lll->bis_curr;
754
755 goto isr_rx_next_subevent;
756 }
757
758 /* (bn_curr)th Rx PDU already received, skip subevent */
759 skipped++;
760 }
761
762 /* Find the next repetition (irc_curr)th subevent to receive PDU */
763 if (lll->irc_curr < lll->irc) {
764 if (!new_burst) {
765 uint32_t payload_offset;
766 uint16_t payload_index;
767
768 /* Increment to next repetition count and be at first
769 * burst count for it.
770 */
771 lll->bn_curr = 1U;
772 lll->irc_curr++;
773
774 /* Check payload buffer overflow */
775 /* FIXME: Typically we should not have high latency, but do have an
776 * assertion check to ensure we do not rollover in the payload_index
777 * variable use. Alternatively, add implementation to correctly
778 * skip subevents as buffers at these high offset are unavailable.
779 */
780 payload_offset = (lll->latency_event * lll->bn);
781 LL_ASSERT(payload_offset <= UINT8_MAX);
782
783 /* Find the index of the (irc_curr)th bn = 1 Rx PDU
784 * buffer.
785 */
786 payload_index = lll->payload_tail + payload_offset;
787 if (payload_index >= lll->payload_count_max) {
788 payload_index -= lll->payload_count_max;
789 }
790
791 /* Check if (irc_curr)th bn = 1 Rx PDU has been
792 * received.
793 */
794 if (!lll->payload[stream_curr][payload_index]) {
795 /* Receive the (irc_curr)th bn = 1 Rx PDU of
796 * bis_curr.
797 */
798 bis = lll->bis_curr;
799
800 goto isr_rx_next_subevent;
801 } else {
802 /* bn = 1 Rx PDU already received, skip
803 * subevent.
804 */
805 skipped++;
806
807 /* flag to skip successive repetitions if all
808 * bn PDUs have been received. i.e. the bn
809 * loop above did not find a PDU to be received.
810 */
811 new_burst = 1U;
812
813 /* Find the missing (bn_curr)th Rx PDU of
814 * bis_curr
815 */
816 goto isr_rx_find_subevent;
817 }
818 } else {
819 /* Skip all successive repetition reception as all
820 * bn PDUs have been received.
821 */
822 skipped += (lll->irc - lll->irc_curr) * lll->bn;
823 lll->irc_curr = lll->irc;
824 }
825 }
826
827 /* Next pre-transmission subevent */
828 if (lll->ptc_curr < lll->ptc) {
829 lll->ptc_curr++;
830
831 /* TODO: optimize to skip pre-transmission subevent in case
832 * of insufficient buffers in sliding window.
833 */
834
835 /* Receive the (ptc_curr)th Rx PDU of bis_curr */
836 bis = lll->bis_curr;
837
838 goto isr_rx_next_subevent;
839 }
840
841 /* Next BIS */
842 if (lll->bis_curr < lll->num_bis) {
843 struct lll_sync_iso_stream *sync_stream;
844 uint16_t stream_handle;
845
846 /* Next selected stream */
847 if ((lll->stream_curr + 1U) < lll->stream_count) {
848 stream_curr = ++lll->stream_curr;
849 stream_handle = lll->stream_handle[stream_curr];
850 sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
851 if (sync_stream->bis_index <= lll->num_bis) {
852 uint32_t payload_offset;
853 uint16_t payload_index;
854 uint8_t bis_idx_new;
855
856 lll->bis_curr = sync_stream->bis_index;
857 lll->ptc_curr = 0U;
858 lll->irc_curr = 1U;
859 lll->bn_curr = 1U;
860
861 /* new BIS index */
862 bis_idx_new = lll->bis_curr - 1U;
863
864 /* Check payload buffer overflow */
865 /* FIXME: Typically we should not have high latency, but do have an
866 * assertion check to ensure we do not rollover in the
867 * payload_index variable use. Alternatively, add
868 * implementation to correctly skip subevents as buffers at
869 * these high offsets are unavailable.
870 */
871 payload_offset = (lll->latency_event * lll->bn);
872 LL_ASSERT(payload_offset <= UINT8_MAX);
873
874 /* Find the index of the (irc_curr)th bn = 1 Rx
875 * PDU buffer.
876 */
877 payload_index = lll->payload_tail + payload_offset;
878 if (payload_index >= lll->payload_count_max) {
879 payload_index -= lll->payload_count_max;
880 }
881
882 /* Check if (irc_curr)th bn = 1 Rx PDU has been
883 * received.
884 */
885 if (!lll->payload[stream_curr][payload_index]) {
886 /* bn = 1 Rx PDU not received */
887 skipped = (bis_idx_new - bis_idx) *
888 ((lll->bn * lll->irc) +
889 lll->ptc);
890
891 /* Receive the (irc_curr)th bn = 1 Rx
892 * PDU of bis_curr.
893 */
894 bis = lll->bis_curr;
895
896 goto isr_rx_next_subevent;
897 } else {
898 /* bn = 1 Rx PDU already received, skip
899 * subevent.
900 */
901 skipped = ((bis_idx_new - bis_idx) *
902 ((lll->bn * lll->irc) +
903 lll->ptc)) + 1U;
904
905 /* BIS index */
906 bis_idx = lll->bis_curr - 1U;
907
908 /* Find the missing (bn_curr)th Rx PDU
909 * of bis_curr
910 */
911 goto isr_rx_find_subevent;
912 }
913 } else {
914 lll->bis_curr = lll->num_bis;
915 }
916 } else {
917 lll->bis_curr = lll->num_bis;
918 }
919 }
920
921 /* Control subevent */
922 if (!lll->ctrl && (lll->cssn_next != lll->cssn_curr)) {
923 uint8_t pkt_flags;
924
925 /* Receive the control PDU and close the BIG event
926 * there after.
927 */
928 lll->ctrl = 1U;
929
930 /* control subevent to use bis = 0 and se_n = 1 */
931 bis = 0U;
932
933 /* Configure Radio to receive Control PDU that can have greater
934 * PDU length than max_pdu length.
935 */
936 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
937 lll->phy,
938 RADIO_PKT_CONF_CTE_DISABLED);
939 if (lll->enc) {
940 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
941 (sizeof(struct pdu_big_ctrl) + PDU_MIC_SIZE),
942 pkt_flags);
943 } else {
944 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
945 sizeof(struct pdu_big_ctrl),
946 pkt_flags);
947 }
948
949 goto isr_rx_next_subevent;
950 }
951
952 isr_rx_mic_failure:
953 isr_rx_done(param);
954
955 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
956 lll_prof_send();
957 }
958
959 return;
960
961 isr_rx_next_subevent:
962 /* Calculate the Access Address for the BIS event */
963 util_bis_aa_le32(bis, lll->seed_access_addr, access_addr);
964 data_chan_id = lll_chan_id(access_addr);
965
966 /* Calculate the CRC init value for the BIS event,
967 * preset with the BaseCRCInit value from the BIGInfo data the most
968 * significant 2 octets and the BIS_Number for the specific BIS in the
969 * least significant octet.
970 */
971 crc_init[0] = bis;
972 (void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
973
974 radio_aa_set(access_addr);
975 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
976
977 /* Set the channel to use */
978 if (!bis) {
979 const uint16_t event_counter =
980 (lll->payload_count / lll->bn) - 1U;
981
982 /* Calculate the radio channel to use for ISO event */
983 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
984 lll->data_chan_map,
985 lll->data_chan_count,
986 &lll->data_chan_prn_s,
987 &lll->data_chan_remap_idx);
988 } else if (!skipped) {
989 data_chan_use = lll->next_chan_use;
990 } else {
991 uint8_t bis_idx_new = lll->bis_curr - 1U;
992
993 /* Initialise to avoid compile error */
994 data_chan_use = 0U;
995
996 if (bis_idx_old != bis_idx_new) {
997 const uint16_t event_counter =
998 (lll->payload_count / lll->bn) - 1U;
999
1000 /* Calculate the radio channel to use for next BIS */
1001 data_chan_use = lll_chan_iso_event(event_counter,
1002 data_chan_id,
1003 lll->data_chan_map,
1004 lll->data_chan_count,
1005 &lll->data_chan_prn_s,
1006 &lll->data_chan_remap_idx);
1007
1008 skipped -= (bis_idx_new - bis_idx_old) *
1009 ((lll->bn * lll->irc) + lll->ptc);
1010 }
1011
1012 while (skipped--) {
1013 /* Calculate the radio channel to use for subevent */
1014 data_chan_use = lll_chan_iso_subevent(data_chan_id,
1015 lll->data_chan_map,
1016 lll->data_chan_count,
1017 &lll->data_chan_prn_s,
1018 &lll->data_chan_remap_idx);
1019 }
1020 }
1021
1022 lll_chan_set(data_chan_use);
1023
1024 /* Encryption */
1025 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
1026 lll->enc) {
1027 uint64_t payload_count;
1028 struct pdu_bis *pdu;
1029
1030 payload_count = lll->payload_count - lll->bn;
1031 if (bis) {
1032 struct node_rx_pdu *node_rx;
1033
1034 payload_count += (lll->bn_curr - 1U) +
1035 (lll->ptc_curr * lll->pto);
1036
1037 /* By design, there shall always be one free node rx
1038 * available for setting up radio for new PDU reception.
1039 */
1040 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1041 LL_ASSERT(node_rx);
1042
1043 pdu = (void *)node_rx->pdu;
1044 } else {
1045 /* Use the dedicated Control PDU buffer */
1046 pdu = radio_pkt_big_ctrl_get();
1047 }
1048
1049 lll->ccm_rx.counter = payload_count;
1050
1051 (void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
1052 mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
1053
1054 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, lll->phy,
1055 RADIO_PKT_CONF_PDU_TYPE_BIS,
1056 pdu));
1057
1058 } else {
1059 struct pdu_bis *pdu;
1060
1061 if (bis) {
1062 struct node_rx_pdu *node_rx;
1063
1064 /* By design, there shall always be one free node rx
1065 * available for setting up radio for new PDU reception.
1066 */
1067 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1068 LL_ASSERT(node_rx);
1069
1070 pdu = (void *)node_rx->pdu;
1071 } else {
1072 /* Use the dedicated Control PDU buffer */
1073 pdu = radio_pkt_big_ctrl_get();
1074 }
1075
1076 radio_pkt_rx_set(pdu);
1077 }
1078
1079 radio_switch_complete_and_disable();
1080
1081 /* PDU Header Complete TimeOut, calculate the absolute timeout in
1082 * microseconds by when a PDU header is to be received for each
1083 * subevent.
1084 */
1085 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1086 nse = ((lll->bis_curr - stream->bis_index) *
1087 ((lll->bn * lll->irc) + lll->ptc)) +
1088 ((lll->irc_curr - 1U) * lll->bn) + (lll->bn_curr - 1U) +
1089 lll->ptc_curr + lll->ctrl;
1090 hcto = lll->sub_interval * nse;
1091
1092 if (trx_cnt) {
1093 /* Setup radio packet timer header complete timeout for
1094 * subsequent subevent PDU.
1095 */
1096
1097 /* Calculate the radio start with consideration of the drift
1098 * based on the access address capture timestamp.
1099 * Listen early considering +/- 2 us active clock jitter, i.e.
1100 * listen early by 4 us.
1101 */
1102 hcto += radio_tmr_aa_restore();
1103 hcto -= radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1104 hcto -= addr_us_get(lll->phy);
1105 hcto -= radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1106 hcto -= (EVENT_CLOCK_JITTER_US << 1) * nse;
1107
1108 start_us = hcto;
1109 hcto = radio_tmr_start_us(0U, start_us);
1110 LL_ASSERT(hcto == (start_us + 1U));
1111
1112 /* Add 8 us * subevents so far, as radio was setup to listen
1113 * 4 us early and subevents could have a 4 us drift each until
1114 * the current subevent we are listening.
1115 */
1116 hcto += (((EVENT_CLOCK_JITTER_US << 1) * nse) << 1) +
1117 RANGE_DELAY_US + HAL_RADIO_TMR_START_DELAY_US;
1118 } else {
1119 /* First subevent PDU was not received, hence setup radio packet
1120 * timer header complete timeout from where the first subevent
1121 * PDU which is the BIG event anchor point would have been
1122 * received.
1123 */
1124 hcto += radio_tmr_ready_restore();
1125
1126 start_us = hcto;
1127 hcto = radio_tmr_start_us(0U, start_us);
1128 LL_ASSERT(hcto == (start_us + 1U));
1129
1130 hcto += ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1131 lll->window_widening_event_us) << 1) +
1132 lll->window_size_event_us;
1133 }
1134
1135 /* header complete timeout to consider the radio ready delay, chain
1136 * delay and access address duration.
1137 */
1138 hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1139 hcto += addr_us_get(lll->phy);
1140 hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1141
1142 /* setup absolute PDU header reception timeout */
1143 radio_tmr_hcto_configure(hcto);
1144
1145 /* setup capture of PDU end timestamp */
1146 radio_tmr_end_capture();
1147
1148 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1149 radio_gpio_lna_setup();
1150
1151 radio_gpio_pa_lna_enable(start_us +
1152 radio_rx_ready_delay_get(lll->phy,
1153 PHY_FLAGS_S8) -
1154 HAL_RADIO_GPIO_LNA_OFFSET);
1155 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1156
1157 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1158 lll_prof_cputime_capture();
1159 }
1160
1161 /* Calculate ahead the next subevent channel index */
1162 const uint16_t event_counter = (lll->payload_count / lll->bn) - 1U;
1163
1164 next_chan_calc(lll, event_counter, data_chan_id);
1165
1166 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1167 lll_prof_send();
1168 }
1169 }
1170
isr_rx_done(void * param)1171 static void isr_rx_done(void *param)
1172 {
1173 struct node_rx_pdu *node_rx;
1174 struct event_done_extra *e;
1175 struct lll_sync_iso *lll;
1176 uint16_t latency_event;
1177 uint16_t payload_index;
1178 uint8_t bis_idx;
1179
1180 /* Enqueue PDUs to ULL */
1181 node_rx = NULL;
1182
1183 /* Dequeue sliding window */
1184 lll = param;
1185 payload_index = lll->payload_tail;
1186
1187 /* Catchup with ISO event latencies */
1188 latency_event = lll->latency_event;
1189 do {
1190 uint8_t stream_curr;
1191
1192 stream_curr = 0U;
1193 for (bis_idx = 0U; bis_idx < lll->num_bis; bis_idx++) {
1194 struct lll_sync_iso_stream *stream;
1195 uint8_t stream_curr_inc;
1196 uint16_t stream_handle;
1197 uint8_t payload_tail;
1198
1199 stream_handle = lll->stream_handle[stream_curr];
1200 stream = ull_sync_iso_lll_stream_get(stream_handle);
1201 /* Skip BIS indices not synchronized. bis_index is 0x01 to 0x1F,
1202 * where as bis_idx is 0 indexed.
1203 */
1204 if ((bis_idx + 1U) != stream->bis_index) {
1205 continue;
1206 }
1207
1208 payload_tail = lll->payload_tail;
1209 for (uint8_t bn = 0U; bn < lll->bn; bn++) {
1210 if (lll->payload[stream_curr][payload_tail]) {
1211 node_rx = lll->payload[stream_curr][payload_tail];
1212 lll->payload[stream_curr][payload_tail] = NULL;
1213
1214 iso_rx_put(node_rx->hdr.link, node_rx);
1215 } else {
1216 /* Check if there are 2 free rx buffers, one
1217 * will be consumed to generate PDU with invalid
1218 * status, and the other is to ensure a PDU can
1219 * be setup for the radio DMA to receive in the
1220 * next sub_interval/iso_interval.
1221 */
1222 node_rx = ull_iso_pdu_rx_alloc_peek(2U);
1223 if (node_rx) {
1224 struct pdu_bis *pdu;
1225 uint16_t handle;
1226
1227 ull_iso_pdu_rx_alloc();
1228
1229 pdu = (void *)node_rx->pdu;
1230 pdu->ll_id = PDU_BIS_LLID_COMPLETE_END;
1231 pdu->len = 0U;
1232
1233 handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
1234 isr_rx_iso_data_invalid(lll, latency_event, bn,
1235 handle, node_rx);
1236
1237 iso_rx_put(node_rx->hdr.link, node_rx);
1238 }
1239 }
1240
1241 payload_index = payload_tail + 1U;
1242 if (payload_index >= lll->payload_count_max) {
1243 payload_index = 0U;
1244 }
1245 payload_tail = payload_index;
1246 }
1247
1248 stream_curr_inc = stream_curr + 1U;
1249 if (stream_curr_inc < lll->stream_count) {
1250 stream_curr = stream_curr_inc;
1251 }
1252 }
1253 lll->payload_tail = payload_index;
1254 } while (latency_event--);
1255
1256 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
1257 if (node_rx) {
1258 iso_rx_sched();
1259 }
1260 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
1261
1262 e = ull_event_done_extra_get();
1263 LL_ASSERT(e);
1264
1265 /* Check if BIG terminate procedure received */
1266 if (lll->term_reason) {
1267 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_TERMINATE;
1268
1269 goto isr_done_cleanup;
1270
1271 /* Check if BIG Channel Map Update */
1272 } else if (lll->chm_chan_count) {
1273 const uint16_t event_counter = lll->payload_count / lll->bn;
1274
1275 /* Bluetooth Core Specification v5.3 Vol 6, Part B,
1276 * Section 5.5.2 BIG Control Procedures
1277 *
1278 * When a Synchronized Receiver receives such a PDU where
1279 * (instant - bigEventCounter) mod 65536 is greater than or
1280 * equal to 32767 (because the instant is in the past),
1281 * the Link Layer may stop synchronization with the BIG.
1282 */
1283
1284 /* Note: We are not validating whether the control PDU was
1285 * received after the instant but apply the new channel map.
1286 * If the channel map was new at or after the instant and
1287 * the channel at the event counter did not match then the
1288 * control PDU would not have been received.
1289 */
1290 if (((event_counter - lll->ctrl_instant) & 0xFFFF) <= 0x7FFF) {
1291 (void)memcpy(lll->data_chan_map, lll->chm_chan_map,
1292 sizeof(lll->data_chan_map));
1293 lll->data_chan_count = lll->chm_chan_count;
1294 lll->chm_chan_count = 0U;
1295 }
1296 }
1297
1298 /* Calculate and place the drift information in done event */
1299 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
1300 e->estab_failed = 0U;
1301 e->trx_cnt = trx_cnt;
1302 e->crc_valid = crc_ok_anchor;
1303
1304 if (trx_cnt) {
1305 e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
1306 e->drift.start_to_address_actual_us =
1307 radio_tmr_aa_restore() - radio_tmr_ready_restore();
1308 e->drift.window_widening_event_us =
1309 lll->window_widening_event_us;
1310
1311 /* Reset window widening, as anchor point sync-ed */
1312 lll->window_widening_event_us = 0U;
1313 lll->window_size_event_us = 0U;
1314 }
1315
1316 isr_done_cleanup:
1317 lll_isr_cleanup(param);
1318 }
1319
isr_done(void * param)1320 static void isr_done(void *param)
1321 {
1322 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1323 lll_prof_latency_capture();
1324 }
1325
1326 lll_isr_status_reset();
1327
1328 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1329 lll_prof_cputime_capture();
1330 }
1331
1332 isr_rx_done(param);
1333
1334 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1335 lll_prof_send();
1336 }
1337 }
1338
next_chan_calc(struct lll_sync_iso * lll,uint16_t event_counter,uint16_t data_chan_id)1339 static void next_chan_calc(struct lll_sync_iso *lll, uint16_t event_counter,
1340 uint16_t data_chan_id)
1341 {
1342 /* Calculate ahead the next subevent channel index */
1343 if ((lll->bn_curr < lll->bn) ||
1344 (lll->irc_curr < lll->irc) ||
1345 (lll->ptc_curr < lll->ptc)) {
1346 /* Calculate the radio channel to use for next subevent */
1347 lll->next_chan_use = lll_chan_iso_subevent(data_chan_id,
1348 lll->data_chan_map,
1349 lll->data_chan_count,
1350 &lll->data_chan_prn_s,
1351 &lll->data_chan_remap_idx);
1352 } else if (lll->bis_curr < lll->num_bis) {
1353 uint8_t access_addr[4];
1354
1355 /* Calculate the Access Address for the next BIS subevent */
1356 util_bis_aa_le32((lll->bis_curr + 1U), lll->seed_access_addr,
1357 access_addr);
1358 data_chan_id = lll_chan_id(access_addr);
1359
1360 /* Calculate the radio channel to use for next BIS */
1361 lll->next_chan_use = lll_chan_iso_event(event_counter,
1362 data_chan_id,
1363 lll->data_chan_map,
1364 lll->data_chan_count,
1365 &lll->data_chan_prn_s,
1366 &lll->data_chan_remap_idx);
1367 }
1368 }
1369
isr_rx_iso_data_valid(const struct lll_sync_iso * const lll,uint16_t handle,struct node_rx_pdu * node_rx)1370 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
1371 uint16_t handle, struct node_rx_pdu *node_rx)
1372 {
1373 struct lll_sync_iso_stream *stream;
1374 struct node_rx_iso_meta *iso_meta;
1375
1376 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1377 node_rx->hdr.handle = handle;
1378
1379 iso_meta = &node_rx->rx_iso_meta;
1380 iso_meta->payload_number = lll->payload_count + (lll->bn_curr - 1U) +
1381 (lll->ptc_curr * lll->pto);
1382 /* Decrement BN as payload_count was pre-incremented */
1383 iso_meta->payload_number -= lll->bn;
1384
1385 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1386 iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1387 radio_tmr_aa_restore() +
1388 (DIV_ROUND_UP(lll->ptc_curr, lll->bn) *
1389 lll->pto * lll->iso_interval *
1390 PERIODIC_INT_UNIT_US) -
1391 addr_us_get(lll->phy) -
1392 ((stream->bis_index - 1U) *
1393 lll->sub_interval * ((lll->irc * lll->bn) +
1394 lll->ptc));
1395 iso_meta->timestamp %=
1396 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
1397 iso_meta->status = 0U;
1398 }
1399
isr_rx_iso_data_invalid(const struct lll_sync_iso * const lll,uint16_t latency,uint8_t bn,uint16_t handle,struct node_rx_pdu * node_rx)1400 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
1401 uint16_t latency, uint8_t bn,
1402 uint16_t handle,
1403 struct node_rx_pdu *node_rx)
1404 {
1405 struct lll_sync_iso_stream *stream;
1406 struct node_rx_iso_meta *iso_meta;
1407
1408 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1409 node_rx->hdr.handle = handle;
1410
1411 iso_meta = &node_rx->rx_iso_meta;
1412 iso_meta->payload_number = lll->payload_count + bn;
1413 /* Decrement BN as payload_count was pre-incremented */
1414 iso_meta->payload_number -= (latency + 1U) * lll->bn;
1415
1416 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1417 iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1418 radio_tmr_aa_restore() - addr_us_get(lll->phy) -
1419 ((stream->bis_index - 1U) *
1420 lll->sub_interval * ((lll->irc * lll->bn) +
1421 lll->ptc));
1422 iso_meta->timestamp -= (latency * lll->iso_interval *
1423 PERIODIC_INT_UNIT_US);
1424 iso_meta->timestamp %=
1425 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
1426 iso_meta->status = 1U;
1427 }
1428
isr_rx_ctrl_recv(struct lll_sync_iso * lll,struct pdu_bis * pdu)1429 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu)
1430 {
1431 const uint8_t opcode = pdu->ctrl.opcode;
1432
1433 if (opcode == PDU_BIG_CTRL_TYPE_TERM_IND) {
1434 if (!lll->term_reason) {
1435 struct pdu_big_ctrl_term_ind *term;
1436
1437 term = (void *)&pdu->ctrl.term_ind;
1438 lll->term_reason = term->reason;
1439 lll->ctrl_instant = term->instant;
1440 }
1441 } else if (opcode == PDU_BIG_CTRL_TYPE_CHAN_MAP_IND) {
1442 if (!lll->chm_chan_count) {
1443 struct pdu_big_ctrl_chan_map_ind *chm;
1444 uint8_t chan_count;
1445
1446 chm = (void *)&pdu->ctrl.chan_map_ind;
1447 chan_count =
1448 util_ones_count_get(chm->chm, sizeof(chm->chm));
1449 if (chan_count >= CHM_USED_COUNT_MIN) {
1450 lll->chm_chan_count = chan_count;
1451 (void)memcpy(lll->chm_chan_map, chm->chm,
1452 sizeof(lll->chm_chan_map));
1453 lll->ctrl_instant = chm->instant;
1454 }
1455 }
1456 } else {
1457 /* Unknown control PDU, ignore */
1458 }
1459 }
1460