1 /*
2 * Copyright (c) 2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include <string.h>
9
10 #include <soc.h>
11 #include <zephyr/sys/byteorder.h>
12 #include <zephyr/sys/util.h>
13
14 #include "hal/cpu.h"
15 #include "hal/ccm.h"
16 #include "hal/radio.h"
17 #include "hal/ticker.h"
18
19 #include "util/util.h"
20 #include "util/mem.h"
21 #include "util/memq.h"
22
23 #include "pdu_df.h"
24 #include "lll/pdu_vendor.h"
25 #include "pdu.h"
26
27 #include "lll.h"
28 #include "lll_vendor.h"
29 #include "lll_clock.h"
30 #include "lll_chan.h"
31 #include "lll_sync_iso.h"
32
33 #include "lll_internal.h"
34 #include "lll_tim_internal.h"
35 #include "lll_prof_internal.h"
36
37 #include "ll_feat.h"
38
39 #include "hal/debug.h"
40
41 static int init_reset(void);
42 static void prepare(void *param);
43 static void create_prepare_bh(void *param);
44 static void prepare_bh(void *param);
45 static int create_prepare_cb(struct lll_prepare_param *p);
46 static int prepare_cb(struct lll_prepare_param *p);
47 static int prepare_cb_common(struct lll_prepare_param *p);
48 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
49 static void isr_rx_estab(void *param);
50 static void isr_rx(void *param);
51 static void isr_rx_done(void *param);
52 static void isr_done(void *param);
53 static void next_chan_calc(struct lll_sync_iso *lll, uint16_t event_counter,
54 uint16_t data_chan_id);
55 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
56 uint16_t handle, struct node_rx_pdu *node_rx);
57 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
58 uint8_t bn, uint16_t handle,
59 struct node_rx_pdu *node_rx);
60 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu);
61
62 /* FIXME: Optimize by moving to a common place, as similar variable is used for
63 * connections too.
64 */
65 static uint8_t trx_cnt;
66 static uint8_t crc_ok_anchor;
67
lll_sync_iso_init(void)68 int lll_sync_iso_init(void)
69 {
70 int err;
71
72 err = init_reset();
73 if (err) {
74 return err;
75 }
76
77 return 0;
78 }
79
lll_sync_iso_reset(void)80 int lll_sync_iso_reset(void)
81 {
82 int err;
83
84 err = init_reset();
85 if (err) {
86 return err;
87 }
88
89 return 0;
90 }
91
lll_sync_iso_create_prepare(void * param)92 void lll_sync_iso_create_prepare(void *param)
93 {
94 prepare(param);
95 create_prepare_bh(param);
96 }
97
lll_sync_iso_prepare(void * param)98 void lll_sync_iso_prepare(void *param)
99 {
100 prepare(param);
101 prepare_bh(param);
102 }
103
init_reset(void)104 static int init_reset(void)
105 {
106 return 0;
107 }
108
prepare(void * param)109 static void prepare(void *param)
110 {
111 struct lll_prepare_param *p;
112 struct lll_sync_iso *lll;
113 uint16_t elapsed;
114 int err;
115
116 err = lll_hfclock_on();
117 LL_ASSERT(err >= 0);
118
119 p = param;
120
121 /* Instants elapsed */
122 elapsed = p->lazy + 1U;
123
124 lll = p->param;
125
126 /* Save the (latency + 1) for use in event */
127 lll->latency_prepare += elapsed;
128
129 /* Accumulate window widening */
130 lll->window_widening_prepare_us += lll->window_widening_periodic_us *
131 elapsed;
132 if (lll->window_widening_prepare_us > lll->window_widening_max_us) {
133 lll->window_widening_prepare_us = lll->window_widening_max_us;
134 }
135 }
136
create_prepare_bh(void * param)137 static void create_prepare_bh(void *param)
138 {
139 int err;
140
141 /* Invoke common pipeline handling of prepare */
142 err = lll_prepare(lll_is_abort_cb, abort_cb, create_prepare_cb, 0U,
143 param);
144 LL_ASSERT(!err || err == -EINPROGRESS);
145 }
146
prepare_bh(void * param)147 static void prepare_bh(void *param)
148 {
149 int err;
150
151 /* Invoke common pipeline handling of prepare */
152 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
153 LL_ASSERT(!err || err == -EINPROGRESS);
154 }
155
create_prepare_cb(struct lll_prepare_param * p)156 static int create_prepare_cb(struct lll_prepare_param *p)
157 {
158 int err;
159
160 err = prepare_cb_common(p);
161 if (err) {
162 DEBUG_RADIO_START_O(1);
163 return 0;
164 }
165
166 radio_isr_set(isr_rx_estab, p->param);
167
168 DEBUG_RADIO_START_O(1);
169 return 0;
170 }
171
prepare_cb(struct lll_prepare_param * p)172 static int prepare_cb(struct lll_prepare_param *p)
173 {
174 int err;
175
176 err = prepare_cb_common(p);
177 if (err) {
178 DEBUG_RADIO_START_O(1);
179 return 0;
180 }
181
182 radio_isr_set(isr_rx, p->param);
183
184 DEBUG_RADIO_START_O(1);
185 return 0;
186 }
187
prepare_cb_common(struct lll_prepare_param * p)188 static int prepare_cb_common(struct lll_prepare_param *p)
189 {
190 struct lll_sync_iso_stream *stream;
191 struct node_rx_pdu *node_rx;
192 struct lll_sync_iso *lll;
193 uint32_t ticks_at_event;
194 uint32_t ticks_at_start;
195 uint16_t stream_handle;
196 uint16_t event_counter;
197 uint8_t access_addr[4];
198 uint16_t data_chan_id;
199 uint8_t data_chan_use;
200 uint32_t remainder_us;
201 uint8_t crc_init[3];
202 struct ull_hdr *ull;
203 uint32_t remainder;
204 uint32_t hcto;
205 uint32_t ret;
206 uint8_t phy;
207
208 DEBUG_RADIO_START_O(1);
209
210 lll = p->param;
211
212 /* Deduce the latency */
213 lll->latency_event = lll->latency_prepare - 1U;
214
215 /* Calculate the current event counter value */
216 event_counter = (lll->payload_count / lll->bn) + lll->latency_event;
217
218 /* Update BIS packet counter to next value */
219 lll->payload_count += (lll->latency_prepare * lll->bn);
220
221 /* Reset accumulated latencies */
222 lll->latency_prepare = 0U;
223
224 /* Current window widening */
225 lll->window_widening_event_us += lll->window_widening_prepare_us;
226 lll->window_widening_prepare_us = 0U;
227 if (lll->window_widening_event_us > lll->window_widening_max_us) {
228 lll->window_widening_event_us = lll->window_widening_max_us;
229 }
230
231 /* Initialize trx chain count */
232 trx_cnt = 0U;
233
234 /* Initialize anchor point CRC ok flag */
235 crc_ok_anchor = 0U;
236
237 /* Initialize to mandatory parameter values */
238 lll->bis_curr = 1U;
239 lll->ptc_curr = 0U;
240 lll->irc_curr = 1U;
241 lll->bn_curr = 1U;
242
243 /* Initialize control subevent flag */
244 lll->ctrl = 0U;
245
246 /* Calculate the Access Address for the BIS event */
247 util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr, access_addr);
248 data_chan_id = lll_chan_id(access_addr);
249
250 /* Calculate the radio channel to use for ISO event and hence store the
251 * channel to be used for control subevent.
252 */
253 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
254 lll->data_chan_map,
255 lll->data_chan_count,
256 &lll->data_chan_prn_s,
257 &lll->data_chan_remap_idx);
258
259 /* Initialize stream current */
260 lll->stream_curr = 0U;
261
262 /* Skip subevents until first selected BIS */
263 stream_handle = lll->stream_handle[lll->stream_curr];
264 stream = ull_sync_iso_lll_stream_get(stream_handle);
265 if ((stream->bis_index != lll->bis_curr) &&
266 (stream->bis_index <= lll->num_bis)) {
267 /* First selected BIS */
268 lll->bis_curr = stream->bis_index;
269
270 /* Calculate the Access Address for the current BIS */
271 util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr,
272 access_addr);
273 data_chan_id = lll_chan_id(access_addr);
274
275 /* Calculate the channel id for the next BIS subevent */
276 data_chan_use = lll_chan_iso_event(event_counter,
277 data_chan_id,
278 lll->data_chan_map,
279 lll->data_chan_count,
280 &lll->data_chan_prn_s,
281 &lll->data_chan_remap_idx);
282 }
283
284 /* Calculate the CRC init value for the BIS event,
285 * preset with the BaseCRCInit value from the BIGInfo data the most
286 * significant 2 octets and the BIS_Number for the specific BIS in the
287 * least significant octet.
288 */
289 crc_init[0] = lll->bis_curr;
290 (void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
291
292 /* Start setting up of Radio h/w */
293 radio_reset();
294
295 phy = lll->phy;
296 radio_phy_set(phy, PHY_FLAGS_S8);
297 radio_aa_set(access_addr);
298 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
299 lll_chan_set(data_chan_use);
300
301 /* By design, there shall always be one free node rx available for
302 * setting up radio for new PDU reception.
303 */
304 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
305 LL_ASSERT(node_rx);
306
307 /* Encryption */
308 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
309 lll->enc) {
310 uint64_t payload_count;
311 uint8_t pkt_flags;
312
313 payload_count = lll->payload_count - lll->bn;
314 lll->ccm_rx.counter = payload_count;
315
316 (void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
317 mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
318
319 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
320 phy,
321 RADIO_PKT_CONF_CTE_DISABLED);
322 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
323 (lll->max_pdu + PDU_MIC_SIZE), pkt_flags);
324 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, phy,
325 RADIO_PKT_CONF_PDU_TYPE_BIS,
326 node_rx->pdu));
327 } else {
328 uint8_t pkt_flags;
329
330 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
331 phy,
332 RADIO_PKT_CONF_CTE_DISABLED);
333 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, lll->max_pdu,
334 pkt_flags);
335 radio_pkt_rx_set(node_rx->pdu);
336 }
337
338 radio_switch_complete_and_disable();
339
340 ticks_at_event = p->ticks_at_expire;
341 ull = HDR_LLL2ULL(lll);
342 ticks_at_event += lll_event_offset_get(ull);
343
344 ticks_at_start = ticks_at_event;
345 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
346
347 remainder = p->remainder;
348 remainder_us = radio_tmr_start(0U, ticks_at_start, remainder);
349
350 radio_tmr_ready_save(remainder_us);
351 radio_tmr_aa_save(0U);
352 radio_tmr_aa_capture();
353
354 hcto = remainder_us +
355 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
356 lll->window_widening_event_us) << 1) +
357 lll->window_size_event_us;
358 hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
359 hcto += addr_us_get(lll->phy);
360 hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
361 radio_tmr_hcto_configure(hcto);
362
363 radio_tmr_end_capture();
364 radio_rssi_measure();
365
366 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
367 radio_gpio_lna_setup();
368
369 radio_gpio_pa_lna_enable(remainder_us +
370 radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8) -
371 HAL_RADIO_GPIO_LNA_OFFSET);
372 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
373
374 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
375 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
376 uint32_t overhead;
377
378 overhead = lll_preempt_calc(ull, (TICKER_ID_SCAN_SYNC_ISO_BASE +
379 ull_sync_iso_lll_index_get(lll)), ticks_at_event);
380 /* check if preempt to start has changed */
381 if (overhead) {
382 LL_ASSERT_OVERHEAD(overhead);
383
384 radio_isr_set(lll_isr_abort, lll);
385 radio_disable();
386
387 return -ECANCELED;
388 }
389 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
390
391 ret = lll_prepare_done(lll);
392 LL_ASSERT(!ret);
393
394 /* Calculate ahead the next subevent channel index */
395 next_chan_calc(lll, event_counter, data_chan_id);
396
397 return 0;
398 }
399
abort_cb(struct lll_prepare_param * prepare_param,void * param)400 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
401 {
402 struct event_done_extra *e;
403 int err;
404
405 /* NOTE: This is not a prepare being cancelled */
406 if (!prepare_param) {
407 radio_isr_set(isr_done, param);
408 radio_disable();
409 return;
410 }
411
412 /* NOTE: Else clean the top half preparations of the aborted event
413 * currently in preparation pipeline.
414 */
415 err = lll_hfclock_off();
416 LL_ASSERT(err >= 0);
417
418 /* Extra done event, to check sync lost */
419 e = ull_event_done_extra_get();
420 LL_ASSERT(e);
421
422 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
423 e->trx_cnt = 0U;
424 e->crc_valid = 0U;
425
426 lll_done(param);
427 }
428
isr_rx_estab(void * param)429 static void isr_rx_estab(void *param)
430 {
431 struct event_done_extra *e;
432 uint8_t trx_done;
433 uint8_t crc_ok;
434
435 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
436 lll_prof_latency_capture();
437 }
438
439 /* Read radio status and events */
440 trx_done = radio_is_done();
441 if (trx_done) {
442 crc_ok = radio_crc_is_valid();
443 trx_cnt++;
444 } else {
445 crc_ok = 0U;
446 }
447
448 /* Clear radio rx status and events */
449 lll_isr_rx_status_reset();
450
451 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
452 lll_prof_cputime_capture();
453 }
454
455 /* Calculate and place the drift information in done event */
456 e = ull_event_done_extra_get();
457 LL_ASSERT(e);
458
459 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_ESTAB;
460 e->estab_failed = 0U;
461 e->trx_cnt = trx_cnt;
462 e->crc_valid = crc_ok;
463
464 if (trx_cnt) {
465 struct lll_sync_iso *lll;
466
467 lll = param;
468 e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
469 e->drift.start_to_address_actual_us =
470 radio_tmr_aa_get() - radio_tmr_ready_get();
471 e->drift.window_widening_event_us =
472 lll->window_widening_event_us;
473
474 /* Reset window widening, as anchor point sync-ed */
475 lll->window_widening_event_us = 0U;
476 lll->window_size_event_us = 0U;
477 }
478
479 lll_isr_cleanup(param);
480
481 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
482 lll_prof_send();
483 }
484 }
485
isr_rx(void * param)486 static void isr_rx(void *param)
487 {
488 struct lll_sync_iso_stream *stream;
489 struct node_rx_pdu *node_rx;
490 struct lll_sync_iso *lll;
491 uint8_t access_addr[4];
492 uint16_t data_chan_id;
493 uint8_t data_chan_use;
494 uint8_t crc_init[3];
495 uint8_t rssi_ready;
496 uint32_t start_us;
497 uint8_t new_burst;
498 uint8_t trx_done;
499 uint8_t bis_idx;
500 uint8_t skipped;
501 uint8_t crc_ok;
502 uint32_t hcto;
503 uint8_t bis;
504 uint8_t nse;
505
506 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
507 lll_prof_latency_capture();
508 }
509
510 /* Read radio status and events */
511 trx_done = radio_is_done();
512 if (!trx_done) {
513 /* Clear radio rx status and events */
514 lll_isr_rx_status_reset();
515
516 /* initialize LLL context reference */
517 lll = param;
518
519 /* BIS index */
520 bis_idx = lll->bis_curr - 1U;
521
522 goto isr_rx_done;
523 }
524
525 crc_ok = radio_crc_is_valid();
526 rssi_ready = radio_rssi_is_ready();
527 trx_cnt++;
528
529 /* initialize LLL context reference */
530 lll = param;
531
532 /* Save the AA captured for the first anchor point sync */
533 if (!radio_tmr_aa_restore()) {
534 const struct lll_sync_iso_stream *sync_stream;
535 uint32_t se_offset_us;
536 uint8_t se;
537
538 crc_ok_anchor = crc_ok;
539
540 sync_stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
541 se = ((lll->bis_curr - sync_stream->bis_index) *
542 ((lll->bn * lll->irc) + lll->ptc)) +
543 ((lll->irc_curr - 1U) * lll->bn) + (lll->bn_curr - 1U) +
544 lll->ptc_curr + lll->ctrl;
545 se_offset_us = lll->sub_interval * se;
546 radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
547 radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
548 }
549
550 /* Clear radio rx status and events */
551 lll_isr_rx_status_reset();
552
553 /* BIS index */
554 bis_idx = lll->bis_curr - 1U;
555
556 /* Check CRC and generate ISO Data PDU */
557 if (crc_ok) {
558 struct lll_sync_iso_stream *sync_stream;
559 uint32_t payload_offset;
560 uint16_t payload_index;
561 uint16_t stream_handle;
562 struct pdu_bis *pdu;
563
564 /* Check if Control Subevent being received */
565 if ((lll->bn_curr == lll->bn) &&
566 (lll->irc_curr == lll->irc) &&
567 (lll->ptc_curr == lll->ptc) &&
568 (lll->bis_curr == lll->num_bis) &&
569 lll->ctrl) {
570 lll->cssn_curr = lll->cssn_next;
571
572 /* Check the dedicated Control PDU buffer */
573 pdu = radio_pkt_big_ctrl_get();
574 if (pdu->ll_id == PDU_BIS_LLID_CTRL) {
575 isr_rx_ctrl_recv(lll, pdu);
576 }
577
578 goto isr_rx_done;
579 } else {
580 /* Check if there are 2 free rx buffers, one will be
581 * consumed to receive the current PDU, and the other
582 * is to ensure a PDU can be setup for the radio DMA to
583 * receive in the next sub_interval/iso_interval.
584 */
585 node_rx = ull_iso_pdu_rx_alloc_peek(2U);
586 if (!node_rx) {
587 goto isr_rx_done;
588 }
589 }
590
591 pdu = (void *)node_rx->pdu;
592
593 /* Check for new control PDU in control subevent */
594 if (pdu->cstf && (pdu->cssn != lll->cssn_curr)) {
595 lll->cssn_next = pdu->cssn;
596 /* TODO: check same CSSN is used in every subevent */
597 }
598
599 /* Check payload buffer overflow.
600 * Ensure we are not having offset values over 255 in payload_count_max, used to
601 * allocate the buffers.
602 */
603 payload_offset = (lll->latency_event * lll->bn) + (lll->bn_curr - 1U) +
604 (lll->ptc_curr * lll->pto);
605 if (payload_offset >= lll->payload_count_max) {
606 goto isr_rx_done;
607 }
608
609 /* Calculate the payload index in the sliding window */
610 payload_index = lll->payload_tail + payload_offset;
611 if (payload_index >= lll->payload_count_max) {
612 payload_index -= lll->payload_count_max;
613 }
614
615 /* Get reference to stream context */
616 stream_handle = lll->stream_handle[lll->stream_curr];
617 sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
618
619 /* Store the received PDU if selected stream and not already
620 * received (say in previous event as pre-transmitted PDU.
621 */
622 if ((lll->bis_curr == sync_stream->bis_index) && pdu->len &&
623 !lll->payload[bis_idx][payload_index]) {
624 uint16_t handle;
625
626 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
627 lll->enc) {
628 uint32_t mic_failure;
629 uint32_t done;
630
631 done = radio_ccm_is_done();
632 LL_ASSERT(done);
633
634 mic_failure = !radio_ccm_mic_is_valid();
635 LL_ASSERT(!mic_failure);
636 }
637
638 ull_iso_pdu_rx_alloc();
639
640 handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
641 isr_rx_iso_data_valid(lll, handle, node_rx);
642
643 lll->payload[bis_idx][payload_index] = node_rx;
644 }
645 }
646
647 isr_rx_done:
648 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
649 lll_prof_cputime_capture();
650 }
651
652 uint8_t bis_idx_old = bis_idx;
653
654 new_burst = 0U;
655 skipped = 0U;
656
657 isr_rx_find_subevent:
658 /* FIXME: Sequential or Interleaved BIS subevents decision */
659 /* NOTE: below code is for Sequential Rx only */
660
661 /* Find the next (bn_curr)th subevent to receive PDU */
662 while (lll->bn_curr < lll->bn) {
663 uint32_t payload_offset;
664 uint16_t payload_index;
665
666 /* Next burst number to check for reception required */
667 lll->bn_curr++;
668
669 /* Check payload buffer overflow.
670 * Ensure we are not having offset values over 255 in payload_count_max, used to
671 * allocate the buffers.
672 */
673 payload_offset = (lll->latency_event * lll->bn) + (lll->bn_curr - 1U);
674 if (payload_offset >= lll->payload_count_max) {
675 /* (bn_curr)th Rx PDU skip subevent */
676 skipped++;
677
678 continue;
679 }
680
681 /* Find the index of the (bn_curr)th Rx PDU buffer */
682 payload_index = lll->payload_tail + payload_offset;
683 if (payload_index >= lll->payload_count_max) {
684 payload_index -= lll->payload_count_max;
685 }
686
687 /* Check if (bn_curr)th Rx PDU has been received */
688 if (!lll->payload[bis_idx][payload_index]) {
689 /* Receive the (bn_curr)th Rx PDU of bis_curr */
690 bis = lll->bis_curr;
691
692 goto isr_rx_next_subevent;
693 }
694
695 /* (bn_curr)th Rx PDU already received, skip subevent */
696 skipped++;
697 }
698
699 /* Find the next repetition (irc_curr)th subevent to receive PDU */
700 if (lll->irc_curr < lll->irc) {
701 if (!new_burst) {
702 uint32_t payload_offset;
703 uint16_t payload_index;
704
705 /* Increment to next repetition count and be at first
706 * burst count for it.
707 */
708 lll->bn_curr = 1U;
709 lll->irc_curr++;
710
711 /* Check payload buffer overflow */
712 /* FIXME: Typically we should not have high latency, but do have an
713 * assertion check to ensure we do not rollover in the payload_index
714 * variable use. Alternatively, add implementation to correctly
715 * skip subevents as buffers at these high offset are unavailable.
716 */
717 payload_offset = (lll->latency_event * lll->bn);
718 LL_ASSERT(payload_offset <= UINT8_MAX);
719
720 /* Find the index of the (irc_curr)th bn = 1 Rx PDU
721 * buffer.
722 */
723 payload_index = lll->payload_tail + payload_offset;
724 if (payload_index >= lll->payload_count_max) {
725 payload_index -= lll->payload_count_max;
726 }
727
728 /* Check if (irc_curr)th bn = 1 Rx PDU has been
729 * received.
730 */
731 if (!lll->payload[bis_idx][payload_index]) {
732 /* Receive the (irc_curr)th bn = 1 Rx PDU of
733 * bis_curr.
734 */
735 bis = lll->bis_curr;
736
737 goto isr_rx_next_subevent;
738 } else {
739 /* bn = 1 Rx PDU already received, skip
740 * subevent.
741 */
742 skipped++;
743
744 /* flag to skip successive repetitions if all
745 * bn PDUs have been received. i.e. the bn
746 * loop above did not find a PDU to be received.
747 */
748 new_burst = 1U;
749
750 /* Find the missing (bn_curr)th Rx PDU of
751 * bis_curr
752 */
753 goto isr_rx_find_subevent;
754 }
755 } else {
756 /* Skip all successive repetition reception as all
757 * bn PDUs have been received.
758 */
759 skipped += (lll->irc - lll->irc_curr) * lll->bn;
760 lll->irc_curr = lll->irc;
761 }
762 }
763
764 /* Next pre-transmission subevent */
765 if (lll->ptc_curr < lll->ptc) {
766 lll->ptc_curr++;
767
768 /* TODO: optimize to skip pre-transmission subevent in case
769 * of insufficient buffers in sliding window.
770 */
771
772 /* Receive the (ptc_curr)th Rx PDU of bis_curr */
773 bis = lll->bis_curr;
774
775 goto isr_rx_next_subevent;
776 }
777
778 /* Next BIS */
779 if (lll->bis_curr < lll->num_bis) {
780 const uint8_t stream_curr = lll->stream_curr + 1U;
781 struct lll_sync_iso_stream *sync_stream;
782 uint16_t stream_handle;
783
784 /* Next selected stream */
785 if (stream_curr < lll->stream_count) {
786 lll->stream_curr = stream_curr;
787 stream_handle = lll->stream_handle[lll->stream_curr];
788 sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
789 if (sync_stream->bis_index <= lll->num_bis) {
790 uint32_t payload_offset;
791 uint16_t payload_index;
792 uint8_t bis_idx_new;
793
794 lll->bis_curr = sync_stream->bis_index;
795 lll->ptc_curr = 0U;
796 lll->irc_curr = 1U;
797 lll->bn_curr = 1U;
798
799 /* new BIS index */
800 bis_idx_new = lll->bis_curr - 1U;
801
802 /* Check payload buffer overflow */
803 /* FIXME: Typically we should not have high latency, but do have an
804 * assertion check to ensure we do not rollover in the
805 * payload_index variable use. Alternatively, add
806 * implementation to correctly skip subevents as buffers at
807 * these high offsets are unavailable.
808 */
809 payload_offset = (lll->latency_event * lll->bn);
810 LL_ASSERT(payload_offset <= UINT8_MAX);
811
812 /* Find the index of the (irc_curr)th bn = 1 Rx
813 * PDU buffer.
814 */
815 payload_index = lll->payload_tail + payload_offset;
816 if (payload_index >= lll->payload_count_max) {
817 payload_index -= lll->payload_count_max;
818 }
819
820 /* Check if (irc_curr)th bn = 1 Rx PDU has been
821 * received.
822 */
823 if (!lll->payload[bis_idx_new][payload_index]) {
824 /* bn = 1 Rx PDU not received */
825 skipped = (bis_idx_new - bis_idx) *
826 ((lll->bn * lll->irc) +
827 lll->ptc);
828
829 /* Receive the (irc_curr)th bn = 1 Rx
830 * PDU of bis_curr.
831 */
832 bis = lll->bis_curr;
833
834 goto isr_rx_next_subevent;
835 } else {
836 /* bn = 1 Rx PDU already received, skip
837 * subevent.
838 */
839 skipped = ((bis_idx_new - bis_idx) *
840 ((lll->bn * lll->irc) +
841 lll->ptc)) + 1U;
842
843 /* BIS index */
844 bis_idx = lll->bis_curr - 1U;
845
846 /* Find the missing (bn_curr)th Rx PDU
847 * of bis_curr
848 */
849 goto isr_rx_find_subevent;
850 }
851 } else {
852 lll->bis_curr = lll->num_bis;
853 }
854 } else {
855 lll->bis_curr = lll->num_bis;
856 }
857 }
858
859 /* Control subevent */
860 if (!lll->ctrl && (lll->cssn_next != lll->cssn_curr)) {
861 uint8_t pkt_flags;
862
863 /* Receive the control PDU and close the BIG event
864 * there after.
865 */
866 lll->ctrl = 1U;
867
868 /* control subevent to use bis = 0 and se_n = 1 */
869 bis = 0U;
870
871 /* Configure Radio to receive Control PDU that can have greater
872 * PDU length than max_pdu length.
873 */
874 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
875 lll->phy,
876 RADIO_PKT_CONF_CTE_DISABLED);
877 if (lll->enc) {
878 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
879 (sizeof(struct pdu_big_ctrl) + PDU_MIC_SIZE),
880 pkt_flags);
881 } else {
882 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
883 sizeof(struct pdu_big_ctrl),
884 pkt_flags);
885 }
886
887 goto isr_rx_next_subevent;
888 }
889
890 isr_rx_done(param);
891
892 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
893 lll_prof_send();
894 }
895
896 return;
897
898 isr_rx_next_subevent:
899 /* Calculate the Access Address for the BIS event */
900 util_bis_aa_le32(bis, lll->seed_access_addr, access_addr);
901 data_chan_id = lll_chan_id(access_addr);
902
903 /* Calculate the CRC init value for the BIS event,
904 * preset with the BaseCRCInit value from the BIGInfo data the most
905 * significant 2 octets and the BIS_Number for the specific BIS in the
906 * least significant octet.
907 */
908 crc_init[0] = bis;
909 (void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
910
911 radio_aa_set(access_addr);
912 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
913
914 /* Set the channel to use */
915 if (!bis) {
916 const uint16_t event_counter =
917 (lll->payload_count / lll->bn) - 1U;
918
919 /* Calculate the radio channel to use for ISO event */
920 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
921 lll->data_chan_map,
922 lll->data_chan_count,
923 &lll->data_chan_prn_s,
924 &lll->data_chan_remap_idx);
925 } else if (!skipped) {
926 data_chan_use = lll->next_chan_use;
927 } else {
928 uint8_t bis_idx_new = lll->bis_curr - 1U;
929
930 /* Initialise to avoid compile error */
931 data_chan_use = 0U;
932
933 if (bis_idx_old != bis_idx_new) {
934 const uint16_t event_counter =
935 (lll->payload_count / lll->bn) - 1U;
936
937 /* Calculate the radio channel to use for next BIS */
938 data_chan_use = lll_chan_iso_event(event_counter,
939 data_chan_id,
940 lll->data_chan_map,
941 lll->data_chan_count,
942 &lll->data_chan_prn_s,
943 &lll->data_chan_remap_idx);
944
945 skipped -= (bis_idx_new - bis_idx_old) *
946 ((lll->bn * lll->irc) + lll->ptc);
947 }
948
949 while (skipped--) {
950 /* Calculate the radio channel to use for subevent */
951 data_chan_use = lll_chan_iso_subevent(data_chan_id,
952 lll->data_chan_map,
953 lll->data_chan_count,
954 &lll->data_chan_prn_s,
955 &lll->data_chan_remap_idx);
956 }
957 }
958
959 lll_chan_set(data_chan_use);
960
961 /* Encryption */
962 if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
963 lll->enc) {
964 uint64_t payload_count;
965 struct pdu_bis *pdu;
966
967 payload_count = lll->payload_count - lll->bn;
968 if (bis) {
969 payload_count += (lll->bn_curr - 1U) +
970 (lll->ptc_curr * lll->pto);
971
972 /* By design, there shall always be one free node rx
973 * available for setting up radio for new PDU reception.
974 */
975 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
976 LL_ASSERT(node_rx);
977
978 pdu = (void *)node_rx->pdu;
979 } else {
980 /* Use the dedicated Control PDU buffer */
981 pdu = radio_pkt_big_ctrl_get();
982 }
983
984 lll->ccm_rx.counter = payload_count;
985
986 (void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
987 mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
988
989 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, lll->phy,
990 RADIO_PKT_CONF_PDU_TYPE_BIS,
991 pdu));
992
993 } else {
994 struct pdu_bis *pdu;
995
996 if (bis) {
997 /* By design, there shall always be one free node rx
998 * available for setting up radio for new PDU reception.
999 */
1000 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1001 LL_ASSERT(node_rx);
1002
1003 pdu = (void *)node_rx->pdu;
1004 } else {
1005 /* Use the dedicated Control PDU buffer */
1006 pdu = radio_pkt_big_ctrl_get();
1007 }
1008
1009 radio_pkt_rx_set(pdu);
1010 }
1011
1012 radio_switch_complete_and_disable();
1013
1014 /* PDU Header Complete TimeOut, calculate the absolute timeout in
1015 * microseconds by when a PDU header is to be received for each
1016 * subevent.
1017 */
1018 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1019 nse = ((lll->bis_curr - stream->bis_index) *
1020 ((lll->bn * lll->irc) + lll->ptc)) +
1021 ((lll->irc_curr - 1U) * lll->bn) + (lll->bn_curr - 1U) +
1022 lll->ptc_curr + lll->ctrl;
1023 hcto = lll->sub_interval * nse;
1024
1025 if (trx_cnt) {
1026 /* Setup radio packet timer header complete timeout for
1027 * subsequent subevent PDU.
1028 */
1029
1030 /* Calculate the radio start with consideration of the drift
1031 * based on the access address capture timestamp.
1032 * Listen early considering +/- 2 us active clock jitter, i.e.
1033 * listen early by 4 us.
1034 */
1035 hcto += radio_tmr_aa_restore();
1036 hcto -= radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1037 hcto -= addr_us_get(lll->phy);
1038 hcto -= radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1039 hcto -= (EVENT_CLOCK_JITTER_US << 1) * nse;
1040
1041 start_us = hcto;
1042 hcto = radio_tmr_start_us(0U, start_us);
1043 /* FIXME: Assertion check disabled until investigation as to
1044 * why there is high ISR latency causing assertion here.
1045 */
1046 /* LL_ASSERT(hcto == (start_us + 1U)); */
1047
1048 /* Add 8 us * subevents so far, as radio was setup to listen
1049 * 4 us early and subevents could have a 4 us drift each until
1050 * the current subevent we are listening.
1051 */
1052 hcto += (((EVENT_CLOCK_JITTER_US << 1) * nse) << 1) +
1053 RANGE_DELAY_US + HAL_RADIO_TMR_START_DELAY_US;
1054 } else {
1055 /* First subevent PDU was not received, hence setup radio packet
1056 * timer header complete timeout from where the first subevent
1057 * PDU which is the BIG event anchor point would have been
1058 * received.
1059 */
1060 hcto += radio_tmr_ready_restore();
1061
1062 start_us = hcto;
1063 hcto = radio_tmr_start_us(0U, start_us);
1064 LL_ASSERT(hcto == (start_us + 1U));
1065
1066 hcto += ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1067 lll->window_widening_event_us) << 1) +
1068 lll->window_size_event_us;
1069 }
1070
1071 /* header complete timeout to consider the radio ready delay, chain
1072 * delay and access address duration.
1073 */
1074 hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1075 hcto += addr_us_get(lll->phy);
1076 hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1077
1078 /* setup absolute PDU header reception timeout */
1079 radio_tmr_hcto_configure(hcto);
1080
1081 /* setup capture of PDU end timestamp */
1082 radio_tmr_end_capture();
1083
1084 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1085 radio_gpio_lna_setup();
1086
1087 radio_gpio_pa_lna_enable(start_us +
1088 radio_rx_ready_delay_get(lll->phy,
1089 PHY_FLAGS_S8) -
1090 HAL_RADIO_GPIO_LNA_OFFSET);
1091 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1092
1093 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1094 lll_prof_cputime_capture();
1095 }
1096
1097 /* Calculate ahead the next subevent channel index */
1098 const uint16_t event_counter = (lll->payload_count / lll->bn) - 1U;
1099
1100 next_chan_calc(lll, event_counter, data_chan_id);
1101
1102 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1103 lll_prof_send();
1104 }
1105 }
1106
isr_rx_done(void * param)1107 static void isr_rx_done(void *param)
1108 {
1109 struct node_rx_pdu *node_rx;
1110 struct event_done_extra *e;
1111 struct lll_sync_iso *lll;
1112 uint16_t latency_event;
1113 uint16_t payload_index;
1114 uint8_t bis_idx;
1115 uint8_t bn;
1116
1117 /* Enqueue PDUs to ULL */
1118 node_rx = NULL;
1119
1120 /* Dequeue sliding window */
1121 lll = param;
1122 payload_index = lll->payload_tail;
1123
1124 /* Catchup with ISO event latencies */
1125 latency_event = lll->latency_event;
1126 do {
1127 lll->stream_curr = 0U;
1128 for (bis_idx = 0U; bis_idx < lll->num_bis; bis_idx++) {
1129 struct lll_sync_iso_stream *stream;
1130 uint8_t payload_tail;
1131 uint8_t stream_curr;
1132 uint16_t stream_handle;
1133
1134 stream_handle = lll->stream_handle[lll->stream_curr];
1135 stream = ull_sync_iso_lll_stream_get(stream_handle);
1136 /* Skip BIS indices not synchronized. bis_index is 0x01 to 0x1F,
1137 * where as bis_idx is 0 indexed.
1138 */
1139 if ((bis_idx + 1U) != stream->bis_index) {
1140 continue;
1141 }
1142
1143 payload_tail = lll->payload_tail;
1144 bn = lll->bn;
1145 while (bn--) {
1146 if (lll->payload[bis_idx][payload_tail]) {
1147 node_rx = lll->payload[bis_idx][payload_tail];
1148 lll->payload[bis_idx][payload_tail] = NULL;
1149
1150 iso_rx_put(node_rx->hdr.link, node_rx);
1151 } else {
1152 /* Check if there are 2 free rx buffers, one
1153 * will be consumed to generate PDU with invalid
1154 * status, and the other is to ensure a PDU can
1155 * be setup for the radio DMA to receive in the
1156 * next sub_interval/iso_interval.
1157 */
1158 node_rx = ull_iso_pdu_rx_alloc_peek(2U);
1159 if (node_rx) {
1160 struct pdu_bis *pdu;
1161 uint16_t handle;
1162
1163 ull_iso_pdu_rx_alloc();
1164
1165 pdu = (void *)node_rx->pdu;
1166 pdu->ll_id = PDU_BIS_LLID_COMPLETE_END;
1167 pdu->len = 0U;
1168
1169 handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
1170 isr_rx_iso_data_invalid(lll, bn, handle, node_rx);
1171
1172 iso_rx_put(node_rx->hdr.link, node_rx);
1173 }
1174 }
1175
1176 payload_index = payload_tail + 1U;
1177 if (payload_index >= lll->payload_count_max) {
1178 payload_index = 0U;
1179 }
1180 payload_tail = payload_index;
1181 }
1182
1183 stream_curr = lll->stream_curr + 1U;
1184 if (stream_curr < lll->stream_count) {
1185 lll->stream_curr = stream_curr;
1186 }
1187 }
1188 lll->payload_tail = payload_index;
1189 } while (latency_event--);
1190
1191 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
1192 if (node_rx) {
1193 iso_rx_sched();
1194 }
1195 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
1196
1197 e = ull_event_done_extra_get();
1198 LL_ASSERT(e);
1199
1200 /* Check if BIG terminate procedure received */
1201 if (lll->term_reason) {
1202 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_TERMINATE;
1203
1204 goto isr_done_cleanup;
1205
1206 /* Check if BIG Channel Map Update */
1207 } else if (lll->chm_chan_count) {
1208 const uint16_t event_counter = lll->payload_count / lll->bn;
1209
1210 /* Bluetooth Core Specification v5.3 Vol 6, Part B,
1211 * Section 5.5.2 BIG Control Procedures
1212 *
1213 * When a Synchronized Receiver receives such a PDU where
1214 * (instant - bigEventCounter) mod 65536 is greater than or
1215 * equal to 32767 (because the instant is in the past),
1216 * the Link Layer may stop synchronization with the BIG.
1217 */
1218
1219 /* Note: We are not validating whether the control PDU was
1220 * received after the instant but apply the new channel map.
1221 * If the channel map was new at or after the instant and
1222 * the channel at the event counter did not match then the
1223 * control PDU would not have been received.
1224 */
1225 if (((event_counter - lll->ctrl_instant) & 0xFFFF) <= 0x7FFF) {
1226 (void)memcpy(lll->data_chan_map, lll->chm_chan_map,
1227 sizeof(lll->data_chan_map));
1228 lll->data_chan_count = lll->chm_chan_count;
1229 lll->chm_chan_count = 0U;
1230 }
1231 }
1232
1233 /* Calculate and place the drift information in done event */
1234 e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
1235 e->trx_cnt = trx_cnt;
1236 e->crc_valid = crc_ok_anchor;
1237
1238 if (trx_cnt) {
1239 e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
1240 e->drift.start_to_address_actual_us =
1241 radio_tmr_aa_restore() - radio_tmr_ready_restore();
1242 e->drift.window_widening_event_us =
1243 lll->window_widening_event_us;
1244
1245 /* Reset window widening, as anchor point sync-ed */
1246 lll->window_widening_event_us = 0U;
1247 lll->window_size_event_us = 0U;
1248 }
1249
1250 isr_done_cleanup:
1251 lll_isr_cleanup(param);
1252 }
1253
isr_done(void * param)1254 static void isr_done(void *param)
1255 {
1256 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1257 lll_prof_latency_capture();
1258 }
1259
1260 lll_isr_status_reset();
1261
1262 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1263 lll_prof_cputime_capture();
1264 }
1265
1266 isr_rx_done(param);
1267
1268 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1269 lll_prof_send();
1270 }
1271 }
1272
next_chan_calc(struct lll_sync_iso * lll,uint16_t event_counter,uint16_t data_chan_id)1273 static void next_chan_calc(struct lll_sync_iso *lll, uint16_t event_counter,
1274 uint16_t data_chan_id)
1275 {
1276 /* Calculate ahead the next subevent channel index */
1277 if ((lll->bn_curr < lll->bn) ||
1278 (lll->irc_curr < lll->irc) ||
1279 (lll->ptc_curr < lll->ptc)) {
1280 /* Calculate the radio channel to use for next subevent */
1281 lll->next_chan_use = lll_chan_iso_subevent(data_chan_id,
1282 lll->data_chan_map,
1283 lll->data_chan_count,
1284 &lll->data_chan_prn_s,
1285 &lll->data_chan_remap_idx);
1286 } else if (lll->bis_curr < lll->num_bis) {
1287 uint8_t access_addr[4];
1288
1289 /* Calculate the Access Address for the next BIS subevent */
1290 util_bis_aa_le32((lll->bis_curr + 1U), lll->seed_access_addr,
1291 access_addr);
1292 data_chan_id = lll_chan_id(access_addr);
1293
1294 /* Calculate the radio channel to use for next BIS */
1295 lll->next_chan_use = lll_chan_iso_event(event_counter,
1296 data_chan_id,
1297 lll->data_chan_map,
1298 lll->data_chan_count,
1299 &lll->data_chan_prn_s,
1300 &lll->data_chan_remap_idx);
1301 }
1302 }
1303
isr_rx_iso_data_valid(const struct lll_sync_iso * const lll,uint16_t handle,struct node_rx_pdu * node_rx)1304 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
1305 uint16_t handle, struct node_rx_pdu *node_rx)
1306 {
1307 struct lll_sync_iso_stream *stream;
1308 struct node_rx_iso_meta *iso_meta;
1309
1310 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1311 node_rx->hdr.handle = handle;
1312
1313 iso_meta = &node_rx->rx_iso_meta;
1314 iso_meta->payload_number = lll->payload_count + (lll->bn_curr - 1U) +
1315 (lll->ptc_curr * lll->pto) - lll->bn;
1316
1317 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1318 iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1319 radio_tmr_aa_restore() +
1320 (DIV_ROUND_UP(lll->ptc_curr, lll->bn) *
1321 lll->pto * lll->iso_interval *
1322 PERIODIC_INT_UNIT_US) -
1323 addr_us_get(lll->phy) -
1324 ((stream->bis_index - 1U) *
1325 lll->sub_interval * ((lll->irc * lll->bn) +
1326 lll->ptc));
1327 iso_meta->timestamp %=
1328 HAL_TICKER_TICKS_TO_US(BIT(HAL_TICKER_CNTR_MSBIT + 1U));
1329 iso_meta->status = 0U;
1330 }
1331
isr_rx_iso_data_invalid(const struct lll_sync_iso * const lll,uint8_t bn,uint16_t handle,struct node_rx_pdu * node_rx)1332 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
1333 uint8_t bn, uint16_t handle,
1334 struct node_rx_pdu *node_rx)
1335 {
1336 struct lll_sync_iso_stream *stream;
1337 struct node_rx_iso_meta *iso_meta;
1338
1339 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1340 node_rx->hdr.handle = handle;
1341
1342 iso_meta = &node_rx->rx_iso_meta;
1343 iso_meta->payload_number = lll->payload_count - bn - 1U;
1344
1345 stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1346 iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1347 radio_tmr_aa_restore() - addr_us_get(lll->phy) -
1348 ((stream->bis_index - 1U) *
1349 lll->sub_interval * ((lll->irc * lll->bn) +
1350 lll->ptc));
1351 iso_meta->timestamp %=
1352 HAL_TICKER_TICKS_TO_US(BIT(HAL_TICKER_CNTR_MSBIT + 1U));
1353 iso_meta->status = 1U;
1354 }
1355
isr_rx_ctrl_recv(struct lll_sync_iso * lll,struct pdu_bis * pdu)1356 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu)
1357 {
1358 const uint8_t opcode = pdu->ctrl.opcode;
1359
1360 if (opcode == PDU_BIG_CTRL_TYPE_TERM_IND) {
1361 if (!lll->term_reason) {
1362 struct pdu_big_ctrl_term_ind *term;
1363
1364 term = (void *)&pdu->ctrl.term_ind;
1365 lll->term_reason = term->reason;
1366 lll->ctrl_instant = term->instant;
1367 }
1368 } else if (opcode == PDU_BIG_CTRL_TYPE_CHAN_MAP_IND) {
1369 if (!lll->chm_chan_count) {
1370 struct pdu_big_ctrl_chan_map_ind *chm;
1371 uint8_t chan_count;
1372
1373 chm = (void *)&pdu->ctrl.chan_map_ind;
1374 chan_count =
1375 util_ones_count_get(chm->chm, sizeof(chm->chm));
1376 if (chan_count >= CHM_USED_COUNT_MIN) {
1377 lll->chm_chan_count = chan_count;
1378 (void)memcpy(lll->chm_chan_map, chm->chm,
1379 sizeof(lll->chm_chan_map));
1380 lll->ctrl_instant = chm->instant;
1381 }
1382 }
1383 } else {
1384 /* Unknown control PDU, ignore */
1385 }
1386 }
1387