1 /*
2  * Copyright (c) 2021 Nordic Semiconductor ASA
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <stdint.h>
8 #include <string.h>
9 
10 #include <soc.h>
11 #include <zephyr/sys/byteorder.h>
12 #include <zephyr/sys/util.h>
13 #include <zephyr/bluetooth/hci_types.h>
14 
15 #include "hal/cpu.h"
16 #include "hal/ccm.h"
17 #include "hal/radio.h"
18 #include "hal/ticker.h"
19 
20 #include "util/util.h"
21 #include "util/mem.h"
22 #include "util/memq.h"
23 
24 #include "pdu_df.h"
25 #include "lll/pdu_vendor.h"
26 #include "pdu.h"
27 
28 #include "lll.h"
29 #include "lll_vendor.h"
30 #include "lll_clock.h"
31 #include "lll_chan.h"
32 #include "lll_sync_iso.h"
33 
34 #include "lll_internal.h"
35 #include "lll_tim_internal.h"
36 #include "lll_prof_internal.h"
37 
38 #include "ll_feat.h"
39 
40 #include "hal/debug.h"
41 
42 static int init_reset(void);
43 static void prepare(void *param);
44 static void create_prepare_bh(void *param);
45 static void prepare_bh(void *param);
46 static int create_prepare_cb(struct lll_prepare_param *p);
47 static int prepare_cb(struct lll_prepare_param *p);
48 static int prepare_cb_common(struct lll_prepare_param *p);
49 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb);
50 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
51 static void isr_rx_estab(void *param);
52 static void isr_rx(void *param);
53 static void isr_rx_done(void *param);
54 static void isr_done(void *param);
55 static uint16_t payload_index_get(const struct lll_sync_iso *lll);
56 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
57 static void next_chan_calc_seq(struct lll_sync_iso *lll, uint16_t event_counter,
58 			       uint16_t data_chan_id);
59 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
60 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
61 static void next_chan_calc_int(struct lll_sync_iso *lll,
62 			       uint16_t event_counter);
63 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
64 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
65 				  uint16_t handle, struct node_rx_pdu *node_rx);
66 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
67 				    uint16_t latency, uint8_t bn,
68 				    uint16_t handle,
69 				    struct node_rx_pdu *node_rx);
70 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu);
71 
72 /* FIXME: Optimize by moving to a common place, as similar variable is used for
73  *        connections too.
74  */
75 static uint8_t trx_cnt;
76 static uint8_t crc_ok_anchor;
77 
lll_sync_iso_init(void)78 int lll_sync_iso_init(void)
79 {
80 	int err;
81 
82 	err = init_reset();
83 	if (err) {
84 		return err;
85 	}
86 
87 	return 0;
88 }
89 
lll_sync_iso_reset(void)90 int lll_sync_iso_reset(void)
91 {
92 	int err;
93 
94 	err = init_reset();
95 	if (err) {
96 		return err;
97 	}
98 
99 	return 0;
100 }
101 
lll_sync_iso_create_prepare(void * param)102 void lll_sync_iso_create_prepare(void *param)
103 {
104 	prepare(param);
105 	create_prepare_bh(param);
106 }
107 
lll_sync_iso_prepare(void * param)108 void lll_sync_iso_prepare(void *param)
109 {
110 	prepare(param);
111 	prepare_bh(param);
112 }
113 
lll_sync_iso_flush(uint8_t handle,struct lll_sync_iso * lll)114 void lll_sync_iso_flush(uint8_t handle, struct lll_sync_iso *lll)
115 {
116 	ARG_UNUSED(handle);
117 	ARG_UNUSED(lll);
118 }
119 
init_reset(void)120 static int init_reset(void)
121 {
122 	return 0;
123 }
124 
prepare(void * param)125 static void prepare(void *param)
126 {
127 	struct lll_prepare_param *p;
128 	struct lll_sync_iso *lll;
129 	uint16_t elapsed;
130 	int err;
131 
132 	err = lll_hfclock_on();
133 	LL_ASSERT(err >= 0);
134 
135 	p = param;
136 
137 	/* Instants elapsed */
138 	elapsed = p->lazy + 1U;
139 
140 	lll = p->param;
141 
142 	/* Save the (latency + 1) for use in event */
143 	lll->latency_prepare += elapsed;
144 
145 	/* Accumulate window widening */
146 	lll->window_widening_prepare_us += lll->window_widening_periodic_us *
147 					   elapsed;
148 	if (lll->window_widening_prepare_us > lll->window_widening_max_us) {
149 		lll->window_widening_prepare_us = lll->window_widening_max_us;
150 	}
151 }
152 
create_prepare_bh(void * param)153 static void create_prepare_bh(void *param)
154 {
155 	int err;
156 
157 	/* Invoke common pipeline handling of prepare */
158 	err = lll_prepare(is_abort_cb, abort_cb, create_prepare_cb, 0U,
159 			  param);
160 	LL_ASSERT(!err || err == -EINPROGRESS);
161 }
162 
prepare_bh(void * param)163 static void prepare_bh(void *param)
164 {
165 	int err;
166 
167 	/* Invoke common pipeline handling of prepare */
168 	err = lll_prepare(is_abort_cb, abort_cb, prepare_cb, 0U, param);
169 	LL_ASSERT(!err || err == -EINPROGRESS);
170 }
171 
create_prepare_cb(struct lll_prepare_param * p)172 static int create_prepare_cb(struct lll_prepare_param *p)
173 {
174 	int err;
175 
176 	err = prepare_cb_common(p);
177 	if (err) {
178 		DEBUG_RADIO_START_O(1);
179 		return 0;
180 	}
181 
182 	radio_isr_set(isr_rx_estab, p->param);
183 
184 	DEBUG_RADIO_START_O(1);
185 	return 0;
186 }
187 
prepare_cb(struct lll_prepare_param * p)188 static int prepare_cb(struct lll_prepare_param *p)
189 {
190 	int err;
191 
192 	err = prepare_cb_common(p);
193 	if (err) {
194 		DEBUG_RADIO_START_O(1);
195 		return 0;
196 	}
197 
198 	radio_isr_set(isr_rx, p->param);
199 
200 	DEBUG_RADIO_START_O(1);
201 	return 0;
202 }
203 
prepare_cb_common(struct lll_prepare_param * p)204 static int prepare_cb_common(struct lll_prepare_param *p)
205 {
206 	struct lll_sync_iso_stream *stream;
207 	struct node_rx_pdu *node_rx;
208 	struct lll_sync_iso *lll;
209 	uint32_t ticks_at_event;
210 	uint32_t ticks_at_start;
211 	uint16_t stream_handle;
212 	uint16_t event_counter;
213 	uint8_t access_addr[4];
214 	uint16_t data_chan_id;
215 	uint8_t data_chan_use;
216 	uint32_t remainder_us;
217 	uint8_t crc_init[3];
218 	struct ull_hdr *ull;
219 	uint32_t remainder;
220 	uint32_t hcto;
221 	uint32_t ret;
222 	uint8_t phy;
223 
224 	DEBUG_RADIO_START_O(1);
225 
226 	lll = p->param;
227 
228 	/* Deduce the latency */
229 	lll->latency_event = lll->latency_prepare - 1U;
230 
231 	/* Calculate the current event counter value */
232 	event_counter = (lll->payload_count / lll->bn) + lll->latency_event;
233 
234 	/* Update BIS packet counter to next value */
235 	lll->payload_count += (lll->latency_prepare * lll->bn);
236 
237 	/* Reset accumulated latencies */
238 	lll->latency_prepare = 0U;
239 
240 	/* Current window widening */
241 	lll->window_widening_event_us += lll->window_widening_prepare_us;
242 	lll->window_widening_prepare_us = 0U;
243 	if (lll->window_widening_event_us > lll->window_widening_max_us) {
244 		lll->window_widening_event_us =	lll->window_widening_max_us;
245 	}
246 
247 	/* Initialize trx chain count */
248 	trx_cnt = 0U;
249 
250 	/* Initialize anchor point CRC ok flag */
251 	crc_ok_anchor = 0U;
252 
253 	/* Initialize to mandatory parameter values */
254 	lll->bis_curr = 1U;
255 	lll->ptc_curr = 0U;
256 	lll->irc_curr = 1U;
257 	lll->bn_curr = 1U;
258 
259 	/* Initialize control subevent flag */
260 	lll->ctrl = 0U;
261 
262 	/* Calculate the Access Address for the BIS event */
263 	util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr, access_addr);
264 	data_chan_id = lll_chan_id(access_addr);
265 
266 	/* Calculate the radio channel to use for ISO event and hence store the
267 	 * channel to be used for control subevent.
268 	 */
269 	data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
270 					   lll->data_chan_map,
271 					   lll->data_chan_count,
272 					   &lll->data_chan.prn_s,
273 					   &lll->data_chan.remap_idx);
274 
275 	/* Initialize stream current */
276 	lll->stream_curr = 0U;
277 
278 	const bool is_sequential_packing = (lll->bis_spacing >= (lll->sub_interval * lll->nse));
279 
280 	/* Skip subevents until first selected BIS */
281 	stream_handle = lll->stream_handle[lll->stream_curr];
282 	stream = ull_sync_iso_lll_stream_get(stream_handle);
283 	if ((stream->bis_index != lll->bis_curr) &&
284 	    (stream->bis_index <= lll->num_bis)) {
285 		/* First selected BIS */
286 		lll->bis_curr = stream->bis_index;
287 
288 		/* Calculate the Access Address for the current BIS */
289 		util_bis_aa_le32(lll->bis_curr, lll->seed_access_addr,
290 				 access_addr);
291 		data_chan_id = lll_chan_id(access_addr);
292 
293 		/* Calculate the channel id for the next BIS subevent */
294 		if (false) {
295 
296 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
297 		} else if (is_sequential_packing) {
298 			data_chan_use = lll_chan_iso_event(event_counter,
299 							   data_chan_id,
300 							   lll->data_chan_map,
301 							   lll->data_chan_count,
302 							   &lll->data_chan.prn_s,
303 							   &lll->data_chan.remap_idx);
304 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
305 
306 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
307 		} else if (!is_sequential_packing) {
308 			struct lll_sync_iso_data_chan_interleaved *interleaved_data_chan;
309 
310 			interleaved_data_chan =
311 				&lll->interleaved_data_chan[lll->bis_curr - 1U];
312 
313 			data_chan_use = lll_chan_iso_event(event_counter,
314 							   data_chan_id,
315 							   lll->data_chan_map,
316 							   lll->data_chan_count,
317 							   &interleaved_data_chan->prn_s,
318 							   &interleaved_data_chan->remap_idx);
319 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
320 
321 		} else {
322 			LL_ASSERT(false);
323 		}
324 	}
325 
326 	/* Calculate the CRC init value for the BIS event,
327 	 * preset with the BaseCRCInit value from the BIGInfo data the most
328 	 * significant 2 octets and the BIS_Number for the specific BIS in the
329 	 * least significant octet.
330 	 */
331 	crc_init[0] = lll->bis_curr;
332 	(void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
333 
334 	/* Start setting up of Radio h/w */
335 	radio_reset();
336 
337 	phy = lll->phy;
338 	radio_phy_set(phy, PHY_FLAGS_S8);
339 	radio_aa_set(access_addr);
340 	radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
341 	lll_chan_set(data_chan_use);
342 
343 	/* By design, there shall always be one free node rx available for
344 	 * setting up radio for new PDU reception.
345 	 */
346 	node_rx = ull_iso_pdu_rx_alloc_peek(1U);
347 	LL_ASSERT(node_rx);
348 
349 	/* Encryption */
350 	if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
351 	    lll->enc) {
352 		uint64_t payload_count;
353 		uint8_t pkt_flags;
354 
355 		payload_count = lll->payload_count - lll->bn;
356 		lll->ccm_rx.counter = payload_count;
357 
358 		(void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
359 		mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
360 
361 		pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
362 						 phy,
363 						 RADIO_PKT_CONF_CTE_DISABLED);
364 		radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
365 				    (lll->max_pdu + PDU_MIC_SIZE), pkt_flags);
366 		radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, phy,
367 							  RADIO_PKT_CONF_PDU_TYPE_BIS,
368 							  node_rx->pdu));
369 	} else {
370 		uint8_t pkt_flags;
371 
372 		pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
373 						 phy,
374 						 RADIO_PKT_CONF_CTE_DISABLED);
375 		radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, lll->max_pdu,
376 				    pkt_flags);
377 		radio_pkt_rx_set(node_rx->pdu);
378 	}
379 
380 	radio_switch_complete_and_disable();
381 
382 	ticks_at_event = p->ticks_at_expire;
383 	ull = HDR_LLL2ULL(lll);
384 	ticks_at_event += lll_event_offset_get(ull);
385 
386 	ticks_at_start = ticks_at_event;
387 	ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
388 
389 	remainder = p->remainder;
390 	remainder_us = radio_tmr_start(0U, ticks_at_start, remainder);
391 
392 	radio_tmr_ready_save(remainder_us);
393 	radio_tmr_aa_save(0U);
394 	radio_tmr_aa_capture();
395 
396 	hcto = remainder_us +
397 	       ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
398 		 lll->window_widening_event_us) << 1) +
399 	       lll->window_size_event_us;
400 	hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
401 	hcto += addr_us_get(lll->phy);
402 	hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
403 	radio_tmr_hcto_configure(hcto);
404 
405 	radio_tmr_end_capture();
406 	radio_rssi_measure();
407 
408 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
409 	radio_gpio_lna_setup();
410 
411 	radio_gpio_pa_lna_enable(remainder_us +
412 				 radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8) -
413 				 HAL_RADIO_GPIO_LNA_OFFSET);
414 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
415 
416 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
417 	(EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
418 	uint32_t overhead;
419 
420 	overhead = lll_preempt_calc(ull, (TICKER_ID_SCAN_SYNC_ISO_BASE +
421 					  ull_sync_iso_lll_index_get(lll)), ticks_at_event);
422 	/* check if preempt to start has changed */
423 	if (overhead) {
424 		LL_ASSERT_OVERHEAD(overhead);
425 
426 		radio_isr_set(lll_isr_abort, lll);
427 		radio_disable();
428 
429 		return -ECANCELED;
430 	}
431 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
432 
433 	ret = lll_prepare_done(lll);
434 	LL_ASSERT(!ret);
435 
436 	/* Calculate ahead the next subevent channel index */
437 	if (false) {
438 
439 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
440 	} else if (is_sequential_packing) {
441 		next_chan_calc_seq(lll, event_counter, data_chan_id);
442 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
443 
444 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
445 	} else if (!is_sequential_packing) {
446 		struct lll_sync_iso_data_chan_interleaved *interleaved_data_chan;
447 
448 		interleaved_data_chan =
449 			&lll->interleaved_data_chan[lll->bis_curr - 1U];
450 		interleaved_data_chan->id = data_chan_id;
451 
452 		next_chan_calc_int(lll, event_counter);
453 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
454 
455 	} else {
456 		LL_ASSERT(false);
457 	}
458 
459 	return 0;
460 }
461 
is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)462 static int is_abort_cb(void *next, void *curr, lll_prepare_cb_t *resume_cb)
463 {
464 	if (next != curr) {
465 		struct lll_sync_iso *lll;
466 
467 		lll = curr;
468 		if (lll->bn_curr <= lll->bn) {
469 			return 0;
470 		}
471 	}
472 
473 	return -ECANCELED;
474 }
475 
abort_cb(struct lll_prepare_param * prepare_param,void * param)476 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
477 {
478 	struct event_done_extra *e;
479 	int err;
480 
481 	/* NOTE: This is not a prepare being cancelled */
482 	if (!prepare_param) {
483 		radio_isr_set(isr_done, param);
484 		radio_disable();
485 		return;
486 	}
487 
488 	/* NOTE: Else clean the top half preparations of the aborted event
489 	 * currently in preparation pipeline.
490 	 */
491 	err = lll_hfclock_off();
492 	LL_ASSERT(err >= 0);
493 
494 	/* Extra done event, to check sync lost */
495 	e = ull_event_done_extra_get();
496 	LL_ASSERT(e);
497 
498 	e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
499 	e->estab_failed = 0U;
500 	e->trx_cnt = 0U;
501 	e->crc_valid = 0U;
502 
503 	lll_done(param);
504 }
505 
isr_rx_estab(void * param)506 static void isr_rx_estab(void *param)
507 {
508 	struct event_done_extra *e;
509 	struct lll_sync_iso *lll;
510 	uint8_t trx_done;
511 	uint8_t crc_ok;
512 
513 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
514 		lll_prof_latency_capture();
515 	}
516 
517 	/* Read radio status and events */
518 	trx_done = radio_is_done();
519 	if (trx_done) {
520 		crc_ok = radio_crc_is_valid();
521 		trx_cnt++;
522 	} else {
523 		crc_ok = 0U;
524 	}
525 
526 	/* Clear radio rx status and events */
527 	lll_isr_rx_status_reset();
528 
529 	/* Get reference to LLL context */
530 	lll = param;
531 
532 	/* Check for MIC failures for encrypted Broadcast ISO streams */
533 	if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) && crc_ok && lll->enc) {
534 		struct node_rx_pdu *node_rx;
535 		struct pdu_bis *pdu;
536 
537 		/* By design, there shall always be one free node rx available when setting up radio
538 		 * for new PDU reception.
539 		 */
540 		node_rx = ull_iso_pdu_rx_alloc_peek(1U);
541 		LL_ASSERT(node_rx);
542 
543 		/* Get reference to received PDU and validate MIC for non-empty PDU */
544 		pdu = (void *)node_rx->pdu;
545 		if (pdu->len) {
546 			bool mic_failure;
547 			uint32_t done;
548 
549 			done = radio_ccm_is_done();
550 			LL_ASSERT(done);
551 
552 			mic_failure = !radio_ccm_mic_is_valid();
553 			if (mic_failure) {
554 				lll->term_reason = BT_HCI_ERR_TERM_DUE_TO_MIC_FAIL;
555 			}
556 		}
557 	}
558 
559 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
560 		lll_prof_cputime_capture();
561 	}
562 
563 	/* Calculate and place the drift information in done event */
564 	e = ull_event_done_extra_get();
565 	LL_ASSERT(e);
566 
567 	e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_ESTAB;
568 	e->estab_failed = lll->term_reason ? 1U : 0U;
569 	e->trx_cnt = trx_cnt;
570 	e->crc_valid = crc_ok;
571 
572 	if (trx_cnt) {
573 		e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
574 		e->drift.start_to_address_actual_us =
575 			radio_tmr_aa_get() - radio_tmr_ready_get();
576 		e->drift.window_widening_event_us =
577 			lll->window_widening_event_us;
578 
579 		/* Reset window widening, as anchor point sync-ed */
580 		lll->window_widening_event_us = 0U;
581 		lll->window_size_event_us = 0U;
582 	}
583 
584 	lll_isr_cleanup(param);
585 
586 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
587 		lll_prof_send();
588 	}
589 }
590 
isr_rx(void * param)591 static void isr_rx(void *param)
592 {
593 	struct lll_sync_iso_stream *stream;
594 	struct lll_sync_iso *lll;
595 	uint8_t access_addr[4];
596 	uint16_t data_chan_id;
597 	uint8_t data_chan_use;
598 	uint8_t crc_init[3];
599 	uint8_t stream_curr;
600 	uint8_t rssi_ready;
601 	uint32_t start_us;
602 	uint8_t new_burst;
603 	uint8_t trx_done;
604 	uint8_t bis_idx;
605 	uint8_t skipped;
606 	uint8_t crc_ok;
607 	uint32_t hcto;
608 	uint8_t bis;
609 	uint8_t nse;
610 
611 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
612 		lll_prof_latency_capture();
613 	}
614 
615 	/* initialize LLL context reference */
616 	lll = param;
617 
618 	const bool is_sequential_packing = (lll->bis_spacing >= (lll->sub_interval * lll->nse));
619 
620 	/* Read radio status and events */
621 	trx_done = radio_is_done();
622 	if (!trx_done) {
623 		/* Clear radio rx status and events */
624 		lll_isr_rx_status_reset();
625 
626 		/* BIS index */
627 		bis_idx = lll->bis_curr - 1U;
628 
629 		/* Current stream */
630 		stream_curr = lll->stream_curr;
631 
632 		goto isr_rx_done;
633 	}
634 
635 	crc_ok = radio_crc_is_valid();
636 	rssi_ready = radio_rssi_is_ready();
637 	trx_cnt++;
638 
639 	/* Save the AA captured for the first anchor point sync */
640 	if (!radio_tmr_aa_restore()) {
641 		const struct lll_sync_iso_stream *sync_stream;
642 		uint32_t se_offset_us;
643 
644 		crc_ok_anchor = crc_ok;
645 
646 		sync_stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
647 
648 		if (IS_ENABLED(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL) &&
649 		    is_sequential_packing) {
650 			uint8_t se;
651 
652 			se = ((lll->bis_curr - sync_stream->bis_index) *
653 			      ((lll->bn * lll->irc) + lll->ptc)) +
654 			     ((lll->irc_curr - 1U) * lll->bn) +
655 			     (lll->bn_curr - 1U) + lll->ptc_curr + lll->ctrl;
656 			se_offset_us = lll->sub_interval * se;
657 		} else if (IS_ENABLED(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED) &&
658 			   !is_sequential_packing) {
659 			uint8_t se;
660 
661 			se = (lll->bis_curr - sync_stream->bis_index) +
662 			     ((((lll->irc_curr - 1U) * lll->bn) +
663 			       (lll->bn_curr - 1U) + lll->ptc_curr) *
664 			      lll->num_bis) + lll->ctrl;
665 			se_offset_us = lll->bis_spacing * se;
666 		} else {
667 			se_offset_us = 0U;
668 
669 			LL_ASSERT(false);
670 		}
671 
672 		radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
673 		radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
674 	}
675 
676 	/* Clear radio rx status and events */
677 	lll_isr_rx_status_reset();
678 
679 	/* BIS index */
680 	bis_idx = lll->bis_curr - 1U;
681 
682 	/* Current stream */
683 	stream_curr = lll->stream_curr;
684 
685 	/* Check CRC and generate ISO Data PDU */
686 	if (crc_ok) {
687 		struct lll_sync_iso_stream *sync_stream;
688 		struct node_rx_pdu *node_rx;
689 		uint32_t payload_offset;
690 		uint16_t payload_index;
691 		uint16_t stream_handle;
692 		struct pdu_bis *pdu;
693 
694 		/* Check if Control Subevent being received */
695 		if ((lll->bn_curr == lll->bn) &&
696 		    (lll->irc_curr == lll->irc) &&
697 		    (lll->ptc_curr == lll->ptc) &&
698 		    (lll->bis_curr == lll->num_bis) &&
699 		    lll->ctrl) {
700 			lll->cssn_curr = lll->cssn_next;
701 
702 			/* Check the dedicated Control PDU buffer */
703 			pdu = radio_pkt_big_ctrl_get();
704 			if (pdu->ll_id == PDU_BIS_LLID_CTRL) {
705 				isr_rx_ctrl_recv(lll, pdu);
706 			}
707 
708 			goto isr_rx_done;
709 		} else {
710 			/* Check if there are 2 free rx buffers, one will be
711 			 * consumed to receive the current PDU, and the other
712 			 * is to ensure a PDU can be setup for the radio DMA to
713 			 * receive in the next sub_interval/iso_interval.
714 			 */
715 			node_rx = ull_iso_pdu_rx_alloc_peek(2U);
716 			if (!node_rx) {
717 				goto isr_rx_done;
718 			}
719 		}
720 
721 		pdu = (void *)node_rx->pdu;
722 
723 		/* Check for new control PDU in control subevent */
724 		if (pdu->cstf && (pdu->cssn != lll->cssn_curr)) {
725 			lll->cssn_next = pdu->cssn;
726 			/* TODO: check same CSSN is used in every subevent */
727 		}
728 
729 		/* Check payload buffer overflow.
730 		 * Ensure we are not having offset values over 255 in payload_count_max, used to
731 		 * allocate the buffers.
732 		 */
733 		payload_offset = (lll->latency_event * lll->bn) + payload_index_get(lll);
734 		if (payload_offset >= lll->payload_count_max) {
735 			goto isr_rx_done;
736 		}
737 
738 		/* Calculate the payload index in the sliding window */
739 		payload_index = lll->payload_tail + payload_offset;
740 		if (payload_index >= lll->payload_count_max) {
741 			payload_index -= lll->payload_count_max;
742 		}
743 
744 		/* Get reference to stream context */
745 		stream_handle = lll->stream_handle[stream_curr];
746 		sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
747 
748 		/* Store the received PDU if selected stream and not already
749 		 * received (say in previous event as pre-transmitted PDU.
750 		 */
751 		if ((lll->bis_curr == sync_stream->bis_index) && pdu->len &&
752 		    !lll->payload[stream_curr][payload_index]) {
753 			uint16_t handle;
754 
755 			if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
756 			    lll->enc) {
757 				bool mic_failure;
758 				uint32_t done;
759 
760 				done = radio_ccm_is_done();
761 				LL_ASSERT(done);
762 
763 				mic_failure = !radio_ccm_mic_is_valid();
764 				if (mic_failure) {
765 					lll->term_reason = BT_HCI_ERR_TERM_DUE_TO_MIC_FAIL;
766 
767 					goto isr_rx_mic_failure;
768 				}
769 			}
770 
771 			ull_iso_pdu_rx_alloc();
772 
773 			handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
774 			isr_rx_iso_data_valid(lll, handle, node_rx);
775 
776 			lll->payload[stream_curr][payload_index] = node_rx;
777 		}
778 	}
779 
780 isr_rx_done:
781 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
782 		lll_prof_cputime_capture();
783 	}
784 
785 	uint8_t bis_idx_old = bis_idx;
786 
787 	new_burst = 0U;
788 	skipped = 0U;
789 
790 isr_rx_find_subevent:
791 	/* Sequential or Interleaved BIS subevents decision */
792 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
793 	if (!is_sequential_packing) {
794 		goto isr_rx_interleaved;
795 	}
796 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
797 
798 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
799 	/* NOTE: below code is for Sequential Rx only */
800 
801 	/* Find the next (bn_curr)th subevent to receive PDU */
802 	while (lll->bn_curr < lll->bn) {
803 		uint32_t payload_offset;
804 		uint16_t payload_index;
805 
806 		/* Next burst number to check for reception required */
807 		lll->bn_curr++;
808 
809 		/* Check payload buffer overflow.
810 		 * Ensure we are not having offset values over 255 in payload_count_max, used to
811 		 * allocate the buffers.
812 		 */
813 		payload_offset = (lll->latency_event * lll->bn) + (lll->bn_curr - 1U);
814 		if (payload_offset >= lll->payload_count_max) {
815 			/* (bn_curr)th Rx PDU skip subevent */
816 			skipped++;
817 
818 			continue;
819 		}
820 
821 		/* Find the index of the (bn_curr)th Rx PDU buffer */
822 		payload_index = lll->payload_tail + payload_offset;
823 		if (payload_index >= lll->payload_count_max) {
824 			payload_index -= lll->payload_count_max;
825 		}
826 
827 		/* Check if (bn_curr)th Rx PDU has been received */
828 		if (!lll->payload[stream_curr][payload_index]) {
829 			/* Receive the (bn_curr)th Rx PDU of bis_curr */
830 			bis = lll->bis_curr;
831 
832 			goto isr_rx_next_subevent;
833 		}
834 
835 		/* (bn_curr)th Rx PDU already received, skip subevent */
836 		skipped++;
837 	}
838 
839 	/* Find the next repetition (irc_curr)th subevent to receive PDU */
840 	if (lll->irc_curr < lll->irc) {
841 		if (!new_burst) {
842 			uint32_t payload_offset;
843 			uint16_t payload_index;
844 
845 			/* Increment to next repetition count and be at first
846 			 * burst count for it.
847 			 */
848 			lll->bn_curr = 1U;
849 			lll->irc_curr++;
850 
851 			/* Check payload buffer overflow */
852 			/* FIXME: Typically we should not have high latency, but do have an
853 			 *        assertion check to ensure we do not rollover in the payload_index
854 			 *        variable use. Alternatively, add implementation to correctly
855 			 *        skip subevents as buffers at these high offset are unavailable.
856 			 */
857 			payload_offset = (lll->latency_event * lll->bn);
858 			LL_ASSERT(payload_offset <= UINT8_MAX);
859 
860 			/* Find the index of the (irc_curr)th bn = 1 Rx PDU
861 			 * buffer.
862 			 */
863 			payload_index = lll->payload_tail + payload_offset;
864 			if (payload_index >= lll->payload_count_max) {
865 				payload_index -= lll->payload_count_max;
866 			}
867 
868 			/* Check if (irc_curr)th bn = 1 Rx PDU has been
869 			 * received.
870 			 */
871 			if (!lll->payload[stream_curr][payload_index]) {
872 				/* Receive the (irc_curr)th bn = 1 Rx PDU of
873 				 * bis_curr.
874 				 */
875 				bis = lll->bis_curr;
876 
877 				goto isr_rx_next_subevent;
878 			} else {
879 				/* bn = 1 Rx PDU already received, skip
880 				 * subevent.
881 				 */
882 				skipped++;
883 
884 				/* flag to skip successive repetitions if all
885 				 * bn PDUs have been received. i.e. the bn
886 				 * loop above did not find a PDU to be received.
887 				 */
888 				new_burst = 1U;
889 
890 				/* Find the missing (bn_curr)th Rx PDU of
891 				 * bis_curr
892 				 */
893 				goto isr_rx_find_subevent;
894 			}
895 		} else {
896 			/* Skip all successive repetition reception as all
897 			 * bn PDUs have been received.
898 			 */
899 			skipped += (lll->irc - lll->irc_curr) * lll->bn;
900 			lll->irc_curr = lll->irc;
901 		}
902 	}
903 
904 	/* Next pre-transmission subevent */
905 	if (lll->ptc_curr < lll->ptc) {
906 		lll->ptc_curr++;
907 
908 		/* TODO: optimize to skip pre-transmission subevent in case
909 		 * of insufficient buffers in sliding window.
910 		 */
911 
912 		/* Receive the (ptc_curr)th Rx PDU of bis_curr */
913 		bis = lll->bis_curr;
914 
915 		goto isr_rx_next_subevent;
916 	}
917 
918 	/* Next BIS */
919 	if (lll->bis_curr < lll->num_bis) {
920 		/* Next selected stream */
921 		if ((lll->stream_curr + 1U) < lll->stream_count) {
922 			struct lll_sync_iso_stream *sync_stream;
923 			uint16_t stream_handle;
924 
925 			stream_curr = ++lll->stream_curr;
926 			stream_handle = lll->stream_handle[stream_curr];
927 			sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
928 			if (sync_stream->bis_index <= lll->num_bis) {
929 				uint32_t payload_offset;
930 				uint16_t payload_index;
931 				uint8_t bis_idx_new;
932 
933 				lll->bis_curr = sync_stream->bis_index;
934 				lll->bn_curr = 1U;
935 				lll->irc_curr = 1U;
936 				lll->ptc_curr = 0U;
937 
938 				/* new BIS index */
939 				bis_idx_new = lll->bis_curr - 1U;
940 
941 				/* Check payload buffer overflow */
942 				/* FIXME: Typically we should not have high latency, but do have an
943 				 *        assertion check to ensure we do not rollover in the
944 				 *        payload_index variable use. Alternatively, add
945 				 *        implementation to correctly skip subevents as buffers at
946 				 *        these high offsets are unavailable.
947 				 */
948 				payload_offset = (lll->latency_event * lll->bn);
949 				LL_ASSERT(payload_offset <= UINT8_MAX);
950 
951 				/* Find the index of the (irc_curr)th bn = 1 Rx
952 				 * PDU buffer.
953 				 */
954 				payload_index = lll->payload_tail + payload_offset;
955 				if (payload_index >= lll->payload_count_max) {
956 					payload_index -= lll->payload_count_max;
957 				}
958 
959 				/* Check if (irc_curr)th bn = 1 Rx PDU has been
960 				 * received.
961 				 */
962 				if (!lll->payload[stream_curr][payload_index]) {
963 					/* bn = 1 Rx PDU not received */
964 					skipped = (bis_idx_new - bis_idx) *
965 						  ((lll->bn * lll->irc) +
966 						   lll->ptc);
967 
968 					/* Receive the (irc_curr)th bn = 1 Rx
969 					 * PDU of bis_curr.
970 					 */
971 					bis = lll->bis_curr;
972 
973 					goto isr_rx_next_subevent;
974 				} else {
975 					/* bn = 1 Rx PDU already received, skip
976 					 * subevent.
977 					 */
978 					skipped = ((bis_idx_new - bis_idx) *
979 						   ((lll->bn * lll->irc) +
980 						    lll->ptc)) + 1U;
981 
982 					/* BIS index */
983 					bis_idx = lll->bis_curr - 1U;
984 
985 					/* Find the missing (bn_curr)th Rx PDU
986 					 * of bis_curr
987 					 */
988 					goto isr_rx_find_subevent;
989 				}
990 			} else {
991 				lll->bis_curr = lll->num_bis;
992 			}
993 		} else {
994 			lll->bis_curr = lll->num_bis;
995 		}
996 	}
997 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
998 
999 	goto isr_rx_ctrl;
1000 
1001 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
1002 isr_rx_interleaved:
1003 	/* Next BIS */
1004 	if (lll->bis_curr < lll->num_bis) {
1005 		/* Next selected stream */
1006 		if ((lll->stream_curr + 1U) < lll->stream_count) {
1007 			struct lll_sync_iso_stream *sync_stream;
1008 			uint16_t stream_handle;
1009 
1010 			stream_curr = ++lll->stream_curr;
1011 			stream_handle = lll->stream_handle[stream_curr];
1012 			sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
1013 			if (sync_stream->bis_index <= lll->num_bis) {
1014 				uint8_t payload_offset;
1015 				uint8_t payload_index;
1016 
1017 				lll->bis_curr = sync_stream->bis_index;
1018 
1019 				/* Check payload buffer overflow */
1020 				payload_offset = (lll->bn_curr - 1U) +
1021 						 (lll->ptc_curr * lll->pto);
1022 				if (payload_offset > lll->payload_count_max) {
1023 					const uint16_t event_counter =
1024 						(lll->payload_count / lll->bn) - 1U;
1025 
1026 					next_chan_calc_int(lll, event_counter);
1027 
1028 					goto isr_rx_interleaved;
1029 				}
1030 
1031 				/* Find the index of the (bn_curr)th Rx PDU buffer */
1032 				payload_index = lll->payload_tail + payload_offset;
1033 				if (payload_index >= lll->payload_count_max) {
1034 					payload_index -= lll->payload_count_max;
1035 				}
1036 
1037 				/* Check if (bn_curr)th Rx PDU has been received */
1038 				if (lll->payload[stream_curr][payload_index]) {
1039 					const uint16_t event_counter =
1040 						(lll->payload_count / lll->bn) - 1U;
1041 
1042 					next_chan_calc_int(lll, event_counter);
1043 
1044 					goto isr_rx_interleaved;
1045 				}
1046 
1047 				bis = lll->bis_curr;
1048 
1049 				goto isr_rx_next_subevent;
1050 			} else {
1051 				lll->bis_curr = lll->num_bis;
1052 			}
1053 		} else {
1054 			lll->bis_curr = lll->num_bis;
1055 		}
1056 	}
1057 
1058 	if (lll->bis_curr >= lll->num_bis) {
1059 		struct lll_sync_iso_stream *sync_stream;
1060 		uint16_t stream_handle;
1061 
1062 		lll->stream_curr = 0U;
1063 		stream_curr = 0U;
1064 		stream_handle = lll->stream_handle[stream_curr];
1065 		sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
1066 		if (sync_stream->bis_index <= lll->num_bis) {
1067 			lll->bis_curr = sync_stream->bis_index;
1068 			bis_idx = lll->bis_curr - 1U;
1069 		} else {
1070 			LL_ASSERT(false);
1071 		}
1072 	}
1073 
1074 	if (lll->bn_curr < lll->bn) {
1075 		uint8_t payload_offset;
1076 		uint8_t payload_index;
1077 
1078 		lll->bn_curr++;
1079 
1080 		/* Check payload buffer overflow */
1081 		payload_offset = (lll->bn_curr - 1U);
1082 		if (payload_offset > lll->payload_count_max) {
1083 			const uint16_t event_counter =
1084 				(lll->payload_count / lll->bn) - 1U;
1085 
1086 			next_chan_calc_int(lll, event_counter);
1087 
1088 			goto isr_rx_interleaved;
1089 		}
1090 
1091 		/* Find the index of the (bn_curr)th Rx PDU buffer */
1092 		payload_index = lll->payload_tail + payload_offset;
1093 		if (payload_index >= lll->payload_count_max) {
1094 			payload_index -= lll->payload_count_max;
1095 		}
1096 
1097 		/* Check if (bn_curr)th Rx PDU has been received */
1098 		if (lll->payload[stream_curr][payload_index]) {
1099 			const uint16_t event_counter =
1100 				(lll->payload_count / lll->bn) - 1U;
1101 
1102 			next_chan_calc_int(lll, event_counter);
1103 
1104 			goto isr_rx_interleaved;
1105 		}
1106 
1107 		bis = lll->bis_curr;
1108 
1109 		goto isr_rx_next_subevent;
1110 	}
1111 
1112 	if (lll->irc_curr < lll->irc) {
1113 		uint8_t payload_index;
1114 
1115 		lll->irc_curr++;
1116 		lll->bn_curr = 1U;
1117 
1118 		/* Find the index of the (irc_curr)th bn = 1 Rx PDU
1119 		 * buffer.
1120 		 */
1121 		payload_index = lll->payload_tail;
1122 
1123 		/* Check if (irc_curr)th bn = 1 Rx PDU has been
1124 		 * received.
1125 		 */
1126 		if (lll->payload[stream_curr][payload_index]) {
1127 			const uint16_t event_counter =
1128 				(lll->payload_count / lll->bn) - 1U;
1129 
1130 			next_chan_calc_int(lll, event_counter);
1131 
1132 			goto isr_rx_interleaved;
1133 		}
1134 
1135 		bis = lll->bis_curr;
1136 
1137 		goto isr_rx_next_subevent;
1138 	}
1139 
1140 	if (lll->ptc_curr < lll->ptc) {
1141 		lll->ptc_curr++;
1142 
1143 		bis = lll->bis_curr;
1144 
1145 		goto isr_rx_next_subevent;
1146 	}
1147 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
1148 
1149 isr_rx_ctrl:
1150 	/* Control subevent */
1151 	if (!lll->ctrl && (lll->cssn_next != lll->cssn_curr)) {
1152 		uint8_t pkt_flags;
1153 
1154 		/* Receive the control PDU and close the BIG event
1155 		 *  there after.
1156 		 */
1157 		lll->ctrl = 1U;
1158 
1159 		/* control subevent to use bis = 0 and se_n = 1 */
1160 		bis = 0U;
1161 
1162 		/* Configure Radio to receive Control PDU that can have greater
1163 		 * PDU length than max_pdu length.
1164 		 */
1165 		pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_BIS,
1166 						 lll->phy,
1167 						 RADIO_PKT_CONF_CTE_DISABLED);
1168 		if (lll->enc) {
1169 			radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1170 					    (sizeof(struct pdu_big_ctrl) + PDU_MIC_SIZE),
1171 					    pkt_flags);
1172 		} else {
1173 			radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1174 					    sizeof(struct pdu_big_ctrl),
1175 					    pkt_flags);
1176 		}
1177 
1178 		goto isr_rx_next_subevent;
1179 	}
1180 
1181 isr_rx_mic_failure:
1182 	isr_rx_done(param);
1183 
1184 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1185 		lll_prof_send();
1186 	}
1187 
1188 	return;
1189 
1190 isr_rx_next_subevent:
1191 	/* Calculate the Access Address for the BIS event */
1192 	util_bis_aa_le32(bis, lll->seed_access_addr, access_addr);
1193 	data_chan_id = lll_chan_id(access_addr);
1194 
1195 	/* Calculate the CRC init value for the BIS event,
1196 	 * preset with the BaseCRCInit value from the BIGInfo data the most
1197 	 * significant 2 octets and the BIS_Number for the specific BIS in the
1198 	 * least significant octet.
1199 	 */
1200 	crc_init[0] = bis;
1201 	(void)memcpy(&crc_init[1], lll->base_crc_init, sizeof(uint16_t));
1202 
1203 	radio_aa_set(access_addr);
1204 	radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(crc_init));
1205 
1206 	/* Set the channel to use */
1207 	if (!bis) {
1208 		const uint16_t event_counter =
1209 				(lll->payload_count / lll->bn) - 1U;
1210 
1211 		/* Calculate the radio channel to use for ISO event */
1212 		data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
1213 						   lll->data_chan_map,
1214 						   lll->data_chan_count,
1215 						   &lll->data_chan.prn_s,
1216 						   &lll->data_chan.remap_idx);
1217 	} else if (!skipped) {
1218 		data_chan_use = lll->next_chan_use;
1219 	} else {
1220 		uint8_t bis_idx_new = lll->bis_curr - 1U;
1221 
1222 		/* Initialise to avoid compile error */
1223 		data_chan_use = 0U;
1224 
1225 		if (bis_idx_old != bis_idx_new) {
1226 			const uint16_t event_counter =
1227 				(lll->payload_count / lll->bn) - 1U;
1228 
1229 			/* Calculate the radio channel to use for next BIS */
1230 			data_chan_use = lll_chan_iso_event(event_counter,
1231 						data_chan_id,
1232 						lll->data_chan_map,
1233 						lll->data_chan_count,
1234 						&lll->data_chan.prn_s,
1235 						&lll->data_chan.remap_idx);
1236 
1237 			skipped -= (bis_idx_new - bis_idx_old) *
1238 				   ((lll->bn * lll->irc) + lll->ptc);
1239 		}
1240 
1241 		while (skipped--) {
1242 			/* Calculate the radio channel to use for subevent */
1243 			data_chan_use = lll_chan_iso_subevent(data_chan_id,
1244 						lll->data_chan_map,
1245 						lll->data_chan_count,
1246 						&lll->data_chan.prn_s,
1247 						&lll->data_chan.remap_idx);
1248 		}
1249 	}
1250 
1251 	lll_chan_set(data_chan_use);
1252 
1253 	/* Encryption */
1254 	if (IS_ENABLED(CONFIG_BT_CTLR_BROADCAST_ISO_ENC) &&
1255 	    lll->enc) {
1256 		uint64_t payload_count;
1257 		struct pdu_bis *pdu;
1258 
1259 		payload_count = lll->payload_count - lll->bn;
1260 		if (bis) {
1261 			struct node_rx_pdu *node_rx;
1262 
1263 			payload_count += payload_index_get(lll);
1264 
1265 			/* By design, there shall always be one free node rx
1266 			 * available for setting up radio for new PDU reception.
1267 			 */
1268 			node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1269 			LL_ASSERT(node_rx);
1270 
1271 			pdu = (void *)node_rx->pdu;
1272 		} else {
1273 			/* Use the dedicated Control PDU buffer */
1274 			pdu = radio_pkt_big_ctrl_get();
1275 		}
1276 
1277 		lll->ccm_rx.counter = payload_count;
1278 
1279 		(void)memcpy(lll->ccm_rx.iv, lll->giv, 4U);
1280 		mem_xor_32(lll->ccm_rx.iv, lll->ccm_rx.iv, access_addr);
1281 
1282 		radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&lll->ccm_rx, lll->phy,
1283 							  RADIO_PKT_CONF_PDU_TYPE_BIS,
1284 							  pdu));
1285 
1286 	} else {
1287 		struct pdu_bis *pdu;
1288 
1289 		if (bis) {
1290 			struct node_rx_pdu *node_rx;
1291 
1292 			/* By design, there shall always be one free node rx
1293 			 * available for setting up radio for new PDU reception.
1294 			 */
1295 			node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1296 			LL_ASSERT(node_rx);
1297 
1298 			pdu = (void *)node_rx->pdu;
1299 		} else {
1300 			/* Use the dedicated Control PDU buffer */
1301 			pdu = radio_pkt_big_ctrl_get();
1302 		}
1303 
1304 		radio_pkt_rx_set(pdu);
1305 	}
1306 
1307 	radio_switch_complete_and_disable();
1308 
1309 	/* PDU Header Complete TimeOut, calculate the absolute timeout in
1310 	 * microseconds by when a PDU header is to be received for each
1311 	 * subevent.
1312 	 */
1313 	stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1314 	if (IS_ENABLED(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL) &&
1315 	    is_sequential_packing) {
1316 		nse = ((lll->bis_curr - stream->bis_index) *
1317 		       ((lll->bn * lll->irc) + lll->ptc)) +
1318 		      ((lll->irc_curr - 1U) * lll->bn) + (lll->bn_curr - 1U) +
1319 		      lll->ptc_curr + lll->ctrl;
1320 		hcto = lll->sub_interval * nse;
1321 	} else if (IS_ENABLED(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED) &&
1322 		   !is_sequential_packing) {
1323 		nse = (lll->bis_curr - stream->bis_index) +
1324 		       ((((lll->irc_curr - 1U) * lll->bn) +
1325 			 (lll->bn_curr - 1U) + lll->ptc_curr) *
1326 			lll->num_bis) + lll->ctrl;
1327 		hcto = lll->bis_spacing * nse;
1328 	} else {
1329 		nse = 0U;
1330 		hcto = 0U;
1331 
1332 		LL_ASSERT(false);
1333 	}
1334 
1335 	if (trx_cnt) {
1336 		/* Setup radio packet timer header complete timeout for
1337 		 * subsequent subevent PDU.
1338 		 */
1339 		uint32_t jitter_max_us;
1340 		uint32_t overhead_us;
1341 		uint32_t jitter_us;
1342 
1343 		/* Calculate the radio start with consideration of the drift
1344 		 * based on the access address capture timestamp.
1345 		 * Listen early considering +/- 2 us active clock jitter, i.e.
1346 		 * listen early by 4 us.
1347 		 */
1348 		hcto += radio_tmr_aa_restore();
1349 		hcto -= radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1350 		hcto -= addr_us_get(lll->phy);
1351 		hcto -= radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1352 		overhead_us = radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1353 		overhead_us += addr_us_get(lll->phy);
1354 		overhead_us += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1355 		overhead_us += (EVENT_CLOCK_JITTER_US << 1);
1356 		jitter_max_us = (EVENT_IFS_US - overhead_us) >> 1;
1357 		jitter_max_us -= RANGE_DELAY_US + HAL_RADIO_TMR_START_DELAY_US;
1358 		jitter_us = (EVENT_CLOCK_JITTER_US << 1) * nse;
1359 		if (jitter_us > jitter_max_us) {
1360 			jitter_us = jitter_max_us;
1361 		}
1362 		hcto -= jitter_us;
1363 
1364 		start_us = hcto;
1365 		hcto = radio_tmr_start_us(0U, start_us);
1366 		LL_ASSERT(hcto == (start_us + 1U));
1367 
1368 		/* Add 8 us * subevents so far, as radio was setup to listen
1369 		 * 4 us early and subevents could have a 4 us drift each until
1370 		 * the current subevent we are listening.
1371 		 */
1372 		hcto += (jitter_us << 1);
1373 		hcto += RANGE_DELAY_US + HAL_RADIO_TMR_START_DELAY_US;
1374 	} else {
1375 		/* First subevent PDU was not received, hence setup radio packet
1376 		 * timer header complete timeout from where the first subevent
1377 		 * PDU which is the BIG event anchor point would have been
1378 		 * received.
1379 		 */
1380 		hcto += radio_tmr_ready_restore();
1381 
1382 		start_us = hcto;
1383 		hcto = radio_tmr_start_us(0U, start_us);
1384 		LL_ASSERT(hcto == (start_us + 1U));
1385 
1386 		hcto += ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1387 			  lll->window_widening_event_us) << 1) +
1388 			lll->window_size_event_us;
1389 	}
1390 
1391 	/* header complete timeout to consider the radio ready delay, chain
1392 	 * delay and access address duration.
1393 	 */
1394 	hcto += radio_rx_ready_delay_get(lll->phy, PHY_FLAGS_S8);
1395 	hcto += addr_us_get(lll->phy);
1396 	hcto += radio_rx_chain_delay_get(lll->phy, PHY_FLAGS_S8);
1397 
1398 	/* setup absolute PDU header reception timeout */
1399 	radio_tmr_hcto_configure_abs(hcto);
1400 
1401 	/* setup capture of PDU end timestamp */
1402 	radio_tmr_end_capture();
1403 
1404 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1405 	radio_gpio_lna_setup();
1406 
1407 	radio_gpio_pa_lna_enable(start_us +
1408 				 radio_rx_ready_delay_get(lll->phy,
1409 							  PHY_FLAGS_S8) -
1410 				 HAL_RADIO_GPIO_LNA_OFFSET);
1411 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1412 
1413 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1414 		lll_prof_cputime_capture();
1415 	}
1416 
1417 	/* Calculate ahead the next subevent channel index */
1418 	const uint16_t event_counter = (lll->payload_count / lll->bn) - 1U;
1419 
1420 	if (false) {
1421 
1422 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
1423 	} else if (is_sequential_packing) {
1424 		next_chan_calc_seq(lll, event_counter, data_chan_id);
1425 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
1426 
1427 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
1428 	} else if (!is_sequential_packing) {
1429 		next_chan_calc_int(lll, event_counter);
1430 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
1431 
1432 	} else {
1433 		LL_ASSERT(false);
1434 	}
1435 
1436 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1437 		lll_prof_send();
1438 	}
1439 }
1440 
isr_rx_done(void * param)1441 static void isr_rx_done(void *param)
1442 {
1443 	struct node_rx_pdu *node_rx;
1444 	struct event_done_extra *e;
1445 	struct lll_sync_iso *lll;
1446 	uint16_t latency_event;
1447 	uint16_t payload_index;
1448 	uint8_t bis_idx;
1449 
1450 	/* Enqueue PDUs to ULL */
1451 	node_rx = NULL;
1452 
1453 	/* Dequeue sliding window */
1454 	lll = param;
1455 	payload_index = lll->payload_tail;
1456 
1457 	/* Catchup with ISO event latencies */
1458 	latency_event = lll->latency_event;
1459 	do {
1460 		uint8_t stream_curr;
1461 
1462 		stream_curr = 0U;
1463 		for (bis_idx = 0U; bis_idx < lll->num_bis; bis_idx++) {
1464 			struct lll_sync_iso_stream *stream;
1465 			uint8_t stream_curr_inc;
1466 			uint16_t stream_handle;
1467 			uint8_t payload_tail;
1468 
1469 			stream_handle = lll->stream_handle[stream_curr];
1470 			stream = ull_sync_iso_lll_stream_get(stream_handle);
1471 			/* Skip BIS indices not synchronized. bis_index is 0x01 to 0x1F,
1472 			 * where as bis_idx is 0 indexed.
1473 			 */
1474 			if ((bis_idx + 1U) != stream->bis_index) {
1475 				continue;
1476 			}
1477 
1478 			payload_tail = lll->payload_tail;
1479 			for (uint8_t bn = 0U; bn < lll->bn; bn++) {
1480 				if (lll->payload[stream_curr][payload_tail]) {
1481 					node_rx = lll->payload[stream_curr][payload_tail];
1482 					lll->payload[stream_curr][payload_tail] = NULL;
1483 
1484 					iso_rx_put(node_rx->hdr.link, node_rx);
1485 				} else {
1486 					/* Check if there are 2 free rx buffers, one
1487 					 * will be consumed to generate PDU with invalid
1488 					 * status, and the other is to ensure a PDU can
1489 					 * be setup for the radio DMA to receive in the
1490 					 * next sub_interval/iso_interval.
1491 					 */
1492 					node_rx = ull_iso_pdu_rx_alloc_peek(2U);
1493 					if (node_rx) {
1494 						struct pdu_bis *pdu;
1495 						uint16_t handle;
1496 
1497 						ull_iso_pdu_rx_alloc();
1498 
1499 						pdu = (void *)node_rx->pdu;
1500 						pdu->ll_id = PDU_BIS_LLID_COMPLETE_END;
1501 						pdu->len = 0U;
1502 
1503 						handle = LL_BIS_SYNC_HANDLE_FROM_IDX(stream_handle);
1504 						isr_rx_iso_data_invalid(lll, latency_event, bn,
1505 									handle, node_rx);
1506 
1507 						iso_rx_put(node_rx->hdr.link, node_rx);
1508 					}
1509 				}
1510 
1511 				payload_index = payload_tail + 1U;
1512 				if (payload_index >= lll->payload_count_max) {
1513 					payload_index = 0U;
1514 				}
1515 				payload_tail = payload_index;
1516 			}
1517 
1518 			stream_curr_inc = stream_curr + 1U;
1519 			if (stream_curr_inc < lll->stream_count) {
1520 				stream_curr = stream_curr_inc;
1521 			}
1522 		}
1523 		lll->payload_tail = payload_index;
1524 	} while (latency_event--);
1525 
1526 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
1527 	if (node_rx) {
1528 		iso_rx_sched();
1529 	}
1530 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
1531 
1532 	e = ull_event_done_extra_get();
1533 	LL_ASSERT(e);
1534 
1535 	/* Check if BIG terminate procedure received */
1536 	if (lll->term_reason) {
1537 		e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO_TERMINATE;
1538 
1539 		goto isr_done_cleanup;
1540 
1541 	/* Check if BIG Channel Map Update */
1542 	} else if (lll->chm_chan_count) {
1543 		const uint16_t event_counter = lll->payload_count / lll->bn;
1544 
1545 		/* Bluetooth Core Specification v5.3 Vol 6, Part B,
1546 		 * Section 5.5.2 BIG Control Procedures
1547 		 *
1548 		 * When a Synchronized Receiver receives such a PDU where
1549 		 * (instant - bigEventCounter) mod 65536 is greater than or
1550 		 * equal to 32767 (because the instant is in the past),
1551 		 * the Link Layer may stop synchronization with the BIG.
1552 		 */
1553 
1554 		/* Note: We are not validating whether the control PDU was
1555 		 * received after the instant but apply the new channel map.
1556 		 * If the channel map was new at or after the instant and
1557 		 * the channel at the event counter did not match then the
1558 		 * control PDU would not have been received.
1559 		 */
1560 		if (((event_counter - lll->ctrl_instant) & 0xFFFF) <= 0x7FFF) {
1561 			(void)memcpy(lll->data_chan_map, lll->chm_chan_map,
1562 				     sizeof(lll->data_chan_map));
1563 			lll->data_chan_count = lll->chm_chan_count;
1564 			lll->chm_chan_count = 0U;
1565 		}
1566 	}
1567 
1568 	/* Calculate and place the drift information in done event */
1569 	e->type = EVENT_DONE_EXTRA_TYPE_SYNC_ISO;
1570 	e->estab_failed = 0U;
1571 	e->trx_cnt = trx_cnt;
1572 	e->crc_valid = crc_ok_anchor;
1573 
1574 	if (trx_cnt) {
1575 		e->drift.preamble_to_addr_us = addr_us_get(lll->phy);
1576 		e->drift.start_to_address_actual_us =
1577 			radio_tmr_aa_restore() - radio_tmr_ready_restore();
1578 		e->drift.window_widening_event_us =
1579 			lll->window_widening_event_us;
1580 
1581 		/* Reset window widening, as anchor point sync-ed */
1582 		lll->window_widening_event_us = 0U;
1583 		lll->window_size_event_us = 0U;
1584 	}
1585 
1586 isr_done_cleanup:
1587 	lll_isr_cleanup(param);
1588 }
1589 
isr_done(void * param)1590 static void isr_done(void *param)
1591 {
1592 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1593 		lll_prof_latency_capture();
1594 	}
1595 
1596 	lll_isr_status_reset();
1597 
1598 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1599 		lll_prof_cputime_capture();
1600 	}
1601 
1602 	isr_rx_done(param);
1603 
1604 	if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
1605 		lll_prof_send();
1606 	}
1607 }
1608 
payload_index_get(const struct lll_sync_iso * lll)1609 static uint16_t payload_index_get(const struct lll_sync_iso *lll)
1610 {
1611 	uint16_t payload_index;
1612 
1613 	if (lll->ptc_curr) {
1614 		/* FIXME: Do not remember why ptc is 4 bits, it should be 5 bits as ptc is a
1615 		 *        running buffer offset related to nse.
1616 		 *        Fix ptc and ptc_curr definitions, until then there is an assertion
1617 		 *        check when ptc is calculated in ptc_calc function.
1618 		 */
1619 		uint8_t ptx_idx = lll->ptc_curr - 1U; /* max. nse 5 bits */
1620 		uint8_t ptx_payload_idx;
1621 		uint16_t ptx_group_mult;
1622 		uint8_t ptx_group_idx;
1623 
1624 		/* Calculate group index and multiplier for deriving
1625 		 * pre-transmission payload index.
1626 		 */
1627 		ptx_group_idx = ptx_idx / lll->bn; /* 5 bits */
1628 		ptx_payload_idx = ptx_idx - ptx_group_idx * lll->bn; /* 8 bits */
1629 		ptx_group_mult = (ptx_group_idx + 1U) * lll->pto; /* 9 bits */
1630 		payload_index = ptx_payload_idx + ptx_group_mult * lll->bn; /* 13 bits */
1631 	} else {
1632 		payload_index  = lll->bn_curr - 1U; /* 3 bits */
1633 	}
1634 
1635 	return payload_index;
1636 }
1637 
1638 #if defined(CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL)
next_chan_calc_seq(struct lll_sync_iso * lll,uint16_t event_counter,uint16_t data_chan_id)1639 static void next_chan_calc_seq(struct lll_sync_iso *lll, uint16_t event_counter,
1640 			       uint16_t data_chan_id)
1641 {
1642 	/* Calculate ahead the next subevent channel index */
1643 	if ((lll->bn_curr < lll->bn) ||
1644 	    (lll->irc_curr < lll->irc) ||
1645 	    (lll->ptc_curr < lll->ptc)) {
1646 		/* Calculate the radio channel to use for next subevent */
1647 		lll->next_chan_use =
1648 			lll_chan_iso_subevent(data_chan_id,
1649 					      lll->data_chan_map,
1650 					      lll->data_chan_count,
1651 					      &lll->data_chan.prn_s,
1652 					      &lll->data_chan.remap_idx);
1653 	} else if (lll->bis_curr < lll->num_bis) {
1654 		uint8_t access_addr[4];
1655 
1656 		/* Calculate the Access Address for the next BIS subevent */
1657 		util_bis_aa_le32((lll->bis_curr + 1U), lll->seed_access_addr,
1658 				 access_addr);
1659 		data_chan_id = lll_chan_id(access_addr);
1660 
1661 		/* Calculate the radio channel to use for next BIS */
1662 		lll->next_chan_use =
1663 			lll_chan_iso_event(event_counter,
1664 					   data_chan_id,
1665 					   lll->data_chan_map,
1666 					   lll->data_chan_count,
1667 					   &lll->data_chan.prn_s,
1668 					   &lll->data_chan.remap_idx);
1669 	}
1670 }
1671 #endif /* CONFIG_BT_CTLR_SYNC_ISO_SEQUENTIAL */
1672 
1673 #if defined(CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED)
next_chan_calc_int(struct lll_sync_iso * lll,uint16_t event_counter)1674 static void next_chan_calc_int(struct lll_sync_iso *lll, uint16_t event_counter)
1675 {
1676 	struct lll_sync_iso_data_chan_interleaved *interleaved_data_chan;
1677 	uint8_t stream_curr;
1678 	uint8_t bis_prev;
1679 
1680 	if ((lll->bis_curr >= lll->num_bis) &&
1681 	    (lll->bn_curr >= lll->bn) &&
1682 	    (lll->irc_curr >= lll->irc) &&
1683 	    (lll->ptc_curr >= lll->ptc)) {
1684 		return;
1685 	}
1686 
1687 	/* Next selected stream */
1688 	stream_curr = lll->stream_curr + 1U;
1689 	if ((stream_curr < BT_CTLR_SYNC_ISO_STREAM_MAX) &&
1690 	    (stream_curr < lll->stream_count)) {
1691 		struct lll_sync_iso_stream *sync_stream;
1692 		uint16_t stream_handle;
1693 
1694 		stream_handle = lll->stream_handle[stream_curr];
1695 		sync_stream = ull_sync_iso_lll_stream_get(stream_handle);
1696 		if (sync_stream->bis_index <= lll->num_bis) {
1697 			bis_prev = sync_stream->bis_index - 1U;
1698 		} else {
1699 			bis_prev = lll->num_bis;
1700 		}
1701 	} else {
1702 		bis_prev = lll->num_bis;
1703 	}
1704 
1705 	if ((bis_prev < lll->num_bis) &&
1706 	    (lll->bn_curr == 1U) &&
1707 	    (lll->irc_curr == 1U) &&
1708 	    (lll->ptc_curr == 0U)) {
1709 		uint8_t access_addr[4];
1710 
1711 		/* Calculate the Access Address for the next BIS subevent */
1712 		util_bis_aa_le32((bis_prev + 1U), lll->seed_access_addr,
1713 				 access_addr);
1714 
1715 		interleaved_data_chan =
1716 			&lll->interleaved_data_chan[bis_prev];
1717 		interleaved_data_chan->id = lll_chan_id(access_addr);
1718 
1719 		/* Calculate the radio channel to use for next BIS */
1720 		lll->next_chan_use =
1721 			lll_chan_iso_event(event_counter,
1722 					   interleaved_data_chan->id,
1723 					   lll->data_chan_map,
1724 					   lll->data_chan_count,
1725 					   &interleaved_data_chan->prn_s,
1726 					   &interleaved_data_chan->remap_idx);
1727 	} else {
1728 		uint8_t bis_idx;
1729 
1730 		if (bis_prev >= lll->num_bis) {
1731 			struct lll_sync_iso_stream *sync_stream;
1732 
1733 			sync_stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1734 			bis_idx = sync_stream->bis_index - 1U;
1735 		} else {
1736 			bis_idx = bis_prev;
1737 		}
1738 
1739 		interleaved_data_chan = &lll->interleaved_data_chan[bis_idx];
1740 
1741 		/* Calculate the radio channel to use for next subevent */
1742 		lll->next_chan_use =
1743 			lll_chan_iso_subevent(interleaved_data_chan->id,
1744 					      lll->data_chan_map,
1745 					      lll->data_chan_count,
1746 					      &interleaved_data_chan->prn_s,
1747 					      &interleaved_data_chan->remap_idx);
1748 	}
1749 }
1750 #endif /* CONFIG_BT_CTLR_SYNC_ISO_INTERLEAVED */
1751 
isr_rx_iso_data_valid(const struct lll_sync_iso * const lll,uint16_t handle,struct node_rx_pdu * node_rx)1752 static void isr_rx_iso_data_valid(const struct lll_sync_iso *const lll,
1753 				  uint16_t handle, struct node_rx_pdu *node_rx)
1754 {
1755 	struct lll_sync_iso_stream *stream;
1756 	struct node_rx_iso_meta *iso_meta;
1757 
1758 	node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1759 	node_rx->hdr.handle = handle;
1760 
1761 	iso_meta = &node_rx->rx_iso_meta;
1762 	iso_meta->payload_number = lll->payload_count + payload_index_get(lll);
1763 	/* Decrement BN as payload_count was pre-incremented */
1764 	iso_meta->payload_number -= lll->bn;
1765 
1766 	stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1767 	iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1768 			      radio_tmr_aa_restore() +
1769 			      (DIV_ROUND_UP(lll->ptc_curr, lll->bn) *
1770 			       lll->pto * lll->iso_interval *
1771 			       PERIODIC_INT_UNIT_US) -
1772 			      addr_us_get(lll->phy);
1773 
1774 	const bool is_sequential_packing = (lll->bis_spacing >= (lll->sub_interval * lll->nse));
1775 
1776 	if (is_sequential_packing) {
1777 		iso_meta->timestamp -= (stream->bis_index - 1U) *
1778 				       lll->sub_interval * lll->nse;
1779 	} else {
1780 		iso_meta->timestamp -= (stream->bis_index - 1U) *
1781 				       lll->bis_spacing;
1782 	}
1783 
1784 	iso_meta->timestamp %=
1785 		HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
1786 
1787 	iso_meta->status = 0U;
1788 }
1789 
isr_rx_iso_data_invalid(const struct lll_sync_iso * const lll,uint16_t latency,uint8_t bn,uint16_t handle,struct node_rx_pdu * node_rx)1790 static void isr_rx_iso_data_invalid(const struct lll_sync_iso *const lll,
1791 				    uint16_t latency, uint8_t bn,
1792 				    uint16_t handle,
1793 				    struct node_rx_pdu *node_rx)
1794 {
1795 	struct lll_sync_iso_stream *stream;
1796 	struct node_rx_iso_meta *iso_meta;
1797 
1798 	node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
1799 	node_rx->hdr.handle = handle;
1800 
1801 	iso_meta = &node_rx->rx_iso_meta;
1802 	iso_meta->payload_number = lll->payload_count + bn;
1803 	/* Decrement BN as payload_count was pre-incremented */
1804 	iso_meta->payload_number -= (latency + 1U) * lll->bn;
1805 
1806 	stream = ull_sync_iso_lll_stream_get(lll->stream_handle[0]);
1807 	iso_meta->timestamp = HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
1808 			      radio_tmr_aa_restore() - addr_us_get(lll->phy);
1809 
1810 	const bool is_sequential_packing = (lll->bis_spacing >= (lll->sub_interval * lll->nse));
1811 
1812 	if (is_sequential_packing) {
1813 		iso_meta->timestamp -= (stream->bis_index - 1U) *
1814 				       lll->sub_interval * lll->nse;
1815 	} else {
1816 		iso_meta->timestamp -= (stream->bis_index - 1U) *
1817 				       lll->bis_spacing;
1818 	}
1819 
1820 	iso_meta->timestamp -= latency * lll->iso_interval *
1821 			       PERIODIC_INT_UNIT_US;
1822 
1823 	iso_meta->timestamp %=
1824 		HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
1825 
1826 	iso_meta->status = 1U;
1827 }
1828 
isr_rx_ctrl_recv(struct lll_sync_iso * lll,struct pdu_bis * pdu)1829 static void isr_rx_ctrl_recv(struct lll_sync_iso *lll, struct pdu_bis *pdu)
1830 {
1831 	const uint8_t opcode = pdu->ctrl.opcode;
1832 
1833 	if (opcode == PDU_BIG_CTRL_TYPE_TERM_IND) {
1834 		if (!lll->term_reason) {
1835 			struct pdu_big_ctrl_term_ind *term;
1836 
1837 			term = (void *)&pdu->ctrl.term_ind;
1838 			lll->term_reason = term->reason;
1839 			lll->ctrl_instant = term->instant;
1840 		}
1841 	} else if (opcode == PDU_BIG_CTRL_TYPE_CHAN_MAP_IND) {
1842 		if (!lll->chm_chan_count) {
1843 			struct pdu_big_ctrl_chan_map_ind *chm;
1844 			uint8_t chan_count;
1845 
1846 			chm = (void *)&pdu->ctrl.chan_map_ind;
1847 			chan_count =
1848 				util_ones_count_get(chm->chm, sizeof(chm->chm));
1849 			if (chan_count >= CHM_USED_COUNT_MIN) {
1850 				lll->chm_chan_count = chan_count;
1851 				(void)memcpy(lll->chm_chan_map, chm->chm,
1852 					     sizeof(lll->chm_chan_map));
1853 				lll->ctrl_instant = chm->instant;
1854 			}
1855 		}
1856 	} else {
1857 		/* Unknown control PDU, ignore */
1858 	}
1859 }
1860