1 /*
2 * Copyright (c) 2022 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8
9 #include <zephyr/sys/byteorder.h>
10
11 #include "hal/ccm.h"
12 #include "hal/radio.h"
13 #include "hal/ticker.h"
14
15 #include "util/util.h"
16 #include "util/mem.h"
17 #include "util/memq.h"
18 #include "util/dbuf.h"
19
20 #include "pdu_df.h"
21 #include "pdu_vendor.h"
22 #include "pdu.h"
23
24 #include "lll.h"
25 #include "lll_vendor.h"
26 #include "lll_clock.h"
27 #include "lll_chan.h"
28 #include "lll_df_types.h"
29 #include "lll_conn.h"
30 #include "lll_conn_iso.h"
31 #include "lll_central_iso.h"
32
33 #include "lll_iso_tx.h"
34
35 #include "lll_internal.h"
36 #include "lll_tim_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static int prepare_cb(struct lll_prepare_param *p);
44 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
45 static void isr_tx(void *param);
46 static void isr_rx(void *param);
47 static void isr_prepare_subevent(void *param);
48 static void isr_done(void *param);
49 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll);
50 static void payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream *cis_lll);
51 static void payload_count_lazy_update(struct lll_conn_iso_stream *cis_lll, uint16_t lazy);
52
53 static uint16_t next_cis_chan_remap_idx;
54 static uint16_t next_cis_chan_prn_s;
55 static uint16_t data_chan_remap_idx;
56 static uint16_t data_chan_prn_s;
57 static uint8_t next_chan_use;
58 static uint8_t next_cis_chan;
59
60 static uint32_t trx_performed_bitmask;
61 static uint16_t cis_offset_first;
62 static uint16_t cis_handle_curr;
63 static uint8_t se_curr;
64
65 #if defined(CONFIG_BT_CTLR_LE_ENC)
66 static uint8_t mic_state;
67 #endif /* CONFIG_BT_CTLR_LE_ENC */
68
lll_central_iso_init(void)69 int lll_central_iso_init(void)
70 {
71 int err;
72
73 err = init_reset();
74 if (err) {
75 return err;
76 }
77
78 return 0;
79 }
80
lll_central_iso_reset(void)81 int lll_central_iso_reset(void)
82 {
83 int err;
84
85 err = init_reset();
86 if (err) {
87 return err;
88 }
89
90 return 0;
91 }
92
lll_central_iso_prepare(void * param)93 void lll_central_iso_prepare(void *param)
94 {
95 int err;
96
97 /* Initiate HF clock start up */
98 err = lll_hfclock_on();
99 LL_ASSERT(err >= 0);
100
101 /* Invoke common pipeline handling of prepare */
102 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
103 LL_ASSERT(!err || err == -EINPROGRESS);
104 }
105
init_reset(void)106 static int init_reset(void)
107 {
108 return 0;
109 }
110
prepare_cb(struct lll_prepare_param * p)111 static int prepare_cb(struct lll_prepare_param *p)
112 {
113 struct lll_conn_iso_group *cig_lll = p->param;
114 struct lll_conn_iso_stream *cis_lll;
115 const struct lll_conn *conn_lll;
116 uint32_t ticks_at_event;
117 uint32_t ticks_at_start;
118 struct pdu_cis *pdu_tx;
119 uint16_t event_counter;
120 uint64_t payload_count;
121 uint16_t data_chan_id;
122 uint8_t data_chan_use;
123 uint16_t cis_handle;
124 struct ull_hdr *ull;
125 uint32_t remainder;
126 uint32_t start_us;
127 uint32_t ret;
128 uint8_t phy;
129 int err = 0;
130
131 DEBUG_RADIO_START_M(1);
132
133 /* Reset global static variables */
134 trx_performed_bitmask = 0U;
135 #if defined(CONFIG_BT_CTLR_LE_ENC)
136 mic_state = LLL_CONN_MIC_NONE;
137 #endif /* CONFIG_BT_CTLR_LE_ENC */
138
139 /* Get the first CIS */
140 cis_handle_curr = UINT16_MAX;
141 do {
142 cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle_curr);
143 } while (cis_lll && !cis_lll->active);
144
145 LL_ASSERT(cis_lll);
146
147 /* Save first active CIS offset */
148 cis_offset_first = cis_lll->offset;
149
150 /* Get reference to ACL context */
151 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
152
153 /* Pick the event_count calculated in the ULL prepare */
154 cis_lll->event_count = cis_lll->event_count_prepare;
155
156 /* Event counter value, 0-15 bit of cisEventCounter */
157 event_counter = cis_lll->event_count;
158
159 /* Calculate the radio channel to use for ISO event */
160 data_chan_id = lll_chan_id(cis_lll->access_addr);
161 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
162 conn_lll->data_chan_map,
163 conn_lll->data_chan_count,
164 &data_chan_prn_s,
165 &data_chan_remap_idx);
166
167 /* Calculate the current event latency */
168 cig_lll->lazy_prepare = p->lazy;
169 cig_lll->latency_event = cig_lll->latency_prepare + cig_lll->lazy_prepare;
170
171 /* Reset accumulated latencies */
172 cig_lll->latency_prepare = 0U;
173
174 se_curr = 1U;
175
176 /* Adjust the SN and NESN for skipped CIG events */
177 payload_count_lazy_update(cis_lll, cig_lll->latency_event);
178
179 /* Start setting up of Radio h/w */
180 radio_reset();
181
182 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
183 radio_tx_power_set(conn_lll->tx_pwr_lvl);
184 #else /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
185 radio_tx_power_set(RADIO_TXP_DEFAULT);
186 #endif /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
187
188 phy = cis_lll->tx.phy;
189 radio_phy_set(phy, cis_lll->tx.phy_flags);
190 radio_aa_set(cis_lll->access_addr);
191 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(conn_lll->crc_init));
192 lll_chan_set(data_chan_use);
193
194 /* Get ISO data PDU */
195 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
196 payload_count = 0U;
197
198 cis_lll->npi = 1U;
199
200 pdu_tx = radio_pkt_empty_get();
201 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
202 pdu_tx->nesn = cis_lll->nesn;
203 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
204 pdu_tx->cie = (cis_lll->rx.bn_curr > cis_lll->rx.bn);
205 pdu_tx->npi = 1U;
206 pdu_tx->len = 0U;
207 } else {
208 struct node_tx_iso *node_tx;
209 memq_link_t *link;
210
211 payload_count = cis_lll->tx.payload_count +
212 cis_lll->tx.bn_curr - 1U;
213
214 do {
215 link = memq_peek(cis_lll->memq_tx.head,
216 cis_lll->memq_tx.tail,
217 (void **)&node_tx);
218 if (!link) {
219 break;
220 }
221
222 if (node_tx->payload_count < payload_count) {
223 memq_dequeue(cis_lll->memq_tx.tail,
224 &cis_lll->memq_tx.head,
225 NULL);
226
227 node_tx->next = link;
228 ull_iso_lll_ack_enqueue(cis_lll->handle,
229 node_tx);
230 } else if (node_tx->payload_count >= (payload_count + cis_lll->tx.bn)) {
231 link = NULL;
232 } else {
233 if (node_tx->payload_count != payload_count) {
234 link = NULL;
235 }
236
237 break;
238 }
239 } while (link);
240
241 if (!link) {
242 cis_lll->npi = 1U;
243
244 pdu_tx = radio_pkt_empty_get();
245 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
246 pdu_tx->nesn = cis_lll->nesn;
247 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
248 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
249 pdu_tx->len = 0U;
250 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
251 pdu_tx->npi = 1U;
252 } else {
253 cis_lll->npi = 0U;
254
255 pdu_tx = (void *)node_tx->pdu;
256 pdu_tx->nesn = cis_lll->nesn;
257 pdu_tx->sn = cis_lll->sn;
258 pdu_tx->cie = 0U;
259 pdu_tx->npi = 0U;
260 }
261 }
262
263 /* Initialize reserve bit */
264 pdu_tx->rfu0 = 0U;
265 pdu_tx->rfu1 = 0U;
266
267 /* Encryption */
268 if (false) {
269
270 #if defined(CONFIG_BT_CTLR_LE_ENC)
271 } else if (pdu_tx->len && conn_lll->enc_tx) {
272 uint8_t pkt_flags;
273
274 cis_lll->tx.ccm.counter = payload_count;
275
276 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
277 phy,
278 RADIO_PKT_CONF_CTE_DISABLED);
279 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
280 (cis_lll->tx.max_pdu + PDU_MIC_SIZE),
281 pkt_flags);
282 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
283 RADIO_PKT_CONF_PDU_TYPE_CIS,
284 pdu_tx));
285 #endif /* CONFIG_BT_CTLR_LE_ENC */
286
287 } else {
288 uint8_t pkt_flags;
289
290 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
291 phy,
292 RADIO_PKT_CONF_CTE_DISABLED);
293 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
294 cis_lll->tx.max_pdu, pkt_flags);
295 radio_pkt_tx_set(pdu_tx);
296 }
297
298 radio_isr_set(isr_tx, cis_lll);
299
300 radio_tmr_tifs_set(cis_lll->tifs_us);
301
302 #if defined(CONFIG_BT_CTLR_PHY)
303 radio_switch_complete_and_rx(cis_lll->rx.phy);
304 #else /* !CONFIG_BT_CTLR_PHY */
305 radio_switch_complete_and_rx(0U);
306 #endif /* !CONFIG_BT_CTLR_PHY */
307
308 ticks_at_event = p->ticks_at_expire;
309 ull = HDR_LLL2ULL(cig_lll);
310 ticks_at_event += lll_event_offset_get(ull);
311
312 ticks_at_start = ticks_at_event;
313 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US +
314 cis_offset_first);
315
316 remainder = p->remainder;
317 start_us = radio_tmr_start(1U, ticks_at_start, remainder);
318
319 /* Save radio ready timestamp, use it to schedule next subevent */
320 radio_tmr_ready_save(start_us);
321
322 /* capture end of Tx-ed PDU, used to calculate HCTO. */
323 radio_tmr_end_capture();
324
325 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
326 radio_gpio_pa_setup();
327
328 #if defined(CONFIG_BT_CTLR_PHY)
329 radio_gpio_pa_lna_enable(start_us +
330 radio_tx_ready_delay_get(phy, PHY_FLAGS_S8) -
331 HAL_RADIO_GPIO_PA_OFFSET);
332 #else /* !CONFIG_BT_CTLR_PHY */
333 radio_gpio_pa_lna_enable(start_us +
334 radio_tx_ready_delay_get(0U, 0U) -
335 HAL_RADIO_GPIO_PA_OFFSET);
336 #endif /* !CONFIG_BT_CTLR_PHY */
337 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
338 ARG_UNUSED(start_us);
339 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
340
341 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
342 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
343 uint32_t overhead;
344
345 overhead = lll_preempt_calc(ull, (TICKER_ID_CONN_ISO_BASE + cig_lll->handle),
346 ticks_at_event);
347 /* check if preempt to start has changed */
348 if (overhead) {
349 LL_ASSERT_OVERHEAD(overhead);
350
351 radio_isr_set(isr_done, cis_lll);
352 radio_disable();
353
354 err = -ECANCELED;
355 }
356 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
357
358 /* Adjust the SN and NESN for skipped CIG events */
359 cis_handle = cis_handle_curr;
360 do {
361 cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle);
362 if (cis_lll && cis_lll->active) {
363 /* Pick the event_count calculated in the ULL prepare */
364 cis_lll->event_count = cis_lll->event_count_prepare;
365
366 /* Adjust sn and nesn for skipped CIG events */
367 payload_count_lazy_update(cis_lll, cig_lll->latency_event);
368
369 /* Adjust sn and nesn for canceled events */
370 if (err) {
371 payload_count_flush_or_inc_on_close(cis_lll);
372 }
373 }
374 } while (cis_lll);
375
376 /* Return if prepare callback cancelled */
377 if (err) {
378 return err;
379 }
380
381 /* Prepare is done */
382 ret = lll_prepare_done(cig_lll);
383 LL_ASSERT(!ret);
384
385 DEBUG_RADIO_START_M(1);
386
387 return 0;
388 }
389
abort_cb(struct lll_prepare_param * prepare_param,void * param)390 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
391 {
392 struct lll_conn_iso_group *cig_lll;
393 int err;
394
395 /* NOTE: This is not a prepare being cancelled */
396 if (!prepare_param) {
397 struct lll_conn_iso_stream *next_cis_lll;
398 struct lll_conn_iso_stream *cis_lll;
399
400 cis_lll = ull_conn_iso_lll_stream_get(cis_handle_curr);
401 cig_lll = param;
402
403 /* Adjust the SN, NESN and payload_count on abort for CISes */
404 do {
405 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll,
406 &cis_handle_curr);
407 if (next_cis_lll && next_cis_lll->active) {
408 payload_count_flush_or_inc_on_close(next_cis_lll);
409 }
410 } while (next_cis_lll);
411
412 /* Perform event abort here.
413 * After event has been cleanly aborted, clean up resources
414 * and dispatch event done.
415 */
416 radio_isr_set(isr_done, cis_lll);
417 radio_disable();
418
419 return;
420 }
421
422 /* NOTE: Else clean the top half preparations of the aborted event
423 * currently in preparation pipeline.
424 */
425 err = lll_hfclock_off();
426 LL_ASSERT(err >= 0);
427
428 /* Get reference to CIG LLL context */
429 cig_lll = prepare_param->param;
430
431 /* Accumulate the latency as event is aborted while being in pipeline */
432 cig_lll->lazy_prepare = prepare_param->lazy;
433 cig_lll->latency_prepare += (cig_lll->lazy_prepare + 1U);
434
435 lll_done(param);
436 }
437
isr_tx(void * param)438 static void isr_tx(void *param)
439 {
440 struct lll_conn_iso_stream *cis_lll;
441 struct node_rx_pdu *node_rx;
442 uint32_t hcto;
443
444 /* Clear radio tx status and events */
445 lll_isr_tx_status_reset();
446
447 /* Close subevent, one tx-rx chain */
448 radio_switch_complete_and_disable();
449
450 /* Get reference to CIS LLL context */
451 cis_lll = param;
452
453 /* Acquire rx node for reception */
454 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
455 LL_ASSERT(node_rx);
456
457 #if defined(CONFIG_BT_CTLR_LE_ENC)
458 /* Get reference to ACL context */
459 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
460 #endif /* CONFIG_BT_CTLR_LE_ENC */
461
462 /* PHY */
463 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
464
465 /* Encryption */
466 if (false) {
467
468 #if defined(CONFIG_BT_CTLR_LE_ENC)
469 } else if (conn_lll->enc_rx) {
470 uint64_t payload_count;
471 uint8_t pkt_flags;
472
473 payload_count = cis_lll->rx.payload_count +
474 cis_lll->rx.bn_curr - 1U;
475
476 cis_lll->rx.ccm.counter = payload_count;
477
478 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
479 cis_lll->rx.phy,
480 RADIO_PKT_CONF_CTE_DISABLED);
481 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
482 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
483 pkt_flags);
484 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
485 cis_lll->rx.phy,
486 RADIO_PKT_CONF_PDU_TYPE_CIS,
487 node_rx->pdu));
488 #endif /* CONFIG_BT_CTLR_LE_ENC */
489
490 } else {
491 uint8_t pkt_flags;
492
493 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
494 cis_lll->rx.phy,
495 RADIO_PKT_CONF_CTE_DISABLED);
496 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
497 cis_lll->rx.max_pdu, pkt_flags);
498 radio_pkt_rx_set(node_rx->pdu);
499 }
500
501 /* assert if radio packet ptr is not set and radio started rx */
502 LL_ASSERT(!radio_is_ready());
503
504 /* +/- 2us active clock jitter, +1 us PPI to timer start compensation */
505 hcto = radio_tmr_tifs_base_get() + cis_lll->tifs_us +
506 (EVENT_CLOCK_JITTER_US << 1) + RANGE_DELAY_US +
507 HAL_RADIO_TMR_START_DELAY_US;
508
509 #if defined(CONFIG_BT_CTLR_PHY)
510 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
511 hcto += addr_us_get(cis_lll->rx.phy);
512 hcto -= radio_tx_chain_delay_get(cis_lll->tx.phy,
513 cis_lll->tx.phy_flags);
514 #else /* !CONFIG_BT_CTLR_PHY */
515 hcto += radio_rx_chain_delay_get(0U, 0U);
516 hcto += addr_us_get(0U);
517 hcto -= radio_tx_chain_delay_get(0U, 0U);
518 #endif /* !CONFIG_BT_CTLR_PHY */
519
520 radio_tmr_hcto_configure(hcto);
521
522 #if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
523 defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
524 radio_tmr_end_capture();
525 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
526
527 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
528 radio_gpio_lna_setup();
529
530 #if defined(CONFIG_BT_CTLR_PHY)
531 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + cis_lll->tifs_us -
532 (EVENT_CLOCK_JITTER_US << 1) -
533 radio_tx_chain_delay_get(cis_lll->tx.phy,
534 cis_lll->tx.phy_flags) -
535 HAL_RADIO_GPIO_LNA_OFFSET);
536 #else /* !CONFIG_BT_CTLR_PHY */
537 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + cis_lll->tifs_us -
538 (EVENT_CLOCK_JITTER_US << 1) -
539 radio_tx_chain_delay_get(0U, 0U) -
540 HAL_RADIO_GPIO_LNA_OFFSET);
541 #endif /* !CONFIG_BT_CTLR_PHY */
542 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
543
544 radio_isr_set(isr_rx, param);
545
546 /* Schedule next subevent */
547 if (se_curr < cis_lll->nse) {
548 const struct lll_conn *evt_conn_lll;
549 uint16_t data_chan_id;
550
551 #if !defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
552 uint32_t subevent_us;
553 uint32_t start_us;
554
555 subevent_us = radio_tmr_ready_restore();
556 subevent_us += cis_lll->offset - cis_offset_first +
557 (cis_lll->sub_interval * se_curr);
558
559 start_us = radio_tmr_start_us(1U, subevent_us);
560 LL_ASSERT(start_us == (subevent_us + 1U));
561 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
562
563 /* Get reference to ACL context */
564 evt_conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
565
566 /* Calculate the radio channel to use for next subevent */
567 data_chan_id = lll_chan_id(cis_lll->access_addr);
568 next_chan_use = lll_chan_iso_subevent(data_chan_id,
569 evt_conn_lll->data_chan_map,
570 evt_conn_lll->data_chan_count,
571 &data_chan_prn_s,
572 &data_chan_remap_idx);
573 } else {
574 struct lll_conn_iso_stream *next_cis_lll;
575 struct lll_conn_iso_group *cig_lll;
576 struct lll_conn *next_conn_lll;
577 struct node_tx_iso *node_tx;
578 uint64_t payload_count;
579 uint16_t event_counter;
580 uint16_t data_chan_id;
581 uint16_t cis_handle;
582 memq_link_t *link;
583
584 /* Calculate channel for next CIS */
585 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
586 cis_handle = cis_handle_curr;
587 do {
588 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle);
589 } while (next_cis_lll && !next_cis_lll->active);
590
591 if (!next_cis_lll) {
592 return;
593 }
594
595 #if !defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
596 uint32_t subevent_us;
597 uint32_t start_us;
598
599 subevent_us = radio_tmr_ready_restore();
600 subevent_us += next_cis_lll->offset - cis_offset_first;
601
602 start_us = radio_tmr_start_us(1U, subevent_us);
603 LL_ASSERT(start_us == (subevent_us + 1U));
604 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
605
606 /* Event counter value, 0-15 bit of cisEventCounter */
607 event_counter = next_cis_lll->event_count;
608
609 /* Get reference to ACL context */
610 next_conn_lll = ull_conn_lll_get(next_cis_lll->acl_handle);
611
612 /* Calculate the radio channel to use for ISO event */
613 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
614 next_cis_chan = lll_chan_iso_event(event_counter, data_chan_id,
615 next_conn_lll->data_chan_map,
616 next_conn_lll->data_chan_count,
617 &next_cis_chan_prn_s,
618 &next_cis_chan_remap_idx);
619
620 cis_lll = next_cis_lll;
621
622 /* Tx Ack stale ISO Data */
623 payload_count = cis_lll->tx.payload_count +
624 cis_lll->tx.bn_curr - 1U;
625
626 do {
627 link = memq_peek(cis_lll->memq_tx.head,
628 cis_lll->memq_tx.tail,
629 (void **)&node_tx);
630 if (!link) {
631 break;
632 }
633
634 if (node_tx->payload_count < payload_count) {
635 memq_dequeue(cis_lll->memq_tx.tail,
636 &cis_lll->memq_tx.head,
637 NULL);
638
639 node_tx->next = link;
640 ull_iso_lll_ack_enqueue(cis_lll->handle,
641 node_tx);
642 } else if (node_tx->payload_count >=
643 (payload_count + cis_lll->tx.bn)) {
644 link = NULL;
645 } else {
646 if (node_tx->payload_count !=
647 payload_count) {
648 link = NULL;
649 }
650
651 break;
652 }
653 } while (link);
654 }
655 }
656
isr_rx(void * param)657 static void isr_rx(void *param)
658 {
659 struct lll_conn_iso_stream *cis_lll;
660 uint8_t ack_pending;
661 uint8_t trx_done;
662 uint8_t crc_ok;
663 uint8_t cie;
664
665 /* Read radio status and events */
666 trx_done = radio_is_done();
667 if (trx_done) {
668 crc_ok = radio_crc_is_valid();
669 } else {
670 crc_ok = 0U;
671 }
672
673 /* Clear radio status and events */
674 lll_isr_rx_sub_status_reset();
675
676 /* Initialize Close Isochronous Event */
677 ack_pending = 0U;
678 cie = 0U;
679
680 /* Get reference to CIS LLL context */
681 cis_lll = param;
682
683 /* No Rx */
684 if (!trx_done ||
685 #if defined(CONFIG_TEST_FT_CEN_SKIP_SUBEVENTS)
686 /* Used by test code,
687 * to skip a number of events in every 3 event count when current subevent is less than
688 * or equal to 2 or when current subevent has completed all its NSE number of subevents.
689 * OR
690 * to skip a (number + 1) of events in every 3 event count when current subevent is less
691 * than or equal to 1 or when current subevent has completed all its NSE number of
692 * subevents.
693 */
694 ((((cis_lll->event_count % 3U) < CONFIG_TEST_FT_CEN_SKIP_EVENTS_COUNT) &&
695 ((se_curr > cis_lll->nse) || (se_curr <= 2U))) ||
696
697 (((cis_lll->event_count % 3U) < (CONFIG_TEST_FT_CEN_SKIP_EVENTS_COUNT + 1U)) &&
698 ((se_curr > cis_lll->nse) || (se_curr <= 1U)))) ||
699 #endif /* CONFIG_TEST_FT_CEN_SKIP_SUBEVENTS */
700 false) {
701 payload_count_flush(cis_lll);
702
703 goto isr_rx_next_subevent;
704 }
705
706 /* FIXME: Do not call this for every event/subevent */
707 ull_conn_iso_lll_cis_established(param);
708
709 /* Set the bit corresponding to CIS index */
710 trx_performed_bitmask |= (1U << LL_CIS_IDX_FROM_HANDLE(cis_lll->handle));
711
712 if (crc_ok) {
713 struct node_rx_pdu *node_rx;
714 struct pdu_cis *pdu_rx;
715
716 /* Get reference to received PDU */
717 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
718 LL_ASSERT(node_rx);
719 pdu_rx = (void *)node_rx->pdu;
720
721 /* Tx ACK */
722 if ((pdu_rx->nesn != cis_lll->sn) && (cis_lll->tx.bn_curr <= cis_lll->tx.bn)) {
723 cis_lll->sn++;
724 cis_lll->tx.bn_curr++;
725 if ((cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
726 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)) {
727 cis_lll->tx.payload_count += cis_lll->tx.bn;
728 cis_lll->tx.bn_curr = 1U;
729 }
730
731 /* TODO: Implement early Tx Ack. Currently Tx Ack
732 * generated as stale Tx Ack when payload count
733 * has elapsed.
734 */
735 }
736
737 /* Handle valid ISO data Rx */
738 if (!pdu_rx->npi &&
739 (cis_lll->rx.bn_curr <= cis_lll->rx.bn) &&
740 (pdu_rx->sn == cis_lll->nesn) &&
741 ull_iso_pdu_rx_alloc_peek(2U)) {
742 struct lll_conn_iso_group *cig_lll;
743 struct node_rx_iso_meta *iso_meta;
744
745 cis_lll->nesn++;
746
747 #if defined(CONFIG_BT_CTLR_LE_ENC)
748 /* Get reference to ACL context */
749 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
750
751 /* If required, wait for CCM to finish
752 */
753 if (pdu_rx->len && conn_lll->enc_rx) {
754 uint32_t done;
755
756 done = radio_ccm_is_done();
757 LL_ASSERT(done);
758
759 if (!radio_ccm_mic_is_valid()) {
760 /* Record MIC invalid */
761 mic_state = LLL_CONN_MIC_FAIL;
762
763 goto isr_rx_done;
764 }
765
766 /* Record MIC valid */
767 mic_state = LLL_CONN_MIC_PASS;
768 }
769 #endif /* CONFIG_BT_CTLR_LE_ENC */
770
771 /* Enqueue Rx ISO PDU */
772 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
773 node_rx->hdr.handle = cis_lll->handle;
774 iso_meta = &node_rx->rx_iso_meta;
775 iso_meta->payload_number = cis_lll->rx.payload_count +
776 cis_lll->rx.bn_curr - 1U;
777 iso_meta->timestamp =
778 HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
779 radio_tmr_ready_restore();
780 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
781 iso_meta->timestamp -= (cis_lll->event_count -
782 (cis_lll->rx.payload_count / cis_lll->rx.bn)) *
783 cig_lll->iso_interval_us;
784 iso_meta->timestamp %=
785 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
786 iso_meta->status = 0U;
787
788 ull_iso_pdu_rx_alloc();
789 iso_rx_put(node_rx->hdr.link, node_rx);
790
791 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
792 iso_rx_sched();
793 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
794
795 cis_lll->rx.bn_curr++;
796 if ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
797 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)) {
798 cis_lll->rx.payload_count += cis_lll->rx.bn;
799 cis_lll->rx.bn_curr = 1U;
800 }
801
802 /* Need to be acked */
803 ack_pending = 1U;
804 }
805
806 /* Close Isochronous Event */
807 cie = cie || pdu_rx->cie;
808 }
809
810 payload_count_flush(cis_lll);
811
812 /* Close Isochronous Event */
813 cie = cie || ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
814 (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
815 !ack_pending);
816
817 isr_rx_next_subevent:
818 if (cie || (se_curr == cis_lll->nse)) {
819 struct lll_conn_iso_stream *next_cis_lll;
820 struct lll_conn_iso_stream *old_cis_lll;
821 struct lll_conn_iso_group *cig_lll;
822 struct lll_conn *next_conn_lll;
823 uint8_t phy;
824
825 /* Fetch next CIS */
826 /* TODO: Use a new ull_conn_iso_lll_stream_get_active_by_group()
827 * in the future.
828 */
829 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
830 do {
831 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll,
832 &cis_handle_curr);
833 } while (next_cis_lll && !next_cis_lll->active);
834
835 if (!next_cis_lll) {
836 goto isr_rx_done;
837 }
838
839 /* Get reference to ACL context */
840 next_conn_lll = ull_conn_lll_get(next_cis_lll->acl_handle);
841
842 /* Calculate CIS channel if not already calculated */
843 if (se_curr < cis_lll->nse) {
844 struct node_tx_iso *node_tx;
845 uint64_t payload_count;
846 uint16_t event_counter;
847 uint16_t data_chan_id;
848 memq_link_t *link;
849
850 #if !defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
851 uint32_t subevent_us;
852 uint32_t start_us;
853
854 subevent_us = radio_tmr_ready_restore();
855 subevent_us += next_cis_lll->offset - cis_offset_first;
856
857 start_us = radio_tmr_start_us(1U, subevent_us);
858 LL_ASSERT(start_us == (subevent_us + 1U));
859 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
860
861 /* Event counter value, 0-15 bit of cisEventCounter */
862 event_counter = next_cis_lll->event_count;
863
864 /* Calculate the radio channel to use for ISO event */
865 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
866 next_cis_chan = lll_chan_iso_event(event_counter, data_chan_id,
867 next_conn_lll->data_chan_map,
868 next_conn_lll->data_chan_count,
869 &next_cis_chan_prn_s,
870 &next_cis_chan_remap_idx);
871
872 old_cis_lll = cis_lll;
873 cis_lll = next_cis_lll;
874
875 payload_count = cis_lll->tx.payload_count +
876 cis_lll->tx.bn_curr - 1U;
877
878 do {
879 link = memq_peek(cis_lll->memq_tx.head,
880 cis_lll->memq_tx.tail,
881 (void **)&node_tx);
882 if (!link) {
883 break;
884 }
885
886 if (node_tx->payload_count < payload_count) {
887 memq_dequeue(cis_lll->memq_tx.tail,
888 &cis_lll->memq_tx.head,
889 NULL);
890
891 node_tx->next = link;
892 ull_iso_lll_ack_enqueue(cis_lll->handle,
893 node_tx);
894 } else if (node_tx->payload_count >=
895 (payload_count + cis_lll->tx.bn)) {
896 link = NULL;
897 } else {
898 if (node_tx->payload_count !=
899 payload_count) {
900 link = NULL;
901 }
902
903 break;
904 }
905 } while (link);
906
907 cis_lll = old_cis_lll;
908 }
909
910 payload_count_flush_or_inc_on_close(cis_lll);
911
912 /* Reset indices for the next CIS */
913 se_curr = 0U; /* isr_prepare_subevent() will increase se_curr */
914
915 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
916 radio_tx_power_set(next_conn_lll->tx_pwr_lvl);
917 #else
918 radio_tx_power_set(RADIO_TXP_DEFAULT);
919 #endif
920
921 phy = next_cis_lll->tx.phy;
922 radio_phy_set(phy, next_cis_lll->tx.phy_flags);
923 radio_aa_set(next_cis_lll->access_addr);
924 radio_crc_configure(PDU_CRC_POLYNOMIAL,
925 sys_get_le24(next_conn_lll->crc_init));
926
927 param = next_cis_lll;
928 next_chan_use = next_cis_chan;
929 data_chan_prn_s = next_cis_chan_prn_s;
930 data_chan_remap_idx = next_cis_chan_remap_idx;
931 }
932
933 isr_prepare_subevent(param);
934
935 return;
936
937 isr_rx_done:
938 radio_isr_set(isr_done, param);
939 radio_disable();
940 }
941
isr_prepare_subevent(void * param)942 static void isr_prepare_subevent(void *param)
943 {
944 struct lll_conn_iso_stream *cis_lll;
945 struct pdu_cis *pdu_tx;
946 uint64_t payload_count;
947 uint8_t payload_index;
948 uint32_t start_us;
949
950 /* Get reference to CIS LLL context */
951 cis_lll = param;
952
953 /* Get ISO data PDU */
954 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
955 payload_count = 0U;
956
957 cis_lll->npi = 1U;
958
959 pdu_tx = radio_pkt_empty_get();
960 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
961 pdu_tx->nesn = cis_lll->nesn;
962 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
963 pdu_tx->cie = (cis_lll->rx.bn_curr > cis_lll->rx.bn);
964 pdu_tx->npi = 1U;
965 pdu_tx->len = 0U;
966 } else {
967 struct node_tx_iso *node_tx;
968 memq_link_t *link;
969
970 payload_index = cis_lll->tx.bn_curr - 1U;
971 payload_count = cis_lll->tx.payload_count + payload_index;
972
973 link = memq_peek_n(cis_lll->memq_tx.head, cis_lll->memq_tx.tail,
974 payload_index, (void **)&node_tx);
975 if (!link || (node_tx->payload_count != payload_count)) {
976 payload_index = 0U;
977 do {
978 link = memq_peek_n(cis_lll->memq_tx.head,
979 cis_lll->memq_tx.tail,
980 payload_index,
981 (void **)&node_tx);
982 payload_index++;
983 } while (link &&
984 (node_tx->payload_count < payload_count));
985 }
986
987 if (!link || (node_tx->payload_count != payload_count)) {
988 cis_lll->npi = 1U;
989
990 pdu_tx = radio_pkt_empty_get();
991 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
992 pdu_tx->nesn = cis_lll->nesn;
993 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
994 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
995 pdu_tx->len = 0U;
996 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
997 pdu_tx->npi = 1U;
998 } else {
999 cis_lll->npi = 0U;
1000
1001 pdu_tx = (void *)node_tx->pdu;
1002 pdu_tx->nesn = cis_lll->nesn;
1003 pdu_tx->sn = cis_lll->sn;
1004 pdu_tx->cie = 0U;
1005 pdu_tx->npi = 0U;
1006 }
1007 }
1008
1009 /* Initialize reserve bit */
1010 pdu_tx->rfu0 = 0U;
1011 pdu_tx->rfu1 = 0U;
1012
1013 #if defined(CONFIG_BT_CTLR_LE_ENC)
1014 /* Get reference to ACL context */
1015 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1016 #endif /* CONFIG_BT_CTLR_LE_ENC */
1017
1018 /* PHY */
1019 radio_phy_set(cis_lll->tx.phy, cis_lll->tx.phy_flags);
1020
1021 /* Encryption */
1022 if (false) {
1023
1024 #if defined(CONFIG_BT_CTLR_LE_ENC)
1025 } else if (pdu_tx->len && conn_lll->enc_tx) {
1026 uint8_t pkt_flags;
1027
1028 cis_lll->tx.ccm.counter = payload_count;
1029
1030 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1031 cis_lll->tx.phy,
1032 RADIO_PKT_CONF_CTE_DISABLED);
1033 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1034 (cis_lll->tx.max_pdu + PDU_MIC_SIZE), pkt_flags);
1035 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
1036 RADIO_PKT_CONF_PDU_TYPE_CIS,
1037 pdu_tx));
1038 #endif /* CONFIG_BT_CTLR_LE_ENC */
1039
1040 } else {
1041 uint8_t pkt_flags;
1042
1043 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1044 cis_lll->tx.phy,
1045 RADIO_PKT_CONF_CTE_DISABLED);
1046 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1047 cis_lll->tx.max_pdu, pkt_flags);
1048 radio_pkt_tx_set(pdu_tx);
1049 }
1050
1051 lll_chan_set(next_chan_use);
1052
1053 radio_tmr_rx_disable();
1054 radio_tmr_tx_enable();
1055
1056 radio_tmr_tifs_set(cis_lll->tifs_us);
1057
1058 #if defined(CONFIG_BT_CTLR_PHY)
1059 radio_switch_complete_and_rx(cis_lll->rx.phy);
1060 #else /* !CONFIG_BT_CTLR_PHY */
1061 radio_switch_complete_and_rx(0U);
1062 #endif /* !CONFIG_BT_CTLR_PHY */
1063
1064 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN) || \
1065 defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
1066 uint32_t subevent_us;
1067
1068 subevent_us = radio_tmr_ready_restore();
1069 subevent_us += cis_lll->offset - cis_offset_first +
1070 (cis_lll->sub_interval * se_curr);
1071
1072 #if defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
1073 start_us = radio_tmr_start_us(1U, subevent_us);
1074 LL_ASSERT(start_us == (subevent_us + 1U));
1075
1076 #else /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
1077 /* Compensate for the 1 us added by radio_tmr_start_us() */
1078 start_us = subevent_us + 1U;
1079 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
1080
1081 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN ||
1082 * CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER
1083 */
1084
1085 /* capture end of Tx-ed PDU, used to calculate HCTO. */
1086 radio_tmr_end_capture();
1087
1088 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
1089 radio_gpio_pa_setup();
1090
1091 #if defined(CONFIG_BT_CTLR_PHY)
1092 radio_gpio_pa_lna_enable(start_us +
1093 radio_tx_ready_delay_get(cis_lll->rx.phy,
1094 PHY_FLAGS_S8) -
1095 HAL_RADIO_GPIO_PA_OFFSET);
1096 #else /* !CONFIG_BT_CTLR_PHY */
1097 radio_gpio_pa_lna_enable(start_us +
1098 radio_tx_ready_delay_get(0U, 0U) -
1099 HAL_RADIO_GPIO_PA_OFFSET);
1100 #endif /* !CONFIG_BT_CTLR_PHY */
1101 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
1102 ARG_UNUSED(start_us);
1103 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
1104
1105 /* assert if radio packet ptr is not set and radio started tx */
1106 LL_ASSERT(!radio_is_ready());
1107
1108 radio_isr_set(isr_tx, param);
1109
1110 /* Next subevent */
1111 se_curr++;
1112 }
1113
isr_done(void * param)1114 static void isr_done(void *param)
1115 {
1116 struct lll_conn_iso_stream *cis_lll;
1117 struct event_done_extra *e;
1118
1119 lll_isr_status_reset();
1120
1121 /* Get reference to CIS LLL context */
1122 cis_lll = param;
1123
1124 payload_count_flush_or_inc_on_close(cis_lll);
1125
1126 e = ull_event_done_extra_get();
1127 LL_ASSERT(e);
1128
1129 e->type = EVENT_DONE_EXTRA_TYPE_CIS;
1130 e->trx_performed_bitmask = trx_performed_bitmask;
1131 e->crc_valid = 1U;
1132
1133 #if defined(CONFIG_BT_CTLR_LE_ENC)
1134 e->mic_state = mic_state;
1135 #endif /* CONFIG_BT_CTLR_LE_ENC */
1136
1137 lll_isr_cleanup(param);
1138 }
1139
payload_count_flush(struct lll_conn_iso_stream * cis_lll)1140 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll)
1141 {
1142 if (cis_lll->tx.bn) {
1143 uint64_t payload_count;
1144 uint8_t u;
1145
1146 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1147 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1148 (cis_lll->tx.bn - 1U -
1149 (payload_count % cis_lll->tx.bn)));
1150 if ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1151 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1152 (((cis_lll->tx.bn_curr < cis_lll->tx.bn) &&
1153 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <= cis_lll->event_count)) ||
1154 ((cis_lll->tx.bn_curr == cis_lll->tx.bn) &&
1155 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)))) {
1156 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1157 cis_lll->sn++;
1158 cis_lll->tx.bn_curr++;
1159 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1160 cis_lll->tx.payload_count += cis_lll->tx.bn;
1161 cis_lll->tx.bn_curr = 1U;
1162 }
1163 }
1164 }
1165
1166 if (cis_lll->rx.bn) {
1167 uint64_t payload_count;
1168 uint8_t u;
1169
1170 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1171 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1172 (cis_lll->rx.bn - 1U -
1173 (payload_count % cis_lll->rx.bn)));
1174 if ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1175 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1176 (((cis_lll->rx.bn_curr < cis_lll->rx.bn) &&
1177 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <= cis_lll->event_count)) ||
1178 ((cis_lll->rx.bn_curr == cis_lll->rx.bn) &&
1179 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)))) {
1180 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1181 cis_lll->nesn++;
1182 cis_lll->rx.bn_curr++;
1183 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1184 cis_lll->rx.payload_count += cis_lll->rx.bn;
1185 cis_lll->rx.bn_curr = 1U;
1186 }
1187 }
1188 }
1189 }
1190
payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream * cis_lll)1191 static void payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream *cis_lll)
1192 {
1193 if (cis_lll->tx.bn) {
1194 uint64_t payload_count;
1195 uint8_t u;
1196
1197 if (((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.bn_curr) >
1198 (cis_lll->event_count + cis_lll->tx.bn)) {
1199 cis_lll->tx.payload_count += cis_lll->tx.bn;
1200 cis_lll->tx.bn_curr = 1U;
1201
1202 goto payload_count_flush_or_inc_on_close_rx;
1203 }
1204
1205 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1206 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1207 (cis_lll->tx.bn - 1U -
1208 (payload_count % cis_lll->tx.bn)));
1209 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1210 (cis_lll->event_count + 1U)) ||
1211 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1212 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1213 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1214 cis_lll->sn++;
1215 cis_lll->tx.bn_curr++;
1216 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1217 cis_lll->tx.payload_count += cis_lll->tx.bn;
1218 cis_lll->tx.bn_curr = 1U;
1219 }
1220
1221 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1222 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1223 (cis_lll->tx.bn - 1U -
1224 (payload_count % cis_lll->tx.bn)));
1225 }
1226 }
1227
1228 payload_count_flush_or_inc_on_close_rx:
1229 if (cis_lll->rx.bn) {
1230 uint64_t payload_count;
1231 uint8_t u;
1232
1233 if (((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.bn_curr) >
1234 (cis_lll->event_count + cis_lll->rx.bn)) {
1235 cis_lll->rx.payload_count += cis_lll->rx.bn;
1236 cis_lll->rx.bn_curr = 1U;
1237
1238 return;
1239 }
1240
1241 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1242 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1243 (cis_lll->rx.bn - 1U -
1244 (payload_count % cis_lll->rx.bn)));
1245 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1246 (cis_lll->event_count + 1U)) ||
1247 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1248 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1249 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1250 cis_lll->nesn++;
1251 cis_lll->rx.bn_curr++;
1252 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1253 cis_lll->rx.payload_count += cis_lll->rx.bn;
1254 cis_lll->rx.bn_curr = 1U;
1255 }
1256
1257 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1258 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1259 (cis_lll->rx.bn - 1U -
1260 (payload_count % cis_lll->rx.bn)));
1261 }
1262 }
1263 }
1264
payload_count_lazy_update(struct lll_conn_iso_stream * cis_lll,uint16_t lazy)1265 static void payload_count_lazy_update(struct lll_conn_iso_stream *cis_lll, uint16_t lazy)
1266 {
1267 if (cis_lll->tx.bn) {
1268 uint16_t tx_lazy;
1269
1270 tx_lazy = lazy;
1271 while (tx_lazy--) {
1272 uint64_t payload_count;
1273 uint8_t u;
1274
1275 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1276 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1277 (cis_lll->tx.bn - 1U -
1278 (payload_count % cis_lll->tx.bn)));
1279 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1280 cis_lll->event_count) ||
1281 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1282 cis_lll->event_count) && (u <= cis_lll->nse))) {
1283 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1284 cis_lll->sn++;
1285 cis_lll->tx.bn_curr++;
1286 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1287 cis_lll->tx.payload_count += cis_lll->tx.bn;
1288 cis_lll->tx.bn_curr = 1U;
1289 }
1290
1291 payload_count = cis_lll->tx.payload_count +
1292 cis_lll->tx.bn_curr - 1U;
1293 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1294 (cis_lll->tx.bn - 1U -
1295 (payload_count % cis_lll->tx.bn)));
1296 }
1297 }
1298 }
1299
1300 if (cis_lll->rx.bn) {
1301 while (lazy--) {
1302 uint64_t payload_count;
1303 uint8_t u;
1304
1305 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1306 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1307 (cis_lll->rx.bn - 1U -
1308 (payload_count % cis_lll->rx.bn)));
1309 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1310 cis_lll->event_count) ||
1311 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1312 cis_lll->event_count) && (u <= cis_lll->nse))) {
1313 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1314 cis_lll->nesn++;
1315 cis_lll->rx.bn_curr++;
1316 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1317 cis_lll->rx.payload_count += cis_lll->rx.bn;
1318 cis_lll->rx.bn_curr = 1U;
1319 }
1320
1321 payload_count = cis_lll->rx.payload_count +
1322 cis_lll->rx.bn_curr - 1U;
1323 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1324 (cis_lll->rx.bn - 1U -
1325 (payload_count % cis_lll->rx.bn)));
1326 }
1327 }
1328 }
1329 }
1330