1 /*
2 * Copyright (c) 2022 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8
9 #include <zephyr/sys/byteorder.h>
10
11 #include "hal/ccm.h"
12 #include "hal/radio.h"
13 #include "hal/ticker.h"
14
15 #include "util/util.h"
16 #include "util/mem.h"
17 #include "util/memq.h"
18 #include "util/dbuf.h"
19
20 #include "pdu_df.h"
21 #include "pdu_vendor.h"
22 #include "pdu.h"
23
24 #include "lll.h"
25 #include "lll_vendor.h"
26 #include "lll_clock.h"
27 #include "lll_chan.h"
28 #include "lll_df_types.h"
29 #include "lll_conn.h"
30 #include "lll_conn_iso.h"
31 #include "lll_central_iso.h"
32
33 #include "lll_iso_tx.h"
34
35 #include "lll_internal.h"
36 #include "lll_tim_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static int prepare_cb(struct lll_prepare_param *p);
44 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
45 static void isr_tx(void *param);
46 static void isr_rx(void *param);
47 static void isr_prepare_subevent(void *param);
48 static void isr_done(void *param);
49 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll);
50 static void payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream *cis_lll);
51 static void payload_count_lazy_update(struct lll_conn_iso_stream *cis_lll, uint16_t lazy);
52
53 static uint16_t next_cis_chan_remap_idx;
54 static uint16_t next_cis_chan_prn_s;
55 static uint16_t data_chan_remap_idx;
56 static uint16_t data_chan_prn_s;
57 static uint8_t next_chan_use;
58 static uint8_t next_cis_chan;
59
60 static uint32_t trx_performed_bitmask;
61 static uint16_t cis_offset_first;
62 static uint16_t cis_handle_curr;
63 static uint8_t se_curr;
64
65 #if defined(CONFIG_BT_CTLR_LE_ENC)
66 static uint8_t mic_state;
67 #endif /* CONFIG_BT_CTLR_LE_ENC */
68
lll_central_iso_init(void)69 int lll_central_iso_init(void)
70 {
71 int err;
72
73 err = init_reset();
74 if (err) {
75 return err;
76 }
77
78 return 0;
79 }
80
lll_central_iso_reset(void)81 int lll_central_iso_reset(void)
82 {
83 int err;
84
85 err = init_reset();
86 if (err) {
87 return err;
88 }
89
90 return 0;
91 }
92
lll_central_iso_prepare(void * param)93 void lll_central_iso_prepare(void *param)
94 {
95 struct lll_conn_iso_group *cig_lll;
96 struct lll_prepare_param *p;
97 uint16_t elapsed;
98 int err;
99
100 /* Initiate HF clock start up */
101 err = lll_hfclock_on();
102 LL_ASSERT(err >= 0);
103
104 /* Instants elapsed */
105 p = param;
106 elapsed = p->lazy + 1U;
107
108 /* Save the (latency + 1) for use in event and/or supervision timeout */
109 cig_lll = p->param;
110 cig_lll->latency_prepare += elapsed;
111
112 /* Invoke common pipeline handling of prepare */
113 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
114 LL_ASSERT(!err || err == -EINPROGRESS);
115 }
116
init_reset(void)117 static int init_reset(void)
118 {
119 return 0;
120 }
121
prepare_cb(struct lll_prepare_param * p)122 static int prepare_cb(struct lll_prepare_param *p)
123 {
124 struct lll_conn_iso_group *cig_lll = p->param;
125 struct lll_conn_iso_stream *cis_lll;
126 const struct lll_conn *conn_lll;
127 uint32_t ticks_at_event;
128 uint32_t ticks_at_start;
129 struct pdu_cis *pdu_tx;
130 uint16_t event_counter;
131 uint64_t payload_count;
132 uint16_t data_chan_id;
133 uint8_t data_chan_use;
134 uint16_t cis_handle;
135 struct ull_hdr *ull;
136 uint32_t remainder;
137 uint32_t start_us;
138 uint16_t lazy;
139 uint32_t ret;
140 uint8_t phy;
141 int err = 0;
142
143 DEBUG_RADIO_START_M(1);
144
145 /* Reset global static variables */
146 trx_performed_bitmask = 0U;
147 #if defined(CONFIG_BT_CTLR_LE_ENC)
148 mic_state = LLL_CONN_MIC_NONE;
149 #endif /* CONFIG_BT_CTLR_LE_ENC */
150
151 /* Get the first CIS */
152 cis_handle_curr = UINT16_MAX;
153 do {
154 cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle_curr);
155 } while (cis_lll && !cis_lll->active);
156
157 LL_ASSERT(cis_lll);
158
159 /* Save first active CIS offset */
160 cis_offset_first = cis_lll->offset;
161
162 /* Get reference to ACL context */
163 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
164
165 /* Event counter value, 0-15 bit of cisEventCounter */
166 event_counter = cis_lll->event_count;
167
168 /* Calculate the radio channel to use for ISO event */
169 data_chan_id = lll_chan_id(cis_lll->access_addr);
170 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
171 conn_lll->data_chan_map,
172 conn_lll->data_chan_count,
173 &data_chan_prn_s,
174 &data_chan_remap_idx);
175
176 /* Store the current event latency */
177 cig_lll->latency_event = cig_lll->latency_prepare;
178 lazy = cig_lll->latency_prepare - 1U;
179
180 /* Reset accumulated latencies */
181 cig_lll->latency_prepare = 0U;
182
183 se_curr = 1U;
184
185 /* Adjust the SN and NESN for skipped CIG events */
186 payload_count_lazy_update(cis_lll, lazy);
187
188 /* Start setting up of Radio h/w */
189 radio_reset();
190
191 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
192 radio_tx_power_set(conn_lll->tx_pwr_lvl);
193 #else /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
194 radio_tx_power_set(RADIO_TXP_DEFAULT);
195 #endif /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
196
197 phy = cis_lll->tx.phy;
198 radio_phy_set(phy, cis_lll->tx.phy_flags);
199 radio_aa_set(cis_lll->access_addr);
200 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(conn_lll->crc_init));
201 lll_chan_set(data_chan_use);
202
203 /* Get ISO data PDU */
204 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
205 payload_count = 0U;
206
207 cis_lll->npi = 1U;
208
209 pdu_tx = radio_pkt_empty_get();
210 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
211 pdu_tx->nesn = cis_lll->nesn;
212 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
213 pdu_tx->cie = (cis_lll->rx.bn_curr > cis_lll->rx.bn);
214 pdu_tx->npi = 1U;
215 pdu_tx->len = 0U;
216 } else {
217 struct node_tx_iso *node_tx;
218 memq_link_t *link;
219
220 payload_count = cis_lll->tx.payload_count +
221 cis_lll->tx.bn_curr - 1U;
222
223 do {
224 link = memq_peek(cis_lll->memq_tx.head,
225 cis_lll->memq_tx.tail,
226 (void **)&node_tx);
227 if (!link) {
228 break;
229 }
230
231 if (node_tx->payload_count < payload_count) {
232 memq_dequeue(cis_lll->memq_tx.tail,
233 &cis_lll->memq_tx.head,
234 NULL);
235
236 node_tx->next = link;
237 ull_iso_lll_ack_enqueue(cis_lll->handle,
238 node_tx);
239 } else if (node_tx->payload_count >= (payload_count + cis_lll->tx.bn)) {
240 link = NULL;
241 } else {
242 if (node_tx->payload_count != payload_count) {
243 link = NULL;
244 }
245
246 break;
247 }
248 } while (link);
249
250 if (!link) {
251 cis_lll->npi = 1U;
252
253 pdu_tx = radio_pkt_empty_get();
254 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
255 pdu_tx->nesn = cis_lll->nesn;
256 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
257 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
258 pdu_tx->len = 0U;
259 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
260 pdu_tx->npi = 1U;
261 } else {
262 cis_lll->npi = 0U;
263
264 pdu_tx = (void *)node_tx->pdu;
265 pdu_tx->nesn = cis_lll->nesn;
266 pdu_tx->sn = cis_lll->sn;
267 pdu_tx->cie = 0U;
268 pdu_tx->npi = 0U;
269 }
270 }
271
272 /* Initialize reserve bit */
273 pdu_tx->rfu0 = 0U;
274 pdu_tx->rfu1 = 0U;
275
276 /* Encryption */
277 if (false) {
278
279 #if defined(CONFIG_BT_CTLR_LE_ENC)
280 } else if (pdu_tx->len && conn_lll->enc_tx) {
281 uint8_t pkt_flags;
282
283 cis_lll->tx.ccm.counter = payload_count;
284
285 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
286 phy,
287 RADIO_PKT_CONF_CTE_DISABLED);
288 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
289 (cis_lll->tx.max_pdu + PDU_MIC_SIZE),
290 pkt_flags);
291 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
292 RADIO_PKT_CONF_PDU_TYPE_CIS,
293 pdu_tx));
294 #endif /* CONFIG_BT_CTLR_LE_ENC */
295
296 } else {
297 uint8_t pkt_flags;
298
299 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
300 phy,
301 RADIO_PKT_CONF_CTE_DISABLED);
302 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
303 cis_lll->tx.max_pdu, pkt_flags);
304 radio_pkt_tx_set(pdu_tx);
305 }
306
307 radio_isr_set(isr_tx, cis_lll);
308
309 radio_tmr_tifs_set(cis_lll->tifs_us);
310
311 #if defined(CONFIG_BT_CTLR_PHY)
312 radio_switch_complete_and_rx(cis_lll->rx.phy);
313 #else /* !CONFIG_BT_CTLR_PHY */
314 radio_switch_complete_and_rx(0U);
315 #endif /* !CONFIG_BT_CTLR_PHY */
316
317 ticks_at_event = p->ticks_at_expire;
318 ull = HDR_LLL2ULL(cig_lll);
319 ticks_at_event += lll_event_offset_get(ull);
320
321 ticks_at_start = ticks_at_event;
322 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US +
323 cis_offset_first);
324
325 remainder = p->remainder;
326 start_us = radio_tmr_start(1U, ticks_at_start, remainder);
327
328 /* Save radio ready timestamp, use it to schedule next subevent */
329 radio_tmr_ready_save(start_us);
330
331 /* capture end of Tx-ed PDU, used to calculate HCTO. */
332 radio_tmr_end_capture();
333
334 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
335 radio_gpio_pa_setup();
336
337 #if defined(CONFIG_BT_CTLR_PHY)
338 radio_gpio_pa_lna_enable(start_us +
339 radio_tx_ready_delay_get(phy, PHY_FLAGS_S8) -
340 HAL_RADIO_GPIO_PA_OFFSET);
341 #else /* !CONFIG_BT_CTLR_PHY */
342 radio_gpio_pa_lna_enable(start_us +
343 radio_tx_ready_delay_get(0U, 0U) -
344 HAL_RADIO_GPIO_PA_OFFSET);
345 #endif /* !CONFIG_BT_CTLR_PHY */
346 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
347 ARG_UNUSED(start_us);
348 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
349
350 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
351 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
352 uint32_t overhead;
353
354 overhead = lll_preempt_calc(ull, (TICKER_ID_CONN_ISO_BASE + cig_lll->handle),
355 ticks_at_event);
356 /* check if preempt to start has changed */
357 if (overhead) {
358 LL_ASSERT_OVERHEAD(overhead);
359
360 radio_isr_set(isr_done, cis_lll);
361 radio_disable();
362
363 err = -ECANCELED;
364 }
365 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
366
367 /* Adjust the SN and NESN for skipped CIG events */
368 cis_handle = cis_handle_curr;
369 do {
370 cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle);
371 if (cis_lll && cis_lll->active) {
372 /* Adjust sn and nesn for skipped CIG events */
373 payload_count_lazy_update(cis_lll, lazy);
374
375 /* Adjust sn and nesn for canceled events */
376 if (err) {
377 payload_count_flush_or_inc_on_close(cis_lll);
378 }
379 }
380 } while (cis_lll);
381
382 /* Return if prepare callback cancelled */
383 if (err) {
384 return err;
385 }
386
387 /* Prepare is done */
388 ret = lll_prepare_done(cig_lll);
389 LL_ASSERT(!ret);
390
391 DEBUG_RADIO_START_M(1);
392
393 return 0;
394 }
395
abort_cb(struct lll_prepare_param * prepare_param,void * param)396 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
397 {
398 int err;
399
400 /* NOTE: This is not a prepare being cancelled */
401 if (!prepare_param) {
402 struct lll_conn_iso_stream *next_cis_lll;
403 struct lll_conn_iso_stream *cis_lll;
404 struct lll_conn_iso_group *cig_lll;
405
406 cis_lll = ull_conn_iso_lll_stream_get(cis_handle_curr);
407 cig_lll = param;
408
409 /* Adjust the SN, NESN and payload_count on abort for CISes */
410 do {
411 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll,
412 &cis_handle_curr);
413 if (next_cis_lll && next_cis_lll->active) {
414 payload_count_flush_or_inc_on_close(next_cis_lll);
415 }
416 } while (next_cis_lll);
417
418 /* Perform event abort here.
419 * After event has been cleanly aborted, clean up resources
420 * and dispatch event done.
421 */
422 radio_isr_set(isr_done, cis_lll);
423 radio_disable();
424
425 return;
426 }
427
428 /* NOTE: Else clean the top half preparations of the aborted event
429 * currently in preparation pipeline.
430 */
431 err = lll_hfclock_off();
432 LL_ASSERT(err >= 0);
433
434 lll_done(param);
435 }
436
isr_tx(void * param)437 static void isr_tx(void *param)
438 {
439 struct lll_conn_iso_stream *cis_lll;
440 struct node_rx_pdu *node_rx;
441 uint32_t hcto;
442
443 /* Clear radio tx status and events */
444 lll_isr_tx_status_reset();
445
446 /* Close subevent, one tx-rx chain */
447 radio_switch_complete_and_disable();
448
449 /* Get reference to CIS LLL context */
450 cis_lll = param;
451
452 /* Acquire rx node for reception */
453 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
454 LL_ASSERT(node_rx);
455
456 #if defined(CONFIG_BT_CTLR_LE_ENC)
457 /* Get reference to ACL context */
458 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
459 #endif /* CONFIG_BT_CTLR_LE_ENC */
460
461 /* PHY */
462 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
463
464 /* Encryption */
465 if (false) {
466
467 #if defined(CONFIG_BT_CTLR_LE_ENC)
468 } else if (conn_lll->enc_rx) {
469 uint64_t payload_count;
470 uint8_t pkt_flags;
471
472 payload_count = cis_lll->rx.payload_count +
473 cis_lll->rx.bn_curr - 1U;
474
475 cis_lll->rx.ccm.counter = payload_count;
476
477 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
478 cis_lll->rx.phy,
479 RADIO_PKT_CONF_CTE_DISABLED);
480 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
481 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
482 pkt_flags);
483 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
484 cis_lll->rx.phy,
485 RADIO_PKT_CONF_PDU_TYPE_CIS,
486 node_rx->pdu));
487 #endif /* CONFIG_BT_CTLR_LE_ENC */
488
489 } else {
490 uint8_t pkt_flags;
491
492 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
493 cis_lll->rx.phy,
494 RADIO_PKT_CONF_CTE_DISABLED);
495 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
496 cis_lll->rx.max_pdu, pkt_flags);
497 radio_pkt_rx_set(node_rx->pdu);
498 }
499
500 /* assert if radio packet ptr is not set and radio started rx */
501 LL_ASSERT(!radio_is_ready());
502
503 /* +/- 2us active clock jitter, +1 us PPI to timer start compensation */
504 hcto = radio_tmr_tifs_base_get() + cis_lll->tifs_us +
505 (EVENT_CLOCK_JITTER_US << 1) + RANGE_DELAY_US +
506 HAL_RADIO_TMR_START_DELAY_US;
507
508 #if defined(CONFIG_BT_CTLR_PHY)
509 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
510 hcto += addr_us_get(cis_lll->rx.phy);
511 hcto -= radio_tx_chain_delay_get(cis_lll->tx.phy,
512 cis_lll->tx.phy_flags);
513 #else /* !CONFIG_BT_CTLR_PHY */
514 hcto += radio_rx_chain_delay_get(0U, 0U);
515 hcto += addr_us_get(0U);
516 hcto -= radio_tx_chain_delay_get(0U, 0U);
517 #endif /* !CONFIG_BT_CTLR_PHY */
518
519 radio_tmr_hcto_configure(hcto);
520
521 #if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
522 defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
523 radio_tmr_end_capture();
524 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
525
526 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
527 radio_gpio_lna_setup();
528
529 #if defined(CONFIG_BT_CTLR_PHY)
530 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + cis_lll->tifs_us -
531 (EVENT_CLOCK_JITTER_US << 1) -
532 radio_tx_chain_delay_get(cis_lll->tx.phy,
533 cis_lll->tx.phy_flags) -
534 HAL_RADIO_GPIO_LNA_OFFSET);
535 #else /* !CONFIG_BT_CTLR_PHY */
536 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + cis_lll->tifs_us -
537 (EVENT_CLOCK_JITTER_US << 1) -
538 radio_tx_chain_delay_get(0U, 0U) -
539 HAL_RADIO_GPIO_LNA_OFFSET);
540 #endif /* !CONFIG_BT_CTLR_PHY */
541 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
542
543 radio_isr_set(isr_rx, param);
544
545 /* Schedule next subevent */
546 if (se_curr < cis_lll->nse) {
547 const struct lll_conn *evt_conn_lll;
548 uint16_t data_chan_id;
549 uint32_t subevent_us;
550 uint32_t start_us;
551
552 subevent_us = radio_tmr_ready_restore();
553 subevent_us += cis_lll->offset - cis_offset_first +
554 (cis_lll->sub_interval * se_curr);
555
556 start_us = radio_tmr_start_us(1U, subevent_us);
557 LL_ASSERT(start_us == (subevent_us + 1U));
558
559 /* Get reference to ACL context */
560 evt_conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
561
562 /* Calculate the radio channel to use for next subevent */
563 data_chan_id = lll_chan_id(cis_lll->access_addr);
564 next_chan_use = lll_chan_iso_subevent(data_chan_id,
565 evt_conn_lll->data_chan_map,
566 evt_conn_lll->data_chan_count,
567 &data_chan_prn_s,
568 &data_chan_remap_idx);
569 } else {
570 struct lll_conn_iso_stream *next_cis_lll;
571 struct lll_conn_iso_group *cig_lll;
572 struct lll_conn *next_conn_lll;
573 struct node_tx_iso *node_tx;
574 uint64_t payload_count;
575 uint16_t event_counter;
576 uint16_t data_chan_id;
577 uint32_t subevent_us;
578 uint16_t cis_handle;
579 uint32_t start_us;
580 memq_link_t *link;
581
582 /* Calculate channel for next CIS */
583 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
584 cis_handle = cis_handle_curr;
585 do {
586 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll, &cis_handle);
587 } while (next_cis_lll && !next_cis_lll->active);
588
589 if (!next_cis_lll) {
590 return;
591 }
592
593 /* Get reference to ACL context */
594 next_conn_lll = ull_conn_lll_get(next_cis_lll->acl_handle);
595
596 /* Event counter value, 0-15 bit of cisEventCounter */
597 event_counter = next_cis_lll->event_count;
598
599 /* Calculate the radio channel to use for ISO event */
600 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
601 next_cis_chan = lll_chan_iso_event(event_counter, data_chan_id,
602 next_conn_lll->data_chan_map,
603 next_conn_lll->data_chan_count,
604 &next_cis_chan_prn_s,
605 &next_cis_chan_remap_idx);
606
607 subevent_us = radio_tmr_ready_restore();
608 subevent_us += next_cis_lll->offset - cis_offset_first;
609
610 start_us = radio_tmr_start_us(1U, subevent_us);
611 LL_ASSERT(start_us == (subevent_us + 1U));
612
613 cis_lll = next_cis_lll;
614
615 /* Tx Ack stale ISO Data */
616 payload_count = cis_lll->tx.payload_count +
617 cis_lll->tx.bn_curr - 1U;
618
619 do {
620 link = memq_peek(cis_lll->memq_tx.head,
621 cis_lll->memq_tx.tail,
622 (void **)&node_tx);
623 if (!link) {
624 break;
625 }
626
627 if (node_tx->payload_count < payload_count) {
628 memq_dequeue(cis_lll->memq_tx.tail,
629 &cis_lll->memq_tx.head,
630 NULL);
631
632 node_tx->next = link;
633 ull_iso_lll_ack_enqueue(cis_lll->handle,
634 node_tx);
635 } else if (node_tx->payload_count >=
636 (payload_count + cis_lll->tx.bn)) {
637 link = NULL;
638 } else {
639 if (node_tx->payload_count !=
640 payload_count) {
641 link = NULL;
642 }
643
644 break;
645 }
646 } while (link);
647 }
648 }
649
isr_rx(void * param)650 static void isr_rx(void *param)
651 {
652 struct lll_conn_iso_stream *cis_lll;
653 uint8_t ack_pending;
654 uint8_t trx_done;
655 uint8_t crc_ok;
656 uint8_t cie;
657
658 /* Read radio status and events */
659 trx_done = radio_is_done();
660 if (trx_done) {
661 crc_ok = radio_crc_is_valid();
662 } else {
663 crc_ok = 0U;
664 }
665
666 /* Clear radio status and events */
667 lll_isr_rx_sub_status_reset();
668
669 /* Initialize Close Isochronous Event */
670 ack_pending = 0U;
671 cie = 0U;
672
673 /* Get reference to CIS LLL context */
674 cis_lll = param;
675
676 /* No Rx */
677 if (!trx_done ||
678 #if defined(CONFIG_TEST_FT_CEN_SKIP_SUBEVENTS)
679 /* Used by test code,
680 * to skip a number of events in every 3 event count when current subevent is less than
681 * or equal to 2 or when current subevent has completed all its NSE number of subevents.
682 * OR
683 * to skip a (number + 1) of events in every 3 event count when current subevent is less
684 * than or equal to 1 or when current subevent has completed all its NSE number of
685 * subevents.
686 */
687 ((((cis_lll->event_count % 3U) < CONFIG_TEST_FT_CEN_SKIP_EVENTS_COUNT) &&
688 ((se_curr > cis_lll->nse) || (se_curr <= 2U))) ||
689
690 (((cis_lll->event_count % 3U) < (CONFIG_TEST_FT_CEN_SKIP_EVENTS_COUNT + 1U)) &&
691 ((se_curr > cis_lll->nse) || (se_curr <= 1U)))) ||
692 #endif /* CONFIG_TEST_FT_CEN_SKIP_SUBEVENTS */
693 false) {
694 payload_count_flush(cis_lll);
695
696 goto isr_rx_next_subevent;
697 }
698
699 /* FIXME: Do not call this for every event/subevent */
700 ull_conn_iso_lll_cis_established(param);
701
702 /* Set the bit corresponding to CIS index */
703 trx_performed_bitmask |= (1U << LL_CIS_IDX_FROM_HANDLE(cis_lll->handle));
704
705 if (crc_ok) {
706 struct node_rx_pdu *node_rx;
707 struct pdu_cis *pdu_rx;
708
709 /* Get reference to received PDU */
710 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
711 LL_ASSERT(node_rx);
712 pdu_rx = (void *)node_rx->pdu;
713
714 /* Tx ACK */
715 if ((pdu_rx->nesn != cis_lll->sn) && (cis_lll->tx.bn_curr <= cis_lll->tx.bn)) {
716 cis_lll->sn++;
717 cis_lll->tx.bn_curr++;
718 if ((cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
719 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)) {
720 cis_lll->tx.payload_count += cis_lll->tx.bn;
721 cis_lll->tx.bn_curr = 1U;
722 }
723
724 /* TODO: Implement early Tx Ack. Currently Tx Ack
725 * generated as stale Tx Ack when payload count
726 * has elapsed.
727 */
728 }
729
730 /* Handle valid ISO data Rx */
731 if (!pdu_rx->npi &&
732 (cis_lll->rx.bn_curr <= cis_lll->rx.bn) &&
733 (pdu_rx->sn == cis_lll->nesn) &&
734 ull_iso_pdu_rx_alloc_peek(2U)) {
735 struct lll_conn_iso_group *cig_lll;
736 struct node_rx_iso_meta *iso_meta;
737
738 cis_lll->nesn++;
739
740 #if defined(CONFIG_BT_CTLR_LE_ENC)
741 /* Get reference to ACL context */
742 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
743
744 /* If required, wait for CCM to finish
745 */
746 if (pdu_rx->len && conn_lll->enc_rx) {
747 uint32_t done;
748
749 done = radio_ccm_is_done();
750 LL_ASSERT(done);
751
752 if (!radio_ccm_mic_is_valid()) {
753 /* Record MIC invalid */
754 mic_state = LLL_CONN_MIC_FAIL;
755
756 goto isr_rx_done;
757 }
758
759 /* Record MIC valid */
760 mic_state = LLL_CONN_MIC_PASS;
761 }
762 #endif /* CONFIG_BT_CTLR_LE_ENC */
763
764 /* Enqueue Rx ISO PDU */
765 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
766 node_rx->hdr.handle = cis_lll->handle;
767 iso_meta = &node_rx->rx_iso_meta;
768 iso_meta->payload_number = cis_lll->rx.payload_count +
769 cis_lll->rx.bn_curr - 1U;
770 iso_meta->timestamp =
771 HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
772 radio_tmr_ready_restore();
773 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
774 iso_meta->timestamp -= (cis_lll->event_count -
775 (cis_lll->rx.payload_count / cis_lll->rx.bn)) *
776 cig_lll->iso_interval_us;
777 iso_meta->timestamp %=
778 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
779 iso_meta->status = 0U;
780
781 ull_iso_pdu_rx_alloc();
782 iso_rx_put(node_rx->hdr.link, node_rx);
783
784 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
785 iso_rx_sched();
786 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
787
788 cis_lll->rx.bn_curr++;
789 if ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
790 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)) {
791 cis_lll->rx.payload_count += cis_lll->rx.bn;
792 cis_lll->rx.bn_curr = 1U;
793 }
794
795 /* Need to be acked */
796 ack_pending = 1U;
797 }
798
799 /* Close Isochronous Event */
800 cie = cie || pdu_rx->cie;
801 }
802
803 payload_count_flush(cis_lll);
804
805 /* Close Isochronous Event */
806 cie = cie || ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
807 (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
808 !ack_pending);
809
810 isr_rx_next_subevent:
811 if (cie || (se_curr == cis_lll->nse)) {
812 struct lll_conn_iso_stream *next_cis_lll;
813 struct lll_conn_iso_stream *old_cis_lll;
814 struct lll_conn_iso_group *cig_lll;
815 struct lll_conn *next_conn_lll;
816 uint8_t phy;
817
818 /* Fetch next CIS */
819 /* TODO: Use a new ull_conn_iso_lll_stream_get_active_by_group()
820 * in the future.
821 */
822 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
823 do {
824 next_cis_lll = ull_conn_iso_lll_stream_get_by_group(cig_lll,
825 &cis_handle_curr);
826 } while (next_cis_lll && !next_cis_lll->active);
827
828 if (!next_cis_lll) {
829 goto isr_rx_done;
830 }
831
832 /* Get reference to ACL context */
833 next_conn_lll = ull_conn_lll_get(next_cis_lll->acl_handle);
834
835 /* Calculate CIS channel if not already calculated */
836 if (se_curr < cis_lll->nse) {
837 struct node_tx_iso *node_tx;
838 uint64_t payload_count;
839 uint16_t event_counter;
840 uint16_t data_chan_id;
841 uint32_t subevent_us;
842 uint32_t start_us;
843 memq_link_t *link;
844
845 /* Event counter value, 0-15 bit of cisEventCounter */
846 event_counter = next_cis_lll->event_count;
847
848 /* Calculate the radio channel to use for ISO event */
849 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
850 next_cis_chan = lll_chan_iso_event(event_counter, data_chan_id,
851 next_conn_lll->data_chan_map,
852 next_conn_lll->data_chan_count,
853 &next_cis_chan_prn_s,
854 &next_cis_chan_remap_idx);
855
856 subevent_us = radio_tmr_ready_restore();
857 subevent_us += next_cis_lll->offset - cis_offset_first;
858
859 start_us = radio_tmr_start_us(1U, subevent_us);
860 LL_ASSERT(start_us == (subevent_us + 1U));
861
862 old_cis_lll = cis_lll;
863 cis_lll = next_cis_lll;
864
865 payload_count = cis_lll->tx.payload_count +
866 cis_lll->tx.bn_curr - 1U;
867
868 do {
869 link = memq_peek(cis_lll->memq_tx.head,
870 cis_lll->memq_tx.tail,
871 (void **)&node_tx);
872 if (!link) {
873 break;
874 }
875
876 if (node_tx->payload_count < payload_count) {
877 memq_dequeue(cis_lll->memq_tx.tail,
878 &cis_lll->memq_tx.head,
879 NULL);
880
881 node_tx->next = link;
882 ull_iso_lll_ack_enqueue(cis_lll->handle,
883 node_tx);
884 } else if (node_tx->payload_count >=
885 (payload_count + cis_lll->tx.bn)) {
886 link = NULL;
887 } else {
888 if (node_tx->payload_count !=
889 payload_count) {
890 link = NULL;
891 }
892
893 break;
894 }
895 } while (link);
896
897 cis_lll = old_cis_lll;
898 }
899
900 payload_count_flush_or_inc_on_close(cis_lll);
901
902 /* Reset indices for the next CIS */
903 se_curr = 0U; /* isr_prepare_subevent() will increase se_curr */
904
905 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
906 radio_tx_power_set(next_conn_lll->tx_pwr_lvl);
907 #else
908 radio_tx_power_set(RADIO_TXP_DEFAULT);
909 #endif
910
911 phy = next_cis_lll->tx.phy;
912 radio_phy_set(phy, next_cis_lll->tx.phy_flags);
913 radio_aa_set(next_cis_lll->access_addr);
914 radio_crc_configure(PDU_CRC_POLYNOMIAL,
915 sys_get_le24(next_conn_lll->crc_init));
916
917 param = next_cis_lll;
918 next_chan_use = next_cis_chan;
919 data_chan_prn_s = next_cis_chan_prn_s;
920 data_chan_remap_idx = next_cis_chan_remap_idx;
921 }
922
923 isr_prepare_subevent(param);
924
925 return;
926
927 isr_rx_done:
928 radio_isr_set(isr_done, param);
929 radio_disable();
930 }
931
isr_prepare_subevent(void * param)932 static void isr_prepare_subevent(void *param)
933 {
934 struct lll_conn_iso_stream *cis_lll;
935 struct pdu_cis *pdu_tx;
936 uint64_t payload_count;
937 uint8_t payload_index;
938 uint32_t subevent_us;
939 uint32_t start_us;
940
941 /* Get reference to CIS LLL context */
942 cis_lll = param;
943
944 /* Get ISO data PDU */
945 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
946 payload_count = 0U;
947
948 cis_lll->npi = 1U;
949
950 pdu_tx = radio_pkt_empty_get();
951 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
952 pdu_tx->nesn = cis_lll->nesn;
953 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
954 pdu_tx->cie = (cis_lll->rx.bn_curr > cis_lll->rx.bn);
955 pdu_tx->npi = 1U;
956 pdu_tx->len = 0U;
957 } else {
958 struct node_tx_iso *node_tx;
959 memq_link_t *link;
960
961 payload_index = cis_lll->tx.bn_curr - 1U;
962 payload_count = cis_lll->tx.payload_count + payload_index;
963
964 link = memq_peek_n(cis_lll->memq_tx.head, cis_lll->memq_tx.tail,
965 payload_index, (void **)&node_tx);
966 if (!link || (node_tx->payload_count != payload_count)) {
967 payload_index = 0U;
968 do {
969 link = memq_peek_n(cis_lll->memq_tx.head,
970 cis_lll->memq_tx.tail,
971 payload_index,
972 (void **)&node_tx);
973 payload_index++;
974 } while (link &&
975 (node_tx->payload_count < payload_count));
976 }
977
978 if (!link || (node_tx->payload_count != payload_count)) {
979 cis_lll->npi = 1U;
980
981 pdu_tx = radio_pkt_empty_get();
982 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
983 pdu_tx->nesn = cis_lll->nesn;
984 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
985 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
986 pdu_tx->len = 0U;
987 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
988 pdu_tx->npi = 1U;
989 } else {
990 cis_lll->npi = 0U;
991
992 pdu_tx = (void *)node_tx->pdu;
993 pdu_tx->nesn = cis_lll->nesn;
994 pdu_tx->sn = cis_lll->sn;
995 pdu_tx->cie = 0U;
996 pdu_tx->npi = 0U;
997 }
998 }
999
1000 /* Initialize reserve bit */
1001 pdu_tx->rfu0 = 0U;
1002 pdu_tx->rfu1 = 0U;
1003
1004 #if defined(CONFIG_BT_CTLR_LE_ENC)
1005 /* Get reference to ACL context */
1006 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1007 #endif /* CONFIG_BT_CTLR_LE_ENC */
1008
1009 /* PHY */
1010 radio_phy_set(cis_lll->tx.phy, cis_lll->tx.phy_flags);
1011
1012 /* Encryption */
1013 if (false) {
1014
1015 #if defined(CONFIG_BT_CTLR_LE_ENC)
1016 } else if (pdu_tx->len && conn_lll->enc_tx) {
1017 uint8_t pkt_flags;
1018
1019 cis_lll->tx.ccm.counter = payload_count;
1020
1021 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1022 cis_lll->tx.phy,
1023 RADIO_PKT_CONF_CTE_DISABLED);
1024 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1025 (cis_lll->tx.max_pdu + PDU_MIC_SIZE), pkt_flags);
1026 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
1027 RADIO_PKT_CONF_PDU_TYPE_CIS,
1028 pdu_tx));
1029 #endif /* CONFIG_BT_CTLR_LE_ENC */
1030
1031 } else {
1032 uint8_t pkt_flags;
1033
1034 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1035 cis_lll->tx.phy,
1036 RADIO_PKT_CONF_CTE_DISABLED);
1037 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1038 cis_lll->tx.max_pdu, pkt_flags);
1039 radio_pkt_tx_set(pdu_tx);
1040 }
1041
1042 lll_chan_set(next_chan_use);
1043
1044 radio_tmr_rx_disable();
1045 radio_tmr_tx_enable();
1046
1047 radio_tmr_tifs_set(cis_lll->tifs_us);
1048
1049 #if defined(CONFIG_BT_CTLR_PHY)
1050 radio_switch_complete_and_rx(cis_lll->rx.phy);
1051 #else /* !CONFIG_BT_CTLR_PHY */
1052 radio_switch_complete_and_rx(0U);
1053 #endif /* !CONFIG_BT_CTLR_PHY */
1054
1055 subevent_us = radio_tmr_ready_restore();
1056 subevent_us += cis_lll->offset - cis_offset_first +
1057 (cis_lll->sub_interval * se_curr);
1058
1059 /* Compensate for the 1 us added by radio_tmr_start_us() */
1060 start_us = subevent_us + 1U;
1061
1062 /* capture end of Tx-ed PDU, used to calculate HCTO. */
1063 radio_tmr_end_capture();
1064
1065 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
1066 radio_gpio_pa_setup();
1067
1068 #if defined(CONFIG_BT_CTLR_PHY)
1069 radio_gpio_pa_lna_enable(start_us +
1070 radio_tx_ready_delay_get(cis_lll->rx.phy,
1071 PHY_FLAGS_S8) -
1072 HAL_RADIO_GPIO_PA_OFFSET);
1073 #else /* !CONFIG_BT_CTLR_PHY */
1074 radio_gpio_pa_lna_enable(start_us +
1075 radio_tx_ready_delay_get(0U, 0U) -
1076 HAL_RADIO_GPIO_PA_OFFSET);
1077 #endif /* !CONFIG_BT_CTLR_PHY */
1078 #else /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
1079 ARG_UNUSED(start_us);
1080 #endif /* !HAL_RADIO_GPIO_HAVE_PA_PIN */
1081
1082 /* assert if radio packet ptr is not set and radio started tx */
1083 LL_ASSERT(!radio_is_ready());
1084
1085 radio_isr_set(isr_tx, param);
1086
1087 /* Next subevent */
1088 se_curr++;
1089 }
1090
isr_done(void * param)1091 static void isr_done(void *param)
1092 {
1093 struct lll_conn_iso_stream *cis_lll;
1094 struct event_done_extra *e;
1095
1096 lll_isr_status_reset();
1097
1098 /* Get reference to CIS LLL context */
1099 cis_lll = param;
1100
1101 payload_count_flush_or_inc_on_close(cis_lll);
1102
1103 e = ull_event_done_extra_get();
1104 LL_ASSERT(e);
1105
1106 e->type = EVENT_DONE_EXTRA_TYPE_CIS;
1107 e->trx_performed_bitmask = trx_performed_bitmask;
1108 e->crc_valid = 1U;
1109
1110 #if defined(CONFIG_BT_CTLR_LE_ENC)
1111 e->mic_state = mic_state;
1112 #endif /* CONFIG_BT_CTLR_LE_ENC */
1113
1114 lll_isr_cleanup(param);
1115 }
1116
payload_count_flush(struct lll_conn_iso_stream * cis_lll)1117 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll)
1118 {
1119 if (cis_lll->tx.bn) {
1120 uint64_t payload_count;
1121 uint8_t u;
1122
1123 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1124 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1125 (cis_lll->tx.bn - 1U -
1126 (payload_count % cis_lll->tx.bn)));
1127 if ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1128 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1129 (((cis_lll->tx.bn_curr < cis_lll->tx.bn) &&
1130 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <= cis_lll->event_count)) ||
1131 ((cis_lll->tx.bn_curr == cis_lll->tx.bn) &&
1132 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)))) {
1133 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1134 cis_lll->sn++;
1135 cis_lll->tx.bn_curr++;
1136 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1137 cis_lll->tx.payload_count += cis_lll->tx.bn;
1138 cis_lll->tx.bn_curr = 1U;
1139 }
1140 }
1141 }
1142
1143 if (cis_lll->rx.bn) {
1144 uint64_t payload_count;
1145 uint8_t u;
1146
1147 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1148 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1149 (cis_lll->rx.bn - 1U -
1150 (payload_count % cis_lll->rx.bn)));
1151 if ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1152 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1153 (((cis_lll->rx.bn_curr < cis_lll->rx.bn) &&
1154 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <= cis_lll->event_count)) ||
1155 ((cis_lll->rx.bn_curr == cis_lll->rx.bn) &&
1156 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)))) {
1157 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1158 cis_lll->nesn++;
1159 cis_lll->rx.bn_curr++;
1160 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1161 cis_lll->rx.payload_count += cis_lll->rx.bn;
1162 cis_lll->rx.bn_curr = 1U;
1163 }
1164 }
1165 }
1166 }
1167
payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream * cis_lll)1168 static void payload_count_flush_or_inc_on_close(struct lll_conn_iso_stream *cis_lll)
1169 {
1170 if (cis_lll->tx.bn) {
1171 uint64_t payload_count;
1172 uint8_t u;
1173
1174 if (((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.bn_curr) >
1175 (cis_lll->event_count + cis_lll->tx.bn)) {
1176 cis_lll->tx.payload_count += cis_lll->tx.bn;
1177 cis_lll->tx.bn_curr = 1U;
1178
1179 goto payload_count_flush_or_inc_on_close_rx;
1180 }
1181
1182 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1183 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1184 (cis_lll->tx.bn - 1U -
1185 (payload_count % cis_lll->tx.bn)));
1186 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1187 (cis_lll->event_count + 1U)) ||
1188 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1189 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1190 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1191 cis_lll->sn++;
1192 cis_lll->tx.bn_curr++;
1193 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1194 cis_lll->tx.payload_count += cis_lll->tx.bn;
1195 cis_lll->tx.bn_curr = 1U;
1196 }
1197
1198 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1199 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1200 (cis_lll->tx.bn - 1U -
1201 (payload_count % cis_lll->tx.bn)));
1202 }
1203 }
1204
1205 payload_count_flush_or_inc_on_close_rx:
1206 if (cis_lll->rx.bn) {
1207 uint64_t payload_count;
1208 uint8_t u;
1209
1210 if (((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.bn_curr) >
1211 (cis_lll->event_count + cis_lll->rx.bn)) {
1212 cis_lll->rx.payload_count += cis_lll->rx.bn;
1213 cis_lll->rx.bn_curr = 1U;
1214
1215 return;
1216 }
1217
1218 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1219 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1220 (cis_lll->rx.bn - 1U -
1221 (payload_count % cis_lll->rx.bn)));
1222 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1223 (cis_lll->event_count + 1U)) ||
1224 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1225 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1226 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1227 cis_lll->nesn++;
1228 cis_lll->rx.bn_curr++;
1229 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1230 cis_lll->rx.payload_count += cis_lll->rx.bn;
1231 cis_lll->rx.bn_curr = 1U;
1232 }
1233
1234 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1235 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1236 (cis_lll->rx.bn - 1U -
1237 (payload_count % cis_lll->rx.bn)));
1238 }
1239 }
1240 }
1241
payload_count_lazy_update(struct lll_conn_iso_stream * cis_lll,uint16_t lazy)1242 static void payload_count_lazy_update(struct lll_conn_iso_stream *cis_lll, uint16_t lazy)
1243 {
1244 if (cis_lll->tx.bn) {
1245 uint16_t tx_lazy;
1246
1247 tx_lazy = lazy;
1248 while (tx_lazy--) {
1249 uint64_t payload_count;
1250 uint8_t u;
1251
1252 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1253 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1254 (cis_lll->tx.bn - 1U -
1255 (payload_count % cis_lll->tx.bn)));
1256 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1257 cis_lll->event_count) ||
1258 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1259 cis_lll->event_count) && (u <= cis_lll->nse))) {
1260 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1261 cis_lll->sn++;
1262 cis_lll->tx.bn_curr++;
1263 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1264 cis_lll->tx.payload_count += cis_lll->tx.bn;
1265 cis_lll->tx.bn_curr = 1U;
1266 }
1267
1268 payload_count = cis_lll->tx.payload_count +
1269 cis_lll->tx.bn_curr - 1U;
1270 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1271 (cis_lll->tx.bn - 1U -
1272 (payload_count % cis_lll->tx.bn)));
1273 }
1274 }
1275 }
1276
1277 if (cis_lll->rx.bn) {
1278 while (lazy--) {
1279 uint64_t payload_count;
1280 uint8_t u;
1281
1282 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1283 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1284 (cis_lll->rx.bn - 1U -
1285 (payload_count % cis_lll->rx.bn)));
1286 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1287 cis_lll->event_count) ||
1288 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1289 cis_lll->event_count) && (u <= cis_lll->nse))) {
1290 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1291 cis_lll->nesn++;
1292 cis_lll->rx.bn_curr++;
1293 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1294 cis_lll->rx.payload_count += cis_lll->rx.bn;
1295 cis_lll->rx.bn_curr = 1U;
1296 }
1297
1298 payload_count = cis_lll->rx.payload_count +
1299 cis_lll->rx.bn_curr - 1U;
1300 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1301 (cis_lll->rx.bn - 1U -
1302 (payload_count % cis_lll->rx.bn)));
1303 }
1304 }
1305 }
1306 }
1307