1 /*
2 * Copyright (c) 2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8
9 #include <zephyr/sys/byteorder.h>
10
11 #include "hal/ccm.h"
12 #include "hal/radio.h"
13 #include "hal/ticker.h"
14
15 #include "util/util.h"
16 #include "util/mem.h"
17 #include "util/memq.h"
18 #include "util/dbuf.h"
19
20 #include "pdu_df.h"
21 #include "pdu_vendor.h"
22 #include "pdu.h"
23
24 #include "lll.h"
25 #include "lll_clock.h"
26 #include "lll/lll_df_types.h"
27 #include "lll_chan.h"
28 #include "lll_vendor.h"
29 #include "lll_conn.h"
30 #include "lll_conn_iso.h"
31 #include "lll_peripheral_iso.h"
32
33 #include "lll_iso_tx.h"
34
35 #include "lll_internal.h"
36 #include "lll_tim_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static int prepare_cb(struct lll_prepare_param *p);
44 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
45 static void isr_rx(void *param);
46 static void isr_tx(void *param);
47 static void next_cis_prepare(void *param);
48 static void isr_prepare_subevent(void *param);
49 static void isr_prepare_subevent_next_cis(void *param);
50 static void isr_prepare_subevent_common(void *param);
51 static void isr_done(void *param);
52 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll);
53 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll);
54 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy);
55
56 static uint8_t next_chan_use;
57 static uint16_t data_chan_id;
58 static uint16_t data_chan_prn_s;
59 static uint16_t data_chan_remap_idx;
60
61 static uint32_t trx_performed_bitmask;
62 static uint16_t cis_offset_first;
63 static uint16_t cis_handle_curr;
64 static uint8_t se_curr;
65
66 #if defined(CONFIG_BT_CTLR_LE_ENC)
67 static uint8_t mic_state;
68 #endif /* CONFIG_BT_CTLR_LE_ENC */
69
lll_peripheral_iso_init(void)70 int lll_peripheral_iso_init(void)
71 {
72 int err;
73
74 err = init_reset();
75 if (err) {
76 return err;
77 }
78
79 return 0;
80 }
81
lll_peripheral_iso_reset(void)82 int lll_peripheral_iso_reset(void)
83 {
84 int err;
85
86 err = init_reset();
87 if (err) {
88 return err;
89 }
90
91 return 0;
92 }
93
lll_peripheral_iso_prepare(void * param)94 void lll_peripheral_iso_prepare(void *param)
95 {
96 struct lll_conn_iso_group *cig_lll;
97 struct lll_prepare_param *p;
98 uint16_t elapsed;
99 int err;
100
101 /* Initiate HF clock start up */
102 err = lll_hfclock_on();
103 LL_ASSERT(err >= 0);
104
105 /* Instants elapsed */
106 p = param;
107 elapsed = p->lazy + 1U;
108
109 /* Save the (latency + 1) for use in event and/or supervision timeout */
110 cig_lll = p->param;
111 cig_lll->latency_prepare += elapsed;
112
113 /* Accumulate window widening */
114 cig_lll->window_widening_prepare_us_frac +=
115 cig_lll->window_widening_periodic_us_frac * elapsed;
116 if (cig_lll->window_widening_prepare_us_frac >
117 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
118 cig_lll->window_widening_prepare_us_frac =
119 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
120 }
121
122 /* Invoke common pipeline handling of prepare */
123 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
124 LL_ASSERT(!err || err == -EINPROGRESS);
125 }
126
lll_peripheral_iso_flush(uint16_t handle,struct lll_conn_iso_stream * lll)127 void lll_peripheral_iso_flush(uint16_t handle, struct lll_conn_iso_stream *lll)
128 {
129 ARG_UNUSED(handle);
130 ARG_UNUSED(lll);
131 }
132
init_reset(void)133 static int init_reset(void)
134 {
135 return 0;
136 }
137
prepare_cb(struct lll_prepare_param * p)138 static int prepare_cb(struct lll_prepare_param *p)
139 {
140 struct lll_conn_iso_group *cig_lll = p->param;
141 struct lll_conn_iso_stream *cis_lll;
142 const struct lll_conn *conn_lll;
143 struct node_rx_pdu *node_rx;
144 uint32_t ticks_at_event;
145 uint32_t ticks_at_start;
146 struct node_tx_iso *tx;
147 uint64_t payload_count;
148 uint16_t event_counter;
149 uint8_t data_chan_use;
150 struct ull_hdr *ull;
151 uint32_t remainder;
152 memq_link_t *link;
153 uint32_t start_us;
154 uint32_t hcto;
155 uint16_t lazy;
156 uint32_t ret;
157 uint8_t phy;
158 int err = 0;
159
160 DEBUG_RADIO_START_S(1);
161
162 /* Reset global static variables */
163 trx_performed_bitmask = 0U;
164 #if defined(CONFIG_BT_CTLR_LE_ENC)
165 mic_state = LLL_CONN_MIC_NONE;
166 #endif /* CONFIG_BT_CTLR_LE_ENC */
167
168 /* Get the first CIS */
169 cis_handle_curr = UINT16_MAX;
170 do {
171 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle_curr);
172 } while (cis_lll && !cis_lll->active);
173
174 LL_ASSERT(cis_lll);
175
176 /* Save first active CIS offset */
177 cis_offset_first = cis_lll->offset;
178
179 /* Get reference to ACL context */
180 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
181
182 /* Event counter value, 0-15 bit of cisEventCounter */
183 event_counter = cis_lll->event_count;
184
185 /* Calculate the radio channel to use for ISO event */
186 data_chan_id = lll_chan_id(cis_lll->access_addr);
187 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
188 conn_lll->data_chan_map,
189 conn_lll->data_chan_count,
190 &data_chan_prn_s,
191 &data_chan_remap_idx);
192
193 /* Store the current event latency */
194 cig_lll->latency_event = cig_lll->latency_prepare;
195 lazy = cig_lll->latency_prepare - 1U;
196
197 /* Reset accumulated latencies */
198 cig_lll->latency_prepare = 0U;
199
200 /* current window widening */
201 cig_lll->window_widening_event_us_frac +=
202 cig_lll->window_widening_prepare_us_frac;
203 cig_lll->window_widening_prepare_us_frac = 0;
204 if (cig_lll->window_widening_event_us_frac >
205 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
206 cig_lll->window_widening_event_us_frac =
207 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
208 }
209
210 se_curr = 1U;
211
212 /* Adjust sn and nesn for skipped CIG events */
213 payload_count_lazy(cis_lll, lazy);
214
215 /* Start setting up of Radio h/w */
216 radio_reset();
217
218 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
219 radio_tx_power_set(conn_lll->tx_pwr_lvl);
220 #else /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
221 radio_tx_power_set(RADIO_TXP_DEFAULT);
222 #endif /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
223
224 phy = cis_lll->rx.phy;
225 radio_phy_set(phy, PHY_FLAGS_S8);
226 radio_aa_set(cis_lll->access_addr);
227 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(conn_lll->crc_init));
228 lll_chan_set(data_chan_use);
229
230 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
231 LL_ASSERT(node_rx);
232
233 /* Encryption */
234 if (false) {
235
236 #if defined(CONFIG_BT_CTLR_LE_ENC)
237 } else if (conn_lll->enc_rx) {
238 uint64_t payload_cnt;
239 uint8_t pkt_flags;
240
241 payload_cnt = cis_lll->rx.payload_count +
242 cis_lll->rx.bn_curr - 1U;
243
244 cis_lll->rx.ccm.counter = payload_cnt;
245
246 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
247 phy,
248 RADIO_PKT_CONF_CTE_DISABLED);
249 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
250 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
251 pkt_flags);
252 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm, phy,
253 RADIO_PKT_CONF_PDU_TYPE_CIS,
254 node_rx->pdu));
255 #endif /* CONFIG_BT_CTLR_LE_ENC */
256
257 } else {
258 uint8_t pkt_flags;
259
260 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
261 phy,
262 RADIO_PKT_CONF_CTE_DISABLED);
263 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
264 cis_lll->rx.max_pdu, pkt_flags);
265 radio_pkt_rx_set(node_rx->pdu);
266 }
267
268 radio_isr_set(isr_rx, cis_lll);
269
270 radio_tmr_tifs_set(EVENT_IFS_US);
271
272 #if defined(CONFIG_BT_CTLR_PHY)
273 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
274 cis_lll->tx.phy_flags);
275 #else /* !CONFIG_BT_CTLR_PHY */
276 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
277 #endif /* !CONFIG_BT_CTLR_PHY */
278
279 ticks_at_event = p->ticks_at_expire;
280 ull = HDR_LLL2ULL(cig_lll);
281 ticks_at_event += lll_event_offset_get(ull);
282
283 ticks_at_start = ticks_at_event;
284 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US +
285 cis_offset_first);
286
287 remainder = p->remainder;
288 start_us = radio_tmr_start(0U, ticks_at_start, remainder);
289
290 radio_tmr_ready_save(start_us);
291 radio_tmr_aa_save(0U);
292 radio_tmr_aa_capture();
293
294 /* Header Complete Timeout, use additional EVENT_TICKER_RES_MARGIN_US to
295 * compensate for possible shift in ACL peripheral's anchor point at
296 * the instant the CIS is to be established.
297 *
298 * FIXME: use a one time value in a window member variable to avoid
299 * using this additional EVENT_TICKER_RES_MARGIN_US window in
300 * subsequent events once CIS is established.
301 */
302 hcto = start_us +
303 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
304 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
305 1U) + EVENT_TICKER_RES_MARGIN_US;
306
307 #if defined(CONFIG_BT_CTLR_PHY)
308 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
309 hcto += addr_us_get(cis_lll->rx.phy);
310 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
311 #else /* !CONFIG_BT_CTLR_PHY */
312 hcto += radio_rx_ready_delay_get(0U, 0U);
313 hcto += addr_us_get(0U);
314 hcto += radio_rx_chain_delay_get(0U, 0U);
315 #endif /* !CONFIG_BT_CTLR_PHY */
316
317 radio_tmr_hcto_configure(hcto);
318
319 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
320 radio_gpio_lna_setup();
321
322 #if defined(CONFIG_BT_CTLR_PHY)
323 radio_gpio_pa_lna_enable(start_us +
324 radio_rx_ready_delay_get(cis_lll->rx.phy,
325 PHY_FLAGS_S8) -
326 HAL_RADIO_GPIO_LNA_OFFSET);
327 #else /* !CONFIG_BT_CTLR_PHY */
328 radio_gpio_pa_lna_enable(start_us +
329 radio_rx_ready_delay_get(0U, 0U) -
330 HAL_RADIO_GPIO_LNA_OFFSET);
331 #endif /* !CONFIG_BT_CTLR_PHY */
332 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
333
334 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
335 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
336 uint32_t overhead;
337
338 overhead = lll_preempt_calc(ull, (TICKER_ID_CONN_ISO_BASE + cig_lll->handle),
339 ticks_at_event);
340 /* check if preempt to start has changed */
341 if (overhead) {
342 LL_ASSERT_OVERHEAD(overhead);
343
344 radio_isr_set(isr_done, cis_lll);
345 radio_disable();
346
347 err = -ECANCELED;
348 }
349 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
350
351 /* Adjust the SN and NESN for skipped CIG events */
352 uint16_t cis_handle = cis_handle_curr;
353
354 do {
355 payload_count = cis_lll->tx.payload_count +
356 cis_lll->tx.bn_curr - 1U;
357
358 do {
359 link = memq_peek(cis_lll->memq_tx.head,
360 cis_lll->memq_tx.tail, (void **)&tx);
361 if (link) {
362 if (tx->payload_count < payload_count) {
363 memq_dequeue(cis_lll->memq_tx.tail,
364 &cis_lll->memq_tx.head,
365 NULL);
366
367 tx->next = link;
368 ull_iso_lll_ack_enqueue(cis_lll->handle, tx);
369 } else {
370 break;
371 }
372 }
373 } while (link);
374
375 do {
376 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
377 } while (cis_lll && !cis_lll->active);
378
379 if (!cis_lll) {
380 break;
381 }
382
383 /* Adjust sn and nesn for skipped CIG events */
384 payload_count_lazy(cis_lll, lazy);
385
386 /* Adjust sn and nesn for canceled events */
387 if (err) {
388 payload_count_rx_flush_or_txrx_inc(cis_lll);
389 }
390 } while (cis_lll);
391
392 /* Return if prepare callback cancelled */
393 if (err) {
394 return err;
395 }
396
397 /* Prepare is done */
398 ret = lll_prepare_done(cig_lll);
399 LL_ASSERT(!ret);
400
401 DEBUG_RADIO_START_S(1);
402
403 return 0;
404 }
405
abort_cb(struct lll_prepare_param * prepare_param,void * param)406 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
407 {
408 int err;
409
410 /* NOTE: This is not a prepare being cancelled */
411 if (!prepare_param) {
412 struct lll_conn_iso_stream *next_cis_lll;
413 struct lll_conn_iso_stream *cis_lll;
414 struct lll_conn_iso_group *cig_lll;
415
416 cis_lll = ull_conn_iso_lll_stream_get(cis_handle_curr);
417 cig_lll = param;
418
419 /* Adjust the SN, NESN and payload_count on abort for CISes */
420 do {
421 next_cis_lll =
422 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll,
423 &cis_handle_curr);
424 if (next_cis_lll && next_cis_lll->active) {
425 payload_count_rx_flush_or_txrx_inc(next_cis_lll);
426 }
427 } while (next_cis_lll);
428
429 /* Perform event abort here.
430 * After event has been cleanly aborted, clean up resources
431 * and dispatch event done.
432 */
433 radio_isr_set(isr_done, cis_lll);
434 radio_disable();
435
436 return;
437 }
438
439 /* NOTE: Else clean the top half preparations of the aborted event
440 * currently in preparation pipeline.
441 */
442 err = lll_hfclock_off();
443 LL_ASSERT(err >= 0);
444
445 lll_done(param);
446 }
447
isr_rx(void * param)448 static void isr_rx(void *param)
449 {
450 struct lll_conn_iso_stream *cis_lll;
451 const struct lll_conn *conn_lll;
452 struct pdu_cis *pdu_tx;
453 uint64_t payload_count;
454 uint8_t payload_index;
455 uint32_t subevent_us;
456 uint32_t start_us;
457 uint8_t trx_done;
458 uint8_t crc_ok;
459 uint8_t cie;
460
461 /* Read radio status and events */
462 trx_done = radio_is_done();
463 if (trx_done) {
464 crc_ok = radio_crc_is_valid();
465 } else {
466 crc_ok = 0U;
467 }
468
469 /* Clear radio rx status and events */
470 lll_isr_rx_status_reset();
471
472 /* Get reference to CIS LLL context */
473 cis_lll = param;
474
475 /* No Rx */
476 if (!trx_done ||
477 #if defined(CONFIG_TEST_FT_PER_SKIP_SUBEVENTS)
478 /* Used by test code,
479 * to skip a number of events in every 3 event count when current subevent is less than
480 * or equal to 2 or when current subevent has completed all its NSE number of subevents.
481 * OR
482 * to skip a (number + 1) of events in every 3 event count when current subevent is less
483 * than or equal to 1 or when current subevent has completed all its NSE number of
484 * subevents.
485 */
486 ((((cis_lll->event_count % 3U) < CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT) &&
487 ((se_curr > cis_lll->nse) || (se_curr <= 2U))) ||
488 (((cis_lll->event_count % 3U) < (CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT + 1U)) &&
489 ((se_curr > cis_lll->nse) || (se_curr <= 1U)))) ||
490 #endif
491 false) {
492 payload_count_flush(cis_lll);
493
494 /* Next subevent or next CIS */
495 if (se_curr < cis_lll->nse) {
496 radio_isr_set(isr_prepare_subevent, param);
497 } else {
498 next_cis_prepare(param);
499 }
500
501 radio_disable();
502
503 return;
504 }
505
506 /* Initialize Close Isochronous Event */
507 cie = 0U;
508
509 /* Save the AA captured for anchor point sync, this could be subsequent
510 * subevent if not synced to the first subevent.
511 */
512 if (!radio_tmr_aa_restore()) {
513 uint32_t se_offset_us;
514
515 se_offset_us = cis_lll->sub_interval * (se_curr - 1U);
516 radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
517 radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
518 }
519
520 /* Close subevent, one tx-rx chain */
521 radio_switch_complete_and_disable();
522
523 /* FIXME: Do not call this for every event/subevent */
524 ull_conn_iso_lll_cis_established(param);
525
526 /* Set the bit corresponding to CIS index */
527 trx_performed_bitmask |= (1U << LL_CIS_IDX_FROM_HANDLE(cis_lll->handle));
528
529 /* Get reference to ACL context */
530 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
531
532 if (crc_ok) {
533 struct node_rx_pdu *node_rx;
534 struct pdu_cis *pdu_rx;
535
536 /* Get reference to received PDU */
537 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
538 LL_ASSERT(node_rx);
539
540 pdu_rx = (void *)node_rx->pdu;
541
542 /* Tx ACK */
543 if ((pdu_rx->nesn != cis_lll->sn) && (cis_lll->tx.bn_curr <= cis_lll->tx.bn)) {
544 cis_lll->sn++;
545 cis_lll->tx.bn_curr++;
546 if ((cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
547 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <
548 cis_lll->event_count)) {
549 cis_lll->tx.payload_count += cis_lll->tx.bn;
550 cis_lll->tx.bn_curr = 1U;
551 }
552
553 /* TODO: Implement early Tx Ack. Currently Tx Ack
554 * generated as stale Tx Ack when payload count
555 * has elapsed.
556 */
557 }
558
559 /* Handle valid ISO data Rx */
560 if (!pdu_rx->npi &&
561 (cis_lll->rx.bn_curr <= cis_lll->rx.bn) &&
562 (pdu_rx->sn == cis_lll->nesn) &&
563 ull_iso_pdu_rx_alloc_peek(2U)) {
564 struct lll_conn_iso_group *cig_lll;
565 struct node_rx_iso_meta *iso_meta;
566
567 cis_lll->nesn++;
568
569 #if defined(CONFIG_BT_CTLR_LE_ENC)
570 /* If required, wait for CCM to finish
571 */
572 if (pdu_rx->len && conn_lll->enc_rx) {
573 uint32_t done;
574
575 done = radio_ccm_is_done();
576 LL_ASSERT(done);
577
578 if (!radio_ccm_mic_is_valid()) {
579 /* Record MIC invalid */
580 mic_state = LLL_CONN_MIC_FAIL;
581
582 /* Close event */
583 radio_isr_set(isr_done, param);
584 radio_disable();
585
586 return;
587 }
588
589 /* Record MIC valid */
590 mic_state = LLL_CONN_MIC_PASS;
591 }
592 #endif /* CONFIG_BT_CTLR_LE_ENC */
593
594 /* Enqueue Rx ISO PDU */
595 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
596 node_rx->hdr.handle = cis_lll->handle;
597 iso_meta = &node_rx->rx_iso_meta;
598 iso_meta->payload_number = cis_lll->rx.payload_count +
599 cis_lll->rx.bn_curr - 1U;
600 iso_meta->timestamp = cis_lll->offset +
601 HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
602 radio_tmr_aa_restore() - cis_offset_first -
603 addr_us_get(cis_lll->rx.phy);
604 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
605 iso_meta->timestamp -= (cis_lll->event_count -
606 (cis_lll->rx.payload_count / cis_lll->rx.bn)) *
607 cig_lll->iso_interval_us;
608 iso_meta->timestamp %=
609 HAL_TICKER_TICKS_TO_US(BIT(HAL_TICKER_CNTR_MSBIT + 1U));
610 iso_meta->status = 0U;
611
612 ull_iso_pdu_rx_alloc();
613 iso_rx_put(node_rx->hdr.link, node_rx);
614
615 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
616 iso_rx_sched();
617 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
618
619 cis_lll->rx.bn_curr++;
620 if ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
621 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)) {
622 cis_lll->rx.payload_count += cis_lll->rx.bn;
623 cis_lll->rx.bn_curr = 1U;
624 }
625 }
626
627 /* Close Isochronous Event */
628 cie = cie || pdu_rx->cie;
629 }
630
631 payload_count_flush(cis_lll);
632
633 /* Close Isochronous Event */
634 cie = cie || ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
635 (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
636 (se_curr < cis_lll->nse));
637
638 /* Get ISO data PDU */
639 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
640 payload_count = 0U;
641
642 cis_lll->npi = 1U;
643
644 pdu_tx = radio_pkt_empty_get();
645 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
646 pdu_tx->nesn = cis_lll->nesn;
647 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
648 pdu_tx->cie = cie;
649 pdu_tx->npi = 1U;
650 pdu_tx->len = 0U;
651 } else {
652 struct node_tx_iso *tx;
653 memq_link_t *link;
654
655 payload_index = cis_lll->tx.bn_curr - 1U;
656 payload_count = cis_lll->tx.payload_count + payload_index;
657
658 link = memq_peek_n(cis_lll->memq_tx.head, cis_lll->memq_tx.tail,
659 payload_index, (void **)&tx);
660 if (!link || (tx->payload_count != payload_count)) {
661 payload_index = 0U;
662 do {
663 link = memq_peek_n(cis_lll->memq_tx.head,
664 cis_lll->memq_tx.tail,
665 payload_index, (void **)&tx);
666 payload_index++;
667 } while (link &&
668 (tx->payload_count < payload_count));
669 }
670
671 if (!link || (tx->payload_count != payload_count)) {
672 cis_lll->npi = 1U;
673
674 pdu_tx = radio_pkt_empty_get();
675 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
676 pdu_tx->nesn = cis_lll->nesn;
677 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
678 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
679 pdu_tx->len = 0U;
680 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
681 pdu_tx->npi = 1U;
682 } else {
683 cis_lll->npi = 0U;
684
685 pdu_tx = (void *)tx->pdu;
686 pdu_tx->nesn = cis_lll->nesn;
687 pdu_tx->sn = cis_lll->sn;
688 pdu_tx->cie = 0U;
689 pdu_tx->npi = 0U;
690 }
691 }
692
693 /* Initialize reserve bit */
694 pdu_tx->rfu0 = 0U;
695 pdu_tx->rfu1 = 0U;
696
697 /* PHY */
698 radio_phy_set(cis_lll->tx.phy, cis_lll->tx.phy_flags);
699
700 /* Encryption */
701 if (false) {
702
703 #if defined(CONFIG_BT_CTLR_LE_ENC)
704 } else if (pdu_tx->len && conn_lll->enc_tx) {
705 uint8_t pkt_flags;
706
707 cis_lll->tx.ccm.counter = payload_count;
708
709 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
710 cis_lll->tx.phy,
711 RADIO_PKT_CONF_CTE_DISABLED);
712 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
713 (cis_lll->tx.max_pdu + PDU_MIC_SIZE),
714 pkt_flags);
715 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
716 RADIO_PKT_CONF_PDU_TYPE_CIS,
717 pdu_tx));
718 #endif /* CONFIG_BT_CTLR_LE_ENC */
719
720 } else {
721 uint8_t pkt_flags;
722
723 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
724 cis_lll->tx.phy,
725 RADIO_PKT_CONF_CTE_DISABLED);
726 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
727 cis_lll->tx.max_pdu, pkt_flags);
728 radio_pkt_tx_set(pdu_tx);
729 }
730
731 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
732 uint32_t pa_lna_enable_us;
733
734 radio_gpio_pa_setup();
735
736 pa_lna_enable_us = radio_tmr_tifs_base_get() + EVENT_IFS_US -
737 HAL_RADIO_GPIO_PA_OFFSET;
738 #if defined(CONFIG_BT_CTLR_PHY)
739 pa_lna_enable_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
740 PHY_FLAGS_S8);
741 #else /* !CONFIG_BT_CTLR_PHY */
742 pa_lna_enable_us -= radio_rx_chain_delay_get(0U, 0U);
743 #endif /* !CONFIG_BT_CTLR_PHY */
744 radio_gpio_pa_lna_enable(pa_lna_enable_us);
745 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
746
747 /* assert if radio packet ptr is not set and radio started tx */
748 LL_ASSERT(!radio_is_ready());
749
750 /* Schedule next subevent */
751 if (!cie && (se_curr < cis_lll->nse)) {
752 /* Calculate the radio channel to use for next subevent
753 */
754 next_chan_use = lll_chan_iso_subevent(data_chan_id,
755 conn_lll->data_chan_map,
756 conn_lll->data_chan_count,
757 &data_chan_prn_s,
758 &data_chan_remap_idx);
759 } else {
760 struct lll_conn_iso_stream *next_cis_lll;
761 struct lll_conn_iso_group *cig_lll;
762 uint16_t event_counter;
763 uint16_t cis_handle;
764
765 /* Check for next active CIS */
766 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
767 cis_handle = cis_handle_curr;
768 do {
769 next_cis_lll =
770 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
771 } while (next_cis_lll && !next_cis_lll->active);
772
773 if (!next_cis_lll) {
774 /* ISO Event Done */
775 radio_isr_set(isr_done, param);
776
777 return;
778 }
779
780 payload_count_rx_flush_or_txrx_inc(cis_lll);
781
782 cis_handle_curr = cis_handle;
783
784 /* Event counter value, 0-15 bit of cisEventCounter */
785 event_counter = next_cis_lll->event_count;
786
787 /* Calculate the radio channel to use for next CIS ISO event */
788 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
789 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
790 conn_lll->data_chan_map,
791 conn_lll->data_chan_count,
792 &data_chan_prn_s,
793 &data_chan_remap_idx);
794
795 /* Next CIS, se_curr is incremented in isr_tx() */
796 cis_lll = next_cis_lll;
797 se_curr = 0U;
798 }
799
800 /* Schedule next subevent reception */
801 subevent_us = radio_tmr_aa_restore();
802 subevent_us += cis_lll->offset - cis_offset_first +
803 (cis_lll->sub_interval * se_curr);
804 subevent_us -= addr_us_get(cis_lll->rx.phy);
805
806 #if defined(CONFIG_BT_CTLR_PHY)
807 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
808 PHY_FLAGS_S8);
809 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
810 PHY_FLAGS_S8);
811 #else /* !CONFIG_BT_CTLR_PHY */
812 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
813 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
814 #endif /* !CONFIG_BT_CTLR_PHY */
815
816 start_us = radio_tmr_start_us(0U, subevent_us);
817 LL_ASSERT(start_us == (subevent_us + 1U));
818
819 radio_isr_set(isr_tx, cis_lll);
820 }
821
isr_tx(void * param)822 static void isr_tx(void *param)
823 {
824 struct lll_conn_iso_stream *cis_lll;
825 struct lll_conn_iso_group *cig_lll;
826 struct node_rx_pdu *node_rx;
827 uint32_t subevent_us;
828 uint32_t start_us;
829 uint32_t hcto;
830
831 lll_isr_tx_sub_status_reset();
832
833 /* Get reference to CIS LLL context */
834 cis_lll = param;
835
836 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
837 LL_ASSERT(node_rx);
838
839 #if defined(CONFIG_BT_CTLR_LE_ENC)
840 /* Get reference to ACL context */
841 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
842 #endif /* CONFIG_BT_CTLR_LE_ENC */
843
844 /* PHY */
845 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
846
847 /* Encryption */
848 if (false) {
849
850 #if defined(CONFIG_BT_CTLR_LE_ENC)
851 } else if (conn_lll->enc_rx) {
852 uint64_t payload_count;
853 uint8_t pkt_flags;
854
855 payload_count = cis_lll->rx.payload_count +
856 cis_lll->rx.bn_curr - 1U;
857
858 cis_lll->rx.ccm.counter = payload_count;
859
860 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
861 cis_lll->rx.phy,
862 RADIO_PKT_CONF_CTE_DISABLED);
863 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
864 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
865 pkt_flags);
866 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
867 cis_lll->rx.phy,
868 RADIO_PKT_CONF_PDU_TYPE_CIS,
869 node_rx->pdu));
870 #endif /* CONFIG_BT_CTLR_LE_ENC */
871
872 } else {
873 uint8_t pkt_flags;
874
875 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
876 cis_lll->rx.phy,
877 RADIO_PKT_CONF_CTE_DISABLED);
878 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
879 cis_lll->rx.max_pdu, pkt_flags);
880 radio_pkt_rx_set(node_rx->pdu);
881 }
882
883 radio_aa_set(cis_lll->access_addr);
884
885 lll_chan_set(next_chan_use);
886
887 radio_tmr_tx_disable();
888 radio_tmr_rx_enable();
889
890 radio_tmr_tifs_set(EVENT_IFS_US);
891
892 #if defined(CONFIG_BT_CTLR_PHY)
893 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
894 cis_lll->tx.phy_flags);
895 #else /* !CONFIG_BT_CTLR_PHY */
896 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
897 #endif /* !CONFIG_BT_CTLR_PHY */
898
899 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
900
901 subevent_us = radio_tmr_aa_restore();
902 subevent_us += cis_lll->offset - cis_offset_first +
903 (cis_lll->sub_interval * se_curr);
904 subevent_us -= addr_us_get(cis_lll->rx.phy);
905
906 #if defined(CONFIG_BT_CTLR_PHY)
907 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
908 PHY_FLAGS_S8);
909 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
910 PHY_FLAGS_S8);
911 #else /* !CONFIG_BT_CTLR_PHY */
912 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
913 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
914 #endif /* !CONFIG_BT_CTLR_PHY */
915
916 /* Compensate for the 1 us added by radio_tmr_start_us() */
917 start_us = subevent_us + 1U;
918
919 hcto = start_us +
920 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
921 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
922 1U);
923
924 #if defined(CONFIG_BT_CTLR_PHY)
925 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
926 hcto += addr_us_get(cis_lll->rx.phy);
927 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
928 #else /* !CONFIG_BT_CTLR_PHY */
929 hcto += radio_rx_ready_delay_get(0U, 0U);
930 hcto += addr_us_get(0U);
931 hcto += radio_rx_chain_delay_get(0U, 0U);
932 #endif /* !CONFIG_BT_CTLR_PHY */
933
934 radio_tmr_hcto_configure(hcto);
935
936 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
937 radio_gpio_lna_setup();
938
939 #if defined(CONFIG_BT_CTLR_PHY)
940 radio_gpio_pa_lna_enable(start_us +
941 radio_rx_ready_delay_get(cis_lll->rx.phy,
942 PHY_FLAGS_S8) -
943 HAL_RADIO_GPIO_LNA_OFFSET);
944 #else /* !CONFIG_BT_CTLR_PHY */
945 radio_gpio_pa_lna_enable(start_us +
946 radio_rx_ready_delay_get(0U, 0U) -
947 HAL_RADIO_GPIO_LNA_OFFSET);
948 #endif /* !CONFIG_BT_CTLR_PHY */
949 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
950
951 radio_isr_set(isr_rx, cis_lll);
952
953 se_curr++;
954 }
955
next_cis_prepare(void * param)956 static void next_cis_prepare(void *param)
957 {
958 struct lll_conn_iso_stream *next_cis_lll;
959 struct lll_conn_iso_stream *cis_lll;
960 struct lll_conn_iso_group *cig_lll;
961 uint16_t cis_handle;
962
963 /* Get reference to CIS LLL context */
964 cis_lll = param;
965
966 /* Check for next active CIS */
967 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
968 next_cis_lll = cis_lll;
969 cis_handle = cis_handle_curr;
970 do {
971 next_cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
972 } while (next_cis_lll && !next_cis_lll->active);
973
974 if (!next_cis_lll) {
975 /* ISO Event Done */
976 radio_isr_set(isr_done, param);
977
978 return;
979 }
980
981 cis_handle_curr = cis_handle;
982
983 radio_isr_set(isr_prepare_subevent_next_cis, next_cis_lll);
984 }
985
isr_prepare_subevent(void * param)986 static void isr_prepare_subevent(void *param)
987 {
988 struct lll_conn_iso_stream *cis_lll;
989 const struct lll_conn *conn_lll;
990
991 lll_isr_status_reset();
992
993 /* Get reference to CIS LLL context */
994 cis_lll = param;
995
996 /* Get reference to ACL context */
997 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
998
999 /* Calculate the radio channel to use for next subevent
1000 */
1001 next_chan_use = lll_chan_iso_subevent(data_chan_id,
1002 conn_lll->data_chan_map,
1003 conn_lll->data_chan_count,
1004 &data_chan_prn_s,
1005 &data_chan_remap_idx);
1006
1007 isr_prepare_subevent_common(param);
1008 }
1009
isr_prepare_subevent_next_cis(void * param)1010 static void isr_prepare_subevent_next_cis(void *param)
1011 {
1012 struct lll_conn_iso_stream *cis_lll;
1013 const struct lll_conn *conn_lll;
1014 uint16_t event_counter;
1015
1016 lll_isr_status_reset();
1017
1018 /* Get reference to CIS LLL context */
1019 cis_lll = param;
1020
1021 /* Get reference to ACL context */
1022 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1023
1024 /* Event counter value, 0-15 bit of cisEventCounter */
1025 event_counter = cis_lll->event_count;
1026
1027 /* Calculate the radio channel to use for next CIS ISO event */
1028 data_chan_id = lll_chan_id(cis_lll->access_addr);
1029 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
1030 conn_lll->data_chan_map,
1031 conn_lll->data_chan_count,
1032 &data_chan_prn_s,
1033 &data_chan_remap_idx);
1034
1035 /* se_curr is incremented in isr_prepare_subevent_common() */
1036 se_curr = 0U;
1037
1038 isr_prepare_subevent_common(param);
1039 }
1040
isr_prepare_subevent_common(void * param)1041 static void isr_prepare_subevent_common(void *param)
1042 {
1043 struct lll_conn_iso_stream *cis_lll;
1044 struct lll_conn_iso_group *cig_lll;
1045 struct node_rx_pdu *node_rx;
1046 uint32_t subevent_us;
1047 uint32_t start_us;
1048 uint32_t hcto;
1049
1050 /* Get reference to CIS LLL context */
1051 cis_lll = param;
1052
1053 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1054 LL_ASSERT(node_rx);
1055
1056 #if defined(CONFIG_BT_CTLR_LE_ENC)
1057 /* Get reference to ACL context */
1058 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1059 #endif /* CONFIG_BT_CTLR_LE_ENC */
1060
1061 /* PHY */
1062 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
1063
1064 /* Encryption */
1065 if (false) {
1066
1067 #if defined(CONFIG_BT_CTLR_LE_ENC)
1068 } else if (conn_lll->enc_rx) {
1069 uint64_t payload_count;
1070 uint8_t pkt_flags;
1071
1072 payload_count = cis_lll->rx.payload_count +
1073 cis_lll->rx.bn_curr - 1U;
1074
1075 cis_lll->rx.ccm.counter = payload_count;
1076
1077 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1078 cis_lll->rx.phy,
1079 RADIO_PKT_CONF_CTE_DISABLED);
1080 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1081 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
1082 pkt_flags);
1083 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
1084 cis_lll->rx.phy,
1085 RADIO_PKT_CONF_PDU_TYPE_CIS,
1086 node_rx->pdu));
1087 #endif /* CONFIG_BT_CTLR_LE_ENC */
1088
1089 } else {
1090 uint8_t pkt_flags;
1091
1092 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1093 cis_lll->rx.phy,
1094 RADIO_PKT_CONF_CTE_DISABLED);
1095 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1096 cis_lll->rx.max_pdu, pkt_flags);
1097 radio_pkt_rx_set(node_rx->pdu);
1098 }
1099
1100 radio_aa_set(cis_lll->access_addr);
1101
1102 lll_chan_set(next_chan_use);
1103
1104 radio_tmr_tx_disable();
1105 radio_tmr_rx_enable();
1106
1107 radio_tmr_tifs_set(EVENT_IFS_US);
1108
1109 #if defined(CONFIG_BT_CTLR_PHY)
1110 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
1111 cis_lll->tx.phy_flags);
1112 #else /* !CONFIG_BT_CTLR_PHY */
1113 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
1114 #endif /* !CONFIG_BT_CTLR_PHY */
1115
1116 /* Anchor point sync-ed */
1117 if (trx_performed_bitmask) {
1118 subevent_us = radio_tmr_aa_restore();
1119 subevent_us += cis_lll->offset - cis_offset_first +
1120 (cis_lll->sub_interval * se_curr);
1121 subevent_us -= addr_us_get(cis_lll->rx.phy);
1122
1123 #if defined(CONFIG_BT_CTLR_PHY)
1124 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
1125 PHY_FLAGS_S8);
1126 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
1127 PHY_FLAGS_S8);
1128 #else /* !CONFIG_BT_CTLR_PHY */
1129 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
1130 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
1131 #endif /* !CONFIG_BT_CTLR_PHY */
1132 } else {
1133 subevent_us = radio_tmr_ready_restore();
1134 subevent_us += cis_lll->offset - cis_offset_first +
1135 (cis_lll->sub_interval * se_curr);
1136 }
1137
1138 start_us = radio_tmr_start_us(0U, subevent_us);
1139 LL_ASSERT(!trx_performed_bitmask || (start_us == (subevent_us + 1U)));
1140
1141 /* If no anchor point sync yet, continue to capture access address
1142 * timestamp.
1143 */
1144 if (!radio_tmr_aa_restore()) {
1145 radio_tmr_aa_capture();
1146 }
1147
1148 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1149
1150 hcto = start_us +
1151 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1152 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
1153 1U);
1154
1155 #if defined(CONFIG_BT_CTLR_PHY)
1156 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1157 hcto += addr_us_get(cis_lll->rx.phy);
1158 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1159 #else /* !CONFIG_BT_CTLR_PHY */
1160 hcto += radio_rx_ready_delay_get(0U, 0U);
1161 hcto += addr_us_get(0U);
1162 hcto += radio_rx_chain_delay_get(0U, 0U);
1163 #endif /* !CONFIG_BT_CTLR_PHY */
1164
1165 radio_tmr_hcto_configure(hcto);
1166
1167 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1168 radio_gpio_lna_setup();
1169
1170 #if defined(CONFIG_BT_CTLR_PHY)
1171 radio_gpio_pa_lna_enable(start_us +
1172 radio_rx_ready_delay_get(cis_lll->rx.phy,
1173 PHY_FLAGS_S8) -
1174 HAL_RADIO_GPIO_LNA_OFFSET);
1175 #else /* !CONFIG_BT_CTLR_PHY */
1176 radio_gpio_pa_lna_enable(start_us +
1177 radio_rx_ready_delay_get(0U, 0U) -
1178 HAL_RADIO_GPIO_LNA_OFFSET);
1179 #endif /* !CONFIG_BT_CTLR_PHY */
1180 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1181
1182 radio_isr_set(isr_rx, cis_lll);
1183
1184 se_curr++;
1185 }
1186
isr_done(void * param)1187 static void isr_done(void *param)
1188 {
1189 struct lll_conn_iso_stream *cis_lll;
1190 struct event_done_extra *e;
1191
1192 lll_isr_status_reset();
1193
1194 /* Get reference to CIS LLL context */
1195 cis_lll = param;
1196
1197 payload_count_rx_flush_or_txrx_inc(cis_lll);
1198
1199 e = ull_event_done_extra_get();
1200 LL_ASSERT(e);
1201
1202 e->type = EVENT_DONE_EXTRA_TYPE_CIS;
1203 e->trx_performed_bitmask = trx_performed_bitmask;
1204
1205 #if defined(CONFIG_BT_CTLR_LE_ENC)
1206 e->mic_state = mic_state;
1207 #endif /* CONFIG_BT_CTLR_LE_ENC */
1208
1209 if (trx_performed_bitmask) {
1210 struct lll_conn_iso_group *cig_lll;
1211 uint32_t preamble_to_addr_us;
1212
1213 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1214
1215 #if defined(CONFIG_BT_CTLR_PHY)
1216 preamble_to_addr_us = addr_us_get(cis_lll->rx.phy);
1217 #else /* !CONFIG_BT_CTLR_PHY */
1218 preamble_to_addr_us = addr_us_get(0U);
1219 #endif /* !CONFIG_BT_CTLR_PHY */
1220
1221 e->drift.start_to_address_actual_us =
1222 radio_tmr_aa_restore() - radio_tmr_ready_restore();
1223 e->drift.window_widening_event_us = EVENT_US_FRAC_TO_US(
1224 cig_lll->window_widening_event_us_frac);
1225 e->drift.preamble_to_addr_us = preamble_to_addr_us;
1226
1227 /* Reset window widening, as anchor point sync-ed */
1228 cig_lll->window_widening_event_us_frac = 0U;
1229 }
1230
1231 lll_isr_cleanup(param);
1232 }
1233
payload_count_flush(struct lll_conn_iso_stream * cis_lll)1234 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll)
1235 {
1236 if (cis_lll->tx.bn) {
1237 uint64_t payload_count;
1238 uint8_t u;
1239
1240 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1241 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1242 (cis_lll->tx.bn - 1U -
1243 (payload_count % cis_lll->tx.bn)));
1244 while (((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1245 (cis_lll->event_count + 1U)) ||
1246 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1247 (cis_lll->event_count + 1U)) && (u < se_curr))) &&
1248 (((cis_lll->tx.bn_curr < cis_lll->tx.bn) &&
1249 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <= cis_lll->event_count)) ||
1250 ((cis_lll->tx.bn_curr == cis_lll->tx.bn) &&
1251 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)))) {
1252 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1253 cis_lll->sn++;
1254 cis_lll->tx.bn_curr++;
1255 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1256 cis_lll->tx.payload_count += cis_lll->tx.bn;
1257 cis_lll->tx.bn_curr = 1U;
1258 }
1259
1260 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1261 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1262 (cis_lll->tx.bn - 1U -
1263 (payload_count % cis_lll->tx.bn)));
1264 }
1265 }
1266
1267 if (cis_lll->rx.bn) {
1268 uint64_t payload_count;
1269 uint8_t u;
1270
1271 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1272 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1273 (cis_lll->rx.bn - 1U -
1274 (payload_count % cis_lll->rx.bn)));
1275 if ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1276 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1277 (((cis_lll->rx.bn_curr < cis_lll->rx.bn) &&
1278 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <= cis_lll->event_count)) ||
1279 ((cis_lll->rx.bn_curr == cis_lll->rx.bn) &&
1280 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)))) {
1281 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1282 cis_lll->nesn++;
1283 cis_lll->rx.bn_curr++;
1284 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1285 cis_lll->rx.payload_count += cis_lll->rx.bn;
1286 cis_lll->rx.bn_curr = 1U;
1287 }
1288 }
1289 }
1290 }
1291
payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream * cis_lll)1292 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll)
1293 {
1294 if (cis_lll->tx.bn) {
1295 if (((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.bn_curr) >
1296 (cis_lll->event_count + cis_lll->tx.bn)) {
1297 cis_lll->tx.payload_count += cis_lll->tx.bn;
1298 cis_lll->tx.bn_curr = 1U;
1299 }
1300 }
1301
1302 if (cis_lll->rx.bn) {
1303 uint64_t payload_count;
1304 uint8_t u;
1305
1306 if (((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.bn_curr) >
1307 (cis_lll->event_count + cis_lll->rx.bn)) {
1308 cis_lll->rx.payload_count += cis_lll->rx.bn;
1309 cis_lll->rx.bn_curr = 1U;
1310
1311 return;
1312 }
1313
1314 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1315 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1316 (cis_lll->rx.bn - 1U -
1317 (payload_count % cis_lll->rx.bn)));
1318 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1319 (cis_lll->event_count + 1U)) ||
1320 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1321 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1322 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1323 cis_lll->nesn++;
1324 cis_lll->rx.bn_curr++;
1325 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1326 cis_lll->rx.payload_count += cis_lll->rx.bn;
1327 cis_lll->rx.bn_curr = 1U;
1328 }
1329
1330 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1331 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1332 (cis_lll->rx.bn - 1U -
1333 (payload_count % cis_lll->rx.bn)));
1334 }
1335 }
1336 }
1337
payload_count_lazy(struct lll_conn_iso_stream * cis_lll,uint16_t lazy)1338 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy)
1339 {
1340 if (cis_lll->tx.bn && lazy) {
1341 uint16_t tx_lazy;
1342
1343 tx_lazy = lazy;
1344 while (tx_lazy--) {
1345 uint64_t payload_count;
1346 uint8_t u;
1347
1348 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1349 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1350 (cis_lll->tx.bn - 1U -
1351 (payload_count % cis_lll->tx.bn)));
1352 while (((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1353 (cis_lll->event_count + 1U)) ||
1354 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1355 (cis_lll->event_count + 1U)) && (u < (cis_lll->nse + 1U)))) &&
1356 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <
1357 cis_lll->event_count)) {
1358 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1359 cis_lll->sn++;
1360 cis_lll->tx.bn_curr++;
1361 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1362 cis_lll->tx.payload_count += cis_lll->tx.bn;
1363 cis_lll->tx.bn_curr = 1U;
1364 }
1365
1366 payload_count = cis_lll->tx.payload_count +
1367 cis_lll->tx.bn_curr - 1U;
1368 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1369 (cis_lll->tx.bn - 1U -
1370 (payload_count % cis_lll->tx.bn)));
1371 }
1372 }
1373 }
1374
1375 if (cis_lll->rx.bn) {
1376 while (lazy--) {
1377 uint64_t payload_count;
1378 uint8_t u;
1379
1380 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1381 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1382 (cis_lll->rx.bn - 1U -
1383 (payload_count % cis_lll->rx.bn)));
1384 while (((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1385 (cis_lll->event_count + 1U)) ||
1386 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1387 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) &&
1388 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <
1389 cis_lll->event_count)) {
1390 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1391 cis_lll->nesn++;
1392 cis_lll->rx.bn_curr++;
1393 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1394 cis_lll->rx.payload_count += cis_lll->rx.bn;
1395 cis_lll->rx.bn_curr = 1U;
1396 }
1397
1398 payload_count = cis_lll->rx.payload_count +
1399 cis_lll->rx.bn_curr - 1U;
1400 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1401 (cis_lll->rx.bn - 1U -
1402 (payload_count % cis_lll->rx.bn)));
1403 }
1404 }
1405 }
1406 }
1407