1 /*
2 * Copyright (c) 2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8
9 #include <zephyr/sys/byteorder.h>
10
11 #include "hal/ccm.h"
12 #include "hal/radio.h"
13 #include "hal/ticker.h"
14
15 #include "util/util.h"
16 #include "util/mem.h"
17 #include "util/memq.h"
18 #include "util/dbuf.h"
19
20 #include "pdu_df.h"
21 #include "pdu_vendor.h"
22 #include "pdu.h"
23
24 #include "lll.h"
25 #include "lll_clock.h"
26 #include "lll/lll_df_types.h"
27 #include "lll_chan.h"
28 #include "lll_vendor.h"
29 #include "lll_conn.h"
30 #include "lll_conn_iso.h"
31 #include "lll_peripheral_iso.h"
32
33 #include "lll_iso_tx.h"
34
35 #include "lll_internal.h"
36 #include "lll_tim_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static int prepare_cb(struct lll_prepare_param *p);
44 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
45 static void isr_rx(void *param);
46 static void isr_tx(void *param);
47 static void next_cis_prepare(void *param);
48 static void isr_prepare_subevent(void *param);
49 static void isr_prepare_subevent_next_cis(void *param);
50 static void isr_prepare_subevent_common(void *param);
51 static void isr_done(void *param);
52 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll);
53 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll);
54 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy);
55
56 static uint8_t next_chan_use;
57 static uint16_t data_chan_id;
58 static uint16_t data_chan_prn_s;
59 static uint16_t data_chan_remap_idx;
60
61 static uint32_t trx_performed_bitmask;
62 static uint16_t cis_offset_first;
63 static uint16_t cis_handle_curr;
64 static uint8_t se_curr;
65
66 #if defined(CONFIG_BT_CTLR_LE_ENC)
67 static uint8_t mic_state;
68 #endif /* CONFIG_BT_CTLR_LE_ENC */
69
lll_peripheral_iso_init(void)70 int lll_peripheral_iso_init(void)
71 {
72 int err;
73
74 err = init_reset();
75 if (err) {
76 return err;
77 }
78
79 return 0;
80 }
81
lll_peripheral_iso_reset(void)82 int lll_peripheral_iso_reset(void)
83 {
84 int err;
85
86 err = init_reset();
87 if (err) {
88 return err;
89 }
90
91 return 0;
92 }
93
lll_peripheral_iso_prepare(void * param)94 void lll_peripheral_iso_prepare(void *param)
95 {
96 struct lll_conn_iso_group *cig_lll;
97 struct lll_prepare_param *p;
98 int err;
99
100 /* Initiate HF clock start up */
101 err = lll_hfclock_on();
102 LL_ASSERT(err >= 0);
103
104 p = param;
105
106 cig_lll = p->param;
107
108 /* Invoke common pipeline handling of prepare */
109 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
110 LL_ASSERT(!err || err == -EINPROGRESS);
111 }
112
lll_peripheral_iso_flush(uint16_t handle,struct lll_conn_iso_stream * lll)113 void lll_peripheral_iso_flush(uint16_t handle, struct lll_conn_iso_stream *lll)
114 {
115 ARG_UNUSED(handle);
116 ARG_UNUSED(lll);
117 }
118
init_reset(void)119 static int init_reset(void)
120 {
121 return 0;
122 }
123
prepare_cb(struct lll_prepare_param * p)124 static int prepare_cb(struct lll_prepare_param *p)
125 {
126 struct lll_conn_iso_group *cig_lll = p->param;
127 struct lll_conn_iso_stream *cis_lll;
128 const struct lll_conn *conn_lll;
129 struct node_rx_pdu *node_rx;
130 uint32_t ticks_at_event;
131 uint32_t ticks_at_start;
132 struct node_tx_iso *tx;
133 uint64_t payload_count;
134 uint16_t event_counter;
135 uint8_t data_chan_use;
136 struct ull_hdr *ull;
137 uint32_t remainder;
138 memq_link_t *link;
139 uint32_t start_us;
140 uint32_t hcto;
141 uint32_t ret;
142 uint8_t phy;
143 int err = 0;
144
145 DEBUG_RADIO_START_S(1);
146
147 /* Reset global static variables */
148 trx_performed_bitmask = 0U;
149 #if defined(CONFIG_BT_CTLR_LE_ENC)
150 mic_state = LLL_CONN_MIC_NONE;
151 #endif /* CONFIG_BT_CTLR_LE_ENC */
152
153 /* Get the first CIS */
154 cis_handle_curr = UINT16_MAX;
155 do {
156 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle_curr);
157 } while (cis_lll && !cis_lll->active);
158
159 LL_ASSERT(cis_lll);
160
161 /* Save first active CIS offset */
162 cis_offset_first = cis_lll->offset;
163
164 /* Get reference to ACL context */
165 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
166
167 /* Pick the event_count calculated in the ULL prepare */
168 cis_lll->event_count = cis_lll->event_count_prepare;
169
170 /* Event counter value, 0-15 bit of cisEventCounter */
171 event_counter = cis_lll->event_count;
172
173 /* Calculate the radio channel to use for ISO event */
174 data_chan_id = lll_chan_id(cis_lll->access_addr);
175 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
176 conn_lll->data_chan_map,
177 conn_lll->data_chan_count,
178 &data_chan_prn_s,
179 &data_chan_remap_idx);
180
181 /* Calculate the current event latency */
182 cig_lll->lazy_prepare = p->lazy;
183 cig_lll->latency_event = cig_lll->latency_prepare + cig_lll->lazy_prepare;
184
185 /* Reset accumulated latencies */
186 cig_lll->latency_prepare = 0U;
187
188 /* Accumulate window widening */
189 cig_lll->window_widening_prepare_us_frac +=
190 cig_lll->window_widening_periodic_us_frac * (cig_lll->lazy_prepare + 1U);
191 if (cig_lll->window_widening_prepare_us_frac >
192 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
193 cig_lll->window_widening_prepare_us_frac =
194 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
195 }
196
197 /* Current window widening */
198 cig_lll->window_widening_event_us_frac +=
199 cig_lll->window_widening_prepare_us_frac;
200 cig_lll->window_widening_prepare_us_frac = 0;
201 if (cig_lll->window_widening_event_us_frac >
202 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
203 cig_lll->window_widening_event_us_frac =
204 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
205 }
206
207 se_curr = 1U;
208
209 /* Adjust sn and nesn for skipped CIG events */
210 payload_count_lazy(cis_lll, cig_lll->latency_event);
211
212 /* Start setting up of Radio h/w */
213 radio_reset();
214
215 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
216 radio_tx_power_set(conn_lll->tx_pwr_lvl);
217 #else /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
218 radio_tx_power_set(RADIO_TXP_DEFAULT);
219 #endif /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
220
221 phy = cis_lll->rx.phy;
222 radio_phy_set(phy, PHY_FLAGS_S8);
223 radio_aa_set(cis_lll->access_addr);
224 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(conn_lll->crc_init));
225 lll_chan_set(data_chan_use);
226
227 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
228 LL_ASSERT(node_rx);
229
230 /* Encryption */
231 if (false) {
232
233 #if defined(CONFIG_BT_CTLR_LE_ENC)
234 } else if (conn_lll->enc_rx) {
235 uint64_t payload_cnt;
236 uint8_t pkt_flags;
237
238 payload_cnt = cis_lll->rx.payload_count +
239 cis_lll->rx.bn_curr - 1U;
240
241 cis_lll->rx.ccm.counter = payload_cnt;
242
243 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
244 phy,
245 RADIO_PKT_CONF_CTE_DISABLED);
246 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
247 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
248 pkt_flags);
249 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm, phy,
250 RADIO_PKT_CONF_PDU_TYPE_CIS,
251 node_rx->pdu));
252 #endif /* CONFIG_BT_CTLR_LE_ENC */
253
254 } else {
255 uint8_t pkt_flags;
256
257 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
258 phy,
259 RADIO_PKT_CONF_CTE_DISABLED);
260 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
261 cis_lll->rx.max_pdu, pkt_flags);
262 radio_pkt_rx_set(node_rx->pdu);
263 }
264
265 radio_isr_set(isr_rx, cis_lll);
266
267 radio_tmr_tifs_set(cis_lll->tifs_us);
268
269 #if defined(CONFIG_BT_CTLR_PHY)
270 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
271 cis_lll->tx.phy_flags);
272 #else /* !CONFIG_BT_CTLR_PHY */
273 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
274 #endif /* !CONFIG_BT_CTLR_PHY */
275
276 ticks_at_event = p->ticks_at_expire;
277 ull = HDR_LLL2ULL(cig_lll);
278 ticks_at_event += lll_event_offset_get(ull);
279
280 ticks_at_start = ticks_at_event;
281 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US +
282 cis_offset_first);
283
284 remainder = p->remainder;
285 start_us = radio_tmr_start(0U, ticks_at_start, remainder);
286
287 radio_tmr_ready_save(start_us);
288 radio_tmr_aa_save(0U);
289 radio_tmr_aa_capture();
290
291 /* Header Complete Timeout, use additional EVENT_TICKER_RES_MARGIN_US to
292 * compensate for possible shift in ACL peripheral's anchor point at
293 * the instant the CIS is to be established.
294 *
295 * FIXME: use a one time value in a window member variable to avoid
296 * using this additional EVENT_TICKER_RES_MARGIN_US window in
297 * subsequent events once CIS is established.
298 */
299 hcto = start_us +
300 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
301 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
302 1U) + EVENT_TICKER_RES_MARGIN_US;
303
304 #if defined(CONFIG_BT_CTLR_PHY)
305 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
306 hcto += addr_us_get(cis_lll->rx.phy);
307 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
308 #else /* !CONFIG_BT_CTLR_PHY */
309 hcto += radio_rx_ready_delay_get(0U, 0U);
310 hcto += addr_us_get(0U);
311 hcto += radio_rx_chain_delay_get(0U, 0U);
312 #endif /* !CONFIG_BT_CTLR_PHY */
313
314 radio_tmr_hcto_configure(hcto);
315
316 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
317 radio_gpio_lna_setup();
318
319 #if defined(CONFIG_BT_CTLR_PHY)
320 radio_gpio_pa_lna_enable(start_us +
321 radio_rx_ready_delay_get(cis_lll->rx.phy,
322 PHY_FLAGS_S8) -
323 HAL_RADIO_GPIO_LNA_OFFSET);
324 #else /* !CONFIG_BT_CTLR_PHY */
325 radio_gpio_pa_lna_enable(start_us +
326 radio_rx_ready_delay_get(0U, 0U) -
327 HAL_RADIO_GPIO_LNA_OFFSET);
328 #endif /* !CONFIG_BT_CTLR_PHY */
329 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
330
331 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
332 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
333 uint32_t overhead;
334
335 overhead = lll_preempt_calc(ull, (TICKER_ID_CONN_ISO_BASE + cig_lll->handle),
336 ticks_at_event);
337 /* check if preempt to start has changed */
338 if (overhead) {
339 LL_ASSERT_OVERHEAD(overhead);
340
341 radio_isr_set(isr_done, cis_lll);
342 radio_disable();
343
344 err = -ECANCELED;
345 }
346 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
347
348 /* Adjust the SN and NESN for skipped CIG events */
349 uint16_t cis_handle = cis_handle_curr;
350
351 do {
352 payload_count = cis_lll->tx.payload_count +
353 cis_lll->tx.bn_curr - 1U;
354
355 do {
356 link = memq_peek(cis_lll->memq_tx.head,
357 cis_lll->memq_tx.tail, (void **)&tx);
358 if (link) {
359 if (tx->payload_count < payload_count) {
360 memq_dequeue(cis_lll->memq_tx.tail,
361 &cis_lll->memq_tx.head,
362 NULL);
363
364 tx->next = link;
365 ull_iso_lll_ack_enqueue(cis_lll->handle, tx);
366 } else {
367 break;
368 }
369 }
370 } while (link);
371
372 do {
373 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
374 } while (cis_lll && !cis_lll->active);
375
376 if (!cis_lll) {
377 break;
378 }
379
380 /* Pick the event_count calculated in the ULL prepare */
381 cis_lll->event_count = cis_lll->event_count_prepare;
382
383 /* Adjust sn and nesn for skipped CIG events */
384 payload_count_lazy(cis_lll, cig_lll->latency_event);
385
386 /* Adjust sn and nesn for canceled events */
387 if (err) {
388 payload_count_rx_flush_or_txrx_inc(cis_lll);
389 }
390 } while (cis_lll);
391
392 /* Return if prepare callback cancelled */
393 if (err) {
394 return err;
395 }
396
397 /* Prepare is done */
398 ret = lll_prepare_done(cig_lll);
399 LL_ASSERT(!ret);
400
401 DEBUG_RADIO_START_S(1);
402
403 return 0;
404 }
405
abort_cb(struct lll_prepare_param * prepare_param,void * param)406 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
407 {
408 struct lll_conn_iso_group *cig_lll;
409 int err;
410
411 /* NOTE: This is not a prepare being cancelled */
412 if (!prepare_param) {
413 struct lll_conn_iso_stream *next_cis_lll;
414 struct lll_conn_iso_stream *cis_lll;
415
416 cis_lll = ull_conn_iso_lll_stream_get(cis_handle_curr);
417 cig_lll = param;
418
419 /* Adjust the SN, NESN and payload_count on abort for CISes */
420 do {
421 next_cis_lll =
422 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll,
423 &cis_handle_curr);
424 if (next_cis_lll && next_cis_lll->active) {
425 payload_count_rx_flush_or_txrx_inc(next_cis_lll);
426 }
427 } while (next_cis_lll);
428
429 /* Perform event abort here.
430 * After event has been cleanly aborted, clean up resources
431 * and dispatch event done.
432 */
433 radio_isr_set(isr_done, cis_lll);
434 radio_disable();
435
436 return;
437 }
438
439 /* NOTE: Else clean the top half preparations of the aborted event
440 * currently in preparation pipeline.
441 */
442 err = lll_hfclock_off();
443 LL_ASSERT(err >= 0);
444
445 /* Get reference to CIG LLL context */
446 cig_lll = prepare_param->param;
447
448 /* Accumulate the latency as event is aborted while being in pipeline */
449 cig_lll->lazy_prepare = prepare_param->lazy;
450 cig_lll->latency_prepare += (cig_lll->lazy_prepare + 1U);
451
452 /* Accumulate window widening */
453 cig_lll->window_widening_prepare_us_frac +=
454 cig_lll->window_widening_periodic_us_frac * (cig_lll->lazy_prepare + 1U);
455 if (cig_lll->window_widening_prepare_us_frac >
456 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
457 cig_lll->window_widening_prepare_us_frac =
458 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
459 }
460
461 lll_done(param);
462 }
463
isr_rx(void * param)464 static void isr_rx(void *param)
465 {
466 struct lll_conn_iso_stream *cis_lll;
467 const struct lll_conn *conn_lll;
468 struct pdu_cis *pdu_tx;
469 uint64_t payload_count;
470 uint8_t payload_index;
471 uint8_t trx_done;
472 uint8_t crc_ok;
473 uint8_t cie;
474
475 /* Read radio status and events */
476 trx_done = radio_is_done();
477 if (trx_done) {
478 crc_ok = radio_crc_is_valid();
479 } else {
480 crc_ok = 0U;
481 }
482
483 /* Clear radio rx status and events */
484 lll_isr_rx_status_reset();
485
486 /* Get reference to CIS LLL context */
487 cis_lll = param;
488
489 /* No Rx */
490 if (!trx_done ||
491 #if defined(CONFIG_TEST_FT_PER_SKIP_SUBEVENTS)
492 /* Used by test code,
493 * to skip a number of events in every 3 event count when current subevent is less than
494 * or equal to 2 or when current subevent has completed all its NSE number of subevents.
495 * OR
496 * to skip a (number + 1) of events in every 3 event count when current subevent is less
497 * than or equal to 1 or when current subevent has completed all its NSE number of
498 * subevents.
499 */
500 ((((cis_lll->event_count % 3U) < CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT) &&
501 ((se_curr > cis_lll->nse) || (se_curr <= 2U))) ||
502 (((cis_lll->event_count % 3U) < (CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT + 1U)) &&
503 ((se_curr > cis_lll->nse) || (se_curr <= 1U)))) ||
504 #endif
505 false) {
506 payload_count_flush(cis_lll);
507
508 /* Next subevent or next CIS */
509 if (se_curr < cis_lll->nse) {
510 radio_isr_set(isr_prepare_subevent, param);
511 } else {
512 next_cis_prepare(param);
513 }
514
515 radio_disable();
516
517 return;
518 }
519
520 /* Initialize Close Isochronous Event */
521 cie = 0U;
522
523 /* Save the AA captured for anchor point sync, this could be subsequent
524 * subevent if not synced to the first subevent.
525 */
526 if (!radio_tmr_aa_restore()) {
527 uint32_t se_offset_us;
528
529 se_offset_us = cis_lll->sub_interval * (se_curr - 1U);
530 radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
531 radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
532 }
533
534 /* Close subevent, one tx-rx chain */
535 radio_switch_complete_and_disable();
536
537 /* FIXME: Do not call this for every event/subevent */
538 ull_conn_iso_lll_cis_established(param);
539
540 /* Set the bit corresponding to CIS index */
541 trx_performed_bitmask |= (1U << LL_CIS_IDX_FROM_HANDLE(cis_lll->handle));
542
543 /* Get reference to ACL context */
544 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
545
546 if (crc_ok) {
547 struct node_rx_pdu *node_rx;
548 struct pdu_cis *pdu_rx;
549
550 /* Get reference to received PDU */
551 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
552 LL_ASSERT(node_rx);
553
554 pdu_rx = (void *)node_rx->pdu;
555
556 /* Tx ACK */
557 if ((pdu_rx->nesn != cis_lll->sn) && (cis_lll->tx.bn_curr <= cis_lll->tx.bn)) {
558 cis_lll->sn++;
559 cis_lll->tx.bn_curr++;
560 if ((cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
561 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <
562 cis_lll->event_count)) {
563 cis_lll->tx.payload_count += cis_lll->tx.bn;
564 cis_lll->tx.bn_curr = 1U;
565 }
566
567 /* TODO: Implement early Tx Ack. Currently Tx Ack
568 * generated as stale Tx Ack when payload count
569 * has elapsed.
570 */
571 }
572
573 /* Handle valid ISO data Rx */
574 if (!pdu_rx->npi &&
575 (cis_lll->rx.bn_curr <= cis_lll->rx.bn) &&
576 (pdu_rx->sn == cis_lll->nesn) &&
577 ull_iso_pdu_rx_alloc_peek(2U)) {
578 struct lll_conn_iso_group *cig_lll;
579 struct node_rx_iso_meta *iso_meta;
580
581 cis_lll->nesn++;
582
583 #if defined(CONFIG_BT_CTLR_LE_ENC)
584 /* If required, wait for CCM to finish
585 */
586 if (pdu_rx->len && conn_lll->enc_rx) {
587 uint32_t done;
588
589 done = radio_ccm_is_done();
590 LL_ASSERT(done);
591
592 if (!radio_ccm_mic_is_valid()) {
593 /* Record MIC invalid */
594 mic_state = LLL_CONN_MIC_FAIL;
595
596 /* Close event */
597 radio_isr_set(isr_done, param);
598 radio_disable();
599
600 return;
601 }
602
603 /* Record MIC valid */
604 mic_state = LLL_CONN_MIC_PASS;
605 }
606 #endif /* CONFIG_BT_CTLR_LE_ENC */
607
608 /* Enqueue Rx ISO PDU */
609 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
610 node_rx->hdr.handle = cis_lll->handle;
611 iso_meta = &node_rx->rx_iso_meta;
612 iso_meta->payload_number = cis_lll->rx.payload_count +
613 cis_lll->rx.bn_curr - 1U;
614 iso_meta->timestamp = cis_lll->offset +
615 HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
616 radio_tmr_aa_restore() - cis_offset_first -
617 addr_us_get(cis_lll->rx.phy);
618 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
619 iso_meta->timestamp -= (cis_lll->event_count -
620 (cis_lll->rx.payload_count / cis_lll->rx.bn)) *
621 cig_lll->iso_interval_us;
622 iso_meta->timestamp %=
623 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
624 iso_meta->status = 0U;
625
626 ull_iso_pdu_rx_alloc();
627 iso_rx_put(node_rx->hdr.link, node_rx);
628
629 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
630 iso_rx_sched();
631 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
632
633 cis_lll->rx.bn_curr++;
634 if ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
635 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)) {
636 cis_lll->rx.payload_count += cis_lll->rx.bn;
637 cis_lll->rx.bn_curr = 1U;
638 }
639 }
640
641 /* Close Isochronous Event */
642 cie = cie || pdu_rx->cie;
643 }
644
645 payload_count_flush(cis_lll);
646
647 /* Close Isochronous Event */
648 cie = cie || ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
649 (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
650 (se_curr < cis_lll->nse));
651
652 /* Get ISO data PDU */
653 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
654 payload_count = 0U;
655
656 cis_lll->npi = 1U;
657
658 pdu_tx = radio_pkt_empty_get();
659 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
660 pdu_tx->nesn = cis_lll->nesn;
661 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
662 pdu_tx->cie = cie;
663 pdu_tx->npi = 1U;
664 pdu_tx->len = 0U;
665 } else {
666 struct node_tx_iso *tx;
667 memq_link_t *link;
668
669 payload_index = cis_lll->tx.bn_curr - 1U;
670 payload_count = cis_lll->tx.payload_count + payload_index;
671
672 link = memq_peek_n(cis_lll->memq_tx.head, cis_lll->memq_tx.tail,
673 payload_index, (void **)&tx);
674 if (!link || (tx->payload_count != payload_count)) {
675 payload_index = 0U;
676 do {
677 link = memq_peek_n(cis_lll->memq_tx.head,
678 cis_lll->memq_tx.tail,
679 payload_index, (void **)&tx);
680 payload_index++;
681 } while (link &&
682 (tx->payload_count < payload_count));
683 }
684
685 if (!link || (tx->payload_count != payload_count)) {
686 cis_lll->npi = 1U;
687
688 pdu_tx = radio_pkt_empty_get();
689 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
690 pdu_tx->nesn = cis_lll->nesn;
691 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
692 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
693 pdu_tx->len = 0U;
694 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
695 pdu_tx->npi = 1U;
696 } else {
697 cis_lll->npi = 0U;
698
699 pdu_tx = (void *)tx->pdu;
700 pdu_tx->nesn = cis_lll->nesn;
701 pdu_tx->sn = cis_lll->sn;
702 pdu_tx->cie = 0U;
703 pdu_tx->npi = 0U;
704 }
705 }
706
707 /* Initialize reserve bit */
708 pdu_tx->rfu0 = 0U;
709 pdu_tx->rfu1 = 0U;
710
711 /* PHY */
712 radio_phy_set(cis_lll->tx.phy, cis_lll->tx.phy_flags);
713
714 /* Encryption */
715 if (false) {
716
717 #if defined(CONFIG_BT_CTLR_LE_ENC)
718 } else if (pdu_tx->len && conn_lll->enc_tx) {
719 uint8_t pkt_flags;
720
721 cis_lll->tx.ccm.counter = payload_count;
722
723 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
724 cis_lll->tx.phy,
725 RADIO_PKT_CONF_CTE_DISABLED);
726 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
727 (cis_lll->tx.max_pdu + PDU_MIC_SIZE),
728 pkt_flags);
729 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
730 RADIO_PKT_CONF_PDU_TYPE_CIS,
731 pdu_tx));
732 #endif /* CONFIG_BT_CTLR_LE_ENC */
733
734 } else {
735 uint8_t pkt_flags;
736
737 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
738 cis_lll->tx.phy,
739 RADIO_PKT_CONF_CTE_DISABLED);
740 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
741 cis_lll->tx.max_pdu, pkt_flags);
742 radio_pkt_tx_set(pdu_tx);
743 }
744
745 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
746 uint32_t pa_lna_enable_us;
747
748 radio_gpio_pa_setup();
749
750 pa_lna_enable_us = radio_tmr_tifs_base_get() + cis_lll->tifs_us -
751 HAL_RADIO_GPIO_PA_OFFSET;
752 #if defined(CONFIG_BT_CTLR_PHY)
753 pa_lna_enable_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
754 PHY_FLAGS_S8);
755 #else /* !CONFIG_BT_CTLR_PHY */
756 pa_lna_enable_us -= radio_rx_chain_delay_get(0U, 0U);
757 #endif /* !CONFIG_BT_CTLR_PHY */
758 radio_gpio_pa_lna_enable(pa_lna_enable_us);
759 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
760
761 /* assert if radio packet ptr is not set and radio started tx */
762 LL_ASSERT(!radio_is_ready());
763
764 /* Schedule next subevent */
765 if (!cie && (se_curr < cis_lll->nse)) {
766 /* Calculate the radio channel to use for next subevent
767 */
768 next_chan_use = lll_chan_iso_subevent(data_chan_id,
769 conn_lll->data_chan_map,
770 conn_lll->data_chan_count,
771 &data_chan_prn_s,
772 &data_chan_remap_idx);
773 } else {
774 struct lll_conn_iso_stream *next_cis_lll;
775 struct lll_conn_iso_group *cig_lll;
776 uint16_t event_counter;
777 uint16_t cis_handle;
778
779 /* Check for next active CIS */
780 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
781 cis_handle = cis_handle_curr;
782 do {
783 next_cis_lll =
784 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
785 } while (next_cis_lll && !next_cis_lll->active);
786
787 if (!next_cis_lll) {
788 /* ISO Event Done */
789 radio_isr_set(isr_done, param);
790
791 return;
792 }
793
794 payload_count_rx_flush_or_txrx_inc(cis_lll);
795
796 cis_handle_curr = cis_handle;
797
798 /* Event counter value, 0-15 bit of cisEventCounter */
799 event_counter = next_cis_lll->event_count;
800
801 /* Calculate the radio channel to use for next CIS ISO event */
802 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
803 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
804 conn_lll->data_chan_map,
805 conn_lll->data_chan_count,
806 &data_chan_prn_s,
807 &data_chan_remap_idx);
808
809 /* Next CIS, se_curr is incremented in isr_tx() */
810 cis_lll = next_cis_lll;
811 se_curr = 0U;
812 }
813
814 radio_isr_set(isr_tx, cis_lll);
815
816 #if !defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
817 uint32_t subevent_us;
818 uint32_t start_us;
819
820 /* Schedule next subevent reception */
821 subevent_us = radio_tmr_aa_restore();
822 subevent_us += cis_lll->offset - cis_offset_first +
823 (cis_lll->sub_interval * se_curr);
824 subevent_us -= addr_us_get(cis_lll->rx.phy);
825
826 #if defined(CONFIG_BT_CTLR_PHY)
827 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
828 PHY_FLAGS_S8);
829 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
830 PHY_FLAGS_S8);
831 #else /* !CONFIG_BT_CTLR_PHY */
832 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
833 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
834 #endif /* !CONFIG_BT_CTLR_PHY */
835
836 start_us = radio_tmr_start_us(0U, subevent_us);
837 LL_ASSERT(start_us == (subevent_us + 1U));
838 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
839 }
840
isr_tx(void * param)841 static void isr_tx(void *param)
842 {
843 struct lll_conn_iso_stream *cis_lll;
844 struct lll_conn_iso_group *cig_lll;
845 struct node_rx_pdu *node_rx;
846 uint32_t subevent_us;
847 uint32_t start_us;
848 uint32_t hcto;
849
850 lll_isr_tx_sub_status_reset();
851
852 /* Get reference to CIS LLL context */
853 cis_lll = param;
854
855 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
856 LL_ASSERT(node_rx);
857
858 #if defined(CONFIG_BT_CTLR_LE_ENC)
859 /* Get reference to ACL context */
860 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
861 #endif /* CONFIG_BT_CTLR_LE_ENC */
862
863 /* PHY */
864 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
865
866 /* Encryption */
867 if (false) {
868
869 #if defined(CONFIG_BT_CTLR_LE_ENC)
870 } else if (conn_lll->enc_rx) {
871 uint64_t payload_count;
872 uint8_t pkt_flags;
873
874 payload_count = cis_lll->rx.payload_count +
875 cis_lll->rx.bn_curr - 1U;
876
877 cis_lll->rx.ccm.counter = payload_count;
878
879 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
880 cis_lll->rx.phy,
881 RADIO_PKT_CONF_CTE_DISABLED);
882 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
883 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
884 pkt_flags);
885 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
886 cis_lll->rx.phy,
887 RADIO_PKT_CONF_PDU_TYPE_CIS,
888 node_rx->pdu));
889 #endif /* CONFIG_BT_CTLR_LE_ENC */
890
891 } else {
892 uint8_t pkt_flags;
893
894 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
895 cis_lll->rx.phy,
896 RADIO_PKT_CONF_CTE_DISABLED);
897 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
898 cis_lll->rx.max_pdu, pkt_flags);
899 radio_pkt_rx_set(node_rx->pdu);
900 }
901
902 radio_aa_set(cis_lll->access_addr);
903
904 lll_chan_set(next_chan_use);
905
906 radio_tmr_tx_disable();
907 radio_tmr_rx_enable();
908
909 radio_tmr_tifs_set(cis_lll->tifs_us);
910
911 #if defined(CONFIG_BT_CTLR_PHY)
912 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
913 cis_lll->tx.phy_flags);
914 #else /* !CONFIG_BT_CTLR_PHY */
915 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
916 #endif /* !CONFIG_BT_CTLR_PHY */
917
918 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
919
920 subevent_us = radio_tmr_aa_restore();
921 subevent_us += cis_lll->offset - cis_offset_first +
922 (cis_lll->sub_interval * se_curr);
923 subevent_us -= addr_us_get(cis_lll->rx.phy);
924
925 #if defined(CONFIG_BT_CTLR_PHY)
926 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
927 PHY_FLAGS_S8);
928 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
929 PHY_FLAGS_S8);
930 #else /* !CONFIG_BT_CTLR_PHY */
931 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
932 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
933 #endif /* !CONFIG_BT_CTLR_PHY */
934
935 #if defined(CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER)
936 start_us = radio_tmr_start_us(0U, subevent_us);
937 LL_ASSERT(start_us == (subevent_us + 1U));
938
939 #else /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
940 /* Compensate for the 1 us added by radio_tmr_start_us() */
941 start_us = subevent_us + 1U;
942 #endif /* !CONFIG_BT_CTLR_SW_SWITCH_SINGLE_TIMER */
943
944 hcto = start_us +
945 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
946 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
947 1U);
948
949 #if defined(CONFIG_BT_CTLR_PHY)
950 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
951 hcto += addr_us_get(cis_lll->rx.phy);
952 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
953 #else /* !CONFIG_BT_CTLR_PHY */
954 hcto += radio_rx_ready_delay_get(0U, 0U);
955 hcto += addr_us_get(0U);
956 hcto += radio_rx_chain_delay_get(0U, 0U);
957 #endif /* !CONFIG_BT_CTLR_PHY */
958
959 radio_tmr_hcto_configure_abs(hcto);
960
961 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
962 radio_gpio_lna_setup();
963
964 #if defined(CONFIG_BT_CTLR_PHY)
965 radio_gpio_pa_lna_enable(start_us +
966 radio_rx_ready_delay_get(cis_lll->rx.phy,
967 PHY_FLAGS_S8) -
968 HAL_RADIO_GPIO_LNA_OFFSET);
969 #else /* !CONFIG_BT_CTLR_PHY */
970 radio_gpio_pa_lna_enable(start_us +
971 radio_rx_ready_delay_get(0U, 0U) -
972 HAL_RADIO_GPIO_LNA_OFFSET);
973 #endif /* !CONFIG_BT_CTLR_PHY */
974 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
975
976 radio_isr_set(isr_rx, cis_lll);
977
978 se_curr++;
979 }
980
next_cis_prepare(void * param)981 static void next_cis_prepare(void *param)
982 {
983 struct lll_conn_iso_stream *next_cis_lll;
984 struct lll_conn_iso_stream *cis_lll;
985 struct lll_conn_iso_group *cig_lll;
986 uint16_t cis_handle;
987
988 /* Get reference to CIS LLL context */
989 cis_lll = param;
990
991 /* Check for next active CIS */
992 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
993 next_cis_lll = cis_lll;
994 cis_handle = cis_handle_curr;
995 do {
996 next_cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
997 } while (next_cis_lll && !next_cis_lll->active);
998
999 if (!next_cis_lll) {
1000 /* ISO Event Done */
1001 radio_isr_set(isr_done, param);
1002
1003 return;
1004 }
1005
1006 cis_handle_curr = cis_handle;
1007
1008 radio_isr_set(isr_prepare_subevent_next_cis, next_cis_lll);
1009 }
1010
isr_prepare_subevent(void * param)1011 static void isr_prepare_subevent(void *param)
1012 {
1013 struct lll_conn_iso_stream *cis_lll;
1014 const struct lll_conn *conn_lll;
1015
1016 lll_isr_status_reset();
1017
1018 /* Get reference to CIS LLL context */
1019 cis_lll = param;
1020
1021 /* Get reference to ACL context */
1022 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1023
1024 /* Calculate the radio channel to use for next subevent
1025 */
1026 next_chan_use = lll_chan_iso_subevent(data_chan_id,
1027 conn_lll->data_chan_map,
1028 conn_lll->data_chan_count,
1029 &data_chan_prn_s,
1030 &data_chan_remap_idx);
1031
1032 isr_prepare_subevent_common(param);
1033 }
1034
isr_prepare_subevent_next_cis(void * param)1035 static void isr_prepare_subevent_next_cis(void *param)
1036 {
1037 struct lll_conn_iso_stream *cis_lll;
1038 const struct lll_conn *conn_lll;
1039 uint16_t event_counter;
1040
1041 lll_isr_status_reset();
1042
1043 /* Get reference to CIS LLL context */
1044 cis_lll = param;
1045
1046 /* Get reference to ACL context */
1047 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1048
1049 /* Event counter value, 0-15 bit of cisEventCounter */
1050 event_counter = cis_lll->event_count;
1051
1052 /* Calculate the radio channel to use for next CIS ISO event */
1053 data_chan_id = lll_chan_id(cis_lll->access_addr);
1054 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
1055 conn_lll->data_chan_map,
1056 conn_lll->data_chan_count,
1057 &data_chan_prn_s,
1058 &data_chan_remap_idx);
1059
1060 /* se_curr is incremented in isr_prepare_subevent_common() */
1061 se_curr = 0U;
1062
1063 isr_prepare_subevent_common(param);
1064 }
1065
isr_prepare_subevent_common(void * param)1066 static void isr_prepare_subevent_common(void *param)
1067 {
1068 struct lll_conn_iso_stream *cis_lll;
1069 struct lll_conn_iso_group *cig_lll;
1070 struct node_rx_pdu *node_rx;
1071 uint32_t subevent_us;
1072 uint32_t start_us;
1073 uint32_t hcto;
1074
1075 /* Get reference to CIS LLL context */
1076 cis_lll = param;
1077
1078 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1079 LL_ASSERT(node_rx);
1080
1081 #if defined(CONFIG_BT_CTLR_LE_ENC)
1082 /* Get reference to ACL context */
1083 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1084 #endif /* CONFIG_BT_CTLR_LE_ENC */
1085
1086 /* PHY */
1087 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
1088
1089 /* Encryption */
1090 if (false) {
1091
1092 #if defined(CONFIG_BT_CTLR_LE_ENC)
1093 } else if (conn_lll->enc_rx) {
1094 uint64_t payload_count;
1095 uint8_t pkt_flags;
1096
1097 payload_count = cis_lll->rx.payload_count +
1098 cis_lll->rx.bn_curr - 1U;
1099
1100 cis_lll->rx.ccm.counter = payload_count;
1101
1102 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1103 cis_lll->rx.phy,
1104 RADIO_PKT_CONF_CTE_DISABLED);
1105 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1106 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
1107 pkt_flags);
1108 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
1109 cis_lll->rx.phy,
1110 RADIO_PKT_CONF_PDU_TYPE_CIS,
1111 node_rx->pdu));
1112 #endif /* CONFIG_BT_CTLR_LE_ENC */
1113
1114 } else {
1115 uint8_t pkt_flags;
1116
1117 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1118 cis_lll->rx.phy,
1119 RADIO_PKT_CONF_CTE_DISABLED);
1120 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1121 cis_lll->rx.max_pdu, pkt_flags);
1122 radio_pkt_rx_set(node_rx->pdu);
1123 }
1124
1125 radio_aa_set(cis_lll->access_addr);
1126
1127 lll_chan_set(next_chan_use);
1128
1129 radio_tmr_tx_disable();
1130 radio_tmr_rx_enable();
1131
1132 radio_tmr_tifs_set(cis_lll->tifs_us);
1133
1134 #if defined(CONFIG_BT_CTLR_PHY)
1135 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
1136 cis_lll->tx.phy_flags);
1137 #else /* !CONFIG_BT_CTLR_PHY */
1138 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
1139 #endif /* !CONFIG_BT_CTLR_PHY */
1140
1141 /* Anchor point sync-ed */
1142 if (trx_performed_bitmask) {
1143 subevent_us = radio_tmr_aa_restore();
1144 subevent_us += cis_lll->offset - cis_offset_first +
1145 (cis_lll->sub_interval * se_curr);
1146 subevent_us -= addr_us_get(cis_lll->rx.phy);
1147
1148 #if defined(CONFIG_BT_CTLR_PHY)
1149 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
1150 PHY_FLAGS_S8);
1151 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
1152 PHY_FLAGS_S8);
1153 #else /* !CONFIG_BT_CTLR_PHY */
1154 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
1155 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
1156 #endif /* !CONFIG_BT_CTLR_PHY */
1157 } else {
1158 subevent_us = radio_tmr_ready_restore();
1159 subevent_us += cis_lll->offset - cis_offset_first +
1160 (cis_lll->sub_interval * se_curr);
1161 }
1162
1163 start_us = radio_tmr_start_us(0U, subevent_us);
1164 LL_ASSERT(!trx_performed_bitmask || (start_us == (subevent_us + 1U)));
1165
1166 /* If no anchor point sync yet, continue to capture access address
1167 * timestamp.
1168 */
1169 if (!radio_tmr_aa_restore()) {
1170 radio_tmr_aa_capture();
1171 }
1172
1173 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1174
1175 hcto = start_us +
1176 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1177 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
1178 1U);
1179
1180 #if defined(CONFIG_BT_CTLR_PHY)
1181 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1182 hcto += addr_us_get(cis_lll->rx.phy);
1183 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1184 #else /* !CONFIG_BT_CTLR_PHY */
1185 hcto += radio_rx_ready_delay_get(0U, 0U);
1186 hcto += addr_us_get(0U);
1187 hcto += radio_rx_chain_delay_get(0U, 0U);
1188 #endif /* !CONFIG_BT_CTLR_PHY */
1189
1190 radio_tmr_hcto_configure_abs(hcto);
1191
1192 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1193 radio_gpio_lna_setup();
1194
1195 #if defined(CONFIG_BT_CTLR_PHY)
1196 radio_gpio_pa_lna_enable(start_us +
1197 radio_rx_ready_delay_get(cis_lll->rx.phy,
1198 PHY_FLAGS_S8) -
1199 HAL_RADIO_GPIO_LNA_OFFSET);
1200 #else /* !CONFIG_BT_CTLR_PHY */
1201 radio_gpio_pa_lna_enable(start_us +
1202 radio_rx_ready_delay_get(0U, 0U) -
1203 HAL_RADIO_GPIO_LNA_OFFSET);
1204 #endif /* !CONFIG_BT_CTLR_PHY */
1205 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1206
1207 radio_isr_set(isr_rx, cis_lll);
1208
1209 se_curr++;
1210 }
1211
isr_done(void * param)1212 static void isr_done(void *param)
1213 {
1214 struct lll_conn_iso_stream *cis_lll;
1215 struct event_done_extra *e;
1216
1217 lll_isr_status_reset();
1218
1219 /* Get reference to CIS LLL context */
1220 cis_lll = param;
1221
1222 payload_count_rx_flush_or_txrx_inc(cis_lll);
1223
1224 e = ull_event_done_extra_get();
1225 LL_ASSERT(e);
1226
1227 e->type = EVENT_DONE_EXTRA_TYPE_CIS;
1228 e->trx_performed_bitmask = trx_performed_bitmask;
1229
1230 #if defined(CONFIG_BT_CTLR_LE_ENC)
1231 e->mic_state = mic_state;
1232 #endif /* CONFIG_BT_CTLR_LE_ENC */
1233
1234 if (trx_performed_bitmask) {
1235 struct lll_conn_iso_group *cig_lll;
1236 uint32_t preamble_to_addr_us;
1237
1238 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1239
1240 #if defined(CONFIG_BT_CTLR_PHY)
1241 preamble_to_addr_us = addr_us_get(cis_lll->rx.phy);
1242 #else /* !CONFIG_BT_CTLR_PHY */
1243 preamble_to_addr_us = addr_us_get(0U);
1244 #endif /* !CONFIG_BT_CTLR_PHY */
1245
1246 e->drift.start_to_address_actual_us =
1247 radio_tmr_aa_restore() - radio_tmr_ready_restore();
1248 e->drift.window_widening_event_us = EVENT_US_FRAC_TO_US(
1249 cig_lll->window_widening_event_us_frac);
1250 e->drift.preamble_to_addr_us = preamble_to_addr_us;
1251
1252 /* Reset window widening, as anchor point sync-ed */
1253 cig_lll->window_widening_event_us_frac = 0U;
1254 }
1255
1256 lll_isr_cleanup(param);
1257 }
1258
payload_count_flush(struct lll_conn_iso_stream * cis_lll)1259 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll)
1260 {
1261 if (cis_lll->tx.bn) {
1262 uint64_t payload_count;
1263 uint8_t u;
1264
1265 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1266 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1267 (cis_lll->tx.bn - 1U -
1268 (payload_count % cis_lll->tx.bn)));
1269 while (((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1270 (cis_lll->event_count + 1U)) ||
1271 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1272 (cis_lll->event_count + 1U)) && (u < se_curr))) &&
1273 (((cis_lll->tx.bn_curr < cis_lll->tx.bn) &&
1274 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <= cis_lll->event_count)) ||
1275 ((cis_lll->tx.bn_curr == cis_lll->tx.bn) &&
1276 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)))) {
1277 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1278 cis_lll->sn++;
1279 cis_lll->tx.bn_curr++;
1280 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1281 cis_lll->tx.payload_count += cis_lll->tx.bn;
1282 cis_lll->tx.bn_curr = 1U;
1283 }
1284
1285 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1286 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1287 (cis_lll->tx.bn - 1U -
1288 (payload_count % cis_lll->tx.bn)));
1289 }
1290 }
1291
1292 if (cis_lll->rx.bn) {
1293 uint64_t payload_count;
1294 uint8_t u;
1295
1296 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1297 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1298 (cis_lll->rx.bn - 1U -
1299 (payload_count % cis_lll->rx.bn)));
1300 if ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1301 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1302 (((cis_lll->rx.bn_curr < cis_lll->rx.bn) &&
1303 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <= cis_lll->event_count)) ||
1304 ((cis_lll->rx.bn_curr == cis_lll->rx.bn) &&
1305 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)))) {
1306 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1307 cis_lll->nesn++;
1308 cis_lll->rx.bn_curr++;
1309 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1310 cis_lll->rx.payload_count += cis_lll->rx.bn;
1311 cis_lll->rx.bn_curr = 1U;
1312 }
1313 }
1314 }
1315 }
1316
payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream * cis_lll)1317 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll)
1318 {
1319 if (cis_lll->tx.bn) {
1320 if (((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.bn_curr) >
1321 (cis_lll->event_count + cis_lll->tx.bn)) {
1322 cis_lll->tx.payload_count += cis_lll->tx.bn;
1323 cis_lll->tx.bn_curr = 1U;
1324 }
1325 }
1326
1327 if (cis_lll->rx.bn) {
1328 uint64_t payload_count;
1329 uint8_t u;
1330
1331 if (((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.bn_curr) >
1332 (cis_lll->event_count + cis_lll->rx.bn)) {
1333 cis_lll->rx.payload_count += cis_lll->rx.bn;
1334 cis_lll->rx.bn_curr = 1U;
1335
1336 return;
1337 }
1338
1339 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1340 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1341 (cis_lll->rx.bn - 1U -
1342 (payload_count % cis_lll->rx.bn)));
1343 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1344 (cis_lll->event_count + 1U)) ||
1345 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1346 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1347 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1348 cis_lll->nesn++;
1349 cis_lll->rx.bn_curr++;
1350 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1351 cis_lll->rx.payload_count += cis_lll->rx.bn;
1352 cis_lll->rx.bn_curr = 1U;
1353 }
1354
1355 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1356 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1357 (cis_lll->rx.bn - 1U -
1358 (payload_count % cis_lll->rx.bn)));
1359 }
1360 }
1361 }
1362
payload_count_lazy(struct lll_conn_iso_stream * cis_lll,uint16_t lazy)1363 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy)
1364 {
1365 if (cis_lll->tx.bn && lazy) {
1366 uint16_t tx_lazy;
1367
1368 tx_lazy = lazy;
1369 while (tx_lazy--) {
1370 uint64_t payload_count;
1371 uint8_t u;
1372
1373 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1374 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1375 (cis_lll->tx.bn - 1U -
1376 (payload_count % cis_lll->tx.bn)));
1377 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1378 cis_lll->event_count) ||
1379 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1380 cis_lll->event_count) && (u < cis_lll->nse))) {
1381 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1382 cis_lll->sn++;
1383 cis_lll->tx.bn_curr++;
1384 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1385 cis_lll->tx.payload_count += cis_lll->tx.bn;
1386 cis_lll->tx.bn_curr = 1U;
1387 }
1388
1389 payload_count = cis_lll->tx.payload_count +
1390 cis_lll->tx.bn_curr - 1U;
1391 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1392 (cis_lll->tx.bn - 1U -
1393 (payload_count % cis_lll->tx.bn)));
1394 }
1395 }
1396 }
1397
1398 if (cis_lll->rx.bn) {
1399 while (lazy--) {
1400 uint64_t payload_count;
1401 uint8_t u;
1402
1403 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1404 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1405 (cis_lll->rx.bn - 1U -
1406 (payload_count % cis_lll->rx.bn)));
1407 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1408 cis_lll->event_count) ||
1409 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1410 cis_lll->event_count) && (u <= cis_lll->nse))) {
1411 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1412 cis_lll->nesn++;
1413 cis_lll->rx.bn_curr++;
1414 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1415 cis_lll->rx.payload_count += cis_lll->rx.bn;
1416 cis_lll->rx.bn_curr = 1U;
1417 }
1418
1419 payload_count = cis_lll->rx.payload_count +
1420 cis_lll->rx.bn_curr - 1U;
1421 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1422 (cis_lll->rx.bn - 1U -
1423 (payload_count % cis_lll->rx.bn)));
1424 }
1425 }
1426 }
1427 }
1428