1 /*
2 * Copyright (c) 2021 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8
9 #include <zephyr/sys/byteorder.h>
10
11 #include "hal/ccm.h"
12 #include "hal/radio.h"
13 #include "hal/ticker.h"
14
15 #include "util/util.h"
16 #include "util/mem.h"
17 #include "util/memq.h"
18 #include "util/dbuf.h"
19
20 #include "pdu_df.h"
21 #include "pdu_vendor.h"
22 #include "pdu.h"
23
24 #include "lll.h"
25 #include "lll_clock.h"
26 #include "lll/lll_df_types.h"
27 #include "lll_chan.h"
28 #include "lll_vendor.h"
29 #include "lll_conn.h"
30 #include "lll_conn_iso.h"
31 #include "lll_peripheral_iso.h"
32
33 #include "lll_iso_tx.h"
34
35 #include "lll_internal.h"
36 #include "lll_tim_internal.h"
37
38 #include "ll_feat.h"
39
40 #include "hal/debug.h"
41
42 static int init_reset(void);
43 static int prepare_cb(struct lll_prepare_param *p);
44 static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
45 static void isr_rx(void *param);
46 static void isr_tx(void *param);
47 static void next_cis_prepare(void *param);
48 static void isr_prepare_subevent(void *param);
49 static void isr_prepare_subevent_next_cis(void *param);
50 static void isr_prepare_subevent_common(void *param);
51 static void isr_done(void *param);
52 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll);
53 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll);
54 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy);
55
56 static uint8_t next_chan_use;
57 static uint16_t data_chan_id;
58 static uint16_t data_chan_prn_s;
59 static uint16_t data_chan_remap_idx;
60
61 static uint32_t trx_performed_bitmask;
62 static uint16_t cis_offset_first;
63 static uint16_t cis_handle_curr;
64 static uint8_t se_curr;
65
66 #if defined(CONFIG_BT_CTLR_LE_ENC)
67 static uint8_t mic_state;
68 #endif /* CONFIG_BT_CTLR_LE_ENC */
69
lll_peripheral_iso_init(void)70 int lll_peripheral_iso_init(void)
71 {
72 int err;
73
74 err = init_reset();
75 if (err) {
76 return err;
77 }
78
79 return 0;
80 }
81
lll_peripheral_iso_reset(void)82 int lll_peripheral_iso_reset(void)
83 {
84 int err;
85
86 err = init_reset();
87 if (err) {
88 return err;
89 }
90
91 return 0;
92 }
93
lll_peripheral_iso_prepare(void * param)94 void lll_peripheral_iso_prepare(void *param)
95 {
96 struct lll_conn_iso_group *cig_lll;
97 struct lll_prepare_param *p;
98 int err;
99
100 /* Initiate HF clock start up */
101 err = lll_hfclock_on();
102 LL_ASSERT(err >= 0);
103
104 p = param;
105
106 cig_lll = p->param;
107
108 /* Invoke common pipeline handling of prepare */
109 err = lll_prepare(lll_is_abort_cb, abort_cb, prepare_cb, 0U, param);
110 LL_ASSERT(!err || err == -EINPROGRESS);
111 }
112
lll_peripheral_iso_flush(uint16_t handle,struct lll_conn_iso_stream * lll)113 void lll_peripheral_iso_flush(uint16_t handle, struct lll_conn_iso_stream *lll)
114 {
115 ARG_UNUSED(handle);
116 ARG_UNUSED(lll);
117 }
118
init_reset(void)119 static int init_reset(void)
120 {
121 return 0;
122 }
123
prepare_cb(struct lll_prepare_param * p)124 static int prepare_cb(struct lll_prepare_param *p)
125 {
126 struct lll_conn_iso_group *cig_lll = p->param;
127 struct lll_conn_iso_stream *cis_lll;
128 const struct lll_conn *conn_lll;
129 struct node_rx_pdu *node_rx;
130 uint32_t ticks_at_event;
131 uint32_t ticks_at_start;
132 struct node_tx_iso *tx;
133 uint64_t payload_count;
134 uint16_t event_counter;
135 uint8_t data_chan_use;
136 struct ull_hdr *ull;
137 uint32_t remainder;
138 memq_link_t *link;
139 uint32_t start_us;
140 uint32_t hcto;
141 uint32_t ret;
142 uint8_t phy;
143 int err = 0;
144
145 DEBUG_RADIO_START_S(1);
146
147 /* Reset global static variables */
148 trx_performed_bitmask = 0U;
149 #if defined(CONFIG_BT_CTLR_LE_ENC)
150 mic_state = LLL_CONN_MIC_NONE;
151 #endif /* CONFIG_BT_CTLR_LE_ENC */
152
153 /* Get the first CIS */
154 cis_handle_curr = UINT16_MAX;
155 do {
156 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle_curr);
157 } while (cis_lll && !cis_lll->active);
158
159 LL_ASSERT(cis_lll);
160
161 /* Save first active CIS offset */
162 cis_offset_first = cis_lll->offset;
163
164 /* Get reference to ACL context */
165 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
166
167 /* Event counter value, 0-15 bit of cisEventCounter */
168 event_counter = cis_lll->event_count;
169
170 /* Calculate the radio channel to use for ISO event */
171 data_chan_id = lll_chan_id(cis_lll->access_addr);
172 data_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
173 conn_lll->data_chan_map,
174 conn_lll->data_chan_count,
175 &data_chan_prn_s,
176 &data_chan_remap_idx);
177
178 /* Calculate the current event latency */
179 cig_lll->lazy_prepare = p->lazy;
180 cig_lll->latency_event = cig_lll->latency_prepare + cig_lll->lazy_prepare;
181
182 /* Reset accumulated latencies */
183 cig_lll->latency_prepare = 0U;
184
185 /* Accumulate window widening */
186 cig_lll->window_widening_prepare_us_frac +=
187 cig_lll->window_widening_periodic_us_frac * (cig_lll->lazy_prepare + 1U);
188 if (cig_lll->window_widening_prepare_us_frac >
189 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
190 cig_lll->window_widening_prepare_us_frac =
191 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
192 }
193
194 /* Current window widening */
195 cig_lll->window_widening_event_us_frac +=
196 cig_lll->window_widening_prepare_us_frac;
197 cig_lll->window_widening_prepare_us_frac = 0;
198 if (cig_lll->window_widening_event_us_frac >
199 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
200 cig_lll->window_widening_event_us_frac =
201 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
202 }
203
204 se_curr = 1U;
205
206 /* Adjust sn and nesn for skipped CIG events */
207 payload_count_lazy(cis_lll, cig_lll->lazy_prepare);
208
209 /* Start setting up of Radio h/w */
210 radio_reset();
211
212 #if defined(CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL)
213 radio_tx_power_set(conn_lll->tx_pwr_lvl);
214 #else /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
215 radio_tx_power_set(RADIO_TXP_DEFAULT);
216 #endif /* !CONFIG_BT_CTLR_TX_PWR_DYNAMIC_CONTROL */
217
218 phy = cis_lll->rx.phy;
219 radio_phy_set(phy, PHY_FLAGS_S8);
220 radio_aa_set(cis_lll->access_addr);
221 radio_crc_configure(PDU_CRC_POLYNOMIAL, sys_get_le24(conn_lll->crc_init));
222 lll_chan_set(data_chan_use);
223
224 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
225 LL_ASSERT(node_rx);
226
227 /* Encryption */
228 if (false) {
229
230 #if defined(CONFIG_BT_CTLR_LE_ENC)
231 } else if (conn_lll->enc_rx) {
232 uint64_t payload_cnt;
233 uint8_t pkt_flags;
234
235 payload_cnt = cis_lll->rx.payload_count +
236 cis_lll->rx.bn_curr - 1U;
237
238 cis_lll->rx.ccm.counter = payload_cnt;
239
240 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
241 phy,
242 RADIO_PKT_CONF_CTE_DISABLED);
243 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
244 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
245 pkt_flags);
246 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm, phy,
247 RADIO_PKT_CONF_PDU_TYPE_CIS,
248 node_rx->pdu));
249 #endif /* CONFIG_BT_CTLR_LE_ENC */
250
251 } else {
252 uint8_t pkt_flags;
253
254 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
255 phy,
256 RADIO_PKT_CONF_CTE_DISABLED);
257 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
258 cis_lll->rx.max_pdu, pkt_flags);
259 radio_pkt_rx_set(node_rx->pdu);
260 }
261
262 radio_isr_set(isr_rx, cis_lll);
263
264 radio_tmr_tifs_set(cis_lll->tifs_us);
265
266 #if defined(CONFIG_BT_CTLR_PHY)
267 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
268 cis_lll->tx.phy_flags);
269 #else /* !CONFIG_BT_CTLR_PHY */
270 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
271 #endif /* !CONFIG_BT_CTLR_PHY */
272
273 ticks_at_event = p->ticks_at_expire;
274 ull = HDR_LLL2ULL(cig_lll);
275 ticks_at_event += lll_event_offset_get(ull);
276
277 ticks_at_start = ticks_at_event;
278 ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US +
279 cis_offset_first);
280
281 remainder = p->remainder;
282 start_us = radio_tmr_start(0U, ticks_at_start, remainder);
283
284 radio_tmr_ready_save(start_us);
285 radio_tmr_aa_save(0U);
286 radio_tmr_aa_capture();
287
288 /* Header Complete Timeout, use additional EVENT_TICKER_RES_MARGIN_US to
289 * compensate for possible shift in ACL peripheral's anchor point at
290 * the instant the CIS is to be established.
291 *
292 * FIXME: use a one time value in a window member variable to avoid
293 * using this additional EVENT_TICKER_RES_MARGIN_US window in
294 * subsequent events once CIS is established.
295 */
296 hcto = start_us +
297 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
298 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
299 1U) + EVENT_TICKER_RES_MARGIN_US;
300
301 #if defined(CONFIG_BT_CTLR_PHY)
302 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
303 hcto += addr_us_get(cis_lll->rx.phy);
304 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
305 #else /* !CONFIG_BT_CTLR_PHY */
306 hcto += radio_rx_ready_delay_get(0U, 0U);
307 hcto += addr_us_get(0U);
308 hcto += radio_rx_chain_delay_get(0U, 0U);
309 #endif /* !CONFIG_BT_CTLR_PHY */
310
311 radio_tmr_hcto_configure(hcto);
312
313 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
314 radio_gpio_lna_setup();
315
316 #if defined(CONFIG_BT_CTLR_PHY)
317 radio_gpio_pa_lna_enable(start_us +
318 radio_rx_ready_delay_get(cis_lll->rx.phy,
319 PHY_FLAGS_S8) -
320 HAL_RADIO_GPIO_LNA_OFFSET);
321 #else /* !CONFIG_BT_CTLR_PHY */
322 radio_gpio_pa_lna_enable(start_us +
323 radio_rx_ready_delay_get(0U, 0U) -
324 HAL_RADIO_GPIO_LNA_OFFSET);
325 #endif /* !CONFIG_BT_CTLR_PHY */
326 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
327
328 #if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
329 (EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
330 uint32_t overhead;
331
332 overhead = lll_preempt_calc(ull, (TICKER_ID_CONN_ISO_BASE + cig_lll->handle),
333 ticks_at_event);
334 /* check if preempt to start has changed */
335 if (overhead) {
336 LL_ASSERT_OVERHEAD(overhead);
337
338 radio_isr_set(isr_done, cis_lll);
339 radio_disable();
340
341 err = -ECANCELED;
342 }
343 #endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
344
345 /* Adjust the SN and NESN for skipped CIG events */
346 uint16_t cis_handle = cis_handle_curr;
347
348 do {
349 payload_count = cis_lll->tx.payload_count +
350 cis_lll->tx.bn_curr - 1U;
351
352 do {
353 link = memq_peek(cis_lll->memq_tx.head,
354 cis_lll->memq_tx.tail, (void **)&tx);
355 if (link) {
356 if (tx->payload_count < payload_count) {
357 memq_dequeue(cis_lll->memq_tx.tail,
358 &cis_lll->memq_tx.head,
359 NULL);
360
361 tx->next = link;
362 ull_iso_lll_ack_enqueue(cis_lll->handle, tx);
363 } else {
364 break;
365 }
366 }
367 } while (link);
368
369 do {
370 cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
371 } while (cis_lll && !cis_lll->active);
372
373 if (!cis_lll) {
374 break;
375 }
376
377 /* Adjust sn and nesn for skipped CIG events */
378 payload_count_lazy(cis_lll, cig_lll->lazy_prepare);
379
380 /* Adjust sn and nesn for canceled events */
381 if (err) {
382 payload_count_rx_flush_or_txrx_inc(cis_lll);
383 }
384 } while (cis_lll);
385
386 /* Return if prepare callback cancelled */
387 if (err) {
388 return err;
389 }
390
391 /* Prepare is done */
392 ret = lll_prepare_done(cig_lll);
393 LL_ASSERT(!ret);
394
395 DEBUG_RADIO_START_S(1);
396
397 return 0;
398 }
399
abort_cb(struct lll_prepare_param * prepare_param,void * param)400 static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
401 {
402 struct lll_conn_iso_group *cig_lll;
403 int err;
404
405 /* NOTE: This is not a prepare being cancelled */
406 if (!prepare_param) {
407 struct lll_conn_iso_stream *next_cis_lll;
408 struct lll_conn_iso_stream *cis_lll;
409
410 cis_lll = ull_conn_iso_lll_stream_get(cis_handle_curr);
411 cig_lll = param;
412
413 /* Adjust the SN, NESN and payload_count on abort for CISes */
414 do {
415 next_cis_lll =
416 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll,
417 &cis_handle_curr);
418 if (next_cis_lll && next_cis_lll->active) {
419 payload_count_rx_flush_or_txrx_inc(next_cis_lll);
420 }
421 } while (next_cis_lll);
422
423 /* Perform event abort here.
424 * After event has been cleanly aborted, clean up resources
425 * and dispatch event done.
426 */
427 radio_isr_set(isr_done, cis_lll);
428 radio_disable();
429
430 return;
431 }
432
433 /* NOTE: Else clean the top half preparations of the aborted event
434 * currently in preparation pipeline.
435 */
436 err = lll_hfclock_off();
437 LL_ASSERT(err >= 0);
438
439 /* Get reference to CIG LLL context */
440 cig_lll = prepare_param->param;
441
442 /* Accumulate the latency as event is aborted while being in pipeline */
443 cig_lll->lazy_prepare = prepare_param->lazy;
444 cig_lll->latency_prepare += (cig_lll->lazy_prepare + 1U);
445
446 /* Accumulate window widening */
447 cig_lll->window_widening_prepare_us_frac +=
448 cig_lll->window_widening_periodic_us_frac * (cig_lll->lazy_prepare + 1U);
449 if (cig_lll->window_widening_prepare_us_frac >
450 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us)) {
451 cig_lll->window_widening_prepare_us_frac =
452 EVENT_US_TO_US_FRAC(cig_lll->window_widening_max_us);
453 }
454
455 lll_done(param);
456 }
457
isr_rx(void * param)458 static void isr_rx(void *param)
459 {
460 struct lll_conn_iso_stream *cis_lll;
461 const struct lll_conn *conn_lll;
462 struct pdu_cis *pdu_tx;
463 uint64_t payload_count;
464 uint8_t payload_index;
465 uint32_t subevent_us;
466 uint32_t start_us;
467 uint8_t trx_done;
468 uint8_t crc_ok;
469 uint8_t cie;
470
471 /* Read radio status and events */
472 trx_done = radio_is_done();
473 if (trx_done) {
474 crc_ok = radio_crc_is_valid();
475 } else {
476 crc_ok = 0U;
477 }
478
479 /* Clear radio rx status and events */
480 lll_isr_rx_status_reset();
481
482 /* Get reference to CIS LLL context */
483 cis_lll = param;
484
485 /* No Rx */
486 if (!trx_done ||
487 #if defined(CONFIG_TEST_FT_PER_SKIP_SUBEVENTS)
488 /* Used by test code,
489 * to skip a number of events in every 3 event count when current subevent is less than
490 * or equal to 2 or when current subevent has completed all its NSE number of subevents.
491 * OR
492 * to skip a (number + 1) of events in every 3 event count when current subevent is less
493 * than or equal to 1 or when current subevent has completed all its NSE number of
494 * subevents.
495 */
496 ((((cis_lll->event_count % 3U) < CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT) &&
497 ((se_curr > cis_lll->nse) || (se_curr <= 2U))) ||
498 (((cis_lll->event_count % 3U) < (CONFIG_TEST_FT_PER_SKIP_EVENTS_COUNT + 1U)) &&
499 ((se_curr > cis_lll->nse) || (se_curr <= 1U)))) ||
500 #endif
501 false) {
502 payload_count_flush(cis_lll);
503
504 /* Next subevent or next CIS */
505 if (se_curr < cis_lll->nse) {
506 radio_isr_set(isr_prepare_subevent, param);
507 } else {
508 next_cis_prepare(param);
509 }
510
511 radio_disable();
512
513 return;
514 }
515
516 /* Initialize Close Isochronous Event */
517 cie = 0U;
518
519 /* Save the AA captured for anchor point sync, this could be subsequent
520 * subevent if not synced to the first subevent.
521 */
522 if (!radio_tmr_aa_restore()) {
523 uint32_t se_offset_us;
524
525 se_offset_us = cis_lll->sub_interval * (se_curr - 1U);
526 radio_tmr_aa_save(radio_tmr_aa_get() - se_offset_us);
527 radio_tmr_ready_save(radio_tmr_ready_get() - se_offset_us);
528 }
529
530 /* Close subevent, one tx-rx chain */
531 radio_switch_complete_and_disable();
532
533 /* FIXME: Do not call this for every event/subevent */
534 ull_conn_iso_lll_cis_established(param);
535
536 /* Set the bit corresponding to CIS index */
537 trx_performed_bitmask |= (1U << LL_CIS_IDX_FROM_HANDLE(cis_lll->handle));
538
539 /* Get reference to ACL context */
540 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
541
542 if (crc_ok) {
543 struct node_rx_pdu *node_rx;
544 struct pdu_cis *pdu_rx;
545
546 /* Get reference to received PDU */
547 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
548 LL_ASSERT(node_rx);
549
550 pdu_rx = (void *)node_rx->pdu;
551
552 /* Tx ACK */
553 if ((pdu_rx->nesn != cis_lll->sn) && (cis_lll->tx.bn_curr <= cis_lll->tx.bn)) {
554 cis_lll->sn++;
555 cis_lll->tx.bn_curr++;
556 if ((cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
557 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <
558 cis_lll->event_count)) {
559 cis_lll->tx.payload_count += cis_lll->tx.bn;
560 cis_lll->tx.bn_curr = 1U;
561 }
562
563 /* TODO: Implement early Tx Ack. Currently Tx Ack
564 * generated as stale Tx Ack when payload count
565 * has elapsed.
566 */
567 }
568
569 /* Handle valid ISO data Rx */
570 if (!pdu_rx->npi &&
571 (cis_lll->rx.bn_curr <= cis_lll->rx.bn) &&
572 (pdu_rx->sn == cis_lll->nesn) &&
573 ull_iso_pdu_rx_alloc_peek(2U)) {
574 struct lll_conn_iso_group *cig_lll;
575 struct node_rx_iso_meta *iso_meta;
576
577 cis_lll->nesn++;
578
579 #if defined(CONFIG_BT_CTLR_LE_ENC)
580 /* If required, wait for CCM to finish
581 */
582 if (pdu_rx->len && conn_lll->enc_rx) {
583 uint32_t done;
584
585 done = radio_ccm_is_done();
586 LL_ASSERT(done);
587
588 if (!radio_ccm_mic_is_valid()) {
589 /* Record MIC invalid */
590 mic_state = LLL_CONN_MIC_FAIL;
591
592 /* Close event */
593 radio_isr_set(isr_done, param);
594 radio_disable();
595
596 return;
597 }
598
599 /* Record MIC valid */
600 mic_state = LLL_CONN_MIC_PASS;
601 }
602 #endif /* CONFIG_BT_CTLR_LE_ENC */
603
604 /* Enqueue Rx ISO PDU */
605 node_rx->hdr.type = NODE_RX_TYPE_ISO_PDU;
606 node_rx->hdr.handle = cis_lll->handle;
607 iso_meta = &node_rx->rx_iso_meta;
608 iso_meta->payload_number = cis_lll->rx.payload_count +
609 cis_lll->rx.bn_curr - 1U;
610 iso_meta->timestamp = cis_lll->offset +
611 HAL_TICKER_TICKS_TO_US(radio_tmr_start_get()) +
612 radio_tmr_aa_restore() - cis_offset_first -
613 addr_us_get(cis_lll->rx.phy);
614 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
615 iso_meta->timestamp -= (cis_lll->event_count -
616 (cis_lll->rx.payload_count / cis_lll->rx.bn)) *
617 cig_lll->iso_interval_us;
618 iso_meta->timestamp %=
619 HAL_TICKER_TICKS_TO_US_64BIT(BIT64(HAL_TICKER_CNTR_MSBIT + 1U));
620 iso_meta->status = 0U;
621
622 ull_iso_pdu_rx_alloc();
623 iso_rx_put(node_rx->hdr.link, node_rx);
624
625 #if !defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
626 iso_rx_sched();
627 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
628
629 cis_lll->rx.bn_curr++;
630 if ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
631 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)) {
632 cis_lll->rx.payload_count += cis_lll->rx.bn;
633 cis_lll->rx.bn_curr = 1U;
634 }
635 }
636
637 /* Close Isochronous Event */
638 cie = cie || pdu_rx->cie;
639 }
640
641 payload_count_flush(cis_lll);
642
643 /* Close Isochronous Event */
644 cie = cie || ((cis_lll->rx.bn_curr > cis_lll->rx.bn) &&
645 (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
646 (se_curr < cis_lll->nse));
647
648 /* Get ISO data PDU */
649 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
650 payload_count = 0U;
651
652 cis_lll->npi = 1U;
653
654 pdu_tx = radio_pkt_empty_get();
655 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
656 pdu_tx->nesn = cis_lll->nesn;
657 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
658 pdu_tx->cie = cie;
659 pdu_tx->npi = 1U;
660 pdu_tx->len = 0U;
661 } else {
662 struct node_tx_iso *tx;
663 memq_link_t *link;
664
665 payload_index = cis_lll->tx.bn_curr - 1U;
666 payload_count = cis_lll->tx.payload_count + payload_index;
667
668 link = memq_peek_n(cis_lll->memq_tx.head, cis_lll->memq_tx.tail,
669 payload_index, (void **)&tx);
670 if (!link || (tx->payload_count != payload_count)) {
671 payload_index = 0U;
672 do {
673 link = memq_peek_n(cis_lll->memq_tx.head,
674 cis_lll->memq_tx.tail,
675 payload_index, (void **)&tx);
676 payload_index++;
677 } while (link &&
678 (tx->payload_count < payload_count));
679 }
680
681 if (!link || (tx->payload_count != payload_count)) {
682 cis_lll->npi = 1U;
683
684 pdu_tx = radio_pkt_empty_get();
685 pdu_tx->ll_id = PDU_CIS_LLID_START_CONTINUE;
686 pdu_tx->nesn = cis_lll->nesn;
687 pdu_tx->cie = (cis_lll->tx.bn_curr > cis_lll->tx.bn) &&
688 (cis_lll->rx.bn_curr > cis_lll->rx.bn);
689 pdu_tx->len = 0U;
690 pdu_tx->sn = 0U; /* reserved RFU for NULL PDU */
691 pdu_tx->npi = 1U;
692 } else {
693 cis_lll->npi = 0U;
694
695 pdu_tx = (void *)tx->pdu;
696 pdu_tx->nesn = cis_lll->nesn;
697 pdu_tx->sn = cis_lll->sn;
698 pdu_tx->cie = 0U;
699 pdu_tx->npi = 0U;
700 }
701 }
702
703 /* Initialize reserve bit */
704 pdu_tx->rfu0 = 0U;
705 pdu_tx->rfu1 = 0U;
706
707 /* PHY */
708 radio_phy_set(cis_lll->tx.phy, cis_lll->tx.phy_flags);
709
710 /* Encryption */
711 if (false) {
712
713 #if defined(CONFIG_BT_CTLR_LE_ENC)
714 } else if (pdu_tx->len && conn_lll->enc_tx) {
715 uint8_t pkt_flags;
716
717 cis_lll->tx.ccm.counter = payload_count;
718
719 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
720 cis_lll->tx.phy,
721 RADIO_PKT_CONF_CTE_DISABLED);
722 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
723 (cis_lll->tx.max_pdu + PDU_MIC_SIZE),
724 pkt_flags);
725 radio_pkt_tx_set(radio_ccm_iso_tx_pkt_set(&cis_lll->tx.ccm,
726 RADIO_PKT_CONF_PDU_TYPE_CIS,
727 pdu_tx));
728 #endif /* CONFIG_BT_CTLR_LE_ENC */
729
730 } else {
731 uint8_t pkt_flags;
732
733 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
734 cis_lll->tx.phy,
735 RADIO_PKT_CONF_CTE_DISABLED);
736 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
737 cis_lll->tx.max_pdu, pkt_flags);
738 radio_pkt_tx_set(pdu_tx);
739 }
740
741 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
742 uint32_t pa_lna_enable_us;
743
744 radio_gpio_pa_setup();
745
746 pa_lna_enable_us = radio_tmr_tifs_base_get() + cis_lll->tifs_us -
747 HAL_RADIO_GPIO_PA_OFFSET;
748 #if defined(CONFIG_BT_CTLR_PHY)
749 pa_lna_enable_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
750 PHY_FLAGS_S8);
751 #else /* !CONFIG_BT_CTLR_PHY */
752 pa_lna_enable_us -= radio_rx_chain_delay_get(0U, 0U);
753 #endif /* !CONFIG_BT_CTLR_PHY */
754 radio_gpio_pa_lna_enable(pa_lna_enable_us);
755 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
756
757 /* assert if radio packet ptr is not set and radio started tx */
758 LL_ASSERT(!radio_is_ready());
759
760 /* Schedule next subevent */
761 if (!cie && (se_curr < cis_lll->nse)) {
762 /* Calculate the radio channel to use for next subevent
763 */
764 next_chan_use = lll_chan_iso_subevent(data_chan_id,
765 conn_lll->data_chan_map,
766 conn_lll->data_chan_count,
767 &data_chan_prn_s,
768 &data_chan_remap_idx);
769 } else {
770 struct lll_conn_iso_stream *next_cis_lll;
771 struct lll_conn_iso_group *cig_lll;
772 uint16_t event_counter;
773 uint16_t cis_handle;
774
775 /* Check for next active CIS */
776 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
777 cis_handle = cis_handle_curr;
778 do {
779 next_cis_lll =
780 ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
781 } while (next_cis_lll && !next_cis_lll->active);
782
783 if (!next_cis_lll) {
784 /* ISO Event Done */
785 radio_isr_set(isr_done, param);
786
787 return;
788 }
789
790 payload_count_rx_flush_or_txrx_inc(cis_lll);
791
792 cis_handle_curr = cis_handle;
793
794 /* Event counter value, 0-15 bit of cisEventCounter */
795 event_counter = next_cis_lll->event_count;
796
797 /* Calculate the radio channel to use for next CIS ISO event */
798 data_chan_id = lll_chan_id(next_cis_lll->access_addr);
799 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
800 conn_lll->data_chan_map,
801 conn_lll->data_chan_count,
802 &data_chan_prn_s,
803 &data_chan_remap_idx);
804
805 /* Next CIS, se_curr is incremented in isr_tx() */
806 cis_lll = next_cis_lll;
807 se_curr = 0U;
808 }
809
810 /* Schedule next subevent reception */
811 subevent_us = radio_tmr_aa_restore();
812 subevent_us += cis_lll->offset - cis_offset_first +
813 (cis_lll->sub_interval * se_curr);
814 subevent_us -= addr_us_get(cis_lll->rx.phy);
815
816 #if defined(CONFIG_BT_CTLR_PHY)
817 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
818 PHY_FLAGS_S8);
819 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
820 PHY_FLAGS_S8);
821 #else /* !CONFIG_BT_CTLR_PHY */
822 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
823 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
824 #endif /* !CONFIG_BT_CTLR_PHY */
825
826 start_us = radio_tmr_start_us(0U, subevent_us);
827 LL_ASSERT(start_us == (subevent_us + 1U));
828
829 radio_isr_set(isr_tx, cis_lll);
830 }
831
isr_tx(void * param)832 static void isr_tx(void *param)
833 {
834 struct lll_conn_iso_stream *cis_lll;
835 struct lll_conn_iso_group *cig_lll;
836 struct node_rx_pdu *node_rx;
837 uint32_t subevent_us;
838 uint32_t start_us;
839 uint32_t hcto;
840
841 lll_isr_tx_sub_status_reset();
842
843 /* Get reference to CIS LLL context */
844 cis_lll = param;
845
846 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
847 LL_ASSERT(node_rx);
848
849 #if defined(CONFIG_BT_CTLR_LE_ENC)
850 /* Get reference to ACL context */
851 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
852 #endif /* CONFIG_BT_CTLR_LE_ENC */
853
854 /* PHY */
855 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
856
857 /* Encryption */
858 if (false) {
859
860 #if defined(CONFIG_BT_CTLR_LE_ENC)
861 } else if (conn_lll->enc_rx) {
862 uint64_t payload_count;
863 uint8_t pkt_flags;
864
865 payload_count = cis_lll->rx.payload_count +
866 cis_lll->rx.bn_curr - 1U;
867
868 cis_lll->rx.ccm.counter = payload_count;
869
870 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
871 cis_lll->rx.phy,
872 RADIO_PKT_CONF_CTE_DISABLED);
873 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
874 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
875 pkt_flags);
876 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
877 cis_lll->rx.phy,
878 RADIO_PKT_CONF_PDU_TYPE_CIS,
879 node_rx->pdu));
880 #endif /* CONFIG_BT_CTLR_LE_ENC */
881
882 } else {
883 uint8_t pkt_flags;
884
885 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
886 cis_lll->rx.phy,
887 RADIO_PKT_CONF_CTE_DISABLED);
888 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
889 cis_lll->rx.max_pdu, pkt_flags);
890 radio_pkt_rx_set(node_rx->pdu);
891 }
892
893 radio_aa_set(cis_lll->access_addr);
894
895 lll_chan_set(next_chan_use);
896
897 radio_tmr_tx_disable();
898 radio_tmr_rx_enable();
899
900 radio_tmr_tifs_set(cis_lll->tifs_us);
901
902 #if defined(CONFIG_BT_CTLR_PHY)
903 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
904 cis_lll->tx.phy_flags);
905 #else /* !CONFIG_BT_CTLR_PHY */
906 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
907 #endif /* !CONFIG_BT_CTLR_PHY */
908
909 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
910
911 subevent_us = radio_tmr_aa_restore();
912 subevent_us += cis_lll->offset - cis_offset_first +
913 (cis_lll->sub_interval * se_curr);
914 subevent_us -= addr_us_get(cis_lll->rx.phy);
915
916 #if defined(CONFIG_BT_CTLR_PHY)
917 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
918 PHY_FLAGS_S8);
919 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
920 PHY_FLAGS_S8);
921 #else /* !CONFIG_BT_CTLR_PHY */
922 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
923 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
924 #endif /* !CONFIG_BT_CTLR_PHY */
925
926 /* Compensate for the 1 us added by radio_tmr_start_us() */
927 start_us = subevent_us + 1U;
928
929 hcto = start_us +
930 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
931 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
932 1U);
933
934 #if defined(CONFIG_BT_CTLR_PHY)
935 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
936 hcto += addr_us_get(cis_lll->rx.phy);
937 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
938 #else /* !CONFIG_BT_CTLR_PHY */
939 hcto += radio_rx_ready_delay_get(0U, 0U);
940 hcto += addr_us_get(0U);
941 hcto += radio_rx_chain_delay_get(0U, 0U);
942 #endif /* !CONFIG_BT_CTLR_PHY */
943
944 radio_tmr_hcto_configure(hcto);
945
946 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
947 radio_gpio_lna_setup();
948
949 #if defined(CONFIG_BT_CTLR_PHY)
950 radio_gpio_pa_lna_enable(start_us +
951 radio_rx_ready_delay_get(cis_lll->rx.phy,
952 PHY_FLAGS_S8) -
953 HAL_RADIO_GPIO_LNA_OFFSET);
954 #else /* !CONFIG_BT_CTLR_PHY */
955 radio_gpio_pa_lna_enable(start_us +
956 radio_rx_ready_delay_get(0U, 0U) -
957 HAL_RADIO_GPIO_LNA_OFFSET);
958 #endif /* !CONFIG_BT_CTLR_PHY */
959 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
960
961 radio_isr_set(isr_rx, cis_lll);
962
963 se_curr++;
964 }
965
next_cis_prepare(void * param)966 static void next_cis_prepare(void *param)
967 {
968 struct lll_conn_iso_stream *next_cis_lll;
969 struct lll_conn_iso_stream *cis_lll;
970 struct lll_conn_iso_group *cig_lll;
971 uint16_t cis_handle;
972
973 /* Get reference to CIS LLL context */
974 cis_lll = param;
975
976 /* Check for next active CIS */
977 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
978 next_cis_lll = cis_lll;
979 cis_handle = cis_handle_curr;
980 do {
981 next_cis_lll = ull_conn_iso_lll_stream_sorted_get_by_group(cig_lll, &cis_handle);
982 } while (next_cis_lll && !next_cis_lll->active);
983
984 if (!next_cis_lll) {
985 /* ISO Event Done */
986 radio_isr_set(isr_done, param);
987
988 return;
989 }
990
991 cis_handle_curr = cis_handle;
992
993 radio_isr_set(isr_prepare_subevent_next_cis, next_cis_lll);
994 }
995
isr_prepare_subevent(void * param)996 static void isr_prepare_subevent(void *param)
997 {
998 struct lll_conn_iso_stream *cis_lll;
999 const struct lll_conn *conn_lll;
1000
1001 lll_isr_status_reset();
1002
1003 /* Get reference to CIS LLL context */
1004 cis_lll = param;
1005
1006 /* Get reference to ACL context */
1007 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1008
1009 /* Calculate the radio channel to use for next subevent
1010 */
1011 next_chan_use = lll_chan_iso_subevent(data_chan_id,
1012 conn_lll->data_chan_map,
1013 conn_lll->data_chan_count,
1014 &data_chan_prn_s,
1015 &data_chan_remap_idx);
1016
1017 isr_prepare_subevent_common(param);
1018 }
1019
isr_prepare_subevent_next_cis(void * param)1020 static void isr_prepare_subevent_next_cis(void *param)
1021 {
1022 struct lll_conn_iso_stream *cis_lll;
1023 const struct lll_conn *conn_lll;
1024 uint16_t event_counter;
1025
1026 lll_isr_status_reset();
1027
1028 /* Get reference to CIS LLL context */
1029 cis_lll = param;
1030
1031 /* Get reference to ACL context */
1032 conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1033
1034 /* Event counter value, 0-15 bit of cisEventCounter */
1035 event_counter = cis_lll->event_count;
1036
1037 /* Calculate the radio channel to use for next CIS ISO event */
1038 data_chan_id = lll_chan_id(cis_lll->access_addr);
1039 next_chan_use = lll_chan_iso_event(event_counter, data_chan_id,
1040 conn_lll->data_chan_map,
1041 conn_lll->data_chan_count,
1042 &data_chan_prn_s,
1043 &data_chan_remap_idx);
1044
1045 /* se_curr is incremented in isr_prepare_subevent_common() */
1046 se_curr = 0U;
1047
1048 isr_prepare_subevent_common(param);
1049 }
1050
isr_prepare_subevent_common(void * param)1051 static void isr_prepare_subevent_common(void *param)
1052 {
1053 struct lll_conn_iso_stream *cis_lll;
1054 struct lll_conn_iso_group *cig_lll;
1055 struct node_rx_pdu *node_rx;
1056 uint32_t subevent_us;
1057 uint32_t start_us;
1058 uint32_t hcto;
1059
1060 /* Get reference to CIS LLL context */
1061 cis_lll = param;
1062
1063 node_rx = ull_iso_pdu_rx_alloc_peek(1U);
1064 LL_ASSERT(node_rx);
1065
1066 #if defined(CONFIG_BT_CTLR_LE_ENC)
1067 /* Get reference to ACL context */
1068 const struct lll_conn *conn_lll = ull_conn_lll_get(cis_lll->acl_handle);
1069 #endif /* CONFIG_BT_CTLR_LE_ENC */
1070
1071 /* PHY */
1072 radio_phy_set(cis_lll->rx.phy, PHY_FLAGS_S8);
1073
1074 /* Encryption */
1075 if (false) {
1076
1077 #if defined(CONFIG_BT_CTLR_LE_ENC)
1078 } else if (conn_lll->enc_rx) {
1079 uint64_t payload_count;
1080 uint8_t pkt_flags;
1081
1082 payload_count = cis_lll->rx.payload_count +
1083 cis_lll->rx.bn_curr - 1U;
1084
1085 cis_lll->rx.ccm.counter = payload_count;
1086
1087 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1088 cis_lll->rx.phy,
1089 RADIO_PKT_CONF_CTE_DISABLED);
1090 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1091 (cis_lll->rx.max_pdu + PDU_MIC_SIZE),
1092 pkt_flags);
1093 radio_pkt_rx_set(radio_ccm_iso_rx_pkt_set(&cis_lll->rx.ccm,
1094 cis_lll->rx.phy,
1095 RADIO_PKT_CONF_PDU_TYPE_CIS,
1096 node_rx->pdu));
1097 #endif /* CONFIG_BT_CTLR_LE_ENC */
1098
1099 } else {
1100 uint8_t pkt_flags;
1101
1102 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_CIS,
1103 cis_lll->rx.phy,
1104 RADIO_PKT_CONF_CTE_DISABLED);
1105 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT,
1106 cis_lll->rx.max_pdu, pkt_flags);
1107 radio_pkt_rx_set(node_rx->pdu);
1108 }
1109
1110 radio_aa_set(cis_lll->access_addr);
1111
1112 lll_chan_set(next_chan_use);
1113
1114 radio_tmr_tx_disable();
1115 radio_tmr_rx_enable();
1116
1117 radio_tmr_tifs_set(cis_lll->tifs_us);
1118
1119 #if defined(CONFIG_BT_CTLR_PHY)
1120 radio_switch_complete_and_tx(cis_lll->rx.phy, 0U, cis_lll->tx.phy,
1121 cis_lll->tx.phy_flags);
1122 #else /* !CONFIG_BT_CTLR_PHY */
1123 radio_switch_complete_and_tx(0U, 0U, 0U, 0U);
1124 #endif /* !CONFIG_BT_CTLR_PHY */
1125
1126 /* Anchor point sync-ed */
1127 if (trx_performed_bitmask) {
1128 subevent_us = radio_tmr_aa_restore();
1129 subevent_us += cis_lll->offset - cis_offset_first +
1130 (cis_lll->sub_interval * se_curr);
1131 subevent_us -= addr_us_get(cis_lll->rx.phy);
1132
1133 #if defined(CONFIG_BT_CTLR_PHY)
1134 subevent_us -= radio_rx_ready_delay_get(cis_lll->rx.phy,
1135 PHY_FLAGS_S8);
1136 subevent_us -= radio_rx_chain_delay_get(cis_lll->rx.phy,
1137 PHY_FLAGS_S8);
1138 #else /* !CONFIG_BT_CTLR_PHY */
1139 subevent_us -= radio_rx_ready_delay_get(0U, 0U);
1140 subevent_us -= radio_rx_chain_delay_get(0U, 0U);
1141 #endif /* !CONFIG_BT_CTLR_PHY */
1142 } else {
1143 subevent_us = radio_tmr_ready_restore();
1144 subevent_us += cis_lll->offset - cis_offset_first +
1145 (cis_lll->sub_interval * se_curr);
1146 }
1147
1148 start_us = radio_tmr_start_us(0U, subevent_us);
1149 LL_ASSERT(!trx_performed_bitmask || (start_us == (subevent_us + 1U)));
1150
1151 /* If no anchor point sync yet, continue to capture access address
1152 * timestamp.
1153 */
1154 if (!radio_tmr_aa_restore()) {
1155 radio_tmr_aa_capture();
1156 }
1157
1158 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1159
1160 hcto = start_us +
1161 ((EVENT_JITTER_US + EVENT_TICKER_RES_MARGIN_US +
1162 EVENT_US_FRAC_TO_US(cig_lll->window_widening_event_us_frac)) <<
1163 1U);
1164
1165 #if defined(CONFIG_BT_CTLR_PHY)
1166 hcto += radio_rx_ready_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1167 hcto += addr_us_get(cis_lll->rx.phy);
1168 hcto += radio_rx_chain_delay_get(cis_lll->rx.phy, PHY_FLAGS_S8);
1169 #else /* !CONFIG_BT_CTLR_PHY */
1170 hcto += radio_rx_ready_delay_get(0U, 0U);
1171 hcto += addr_us_get(0U);
1172 hcto += radio_rx_chain_delay_get(0U, 0U);
1173 #endif /* !CONFIG_BT_CTLR_PHY */
1174
1175 radio_tmr_hcto_configure(hcto);
1176
1177 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
1178 radio_gpio_lna_setup();
1179
1180 #if defined(CONFIG_BT_CTLR_PHY)
1181 radio_gpio_pa_lna_enable(start_us +
1182 radio_rx_ready_delay_get(cis_lll->rx.phy,
1183 PHY_FLAGS_S8) -
1184 HAL_RADIO_GPIO_LNA_OFFSET);
1185 #else /* !CONFIG_BT_CTLR_PHY */
1186 radio_gpio_pa_lna_enable(start_us +
1187 radio_rx_ready_delay_get(0U, 0U) -
1188 HAL_RADIO_GPIO_LNA_OFFSET);
1189 #endif /* !CONFIG_BT_CTLR_PHY */
1190 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
1191
1192 radio_isr_set(isr_rx, cis_lll);
1193
1194 se_curr++;
1195 }
1196
isr_done(void * param)1197 static void isr_done(void *param)
1198 {
1199 struct lll_conn_iso_stream *cis_lll;
1200 struct event_done_extra *e;
1201
1202 lll_isr_status_reset();
1203
1204 /* Get reference to CIS LLL context */
1205 cis_lll = param;
1206
1207 payload_count_rx_flush_or_txrx_inc(cis_lll);
1208
1209 e = ull_event_done_extra_get();
1210 LL_ASSERT(e);
1211
1212 e->type = EVENT_DONE_EXTRA_TYPE_CIS;
1213 e->trx_performed_bitmask = trx_performed_bitmask;
1214
1215 #if defined(CONFIG_BT_CTLR_LE_ENC)
1216 e->mic_state = mic_state;
1217 #endif /* CONFIG_BT_CTLR_LE_ENC */
1218
1219 if (trx_performed_bitmask) {
1220 struct lll_conn_iso_group *cig_lll;
1221 uint32_t preamble_to_addr_us;
1222
1223 cig_lll = ull_conn_iso_lll_group_get_by_stream(cis_lll);
1224
1225 #if defined(CONFIG_BT_CTLR_PHY)
1226 preamble_to_addr_us = addr_us_get(cis_lll->rx.phy);
1227 #else /* !CONFIG_BT_CTLR_PHY */
1228 preamble_to_addr_us = addr_us_get(0U);
1229 #endif /* !CONFIG_BT_CTLR_PHY */
1230
1231 e->drift.start_to_address_actual_us =
1232 radio_tmr_aa_restore() - radio_tmr_ready_restore();
1233 e->drift.window_widening_event_us = EVENT_US_FRAC_TO_US(
1234 cig_lll->window_widening_event_us_frac);
1235 e->drift.preamble_to_addr_us = preamble_to_addr_us;
1236
1237 /* Reset window widening, as anchor point sync-ed */
1238 cig_lll->window_widening_event_us_frac = 0U;
1239 }
1240
1241 lll_isr_cleanup(param);
1242 }
1243
payload_count_flush(struct lll_conn_iso_stream * cis_lll)1244 static void payload_count_flush(struct lll_conn_iso_stream *cis_lll)
1245 {
1246 if (cis_lll->tx.bn) {
1247 uint64_t payload_count;
1248 uint8_t u;
1249
1250 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1251 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1252 (cis_lll->tx.bn - 1U -
1253 (payload_count % cis_lll->tx.bn)));
1254 while (((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1255 (cis_lll->event_count + 1U)) ||
1256 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1257 (cis_lll->event_count + 1U)) && (u < se_curr))) &&
1258 (((cis_lll->tx.bn_curr < cis_lll->tx.bn) &&
1259 ((cis_lll->tx.payload_count / cis_lll->tx.bn) <= cis_lll->event_count)) ||
1260 ((cis_lll->tx.bn_curr == cis_lll->tx.bn) &&
1261 ((cis_lll->tx.payload_count / cis_lll->tx.bn) < cis_lll->event_count)))) {
1262 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1263 cis_lll->sn++;
1264 cis_lll->tx.bn_curr++;
1265 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1266 cis_lll->tx.payload_count += cis_lll->tx.bn;
1267 cis_lll->tx.bn_curr = 1U;
1268 }
1269
1270 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1271 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1272 (cis_lll->tx.bn - 1U -
1273 (payload_count % cis_lll->tx.bn)));
1274 }
1275 }
1276
1277 if (cis_lll->rx.bn) {
1278 uint64_t payload_count;
1279 uint8_t u;
1280
1281 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1282 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1283 (cis_lll->rx.bn - 1U -
1284 (payload_count % cis_lll->rx.bn)));
1285 if ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1286 (cis_lll->event_count + 1U)) && (u <= se_curr) &&
1287 (((cis_lll->rx.bn_curr < cis_lll->rx.bn) &&
1288 ((cis_lll->rx.payload_count / cis_lll->rx.bn) <= cis_lll->event_count)) ||
1289 ((cis_lll->rx.bn_curr == cis_lll->rx.bn) &&
1290 ((cis_lll->rx.payload_count / cis_lll->rx.bn) < cis_lll->event_count)))) {
1291 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1292 cis_lll->nesn++;
1293 cis_lll->rx.bn_curr++;
1294 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1295 cis_lll->rx.payload_count += cis_lll->rx.bn;
1296 cis_lll->rx.bn_curr = 1U;
1297 }
1298 }
1299 }
1300 }
1301
payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream * cis_lll)1302 static void payload_count_rx_flush_or_txrx_inc(struct lll_conn_iso_stream *cis_lll)
1303 {
1304 if (cis_lll->tx.bn) {
1305 if (((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.bn_curr) >
1306 (cis_lll->event_count + cis_lll->tx.bn)) {
1307 cis_lll->tx.payload_count += cis_lll->tx.bn;
1308 cis_lll->tx.bn_curr = 1U;
1309 }
1310 }
1311
1312 if (cis_lll->rx.bn) {
1313 uint64_t payload_count;
1314 uint8_t u;
1315
1316 if (((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.bn_curr) >
1317 (cis_lll->event_count + cis_lll->rx.bn)) {
1318 cis_lll->rx.payload_count += cis_lll->rx.bn;
1319 cis_lll->rx.bn_curr = 1U;
1320
1321 return;
1322 }
1323
1324 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1325 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1326 (cis_lll->rx.bn - 1U -
1327 (payload_count % cis_lll->rx.bn)));
1328 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1329 (cis_lll->event_count + 1U)) ||
1330 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1331 (cis_lll->event_count + 1U)) && (u <= (cis_lll->nse + 1U)))) {
1332 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1333 cis_lll->nesn++;
1334 cis_lll->rx.bn_curr++;
1335 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1336 cis_lll->rx.payload_count += cis_lll->rx.bn;
1337 cis_lll->rx.bn_curr = 1U;
1338 }
1339
1340 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1341 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1342 (cis_lll->rx.bn - 1U -
1343 (payload_count % cis_lll->rx.bn)));
1344 }
1345 }
1346 }
1347
payload_count_lazy(struct lll_conn_iso_stream * cis_lll,uint16_t lazy)1348 static void payload_count_lazy(struct lll_conn_iso_stream *cis_lll, uint16_t lazy)
1349 {
1350 if (cis_lll->tx.bn && lazy) {
1351 uint16_t tx_lazy;
1352
1353 tx_lazy = lazy;
1354 while (tx_lazy--) {
1355 uint64_t payload_count;
1356 uint8_t u;
1357
1358 payload_count = cis_lll->tx.payload_count + cis_lll->tx.bn_curr - 1U;
1359 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1360 (cis_lll->tx.bn - 1U -
1361 (payload_count % cis_lll->tx.bn)));
1362 while ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) <
1363 cis_lll->event_count) ||
1364 ((((cis_lll->tx.payload_count / cis_lll->tx.bn) + cis_lll->tx.ft) ==
1365 cis_lll->event_count) && (u < cis_lll->nse))) {
1366 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1367 cis_lll->sn++;
1368 cis_lll->tx.bn_curr++;
1369 if (cis_lll->tx.bn_curr > cis_lll->tx.bn) {
1370 cis_lll->tx.payload_count += cis_lll->tx.bn;
1371 cis_lll->tx.bn_curr = 1U;
1372 }
1373
1374 payload_count = cis_lll->tx.payload_count +
1375 cis_lll->tx.bn_curr - 1U;
1376 u = cis_lll->nse - ((cis_lll->nse / cis_lll->tx.bn) *
1377 (cis_lll->tx.bn - 1U -
1378 (payload_count % cis_lll->tx.bn)));
1379 }
1380 }
1381 }
1382
1383 if (cis_lll->rx.bn) {
1384 while (lazy--) {
1385 uint64_t payload_count;
1386 uint8_t u;
1387
1388 payload_count = cis_lll->rx.payload_count + cis_lll->rx.bn_curr - 1U;
1389 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1390 (cis_lll->rx.bn - 1U -
1391 (payload_count % cis_lll->rx.bn)));
1392 while ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) <
1393 cis_lll->event_count) ||
1394 ((((cis_lll->rx.payload_count / cis_lll->rx.bn) + cis_lll->rx.ft) ==
1395 cis_lll->event_count) && (u <= cis_lll->nse))) {
1396 /* sn and nesn are 1-bit, only Least Significant bit is needed */
1397 cis_lll->nesn++;
1398 cis_lll->rx.bn_curr++;
1399 if (cis_lll->rx.bn_curr > cis_lll->rx.bn) {
1400 cis_lll->rx.payload_count += cis_lll->rx.bn;
1401 cis_lll->rx.bn_curr = 1U;
1402 }
1403
1404 payload_count = cis_lll->rx.payload_count +
1405 cis_lll->rx.bn_curr - 1U;
1406 u = cis_lll->nse - ((cis_lll->nse / cis_lll->rx.bn) *
1407 (cis_lll->rx.bn - 1U -
1408 (payload_count % cis_lll->rx.bn)));
1409 }
1410 }
1411 }
1412 }
1413