1 /*
2 * Copyright (c) 2018-2020 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include <stdbool.h>
9 #include <stddef.h>
10
11 #include <zephyr/toolchain.h>
12 #include <soc.h>
13
14 #include <zephyr/sys/util.h>
15
16 #include "hal/cpu.h"
17 #include "hal/ccm.h"
18 #include "hal/radio.h"
19 #include "hal/radio_df.h"
20
21 #include "util/util.h"
22 #include "util/mem.h"
23 #include "util/memq.h"
24 #include "util/mfifo.h"
25 #include "util/dbuf.h"
26
27 #include "pdu_df.h"
28 #include "pdu_vendor.h"
29 #include "pdu.h"
30
31 #include "lll.h"
32 #include "lll_clock.h"
33 #include "lll_df_types.h"
34 #include "lll_df.h"
35 #include "lll_conn.h"
36
37 #include "lll_internal.h"
38 #include "lll_df_internal.h"
39 #include "lll_tim_internal.h"
40 #include "lll_prof_internal.h"
41
42 #include <zephyr/bluetooth/hci_types.h>
43
44 #include "hal/debug.h"
45
46 static int init_reset(void);
47 static void isr_done(void *param);
48 static inline int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
49 uint8_t *is_rx_enqueue,
50 struct node_tx **tx_release, uint8_t *is_done);
51
52 #if defined(CONFIG_BT_CTLR_TX_DEFER)
53 static void isr_tx_deferred_set(void *param);
54 #endif /* CONFIG_BT_CTLR_TX_DEFER */
55
56 static void empty_tx_init(void);
57
58 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
59 static inline bool create_iq_report(struct lll_conn *lll, uint8_t rssi_ready,
60 uint8_t packet_status);
61 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
62
63 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
64 static struct pdu_data *get_last_tx_pdu(struct lll_conn *lll);
65 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
66
67 static uint8_t crc_expire;
68 static uint8_t crc_valid;
69 static uint8_t is_aborted;
70 static uint16_t tx_cnt;
71 static uint16_t trx_cnt;
72
73 #if defined(CONFIG_BT_CTLR_LE_ENC)
74 static uint8_t mic_state;
75 #endif /* CONFIG_BT_CTLR_LE_ENC */
76
77 #if defined(CONFIG_BT_CTLR_FORCE_MD_COUNT) && \
78 (CONFIG_BT_CTLR_FORCE_MD_COUNT > 0)
79 #if defined(CONFIG_BT_CTLR_FORCE_MD_AUTO)
80 static uint8_t force_md_cnt_reload;
81 #define BT_CTLR_FORCE_MD_COUNT force_md_cnt_reload
82 #else
83 #define BT_CTLR_FORCE_MD_COUNT CONFIG_BT_CTLR_FORCE_MD_COUNT
84 #endif
85 static uint8_t force_md_cnt;
86
87 #define FORCE_MD_CNT_INIT() \
88 { \
89 force_md_cnt = 0U; \
90 }
91
92 #define FORCE_MD_CNT_DEC() \
93 do { \
94 if (force_md_cnt) { \
95 force_md_cnt--; \
96 } \
97 } while (false)
98
99 #define FORCE_MD_CNT_GET() force_md_cnt
100
101 #define FORCE_MD_CNT_SET() \
102 do { \
103 if (force_md_cnt || \
104 (trx_cnt >= ((CONFIG_BT_BUF_ACL_TX_COUNT) - 1))) { \
105 force_md_cnt = BT_CTLR_FORCE_MD_COUNT; \
106 } \
107 } while (false)
108
109 #else /* !CONFIG_BT_CTLR_FORCE_MD_COUNT */
110 #define FORCE_MD_CNT_INIT()
111 #define FORCE_MD_CNT_DEC()
112 #define FORCE_MD_CNT_GET() 0
113 #define FORCE_MD_CNT_SET()
114 #endif /* !CONFIG_BT_CTLR_FORCE_MD_COUNT */
115
lll_conn_init(void)116 int lll_conn_init(void)
117 {
118 int err;
119
120 err = init_reset();
121 if (err) {
122 return err;
123 }
124
125 empty_tx_init();
126
127 return 0;
128 }
129
lll_conn_reset(void)130 int lll_conn_reset(void)
131 {
132 int err;
133
134 err = init_reset();
135 if (err) {
136 return err;
137 }
138
139 FORCE_MD_CNT_INIT();
140
141 return 0;
142 }
143
lll_conn_flush(uint16_t handle,struct lll_conn * lll)144 void lll_conn_flush(uint16_t handle, struct lll_conn *lll)
145 {
146 /* Nothing to be flushed */
147 }
148
lll_conn_prepare_reset(void)149 void lll_conn_prepare_reset(void)
150 {
151 tx_cnt = 0U;
152 trx_cnt = 0U;
153 crc_valid = 0U;
154 crc_expire = 0U;
155 is_aborted = 0U;
156
157 #if defined(CONFIG_BT_CTLR_LE_ENC)
158 mic_state = LLL_CONN_MIC_NONE;
159 #endif /* CONFIG_BT_CTLR_LE_ENC */
160 }
161
162 #if defined(CONFIG_BT_CENTRAL)
lll_conn_central_is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)163 int lll_conn_central_is_abort_cb(void *next, void *curr,
164 lll_prepare_cb_t *resume_cb)
165 {
166 struct lll_conn *lll = curr;
167
168 /* Do not abort if near supervision timeout */
169 if (lll->forced) {
170 return 0;
171 }
172
173 /* Do not be aborted by same event if a single central trx has not been
174 * exchanged.
175 */
176 if ((next == curr) && (trx_cnt < 1U)) {
177 return -EBUSY;
178 }
179
180 return -ECANCELED;
181 }
182 #endif /* CONFIG_BT_CENTRAL */
183
184 #if defined(CONFIG_BT_PERIPHERAL)
lll_conn_peripheral_is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)185 int lll_conn_peripheral_is_abort_cb(void *next, void *curr,
186 lll_prepare_cb_t *resume_cb)
187 {
188 struct lll_conn *lll = curr;
189
190 /* Do not abort if near supervision timeout */
191 if (lll->forced) {
192 return 0;
193 }
194
195 /* Do not be aborted by same event if a single peripheral trx has not
196 * been exchanged.
197 */
198 if ((next == curr) && (tx_cnt < 1U)) {
199 return -EBUSY;
200 }
201
202 return -ECANCELED;
203 }
204 #endif /* CONFIG_BT_PERIPHERAL */
205
lll_conn_abort_cb(struct lll_prepare_param * prepare_param,void * param)206 void lll_conn_abort_cb(struct lll_prepare_param *prepare_param, void *param)
207 {
208 struct event_done_extra *e;
209 struct lll_conn *lll;
210 int err;
211
212 /* NOTE: This is not a prepare being cancelled */
213 if (!prepare_param) {
214 /* Get reference to LLL connection context */
215 lll = param;
216
217 /* For a peripheral role, ensure at least one PDU is tx-ed
218 * back to central, otherwise let the supervision timeout
219 * countdown be started.
220 */
221 if ((lll->role == BT_HCI_ROLE_PERIPHERAL) && (tx_cnt < 1U)) {
222 is_aborted = 1U;
223 }
224
225 /* Perform event abort here.
226 * After event has been cleanly aborted, clean up resources
227 * and dispatch event done.
228 */
229 radio_isr_set(isr_done, param);
230 radio_disable();
231 return;
232 }
233
234 /* NOTE: Else clean the top half preparations of the aborted event
235 * currently in preparation pipeline.
236 */
237 err = lll_hfclock_off();
238 LL_ASSERT(err >= 0);
239
240 /* Get reference to LLL connection context */
241 lll = prepare_param->param;
242
243 /* Accumulate the latency as event is aborted while being in pipeline */
244 lll->lazy_prepare = prepare_param->lazy;
245 lll->latency_prepare += (lll->lazy_prepare + 1U);
246
247 #if defined(CONFIG_BT_PERIPHERAL)
248 if (lll->role == BT_HCI_ROLE_PERIPHERAL) {
249 /* Accumulate window widening */
250 lll->periph.window_widening_prepare_us +=
251 lll->periph.window_widening_periodic_us *
252 (prepare_param->lazy + 1);
253 if (lll->periph.window_widening_prepare_us >
254 lll->periph.window_widening_max_us) {
255 lll->periph.window_widening_prepare_us =
256 lll->periph.window_widening_max_us;
257 }
258 }
259 #endif /* CONFIG_BT_PERIPHERAL */
260
261 /* Extra done event, to check supervision timeout */
262 e = ull_event_done_extra_get();
263 LL_ASSERT(e);
264
265 e->type = EVENT_DONE_EXTRA_TYPE_CONN;
266 e->trx_cnt = 0U;
267 e->crc_valid = 0U;
268 e->is_aborted = 1U;
269
270 #if defined(CONFIG_BT_CTLR_LE_ENC)
271 e->mic_state = LLL_CONN_MIC_NONE;
272 #endif /* CONFIG_BT_CTLR_LE_ENC */
273
274 lll_done(param);
275 }
276
lll_conn_isr_rx(void * param)277 void lll_conn_isr_rx(void *param)
278 {
279 uint8_t is_empty_pdu_tx_retry;
280 struct pdu_data *pdu_data_rx;
281 struct pdu_data *pdu_data_tx;
282 struct node_rx_pdu *node_rx;
283 struct node_tx *tx_release;
284 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
285 uint32_t pa_lna_enable_us;
286 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
287 uint8_t is_rx_enqueue;
288 struct lll_conn *lll;
289 uint8_t rssi_ready;
290 bool is_iq_report;
291 uint8_t is_ull_rx;
292 uint8_t trx_done;
293 uint8_t is_done;
294 uint8_t cte_len;
295 uint8_t crc_ok;
296 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
297 bool cte_ready;
298 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
299
300 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
301 lll_prof_latency_capture();
302 }
303
304 /* Read radio status and events */
305 trx_done = radio_is_done();
306 if (trx_done) {
307 crc_ok = radio_crc_is_valid();
308 rssi_ready = radio_rssi_is_ready();
309 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
310 cte_ready = radio_df_cte_ready();
311
312 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
313 } else {
314 crc_ok = rssi_ready = 0U;
315 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
316 cte_ready = 0U;
317 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
318 }
319
320 /* Clear radio rx status and events */
321 lll_isr_rx_status_reset();
322
323 /* No Rx */
324 if (!trx_done) {
325 radio_isr_set(isr_done, param);
326 radio_disable();
327
328 return;
329 }
330
331 trx_cnt++;
332
333 is_done = 0U;
334 tx_release = NULL;
335 is_rx_enqueue = 0U;
336
337 lll = param;
338
339 node_rx = ull_pdu_rx_alloc_peek(1);
340 LL_ASSERT(node_rx);
341
342 pdu_data_rx = (void *)node_rx->pdu;
343
344 if (crc_ok) {
345 uint32_t err;
346
347 err = isr_rx_pdu(lll, pdu_data_rx, &is_rx_enqueue, &tx_release,
348 &is_done);
349 if (err) {
350 /* Disable radio trx switch on MIC failure for both
351 * central and peripheral, and close the radio event.
352 */
353 radio_isr_set(isr_done, param);
354 radio_disable();
355
356 /* assert if radio started tx before being disabled */
357 LL_ASSERT(!radio_is_ready());
358
359 goto lll_conn_isr_rx_exit;
360 }
361
362 /* Reset CRC expiry counter */
363 crc_expire = 0U;
364
365 /* CRC valid flag used to detect supervision timeout */
366 crc_valid = 1U;
367 } else {
368 /* Start CRC error countdown, if not already started */
369 if (crc_expire == 0U) {
370 crc_expire = 2U;
371 }
372
373 /* CRC error countdown */
374 crc_expire--;
375 is_done = (crc_expire == 0U);
376 }
377
378 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX) && defined(CONFIG_BT_CTLR_LE_ENC)
379 if (lll->enc_rx) {
380 struct pdu_data *pdu_scratch;
381
382 pdu_scratch = (struct pdu_data *)radio_pkt_scratch_get();
383
384 if (pdu_scratch->cp) {
385 (void)memcpy((void *)&pdu_data_rx->octet3.cte_info,
386 (void *)&pdu_scratch->octet3.cte_info,
387 sizeof(pdu_data_rx->octet3.cte_info));
388 }
389 }
390 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX && defined(CONFIG_BT_CTLR_LE_ENC) */
391
392 /* prepare tx packet */
393 is_empty_pdu_tx_retry = lll->empty;
394 lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
395
396 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
397 if (pdu_data_tx->cp) {
398 cte_len = CTE_LEN_US(pdu_data_tx->octet3.cte_info.time);
399
400 lll_df_cte_tx_configure(pdu_data_tx->octet3.cte_info.type,
401 pdu_data_tx->octet3.cte_info.time,
402 lll->df_tx_cfg.ant_sw_len,
403 lll->df_tx_cfg.ant_ids);
404 } else
405 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
406 {
407 cte_len = 0U;
408 }
409
410 /* Decide on event continuation and hence Radio Shorts to use */
411 is_done = is_done || ((crc_ok) &&
412 (pdu_data_rx->md == 0) &&
413 (pdu_data_tx->md == 0) &&
414 (pdu_data_tx->len == 0));
415
416 if (is_done) {
417 radio_isr_set(isr_done, param);
418
419 if (0) {
420 #if defined(CONFIG_BT_CENTRAL)
421 /* Event done for central */
422 } else if (!lll->role) {
423 radio_disable();
424
425 /* assert if radio packet ptr is not set and radio
426 * started tx.
427 */
428 LL_ASSERT(!radio_is_ready());
429
430 /* Restore state if last transmitted was empty PDU */
431 lll->empty = is_empty_pdu_tx_retry;
432
433 goto lll_conn_isr_rx_exit;
434 #endif /* CONFIG_BT_CENTRAL */
435 #if defined(CONFIG_BT_PERIPHERAL)
436 /* Event done for peripheral */
437 } else {
438 radio_switch_complete_and_disable();
439 #endif /* CONFIG_BT_PERIPHERAL */
440 }
441 } else {
442 radio_tmr_tifs_set(lll->tifs_rx_us);
443
444 #if defined(CONFIG_BT_CTLR_PHY)
445 radio_switch_complete_and_rx(lll->phy_rx);
446 #else /* !CONFIG_BT_CTLR_PHY */
447 radio_switch_complete_and_rx(0);
448 #endif /* !CONFIG_BT_CTLR_PHY */
449
450 radio_isr_set(lll_conn_isr_tx, param);
451
452 /* capture end of Tx-ed PDU, used to calculate HCTO. */
453 radio_tmr_end_capture();
454 }
455
456 /* Fill sn and nesn */
457 pdu_data_tx->sn = lll->sn;
458 pdu_data_tx->nesn = lll->nesn;
459
460 /* setup the radio tx packet buffer */
461 lll_conn_tx_pkt_set(lll, pdu_data_tx);
462
463 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
464
465 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
466 /* PA enable is overwriting packet end used in ISR profiling, hence
467 * back it up for later use.
468 */
469 lll_prof_radio_end_backup();
470 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
471
472 radio_gpio_pa_setup();
473
474 pa_lna_enable_us =
475 radio_tmr_tifs_base_get() + lll->tifs_tx_us - cte_len - HAL_RADIO_GPIO_PA_OFFSET;
476 #if defined(CONFIG_BT_CTLR_PHY)
477 pa_lna_enable_us -= radio_rx_chain_delay_get(lll->phy_rx, PHY_FLAGS_S8);
478 #else /* !CONFIG_BT_CTLR_PHY */
479 pa_lna_enable_us -= radio_rx_chain_delay_get(0, PHY_FLAGS_S2);
480 #endif /* !CONFIG_BT_CTLR_PHY */
481 radio_gpio_pa_lna_enable(pa_lna_enable_us);
482 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
483
484 /* assert if radio packet ptr is not set and radio started tx */
485 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
486 LL_ASSERT_MSG(!radio_is_address(), "%s: Radio ISR latency: %u", __func__,
487 lll_prof_latency_get());
488 } else {
489 LL_ASSERT(!radio_is_address());
490 }
491
492 #if defined(CONFIG_BT_CTLR_TX_DEFER)
493 if (!is_empty_pdu_tx_retry && (pdu_data_tx->len == 0U)) {
494 uint32_t tx_defer_us;
495 uint32_t defer_us;
496
497 /* Restore state if transmission setup for empty PDU */
498 lll->empty = 0U;
499
500 /* Setup deferred tx packet set */
501 tx_defer_us = radio_tmr_tifs_base_get() + lll->tifs_tx_us -
502 HAL_RADIO_TMR_DEFERRED_TX_DELAY_US;
503 defer_us = radio_tmr_isr_set(tx_defer_us, isr_tx_deferred_set,
504 param);
505 }
506 #endif /* CONFIG_BT_CTLR_TX_DEFER */
507
508 lll_conn_isr_rx_exit:
509 /* Save the AA captured for the first Rx in connection event */
510 if (!radio_tmr_aa_restore()) {
511 radio_tmr_aa_save(radio_tmr_aa_get());
512 }
513
514 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
515 lll_prof_cputime_capture();
516 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
517
518 is_ull_rx = 0U;
519
520 if (tx_release) {
521 LL_ASSERT(lll->handle != 0xFFFF);
522
523 ull_conn_lll_ack_enqueue(lll->handle, tx_release);
524
525 is_ull_rx = 1U;
526 }
527
528 if (is_rx_enqueue) {
529 #if defined(CONFIG_SOC_NRF52832) && \
530 defined(CONFIG_BT_CTLR_LE_ENC) && \
531 defined(HAL_RADIO_PDU_LEN_MAX) && \
532 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
533 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
534 if (lll->enc_rx) {
535 uint8_t *pkt_decrypt_data;
536
537 pkt_decrypt_data = (uint8_t *)radio_pkt_decrypt_get() +
538 offsetof(struct pdu_data, lldata);
539 memcpy((void *)pdu_data_rx->lldata,
540 (void *)pkt_decrypt_data, pdu_data_rx->len);
541 }
542 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
543 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
544 #endif
545 ull_pdu_rx_alloc();
546
547 node_rx->hdr.type = NODE_RX_TYPE_DC_PDU;
548 node_rx->hdr.handle = lll->handle;
549
550 ull_rx_put(node_rx->hdr.link, node_rx);
551 is_ull_rx = 1U;
552 }
553
554 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
555 if (cte_ready) {
556 is_iq_report =
557 create_iq_report(lll, rssi_ready,
558 (crc_ok == true ? BT_HCI_LE_CTE_CRC_OK :
559 BT_HCI_LE_CTE_CRC_ERR_CTE_BASED_TIME));
560 } else {
561 #else
562 if (1) {
563 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
564 is_iq_report = false;
565 }
566
567 if (is_ull_rx || is_iq_report) {
568 ull_rx_sched();
569 }
570
571 #if defined(CONFIG_BT_CTLR_CONN_RSSI)
572 /* Collect RSSI for connection */
573 if (rssi_ready) {
574 uint8_t rssi = radio_rssi_get();
575
576 lll->rssi_latest = rssi;
577
578 #if defined(CONFIG_BT_CTLR_CONN_RSSI_EVENT)
579 if (((lll->rssi_reported - rssi) & 0xFF) >
580 LLL_CONN_RSSI_THRESHOLD) {
581 if (lll->rssi_sample_count) {
582 lll->rssi_sample_count--;
583 }
584 } else {
585 lll->rssi_sample_count = LLL_CONN_RSSI_SAMPLE_COUNT;
586 }
587 #endif /* CONFIG_BT_CTLR_CONN_RSSI_EVENT */
588 }
589 #else /* !CONFIG_BT_CTLR_CONN_RSSI */
590 ARG_UNUSED(rssi_ready);
591 #endif /* !CONFIG_BT_CTLR_CONN_RSSI */
592
593 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
594 lll_prof_send();
595 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
596 }
597
598 void lll_conn_isr_tx(void *param)
599 {
600 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
601 static struct pdu_data *pdu_tx;
602 uint8_t cte_len;
603 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
604 struct lll_conn *lll;
605 uint32_t hcto;
606
607 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
608 lll_prof_latency_capture();
609 }
610
611 /* Clear radio tx status and events */
612 lll_isr_tx_status_reset();
613
614 tx_cnt++;
615
616 lll = param;
617
618 /* setup tIFS switching */
619 radio_tmr_tifs_set(lll->tifs_tx_us);
620
621 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
622 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
623 enum radio_end_evt_delay_state end_evt_delay;
624 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
625
626 #if defined(CONFIG_BT_CTLR_PHY)
627 if (lll->phy_rx != PHY_CODED) {
628 #else
629 if (1) {
630 #endif /* CONFIG_BT_CTLR_PHY */
631 struct lll_df_conn_rx_params *df_rx_params;
632 struct lll_df_conn_rx_cfg *df_rx_cfg;
633
634 df_rx_cfg = &lll->df_rx_cfg;
635 /* Get last swapped CTE RX configuration. Do not swap it again here.
636 * It should remain unchanged for connection event duration.
637 */
638 df_rx_params = dbuf_curr_get(&df_rx_cfg->hdr);
639
640 if (df_rx_params->is_enabled) {
641 (void)lll_df_conf_cte_rx_enable(df_rx_params->slot_durations,
642 df_rx_params->ant_sw_len, df_rx_params->ant_ids,
643 df_rx_cfg->chan, CTE_INFO_IN_S1_BYTE,
644 lll->phy_rx);
645 } else {
646 lll_df_conf_cte_info_parsing_enable();
647 }
648 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
649 end_evt_delay = END_EVT_DELAY_ENABLED;
650 } else {
651 end_evt_delay = END_EVT_DELAY_DISABLED;
652 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
653 }
654
655 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
656 /* Use special API for SOC that requires compensation for PHYEND event delay. */
657
658 #if defined(CONFIG_BT_CTLR_PHY)
659 radio_switch_complete_with_delay_compensation_and_tx(lll->phy_rx, 0, lll->phy_tx,
660 lll->phy_flags, end_evt_delay);
661 #else /* !CONFIG_BT_CTLR_PHY */
662 radio_switch_complete_with_delay_compensation_and_tx(0, 0, 0, 0, end_evt_delay);
663 #endif /* !CONFIG_BT_CTLR_PHY */
664
665 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
666 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
667
668 /* Use regular API for cases when:
669 * - CTE RX is not enabled,
670 * - SOC does not require compensation for PHYEND event delay.
671 */
672 #if !defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
673 #if defined(CONFIG_BT_CTLR_PHY)
674 radio_switch_complete_and_tx(lll->phy_rx, 0, lll->phy_tx, lll->phy_flags);
675 #else /* !CONFIG_BT_CTLR_PHY */
676 radio_switch_complete_and_tx(0, 0, 0, 0);
677 #endif /* !CONFIG_BT_CTLR_PHY */
678 #endif /* !CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
679
680 lll_conn_rx_pkt_set(lll);
681
682 /* assert if radio packet ptr is not set and radio started rx */
683 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
684 LL_ASSERT_MSG(!radio_is_address(), "%s: Radio ISR latency: %u", __func__,
685 lll_prof_latency_get());
686 } else {
687 LL_ASSERT(!radio_is_address());
688 }
689
690 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
691 pdu_tx = get_last_tx_pdu(lll);
692 LL_ASSERT(pdu_tx);
693
694 if (pdu_tx->cp) {
695 cte_len = CTE_LEN_US(pdu_tx->octet3.cte_info.time);
696 } else {
697 cte_len = 0U;
698 }
699 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
700
701 #if !defined(CONFIG_BOARD_NRF52_BSIM) && \
702 !defined(CONFIG_BOARD_NRF5340BSIM_NRF5340_CPUNET) && \
703 !defined(CONFIG_BOARD_NRF54L15BSIM_NRF54L15_CPUAPP)
704
705 /* +/- 2us active clock jitter, +1 us PPI to timer start compensation */
706 hcto = radio_tmr_tifs_base_get() + lll->tifs_hcto_us +
707 (EVENT_CLOCK_JITTER_US << 1) + RANGE_DELAY_US +
708 HAL_RADIO_TMR_START_DELAY_US;
709
710 #else /* FIXME: Why different for BabbleSIM? */
711 /* HACK: Have exact 150 us */
712 hcto = radio_tmr_tifs_base_get() + lll->tifs_hcto_us;
713
714 /* HACK: Could wrong MODE register value (next in tIFS switching) being
715 * use for Rx Chain Delay in BabbleSIM? or is there a bug in
716 * target implementation?
717 */
718 hcto += radio_rx_chain_delay_get(lll->phy_tx, PHY_FLAGS_S8);
719 #endif /* FIXME: Why different for BabbleSIM? */
720
721 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
722 hcto += cte_len;
723 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
724 #if defined(CONFIG_BT_CTLR_PHY)
725 hcto += radio_rx_chain_delay_get(lll->phy_rx, 1);
726 hcto += addr_us_get(lll->phy_rx);
727 hcto -= radio_tx_chain_delay_get(lll->phy_tx, lll->phy_flags);
728 #else /* !CONFIG_BT_CTLR_PHY */
729 hcto += radio_rx_chain_delay_get(0, 0);
730 hcto += addr_us_get(0);
731 hcto -= radio_tx_chain_delay_get(0, 0);
732 #endif /* !CONFIG_BT_CTLR_PHY */
733
734 radio_tmr_hcto_configure(hcto);
735
736 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
737 if (true) {
738 #elif defined(CONFIG_BT_CENTRAL) && defined(CONFIG_BT_CTLR_CONN_RSSI)
739 if (!trx_cnt && !lll->role) {
740 #else
741 if (false) {
742 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
743
744 radio_rssi_measure();
745 }
746
747 #if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
748 defined(CONFIG_BT_CTLR_TX_DEFER) || \
749 defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
750 radio_tmr_end_capture();
751 #endif /* CONFIG_BT_CTLR_PROFILE_ISR ||
752 * CONFIG_BT_CTLR_TX_DEFER ||
753 * HAL_RADIO_GPIO_HAVE_PA_PIN
754 */
755
756 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
757 radio_gpio_lna_setup();
758 #if defined(CONFIG_BT_CTLR_PHY)
759 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + lll->tifs_rx_us -
760 (EVENT_CLOCK_JITTER_US << 1) -
761 radio_tx_chain_delay_get(lll->phy_tx,
762 lll->phy_flags) -
763 HAL_RADIO_GPIO_LNA_OFFSET);
764 #else /* !CONFIG_BT_CTLR_PHY */
765 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + lll->tifs_rx_us -
766 (EVENT_CLOCK_JITTER_US << 1) -
767 radio_tx_chain_delay_get(0U, 0U) -
768 HAL_RADIO_GPIO_LNA_OFFSET);
769 #endif /* !CONFIG_BT_CTLR_PHY */
770 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
771
772 radio_isr_set(lll_conn_isr_rx, param);
773
774 #if defined(CONFIG_BT_CTLR_LOW_LAT)
775 ull_conn_lll_tx_demux_sched(lll);
776 #endif /* CONFIG_BT_CTLR_LOW_LAT */
777 }
778
779 void lll_conn_rx_pkt_set(struct lll_conn *lll)
780 {
781 struct pdu_data *pdu_data_rx;
782 struct node_rx_pdu *node_rx;
783 uint16_t max_rx_octets;
784 uint8_t phy;
785
786 node_rx = ull_pdu_rx_alloc_peek(1);
787 LL_ASSERT(node_rx);
788
789 /* In case of ISR latencies, if packet pointer has not been set on time
790 * then we do not want to check uninitialized length in rx buffer that
791 * did not get used by Radio DMA. This would help us in detecting radio
792 * ready event being set? We can not detect radio ready if it happens
793 * twice before Radio ISR executes after latency.
794 */
795 pdu_data_rx = (void *)node_rx->pdu;
796 pdu_data_rx->len = 0U;
797
798 #if defined(CONFIG_BT_CTLR_DATA_LENGTH)
799 max_rx_octets = lll->dle.eff.max_rx_octets;
800 #else /* !CONFIG_BT_CTLR_DATA_LENGTH */
801 max_rx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
802 #endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
803
804 if ((PDU_DC_CTRL_RX_SIZE_MAX > PDU_DC_PAYLOAD_SIZE_MIN) &&
805 (max_rx_octets < PDU_DC_CTRL_RX_SIZE_MAX)) {
806 max_rx_octets = PDU_DC_CTRL_RX_SIZE_MAX;
807 }
808
809 #if defined(CONFIG_BT_CTLR_PHY)
810 phy = lll->phy_rx;
811 #else /* !CONFIG_BT_CTLR_PHY */
812 phy = 0U;
813 #endif /* !CONFIG_BT_CTLR_PHY */
814
815 radio_phy_set(phy, 0);
816
817 if (0) {
818 #if defined(CONFIG_BT_CTLR_LE_ENC)
819 } else if (lll->enc_rx) {
820 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, (max_rx_octets + PDU_MIC_SIZE),
821 RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
822 RADIO_PKT_CONF_CTE_DISABLED));
823
824 #if defined(CONFIG_SOC_NRF52832) && \
825 defined(HAL_RADIO_PDU_LEN_MAX) && \
826 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
827 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
828 radio_pkt_rx_set(radio_ccm_rx_pkt_set(&lll->ccm_rx, phy,
829 radio_pkt_decrypt_get()));
830 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
831 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
832 #else
833 radio_pkt_rx_set(radio_ccm_rx_pkt_set(&lll->ccm_rx, phy,
834 pdu_data_rx));
835 #endif
836 #endif /* CONFIG_BT_CTLR_LE_ENC */
837 } else {
838 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, max_rx_octets,
839 RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
840 RADIO_PKT_CONF_CTE_DISABLED));
841
842 radio_pkt_rx_set(pdu_data_rx);
843 }
844 }
845
846 void lll_conn_tx_pkt_set(struct lll_conn *lll, struct pdu_data *pdu_data_tx)
847 {
848 uint8_t phy, flags, pkt_flags;
849 uint16_t max_tx_octets;
850
851 #if defined(CONFIG_BT_CTLR_DATA_LENGTH)
852 max_tx_octets = lll->dle.eff.max_tx_octets;
853 #else /* !CONFIG_BT_CTLR_DATA_LENGTH */
854 max_tx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
855 #endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
856
857 if ((PDU_DC_CTRL_TX_SIZE_MAX > PDU_DC_PAYLOAD_SIZE_MIN) &&
858 (max_tx_octets < PDU_DC_CTRL_TX_SIZE_MAX)) {
859 max_tx_octets = PDU_DC_CTRL_TX_SIZE_MAX;
860 }
861
862 #if defined(CONFIG_BT_CTLR_PHY)
863 phy = lll->phy_tx;
864 flags = lll->phy_flags;
865 #else /* !CONFIG_BT_CTLR_PHY */
866 phy = 0U;
867 flags = 0U;
868 #endif /* !CONFIG_BT_CTLR_PHY */
869
870 radio_phy_set(phy, flags);
871
872 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
873 if (pdu_data_tx->cp) {
874 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
875 RADIO_PKT_CONF_CTE_ENABLED);
876 } else
877 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
878 {
879 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
880 RADIO_PKT_CONF_CTE_DISABLED);
881 }
882
883 if (0) {
884 #if defined(CONFIG_BT_CTLR_LE_ENC)
885 } else if (lll->enc_tx) {
886 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, (max_tx_octets + PDU_MIC_SIZE),
887 pkt_flags);
888
889 radio_pkt_tx_set(radio_ccm_tx_pkt_set(&lll->ccm_tx, pdu_data_tx));
890 #endif /* CONFIG_BT_CTLR_LE_ENC */
891 } else {
892 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, max_tx_octets, pkt_flags);
893
894 radio_pkt_tx_set(pdu_data_tx);
895 }
896 }
897
898 void lll_conn_pdu_tx_prep(struct lll_conn *lll, struct pdu_data **pdu_data_tx)
899 {
900 struct node_tx *tx;
901 struct pdu_data *p;
902 memq_link_t *link;
903
904 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail, (void **)&tx);
905 if (lll->empty || !link) {
906 lll->empty = 1U;
907
908 p = (void *)radio_pkt_empty_get();
909 if (link || FORCE_MD_CNT_GET()) {
910 p->md = 1U;
911 } else {
912 p->md = 0U;
913 }
914 } else {
915 uint16_t max_tx_octets;
916
917 p = (void *)(tx->pdu + lll->packet_tx_head_offset);
918
919 if (!lll->packet_tx_head_len) {
920 lll->packet_tx_head_len = p->len;
921 }
922
923 if (lll->packet_tx_head_offset) {
924 p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
925
926 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX) || defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
927 /* BT 5.3 Core Spec does not define handling of CP bit
928 * for PDUs fragmented by Controller, hence the CP bit
929 * is set to zero. The CTE should not be transmitted
930 * with CONTINUE PDUs if fragmentation is performed.
931 */
932 p->cp = 0U;
933 p->octet3.resv[0] = 0U;
934 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX || CONFIG_BT_CTLR_DF_CONN_CTE_RX */
935 }
936
937 p->len = lll->packet_tx_head_len - lll->packet_tx_head_offset;
938
939 max_tx_octets = ull_conn_lll_max_tx_octets_get(lll);
940
941 if (((PDU_DC_CTRL_TX_SIZE_MAX <= PDU_DC_PAYLOAD_SIZE_MIN) ||
942 (p->ll_id != PDU_DATA_LLID_CTRL)) &&
943 (p->len > max_tx_octets)) {
944 p->len = max_tx_octets;
945 p->md = 1U;
946 } else if ((link->next != lll->memq_tx.tail) ||
947 FORCE_MD_CNT_GET()) {
948 p->md = 1U;
949 } else {
950 p->md = 0U;
951 }
952
953 p->rfu = 0U;
954
955 #if !defined(CONFIG_BT_CTLR_DATA_LENGTH_CLEAR)
956 #if !defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX) && !defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
957 /* Initialize only if vendor PDU octet3 present */
958 if (sizeof(p->octet3.resv)) {
959 p->octet3.resv[0] = 0U;
960 }
961 #endif /* !CONFIG_BT_CTLR_DF_CONN_CTE_TX && !CONFIG_BT_CTLR_DF_CONN_CTE_RX */
962 #endif /* CONFIG_BT_CTLR_DATA_LENGTH_CLEAR */
963 }
964
965 *pdu_data_tx = p;
966 }
967
968 #if defined(CONFIG_BT_CTLR_FORCE_MD_AUTO)
969 uint8_t lll_conn_force_md_cnt_set(uint8_t reload_cnt)
970 {
971 uint8_t previous;
972
973 previous = force_md_cnt_reload;
974 force_md_cnt_reload = reload_cnt;
975
976 return previous;
977 }
978 #endif /* CONFIG_BT_CTLR_FORCE_MD_AUTO */
979
980 static int init_reset(void)
981 {
982 return 0;
983 }
984
985 static void isr_done(void *param)
986 {
987 struct event_done_extra *e;
988
989 lll_isr_status_reset();
990
991 e = ull_event_done_extra_get();
992 LL_ASSERT(e);
993
994 e->type = EVENT_DONE_EXTRA_TYPE_CONN;
995 e->trx_cnt = trx_cnt;
996 e->crc_valid = crc_valid;
997 e->is_aborted = is_aborted;
998
999 #if defined(CONFIG_BT_CTLR_LE_ENC)
1000 e->mic_state = mic_state;
1001 #endif /* CONFIG_BT_CTLR_LE_ENC */
1002
1003 #if defined(CONFIG_BT_PERIPHERAL)
1004 if (trx_cnt) {
1005 struct lll_conn *lll = param;
1006
1007 if (lll->role) {
1008 uint32_t preamble_to_addr_us;
1009
1010 #if defined(CONFIG_BT_CTLR_PHY)
1011 preamble_to_addr_us =
1012 addr_us_get(lll->periph.phy_rx_event);
1013 #else /* !CONFIG_BT_CTLR_PHY */
1014 preamble_to_addr_us =
1015 addr_us_get(0);
1016 #endif /* !CONFIG_BT_CTLR_PHY */
1017
1018 e->drift.start_to_address_actual_us =
1019 radio_tmr_aa_restore() - radio_tmr_ready_get();
1020 e->drift.window_widening_event_us =
1021 lll->periph.window_widening_event_us;
1022 e->drift.preamble_to_addr_us = preamble_to_addr_us;
1023
1024 /* Reset window widening, as anchor point sync-ed */
1025 lll->periph.window_widening_event_us = 0;
1026 lll->periph.window_size_event_us = 0;
1027 }
1028 }
1029 #endif /* CONFIG_BT_PERIPHERAL */
1030
1031 lll_isr_cleanup(param);
1032 }
1033
1034 static inline bool ctrl_pdu_len_check(uint8_t len)
1035 {
1036 return len <= (offsetof(struct pdu_data, llctrl) +
1037 sizeof(struct pdu_data_llctrl));
1038
1039 }
1040
1041 static inline int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
1042 uint8_t *is_rx_enqueue,
1043 struct node_tx **tx_release, uint8_t *is_done)
1044 {
1045 #if defined(CONFIG_SOC_NRF52832) && \
1046 defined(CONFIG_BT_CTLR_LE_ENC) && \
1047 defined(HAL_RADIO_PDU_LEN_MAX) && \
1048 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
1049 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
1050 if (lll->enc_rx) {
1051 uint8_t *pkt_decrypt;
1052
1053 pkt_decrypt = radio_pkt_decrypt_get();
1054 memcpy((void *)pdu_data_rx, (void *)pkt_decrypt,
1055 offsetof(struct pdu_data, lldata));
1056 }
1057 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
1058 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
1059 #endif
1060
1061 /* Ack for tx-ed data */
1062 if (pdu_data_rx->nesn != lll->sn) {
1063 struct pdu_data *pdu_data_tx;
1064 struct node_tx *tx;
1065 memq_link_t *link;
1066
1067 /* Increment sequence number */
1068 lll->sn++;
1069
1070 #if defined(CONFIG_BT_PERIPHERAL)
1071 /* First ack (and redundantly any other ack) enable use of
1072 * peripheral latency.
1073 */
1074 if (lll->role) {
1075 lll->periph.latency_enabled = 1;
1076 }
1077 #endif /* CONFIG_BT_PERIPHERAL */
1078
1079 FORCE_MD_CNT_DEC();
1080
1081 if (!lll->empty) {
1082 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail,
1083 (void **)&tx);
1084 } else {
1085 lll->empty = 0;
1086
1087 pdu_data_tx = (void *)radio_pkt_empty_get();
1088 if (IS_ENABLED(CONFIG_BT_CENTRAL) && !lll->role &&
1089 !pdu_data_rx->md) {
1090 *is_done = !pdu_data_tx->md;
1091 }
1092
1093 link = NULL;
1094 }
1095
1096 if (link) {
1097 uint8_t pdu_data_tx_len;
1098 uint8_t offset;
1099
1100 pdu_data_tx = (void *)(tx->pdu +
1101 lll->packet_tx_head_offset);
1102
1103 pdu_data_tx_len = pdu_data_tx->len;
1104 #if defined(CONFIG_BT_CTLR_LE_ENC)
1105 if (pdu_data_tx_len != 0U) {
1106 /* if encrypted increment tx counter */
1107 if (lll->enc_tx) {
1108 lll->ccm_tx.counter++;
1109 }
1110 }
1111 #endif /* CONFIG_BT_CTLR_LE_ENC */
1112
1113 offset = lll->packet_tx_head_offset + pdu_data_tx_len;
1114 if (offset < lll->packet_tx_head_len) {
1115 lll->packet_tx_head_offset = offset;
1116 } else if (offset == lll->packet_tx_head_len) {
1117 lll->packet_tx_head_len = 0;
1118 lll->packet_tx_head_offset = 0;
1119
1120 memq_dequeue(lll->memq_tx.tail,
1121 &lll->memq_tx.head, NULL);
1122
1123 /* TX node UPSTREAM, i.e. Tx node ack path */
1124 link->next = tx->next; /* Indicates ctrl or data
1125 * pool.
1126 */
1127 tx->next = link;
1128
1129 *tx_release = tx;
1130
1131 FORCE_MD_CNT_SET();
1132 } else {
1133 LL_ASSERT(0);
1134 }
1135
1136 if (IS_ENABLED(CONFIG_BT_CENTRAL) && !lll->role &&
1137 !pdu_data_rx->md) {
1138 *is_done = !pdu_data_tx->md;
1139 }
1140 }
1141 }
1142
1143 /* process received data */
1144 if ((pdu_data_rx->sn == lll->nesn) &&
1145 /* check so that we will NEVER use the rx buffer reserved for empty
1146 * packet and internal control enqueue
1147 */
1148 (ull_pdu_rx_alloc_peek(3) != 0)) {
1149 /* Increment next expected serial number */
1150 lll->nesn++;
1151
1152 if (pdu_data_rx->len != 0) {
1153 #if defined(CONFIG_BT_CTLR_LE_ENC)
1154 /* If required, wait for CCM to finish
1155 */
1156 if (lll->enc_rx) {
1157 uint32_t done;
1158
1159 done = radio_ccm_is_done();
1160 LL_ASSERT(done);
1161
1162 bool mic_failure = !radio_ccm_mic_is_valid();
1163
1164 if (mic_failure &&
1165 lll->ccm_rx.counter == 0 &&
1166 (pdu_data_rx->ll_id ==
1167 PDU_DATA_LLID_CTRL)) {
1168 /* Received an LL control packet in the
1169 * middle of the LL encryption procedure
1170 * with MIC failure.
1171 * This could be an unencrypted packet
1172 */
1173 struct pdu_data *scratch_pkt =
1174 radio_pkt_scratch_get();
1175
1176 if (ctrl_pdu_len_check(
1177 scratch_pkt->len)) {
1178 memcpy(pdu_data_rx,
1179 scratch_pkt,
1180 scratch_pkt->len +
1181 offsetof(struct pdu_data,
1182 llctrl));
1183 mic_failure = false;
1184 lll->ccm_rx.counter--;
1185 }
1186 }
1187
1188 if (mic_failure) {
1189 /* Record MIC invalid */
1190 mic_state = LLL_CONN_MIC_FAIL;
1191
1192 return -EINVAL;
1193 }
1194
1195 /* Increment counter */
1196 lll->ccm_rx.counter++;
1197
1198 /* Record MIC valid */
1199 mic_state = LLL_CONN_MIC_PASS;
1200 }
1201 #endif /* CONFIG_BT_CTLR_LE_ENC */
1202
1203 /* Enqueue non-empty PDU */
1204 *is_rx_enqueue = 1U;
1205 }
1206 }
1207
1208 return 0;
1209 }
1210
1211 #if defined(CONFIG_BT_CTLR_TX_DEFER)
1212 static void isr_tx_deferred_set(void *param)
1213 {
1214 struct pdu_data *pdu_data_tx;
1215 struct lll_conn *lll;
1216
1217 /* Prepare Tx PDU, maybe we have non-empty PDU when we check here */
1218 lll = param;
1219 lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
1220
1221 /* Fill sn and nesn */
1222 pdu_data_tx->sn = lll->sn;
1223 pdu_data_tx->nesn = lll->nesn;
1224
1225 /* setup the radio tx packet buffer */
1226 lll_conn_tx_pkt_set(lll, pdu_data_tx);
1227 }
1228 #endif /* CONFIG_BT_CTLR_TX_DEFER */
1229
1230 static void empty_tx_init(void)
1231 {
1232 struct pdu_data *p;
1233
1234 p = (void *)radio_pkt_empty_get();
1235 p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
1236
1237 /* cp, rfu, and resv fields in the empty PDU buffer is statically
1238 * zero initialized at power up and these values in this buffer are
1239 * not modified at runtime.
1240 */
1241 }
1242
1243 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
1244 static inline bool create_iq_report(struct lll_conn *lll, uint8_t rssi_ready, uint8_t packet_status)
1245 {
1246 struct lll_df_conn_rx_params *rx_params;
1247 struct lll_df_conn_rx_cfg *rx_cfg;
1248
1249 #if defined(CONFIG_BT_CTLR_PHY)
1250 if (lll->phy_rx == PHY_CODED) {
1251 return false;
1252 }
1253 #endif /* CONFIG_BT_CTLR_PHY */
1254
1255 rx_cfg = &lll->df_rx_cfg;
1256
1257 rx_params = dbuf_curr_get(&rx_cfg->hdr);
1258
1259 if (rx_params->is_enabled) {
1260 struct node_rx_iq_report *iq_report;
1261 struct node_rx_ftr *ftr;
1262 uint8_t cte_info;
1263 uint8_t ant;
1264
1265 cte_info = radio_df_cte_status_get();
1266 ant = radio_df_pdu_antenna_switch_pattern_get();
1267 iq_report = ull_df_iq_report_alloc();
1268
1269 iq_report->rx.hdr.type = NODE_RX_TYPE_CONN_IQ_SAMPLE_REPORT;
1270 iq_report->sample_count = radio_df_iq_samples_amount_get();
1271 iq_report->packet_status = packet_status;
1272 iq_report->rssi_ant_id = ant;
1273 iq_report->cte_info = *(struct pdu_cte_info *)&cte_info;
1274 iq_report->local_slot_durations = rx_params->slot_durations;
1275 /* Event counter is updated to next value during event preparation, hence
1276 * it has to be subtracted to store actual event counter value.
1277 */
1278 iq_report->event_counter = lll->event_counter - 1;
1279
1280 ftr = &iq_report->rx.rx_ftr;
1281 ftr->param = lll;
1282 ftr->rssi = ((rssi_ready) ? radio_rssi_get() : BT_HCI_LE_RSSI_NOT_AVAILABLE);
1283
1284 ull_rx_put(iq_report->rx.hdr.link, iq_report);
1285
1286 return true;
1287 }
1288
1289 return false;
1290 }
1291
1292 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
1293
1294 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
1295 /**
1296 * @brief Get latest transmitted pdu_data instance
1297 *
1298 * @param lll Pointer to lll_conn object
1299 *
1300 * @return Return pointer to latest pdu_data instance
1301 */
1302 static struct pdu_data *get_last_tx_pdu(struct lll_conn *lll)
1303 {
1304 struct node_tx *tx;
1305 struct pdu_data *p;
1306 memq_link_t *link;
1307
1308 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail, (void **)&tx);
1309 if (lll->empty || !link) {
1310 p = radio_pkt_empty_get();
1311 } else {
1312 p = (void *)(tx->pdu + lll->packet_tx_head_offset);
1313 }
1314
1315 return p;
1316 }
1317 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
1318