1 /*
2 * Copyright (c) 2018-2020 Nordic Semiconductor ASA
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include <stdbool.h>
9 #include <stddef.h>
10
11 #include <zephyr/toolchain.h>
12 #include <soc.h>
13
14 #include <zephyr/sys/util.h>
15
16 #include "hal/cpu.h"
17 #include "hal/ccm.h"
18 #include "hal/radio.h"
19 #include "hal/radio_df.h"
20
21 #include "util/util.h"
22 #include "util/mem.h"
23 #include "util/memq.h"
24 #include "util/mfifo.h"
25 #include "util/dbuf.h"
26
27 #include "pdu_df.h"
28 #include "pdu_vendor.h"
29 #include "pdu.h"
30
31 #include "lll.h"
32 #include "lll_clock.h"
33 #include "lll_df_types.h"
34 #include "lll_df.h"
35 #include "lll_conn.h"
36
37 #include "lll_internal.h"
38 #include "lll_df_internal.h"
39 #include "lll_tim_internal.h"
40 #include "lll_prof_internal.h"
41
42 #include <zephyr/bluetooth/hci_types.h>
43
44 #include "hal/debug.h"
45
46 static int init_reset(void);
47 static void isr_done(void *param);
48 static inline int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
49 uint8_t *is_rx_enqueue,
50 struct node_tx **tx_release, uint8_t *is_done);
51
52 #if defined(CONFIG_BT_CTLR_TX_DEFER)
53 static void isr_tx_deferred_set(void *param);
54 #endif /* CONFIG_BT_CTLR_TX_DEFER */
55
56 static void empty_tx_init(void);
57
58 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
59 static inline bool create_iq_report(struct lll_conn *lll, uint8_t rssi_ready,
60 uint8_t packet_status);
61 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
62
63 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
64 static struct pdu_data *get_last_tx_pdu(struct lll_conn *lll);
65 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
66
67 static uint8_t crc_expire;
68 static uint8_t crc_valid;
69 static uint8_t is_aborted;
70 static uint16_t trx_cnt;
71
72 #if defined(CONFIG_BT_CTLR_LE_ENC)
73 static uint8_t mic_state;
74 #endif /* CONFIG_BT_CTLR_LE_ENC */
75
76 #if defined(CONFIG_BT_CTLR_FORCE_MD_COUNT) && \
77 (CONFIG_BT_CTLR_FORCE_MD_COUNT > 0)
78 #if defined(CONFIG_BT_CTLR_FORCE_MD_AUTO)
79 static uint8_t force_md_cnt_reload;
80 #define BT_CTLR_FORCE_MD_COUNT force_md_cnt_reload
81 #else
82 #define BT_CTLR_FORCE_MD_COUNT CONFIG_BT_CTLR_FORCE_MD_COUNT
83 #endif
84 static uint8_t force_md_cnt;
85
86 #define FORCE_MD_CNT_INIT() \
87 { \
88 force_md_cnt = 0U; \
89 }
90
91 #define FORCE_MD_CNT_DEC() \
92 do { \
93 if (force_md_cnt) { \
94 force_md_cnt--; \
95 } \
96 } while (false)
97
98 #define FORCE_MD_CNT_GET() force_md_cnt
99
100 #define FORCE_MD_CNT_SET() \
101 do { \
102 if (force_md_cnt || \
103 (trx_cnt >= ((CONFIG_BT_BUF_ACL_TX_COUNT) - 1))) { \
104 force_md_cnt = BT_CTLR_FORCE_MD_COUNT; \
105 } \
106 } while (false)
107
108 #else /* !CONFIG_BT_CTLR_FORCE_MD_COUNT */
109 #define FORCE_MD_CNT_INIT()
110 #define FORCE_MD_CNT_DEC()
111 #define FORCE_MD_CNT_GET() 0
112 #define FORCE_MD_CNT_SET()
113 #endif /* !CONFIG_BT_CTLR_FORCE_MD_COUNT */
114
lll_conn_init(void)115 int lll_conn_init(void)
116 {
117 int err;
118
119 err = init_reset();
120 if (err) {
121 return err;
122 }
123
124 empty_tx_init();
125
126 return 0;
127 }
128
lll_conn_reset(void)129 int lll_conn_reset(void)
130 {
131 int err;
132
133 err = init_reset();
134 if (err) {
135 return err;
136 }
137
138 FORCE_MD_CNT_INIT();
139
140 return 0;
141 }
142
lll_conn_flush(uint16_t handle,struct lll_conn * lll)143 void lll_conn_flush(uint16_t handle, struct lll_conn *lll)
144 {
145 /* Nothing to be flushed */
146 }
147
lll_conn_prepare_reset(void)148 void lll_conn_prepare_reset(void)
149 {
150 trx_cnt = 0U;
151 crc_valid = 0U;
152 crc_expire = 0U;
153 is_aborted = 0U;
154
155 #if defined(CONFIG_BT_CTLR_LE_ENC)
156 mic_state = LLL_CONN_MIC_NONE;
157 #endif /* CONFIG_BT_CTLR_LE_ENC */
158 }
159
160 #if defined(CONFIG_BT_CENTRAL)
lll_conn_central_is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)161 int lll_conn_central_is_abort_cb(void *next, void *curr,
162 lll_prepare_cb_t *resume_cb)
163 {
164 struct lll_conn *lll = curr;
165
166 /* Do not abort if near supervision timeout */
167 if (lll->forced) {
168 return 0;
169 }
170
171 /* Do not be aborted by same event if a single central trx has not been
172 * exchanged.
173 */
174 if ((next == curr) && (trx_cnt < 1U)) {
175 return -EBUSY;
176 }
177
178 return -ECANCELED;
179 }
180 #endif /* CONFIG_BT_CENTRAL */
181
182 #if defined(CONFIG_BT_PERIPHERAL)
lll_conn_peripheral_is_abort_cb(void * next,void * curr,lll_prepare_cb_t * resume_cb)183 int lll_conn_peripheral_is_abort_cb(void *next, void *curr,
184 lll_prepare_cb_t *resume_cb)
185 {
186 struct lll_conn *lll = curr;
187
188 /* Do not abort if near supervision timeout */
189 if (lll->forced) {
190 return 0;
191 }
192
193 /* Do not be aborted by same event if a single peripheral trx has not
194 * been exchanged.
195 */
196 if ((next == curr) && (trx_cnt <= 1U)) {
197 return -EBUSY;
198 }
199
200 return -ECANCELED;
201 }
202 #endif /* CONFIG_BT_PERIPHERAL */
203
lll_conn_abort_cb(struct lll_prepare_param * prepare_param,void * param)204 void lll_conn_abort_cb(struct lll_prepare_param *prepare_param, void *param)
205 {
206 struct event_done_extra *e;
207 struct lll_conn *lll;
208 int err;
209
210 /* NOTE: This is not a prepare being cancelled */
211 if (!prepare_param) {
212 /* Get reference to LLL connection context */
213 lll = param;
214
215 /* For a peripheral role, ensure at least one PDU is tx-ed
216 * back to central, otherwise let the supervision timeout
217 * countdown be started.
218 */
219 if ((lll->role == BT_HCI_ROLE_PERIPHERAL) && (trx_cnt <= 1U)) {
220 is_aborted = 1U;
221 }
222
223 /* Perform event abort here.
224 * After event has been cleanly aborted, clean up resources
225 * and dispatch event done.
226 */
227 radio_isr_set(isr_done, param);
228 radio_disable();
229 return;
230 }
231
232 /* NOTE: Else clean the top half preparations of the aborted event
233 * currently in preparation pipeline.
234 */
235 err = lll_hfclock_off();
236 LL_ASSERT(err >= 0);
237
238 /* Get reference to LLL connection context */
239 lll = prepare_param->param;
240
241 /* Accumulate the latency as event is aborted while being in pipeline */
242 lll->lazy_prepare = prepare_param->lazy;
243 lll->latency_prepare += (lll->lazy_prepare + 1U);
244
245 #if defined(CONFIG_BT_PERIPHERAL)
246 if (lll->role == BT_HCI_ROLE_PERIPHERAL) {
247 /* Accumulate window widening */
248 lll->periph.window_widening_prepare_us +=
249 lll->periph.window_widening_periodic_us *
250 (prepare_param->lazy + 1);
251 if (lll->periph.window_widening_prepare_us >
252 lll->periph.window_widening_max_us) {
253 lll->periph.window_widening_prepare_us =
254 lll->periph.window_widening_max_us;
255 }
256 }
257 #endif /* CONFIG_BT_PERIPHERAL */
258
259 /* Extra done event, to check supervision timeout */
260 e = ull_event_done_extra_get();
261 LL_ASSERT(e);
262
263 e->type = EVENT_DONE_EXTRA_TYPE_CONN;
264 e->trx_cnt = 0U;
265 e->crc_valid = 0U;
266 #if defined(CONFIG_BT_CTLR_LE_ENC)
267 e->mic_state = LLL_CONN_MIC_NONE;
268 #endif /* CONFIG_BT_CTLR_LE_ENC */
269
270 lll_done(param);
271 }
272
lll_conn_isr_rx(void * param)273 void lll_conn_isr_rx(void *param)
274 {
275 uint8_t is_empty_pdu_tx_retry;
276 struct pdu_data *pdu_data_rx;
277 struct pdu_data *pdu_data_tx;
278 struct node_rx_pdu *node_rx;
279 struct node_tx *tx_release;
280 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
281 uint32_t pa_lna_enable_us;
282 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
283 uint8_t is_rx_enqueue;
284 struct lll_conn *lll;
285 uint8_t rssi_ready;
286 bool is_iq_report;
287 uint8_t is_ull_rx;
288 uint8_t trx_done;
289 uint8_t is_done;
290 uint8_t cte_len;
291 uint8_t crc_ok;
292 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
293 bool cte_ready;
294 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
295
296 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
297 lll_prof_latency_capture();
298 }
299
300 /* Read radio status and events */
301 trx_done = radio_is_done();
302 if (trx_done) {
303 crc_ok = radio_crc_is_valid();
304 rssi_ready = radio_rssi_is_ready();
305 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
306 cte_ready = radio_df_cte_ready();
307
308 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
309 } else {
310 crc_ok = rssi_ready = 0U;
311 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
312 cte_ready = 0U;
313 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
314 }
315
316 /* Clear radio rx status and events */
317 lll_isr_rx_status_reset();
318
319 /* No Rx */
320 if (!trx_done) {
321 radio_isr_set(isr_done, param);
322 radio_disable();
323
324 return;
325 }
326
327 trx_cnt++;
328
329 is_done = 0U;
330 tx_release = NULL;
331 is_rx_enqueue = 0U;
332
333 lll = param;
334
335 node_rx = ull_pdu_rx_alloc_peek(1);
336 LL_ASSERT(node_rx);
337
338 pdu_data_rx = (void *)node_rx->pdu;
339
340 if (crc_ok) {
341 uint32_t err;
342
343 err = isr_rx_pdu(lll, pdu_data_rx, &is_rx_enqueue, &tx_release,
344 &is_done);
345 if (err) {
346 /* Disable radio trx switch on MIC failure for both
347 * central and peripheral, and close the radio event.
348 */
349 radio_isr_set(isr_done, param);
350 radio_disable();
351
352 /* assert if radio started tx before being disabled */
353 LL_ASSERT(!radio_is_ready());
354
355 goto lll_conn_isr_rx_exit;
356 }
357
358 /* Reset CRC expiry counter */
359 crc_expire = 0U;
360
361 /* CRC valid flag used to detect supervision timeout */
362 crc_valid = 1U;
363 } else {
364 /* Start CRC error countdown, if not already started */
365 if (crc_expire == 0U) {
366 crc_expire = 2U;
367 }
368
369 /* CRC error countdown */
370 crc_expire--;
371 is_done = (crc_expire == 0U);
372 }
373
374 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX) && defined(CONFIG_BT_CTLR_LE_ENC)
375 if (lll->enc_rx) {
376 struct pdu_data *pdu_scratch;
377
378 pdu_scratch = (struct pdu_data *)radio_pkt_scratch_get();
379
380 if (pdu_scratch->cp) {
381 (void)memcpy((void *)&pdu_data_rx->octet3.cte_info,
382 (void *)&pdu_scratch->octet3.cte_info,
383 sizeof(pdu_data_rx->octet3.cte_info));
384 }
385 }
386 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX && defined(CONFIG_BT_CTLR_LE_ENC) */
387
388 /* prepare tx packet */
389 is_empty_pdu_tx_retry = lll->empty;
390 lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
391
392 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
393 if (pdu_data_tx->cp) {
394 cte_len = CTE_LEN_US(pdu_data_tx->octet3.cte_info.time);
395
396 lll_df_cte_tx_configure(pdu_data_tx->octet3.cte_info.type,
397 pdu_data_tx->octet3.cte_info.time,
398 lll->df_tx_cfg.ant_sw_len,
399 lll->df_tx_cfg.ant_ids);
400 } else
401 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
402 {
403 cte_len = 0U;
404 }
405
406 /* Decide on event continuation and hence Radio Shorts to use */
407 is_done = is_done || ((crc_ok) &&
408 (pdu_data_rx->md == 0) &&
409 (pdu_data_tx->md == 0) &&
410 (pdu_data_tx->len == 0));
411
412 if (is_done) {
413 radio_isr_set(isr_done, param);
414
415 if (0) {
416 #if defined(CONFIG_BT_CENTRAL)
417 /* Event done for central */
418 } else if (!lll->role) {
419 radio_disable();
420
421 /* assert if radio packet ptr is not set and radio
422 * started tx.
423 */
424 LL_ASSERT(!radio_is_ready());
425
426 /* Restore state if last transmitted was empty PDU */
427 lll->empty = is_empty_pdu_tx_retry;
428
429 goto lll_conn_isr_rx_exit;
430 #endif /* CONFIG_BT_CENTRAL */
431 #if defined(CONFIG_BT_PERIPHERAL)
432 /* Event done for peripheral */
433 } else {
434 radio_switch_complete_and_disable();
435 #endif /* CONFIG_BT_PERIPHERAL */
436 }
437 } else {
438 radio_tmr_tifs_set(lll->tifs_rx_us);
439
440 #if defined(CONFIG_BT_CTLR_PHY)
441 radio_switch_complete_and_rx(lll->phy_rx);
442 #else /* !CONFIG_BT_CTLR_PHY */
443 radio_switch_complete_and_rx(0);
444 #endif /* !CONFIG_BT_CTLR_PHY */
445
446 radio_isr_set(lll_conn_isr_tx, param);
447
448 /* capture end of Tx-ed PDU, used to calculate HCTO. */
449 radio_tmr_end_capture();
450 }
451
452 /* Fill sn and nesn */
453 pdu_data_tx->sn = lll->sn;
454 pdu_data_tx->nesn = lll->nesn;
455
456 /* setup the radio tx packet buffer */
457 lll_conn_tx_pkt_set(lll, pdu_data_tx);
458
459 #if defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
460
461 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
462 /* PA enable is overwriting packet end used in ISR profiling, hence
463 * back it up for later use.
464 */
465 lll_prof_radio_end_backup();
466 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
467
468 radio_gpio_pa_setup();
469
470 pa_lna_enable_us =
471 radio_tmr_tifs_base_get() + lll->tifs_tx_us - cte_len - HAL_RADIO_GPIO_PA_OFFSET;
472 #if defined(CONFIG_BT_CTLR_PHY)
473 pa_lna_enable_us -= radio_rx_chain_delay_get(lll->phy_rx, PHY_FLAGS_S8);
474 #else /* !CONFIG_BT_CTLR_PHY */
475 pa_lna_enable_us -= radio_rx_chain_delay_get(0, PHY_FLAGS_S2);
476 #endif /* !CONFIG_BT_CTLR_PHY */
477 radio_gpio_pa_lna_enable(pa_lna_enable_us);
478 #endif /* HAL_RADIO_GPIO_HAVE_PA_PIN */
479
480 /* assert if radio packet ptr is not set and radio started tx */
481 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
482 LL_ASSERT_MSG(!radio_is_address(), "%s: Radio ISR latency: %u", __func__,
483 lll_prof_latency_get());
484 } else {
485 LL_ASSERT(!radio_is_address());
486 }
487
488 #if defined(CONFIG_BT_CTLR_TX_DEFER)
489 if (!is_empty_pdu_tx_retry && (pdu_data_tx->len == 0U)) {
490 uint32_t tx_defer_us;
491 uint32_t defer_us;
492
493 /* Restore state if transmission setup for empty PDU */
494 lll->empty = 0U;
495
496 /* Setup deferred tx packet set */
497 tx_defer_us = radio_tmr_tifs_base_get() + lll->tifs_tx_us -
498 HAL_RADIO_TMR_DEFERRED_TX_DELAY_US;
499 defer_us = radio_tmr_isr_set(tx_defer_us, isr_tx_deferred_set,
500 param);
501 }
502 #endif /* CONFIG_BT_CTLR_TX_DEFER */
503
504 lll_conn_isr_rx_exit:
505 /* Save the AA captured for the first Rx in connection event */
506 if (!radio_tmr_aa_restore()) {
507 radio_tmr_aa_save(radio_tmr_aa_get());
508 }
509
510 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
511 lll_prof_cputime_capture();
512 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
513
514 is_ull_rx = 0U;
515
516 if (tx_release) {
517 LL_ASSERT(lll->handle != 0xFFFF);
518
519 ull_conn_lll_ack_enqueue(lll->handle, tx_release);
520
521 is_ull_rx = 1U;
522 }
523
524 if (is_rx_enqueue) {
525 #if defined(CONFIG_SOC_NRF52832) && \
526 defined(CONFIG_BT_CTLR_LE_ENC) && \
527 defined(HAL_RADIO_PDU_LEN_MAX) && \
528 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
529 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
530 if (lll->enc_rx) {
531 uint8_t *pkt_decrypt_data;
532
533 pkt_decrypt_data = (uint8_t *)radio_pkt_decrypt_get() +
534 offsetof(struct pdu_data, lldata);
535 memcpy((void *)pdu_data_rx->lldata,
536 (void *)pkt_decrypt_data, pdu_data_rx->len);
537 }
538 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
539 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
540 #endif
541 ull_pdu_rx_alloc();
542
543 node_rx->hdr.type = NODE_RX_TYPE_DC_PDU;
544 node_rx->hdr.handle = lll->handle;
545
546 ull_rx_put(node_rx->hdr.link, node_rx);
547 is_ull_rx = 1U;
548 }
549
550 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
551 if (cte_ready) {
552 is_iq_report =
553 create_iq_report(lll, rssi_ready,
554 (crc_ok == true ? BT_HCI_LE_CTE_CRC_OK :
555 BT_HCI_LE_CTE_CRC_ERR_CTE_BASED_TIME));
556 } else {
557 #else
558 if (1) {
559 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
560 is_iq_report = false;
561 }
562
563 if (is_ull_rx || is_iq_report) {
564 ull_rx_sched();
565 }
566
567 #if defined(CONFIG_BT_CTLR_CONN_RSSI)
568 /* Collect RSSI for connection */
569 if (rssi_ready) {
570 uint8_t rssi = radio_rssi_get();
571
572 lll->rssi_latest = rssi;
573
574 #if defined(CONFIG_BT_CTLR_CONN_RSSI_EVENT)
575 if (((lll->rssi_reported - rssi) & 0xFF) >
576 LLL_CONN_RSSI_THRESHOLD) {
577 if (lll->rssi_sample_count) {
578 lll->rssi_sample_count--;
579 }
580 } else {
581 lll->rssi_sample_count = LLL_CONN_RSSI_SAMPLE_COUNT;
582 }
583 #endif /* CONFIG_BT_CTLR_CONN_RSSI_EVENT */
584 }
585 #else /* !CONFIG_BT_CTLR_CONN_RSSI */
586 ARG_UNUSED(rssi_ready);
587 #endif /* !CONFIG_BT_CTLR_CONN_RSSI */
588
589 #if defined(CONFIG_BT_CTLR_PROFILE_ISR)
590 lll_prof_send();
591 #endif /* CONFIG_BT_CTLR_PROFILE_ISR */
592 }
593
594 void lll_conn_isr_tx(void *param)
595 {
596 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
597 static struct pdu_data *pdu_tx;
598 uint8_t cte_len;
599 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
600 struct lll_conn *lll;
601 uint32_t hcto;
602
603 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
604 lll_prof_latency_capture();
605 }
606
607 /* Clear radio tx status and events */
608 lll_isr_tx_status_reset();
609
610 lll = param;
611
612 /* setup tIFS switching */
613 radio_tmr_tifs_set(lll->tifs_tx_us);
614
615 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
616 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
617 enum radio_end_evt_delay_state end_evt_delay;
618 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
619
620 #if defined(CONFIG_BT_CTLR_PHY)
621 if (lll->phy_rx != PHY_CODED) {
622 #else
623 if (1) {
624 #endif /* CONFIG_BT_CTLR_PHY */
625 struct lll_df_conn_rx_params *df_rx_params;
626 struct lll_df_conn_rx_cfg *df_rx_cfg;
627
628 df_rx_cfg = &lll->df_rx_cfg;
629 /* Get last swapped CTE RX configuration. Do not swap it again here.
630 * It should remain unchanged for connection event duration.
631 */
632 df_rx_params = dbuf_curr_get(&df_rx_cfg->hdr);
633
634 if (df_rx_params->is_enabled) {
635 (void)lll_df_conf_cte_rx_enable(df_rx_params->slot_durations,
636 df_rx_params->ant_sw_len, df_rx_params->ant_ids,
637 df_rx_cfg->chan, CTE_INFO_IN_S1_BYTE,
638 lll->phy_rx);
639 } else {
640 lll_df_conf_cte_info_parsing_enable();
641 }
642 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
643 end_evt_delay = END_EVT_DELAY_ENABLED;
644 } else {
645 end_evt_delay = END_EVT_DELAY_DISABLED;
646 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
647 }
648
649 #if defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
650 /* Use special API for SOC that requires compensation for PHYEND event delay. */
651
652 #if defined(CONFIG_BT_CTLR_PHY)
653 radio_switch_complete_with_delay_compensation_and_tx(lll->phy_rx, 0, lll->phy_tx,
654 lll->phy_flags, end_evt_delay);
655 #else /* !CONFIG_BT_CTLR_PHY */
656 radio_switch_complete_with_delay_compensation_and_tx(0, 0, 0, 0, end_evt_delay);
657 #endif /* !CONFIG_BT_CTLR_PHY */
658
659 #endif /* CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
660 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
661
662 /* Use regular API for cases when:
663 * - CTE RX is not enabled,
664 * - SOC does not require compensation for PHYEND event delay.
665 */
666 #if !defined(CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE)
667 #if defined(CONFIG_BT_CTLR_PHY)
668 radio_switch_complete_and_tx(lll->phy_rx, 0, lll->phy_tx, lll->phy_flags);
669 #else /* !CONFIG_BT_CTLR_PHY */
670 radio_switch_complete_and_tx(0, 0, 0, 0);
671 #endif /* !CONFIG_BT_CTLR_PHY */
672 #endif /* !CONFIG_BT_CTLR_DF_PHYEND_OFFSET_COMPENSATION_ENABLE */
673
674 lll_conn_rx_pkt_set(lll);
675
676 /* assert if radio packet ptr is not set and radio started rx */
677 if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
678 LL_ASSERT_MSG(!radio_is_address(), "%s: Radio ISR latency: %u", __func__,
679 lll_prof_latency_get());
680 } else {
681 LL_ASSERT(!radio_is_address());
682 }
683
684 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
685 pdu_tx = get_last_tx_pdu(lll);
686 LL_ASSERT(pdu_tx);
687
688 if (pdu_tx->cp) {
689 cte_len = CTE_LEN_US(pdu_tx->octet3.cte_info.time);
690 } else {
691 cte_len = 0U;
692 }
693 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
694
695 /* +/- 2us active clock jitter, +1 us PPI to timer start compensation */
696 hcto = radio_tmr_tifs_base_get() + lll->tifs_hcto_us +
697 (EVENT_CLOCK_JITTER_US << 1) + RANGE_DELAY_US +
698 HAL_RADIO_TMR_START_DELAY_US;
699 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
700 hcto += cte_len;
701 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
702 #if defined(CONFIG_BT_CTLR_PHY)
703 hcto += radio_rx_chain_delay_get(lll->phy_rx, 1);
704 hcto += addr_us_get(lll->phy_rx);
705 hcto -= radio_tx_chain_delay_get(lll->phy_tx, lll->phy_flags);
706 #else /* !CONFIG_BT_CTLR_PHY */
707 hcto += radio_rx_chain_delay_get(0, 0);
708 hcto += addr_us_get(0);
709 hcto -= radio_tx_chain_delay_get(0, 0);
710 #endif /* !CONFIG_BT_CTLR_PHY */
711
712 radio_tmr_hcto_configure(hcto);
713
714 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
715 if (true) {
716 #elif defined(CONFIG_BT_CENTRAL) && defined(CONFIG_BT_CTLR_CONN_RSSI)
717 if (!trx_cnt && !lll->role) {
718 #else
719 if (false) {
720 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
721
722 radio_rssi_measure();
723 }
724
725 #if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
726 defined(CONFIG_BT_CTLR_TX_DEFER) || \
727 defined(HAL_RADIO_GPIO_HAVE_PA_PIN)
728 radio_tmr_end_capture();
729 #endif /* CONFIG_BT_CTLR_PROFILE_ISR ||
730 * CONFIG_BT_CTLR_TX_DEFER ||
731 * HAL_RADIO_GPIO_HAVE_PA_PIN
732 */
733
734 #if defined(HAL_RADIO_GPIO_HAVE_LNA_PIN)
735 radio_gpio_lna_setup();
736 #if defined(CONFIG_BT_CTLR_PHY)
737 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + lll->tifs_rx_us -
738 (EVENT_CLOCK_JITTER_US << 1) -
739 radio_tx_chain_delay_get(lll->phy_tx,
740 lll->phy_flags) -
741 HAL_RADIO_GPIO_LNA_OFFSET);
742 #else /* !CONFIG_BT_CTLR_PHY */
743 radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + lll->tifs_rx_us -
744 (EVENT_CLOCK_JITTER_US << 1) -
745 radio_tx_chain_delay_get(0U, 0U) -
746 HAL_RADIO_GPIO_LNA_OFFSET);
747 #endif /* !CONFIG_BT_CTLR_PHY */
748 #endif /* HAL_RADIO_GPIO_HAVE_LNA_PIN */
749
750 radio_isr_set(lll_conn_isr_rx, param);
751
752 #if defined(CONFIG_BT_CTLR_LOW_LAT_ULL)
753 ull_conn_lll_tx_demux_sched(lll);
754 #endif /* CONFIG_BT_CTLR_LOW_LAT_ULL */
755 }
756
757 void lll_conn_rx_pkt_set(struct lll_conn *lll)
758 {
759 struct pdu_data *pdu_data_rx;
760 struct node_rx_pdu *node_rx;
761 uint16_t max_rx_octets;
762 uint8_t phy;
763
764 node_rx = ull_pdu_rx_alloc_peek(1);
765 LL_ASSERT(node_rx);
766
767 /* In case of ISR latencies, if packet pointer has not been set on time
768 * then we do not want to check uninitialized length in rx buffer that
769 * did not get used by Radio DMA. This would help us in detecting radio
770 * ready event being set? We can not detect radio ready if it happens
771 * twice before Radio ISR executes after latency.
772 */
773 pdu_data_rx = (void *)node_rx->pdu;
774 pdu_data_rx->len = 0U;
775
776 #if defined(CONFIG_BT_CTLR_DATA_LENGTH)
777 max_rx_octets = lll->dle.eff.max_rx_octets;
778 #else /* !CONFIG_BT_CTLR_DATA_LENGTH */
779 max_rx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
780 #endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
781
782 if ((PDU_DC_CTRL_RX_SIZE_MAX > PDU_DC_PAYLOAD_SIZE_MIN) &&
783 (max_rx_octets < PDU_DC_CTRL_RX_SIZE_MAX)) {
784 max_rx_octets = PDU_DC_CTRL_RX_SIZE_MAX;
785 }
786
787 #if defined(CONFIG_BT_CTLR_PHY)
788 phy = lll->phy_rx;
789 #else /* !CONFIG_BT_CTLR_PHY */
790 phy = 0U;
791 #endif /* !CONFIG_BT_CTLR_PHY */
792
793 radio_phy_set(phy, 0);
794
795 if (0) {
796 #if defined(CONFIG_BT_CTLR_LE_ENC)
797 } else if (lll->enc_rx) {
798 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, (max_rx_octets + PDU_MIC_SIZE),
799 RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
800 RADIO_PKT_CONF_CTE_DISABLED));
801
802 #if defined(CONFIG_SOC_NRF52832) && \
803 defined(HAL_RADIO_PDU_LEN_MAX) && \
804 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
805 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
806 radio_pkt_rx_set(radio_ccm_rx_pkt_set(&lll->ccm_rx, phy,
807 radio_pkt_decrypt_get()));
808 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
809 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
810 #else
811 radio_pkt_rx_set(radio_ccm_rx_pkt_set(&lll->ccm_rx, phy,
812 pdu_data_rx));
813 #endif
814 #endif /* CONFIG_BT_CTLR_LE_ENC */
815 } else {
816 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, max_rx_octets,
817 RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
818 RADIO_PKT_CONF_CTE_DISABLED));
819
820 radio_pkt_rx_set(pdu_data_rx);
821 }
822 }
823
824 void lll_conn_tx_pkt_set(struct lll_conn *lll, struct pdu_data *pdu_data_tx)
825 {
826 uint8_t phy, flags, pkt_flags;
827 uint16_t max_tx_octets;
828
829 #if defined(CONFIG_BT_CTLR_DATA_LENGTH)
830 max_tx_octets = lll->dle.eff.max_tx_octets;
831 #else /* !CONFIG_BT_CTLR_DATA_LENGTH */
832 max_tx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
833 #endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
834
835 if ((PDU_DC_CTRL_TX_SIZE_MAX > PDU_DC_PAYLOAD_SIZE_MIN) &&
836 (max_tx_octets < PDU_DC_CTRL_TX_SIZE_MAX)) {
837 max_tx_octets = PDU_DC_CTRL_TX_SIZE_MAX;
838 }
839
840 #if defined(CONFIG_BT_CTLR_PHY)
841 phy = lll->phy_tx;
842 flags = lll->phy_flags;
843 #else /* !CONFIG_BT_CTLR_PHY */
844 phy = 0U;
845 flags = 0U;
846 #endif /* !CONFIG_BT_CTLR_PHY */
847
848 radio_phy_set(phy, flags);
849
850 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
851 if (pdu_data_tx->cp) {
852 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
853 RADIO_PKT_CONF_CTE_ENABLED);
854 } else
855 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
856 {
857 pkt_flags = RADIO_PKT_CONF_FLAGS(RADIO_PKT_CONF_PDU_TYPE_DC, phy,
858 RADIO_PKT_CONF_CTE_DISABLED);
859 }
860
861 if (0) {
862 #if defined(CONFIG_BT_CTLR_LE_ENC)
863 } else if (lll->enc_tx) {
864 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, (max_tx_octets + PDU_MIC_SIZE),
865 pkt_flags);
866
867 radio_pkt_tx_set(radio_ccm_tx_pkt_set(&lll->ccm_tx, pdu_data_tx));
868 #endif /* CONFIG_BT_CTLR_LE_ENC */
869 } else {
870 radio_pkt_configure(RADIO_PKT_CONF_LENGTH_8BIT, max_tx_octets, pkt_flags);
871
872 radio_pkt_tx_set(pdu_data_tx);
873 }
874 }
875
876 void lll_conn_pdu_tx_prep(struct lll_conn *lll, struct pdu_data **pdu_data_tx)
877 {
878 struct node_tx *tx;
879 struct pdu_data *p;
880 memq_link_t *link;
881
882 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail, (void **)&tx);
883 if (lll->empty || !link) {
884 lll->empty = 1U;
885
886 p = (void *)radio_pkt_empty_get();
887 if (link || FORCE_MD_CNT_GET()) {
888 p->md = 1U;
889 } else {
890 p->md = 0U;
891 }
892 } else {
893 uint16_t max_tx_octets;
894
895 p = (void *)(tx->pdu + lll->packet_tx_head_offset);
896
897 if (!lll->packet_tx_head_len) {
898 lll->packet_tx_head_len = p->len;
899 }
900
901 if (lll->packet_tx_head_offset) {
902 p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
903
904 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX) || defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
905 /* BT 5.3 Core Spec does not define handling of CP bit
906 * for PDUs fragmented by Controller, hence the CP bit
907 * is set to zero. The CTE should not be transmitted
908 * with CONTINUE PDUs if fragmentation is performed.
909 */
910 p->cp = 0U;
911 p->octet3.resv[0] = 0U;
912 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX || CONFIG_BT_CTLR_DF_CONN_CTE_RX */
913 }
914
915 p->len = lll->packet_tx_head_len - lll->packet_tx_head_offset;
916
917 max_tx_octets = ull_conn_lll_max_tx_octets_get(lll);
918
919 if (((PDU_DC_CTRL_TX_SIZE_MAX <= PDU_DC_PAYLOAD_SIZE_MIN) ||
920 (p->ll_id != PDU_DATA_LLID_CTRL)) &&
921 (p->len > max_tx_octets)) {
922 p->len = max_tx_octets;
923 p->md = 1U;
924 } else if ((link->next != lll->memq_tx.tail) ||
925 FORCE_MD_CNT_GET()) {
926 p->md = 1U;
927 } else {
928 p->md = 0U;
929 }
930
931 p->rfu = 0U;
932
933 #if !defined(CONFIG_BT_CTLR_DATA_LENGTH_CLEAR)
934 #if !defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX) && !defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
935 /* Initialize only if vendor PDU octet3 present */
936 if (sizeof(p->octet3.resv)) {
937 p->octet3.resv[0] = 0U;
938 }
939 #endif /* !CONFIG_BT_CTLR_DF_CONN_CTE_TX && !CONFIG_BT_CTLR_DF_CONN_CTE_RX */
940 #endif /* CONFIG_BT_CTLR_DATA_LENGTH_CLEAR */
941 }
942
943 *pdu_data_tx = p;
944 }
945
946 #if defined(CONFIG_BT_CTLR_FORCE_MD_AUTO)
947 uint8_t lll_conn_force_md_cnt_set(uint8_t reload_cnt)
948 {
949 uint8_t previous;
950
951 previous = force_md_cnt_reload;
952 force_md_cnt_reload = reload_cnt;
953
954 return previous;
955 }
956 #endif /* CONFIG_BT_CTLR_FORCE_MD_AUTO */
957
958 static int init_reset(void)
959 {
960 return 0;
961 }
962
963 static void isr_done(void *param)
964 {
965 struct event_done_extra *e;
966
967 lll_isr_status_reset();
968
969 e = ull_event_done_extra_get();
970 LL_ASSERT(e);
971
972 e->type = EVENT_DONE_EXTRA_TYPE_CONN;
973 e->trx_cnt = trx_cnt;
974 e->crc_valid = crc_valid;
975 e->is_aborted = is_aborted;
976
977 #if defined(CONFIG_BT_CTLR_LE_ENC)
978 e->mic_state = mic_state;
979 #endif /* CONFIG_BT_CTLR_LE_ENC */
980
981 #if defined(CONFIG_BT_PERIPHERAL)
982 if (trx_cnt) {
983 struct lll_conn *lll = param;
984
985 if (lll->role) {
986 uint32_t preamble_to_addr_us;
987
988 #if defined(CONFIG_BT_CTLR_PHY)
989 preamble_to_addr_us =
990 addr_us_get(lll->phy_rx);
991 #else /* !CONFIG_BT_CTLR_PHY */
992 preamble_to_addr_us =
993 addr_us_get(0);
994 #endif /* !CONFIG_BT_CTLR_PHY */
995
996 e->drift.start_to_address_actual_us =
997 radio_tmr_aa_restore() - radio_tmr_ready_get();
998 e->drift.window_widening_event_us =
999 lll->periph.window_widening_event_us;
1000 e->drift.preamble_to_addr_us = preamble_to_addr_us;
1001
1002 /* Reset window widening, as anchor point sync-ed */
1003 lll->periph.window_widening_event_us = 0;
1004 lll->periph.window_size_event_us = 0;
1005 }
1006 }
1007 #endif /* CONFIG_BT_PERIPHERAL */
1008
1009 lll_isr_cleanup(param);
1010 }
1011
1012 static inline bool ctrl_pdu_len_check(uint8_t len)
1013 {
1014 return len <= (offsetof(struct pdu_data, llctrl) +
1015 sizeof(struct pdu_data_llctrl));
1016
1017 }
1018
1019 static inline int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
1020 uint8_t *is_rx_enqueue,
1021 struct node_tx **tx_release, uint8_t *is_done)
1022 {
1023 #if defined(CONFIG_SOC_NRF52832) && \
1024 defined(CONFIG_BT_CTLR_LE_ENC) && \
1025 defined(HAL_RADIO_PDU_LEN_MAX) && \
1026 (!defined(CONFIG_BT_CTLR_DATA_LENGTH_MAX) || \
1027 (CONFIG_BT_CTLR_DATA_LENGTH_MAX < (HAL_RADIO_PDU_LEN_MAX - 4)))
1028 if (lll->enc_rx) {
1029 uint8_t *pkt_decrypt;
1030
1031 pkt_decrypt = radio_pkt_decrypt_get();
1032 memcpy((void *)pdu_data_rx, (void *)pkt_decrypt,
1033 offsetof(struct pdu_data, lldata));
1034 }
1035 #elif !defined(HAL_RADIO_PDU_LEN_MAX)
1036 #error "Undefined HAL_RADIO_PDU_LEN_MAX."
1037 #endif
1038
1039 /* Ack for tx-ed data */
1040 if (pdu_data_rx->nesn != lll->sn) {
1041 struct pdu_data *pdu_data_tx;
1042 struct node_tx *tx;
1043 memq_link_t *link;
1044
1045 /* Increment sequence number */
1046 lll->sn++;
1047
1048 #if defined(CONFIG_BT_PERIPHERAL)
1049 /* First ack (and redundantly any other ack) enable use of
1050 * peripheral latency.
1051 */
1052 if (lll->role) {
1053 lll->periph.latency_enabled = 1;
1054 }
1055 #endif /* CONFIG_BT_PERIPHERAL */
1056
1057 FORCE_MD_CNT_DEC();
1058
1059 if (!lll->empty) {
1060 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail,
1061 (void **)&tx);
1062 } else {
1063 lll->empty = 0;
1064
1065 pdu_data_tx = (void *)radio_pkt_empty_get();
1066 if (IS_ENABLED(CONFIG_BT_CENTRAL) && !lll->role &&
1067 !pdu_data_rx->md) {
1068 *is_done = !pdu_data_tx->md;
1069 }
1070
1071 link = NULL;
1072 }
1073
1074 if (link) {
1075 uint8_t pdu_data_tx_len;
1076 uint8_t offset;
1077
1078 pdu_data_tx = (void *)(tx->pdu +
1079 lll->packet_tx_head_offset);
1080
1081 pdu_data_tx_len = pdu_data_tx->len;
1082 #if defined(CONFIG_BT_CTLR_LE_ENC)
1083 if (pdu_data_tx_len != 0U) {
1084 /* if encrypted increment tx counter */
1085 if (lll->enc_tx) {
1086 lll->ccm_tx.counter++;
1087 }
1088 }
1089 #endif /* CONFIG_BT_CTLR_LE_ENC */
1090
1091 offset = lll->packet_tx_head_offset + pdu_data_tx_len;
1092 if (offset < lll->packet_tx_head_len) {
1093 lll->packet_tx_head_offset = offset;
1094 } else if (offset == lll->packet_tx_head_len) {
1095 lll->packet_tx_head_len = 0;
1096 lll->packet_tx_head_offset = 0;
1097
1098 memq_dequeue(lll->memq_tx.tail,
1099 &lll->memq_tx.head, NULL);
1100
1101 /* TX node UPSTREAM, i.e. Tx node ack path */
1102 link->next = tx->next; /* Indicates ctrl or data
1103 * pool.
1104 */
1105 tx->next = link;
1106
1107 *tx_release = tx;
1108
1109 FORCE_MD_CNT_SET();
1110 } else {
1111 LL_ASSERT(0);
1112 }
1113
1114 if (IS_ENABLED(CONFIG_BT_CENTRAL) && !lll->role &&
1115 !pdu_data_rx->md) {
1116 *is_done = !pdu_data_tx->md;
1117 }
1118 }
1119 }
1120
1121 /* process received data */
1122 if ((pdu_data_rx->sn == lll->nesn) &&
1123 /* check so that we will NEVER use the rx buffer reserved for empty
1124 * packet and internal control enqueue
1125 */
1126 (ull_pdu_rx_alloc_peek(3) != 0)) {
1127 /* Increment next expected serial number */
1128 lll->nesn++;
1129
1130 if (pdu_data_rx->len != 0) {
1131 #if defined(CONFIG_BT_CTLR_LE_ENC)
1132 /* If required, wait for CCM to finish
1133 */
1134 if (lll->enc_rx) {
1135 uint32_t done;
1136
1137 done = radio_ccm_is_done();
1138 LL_ASSERT(done);
1139
1140 bool mic_failure = !radio_ccm_mic_is_valid();
1141
1142 if (mic_failure &&
1143 lll->ccm_rx.counter == 0 &&
1144 (pdu_data_rx->ll_id ==
1145 PDU_DATA_LLID_CTRL)) {
1146 /* Received an LL control packet in the
1147 * middle of the LL encryption procedure
1148 * with MIC failure.
1149 * This could be an unencrypted packet
1150 */
1151 struct pdu_data *scratch_pkt =
1152 radio_pkt_scratch_get();
1153
1154 if (ctrl_pdu_len_check(
1155 scratch_pkt->len)) {
1156 memcpy(pdu_data_rx,
1157 scratch_pkt,
1158 scratch_pkt->len +
1159 offsetof(struct pdu_data,
1160 llctrl));
1161 mic_failure = false;
1162 lll->ccm_rx.counter--;
1163 }
1164 }
1165
1166 if (mic_failure) {
1167 /* Record MIC invalid */
1168 mic_state = LLL_CONN_MIC_FAIL;
1169
1170 return -EINVAL;
1171 }
1172
1173 /* Increment counter */
1174 lll->ccm_rx.counter++;
1175
1176 /* Record MIC valid */
1177 mic_state = LLL_CONN_MIC_PASS;
1178 }
1179 #endif /* CONFIG_BT_CTLR_LE_ENC */
1180
1181 /* Enqueue non-empty PDU */
1182 *is_rx_enqueue = 1U;
1183 }
1184 }
1185
1186 return 0;
1187 }
1188
1189 #if defined(CONFIG_BT_CTLR_TX_DEFER)
1190 static void isr_tx_deferred_set(void *param)
1191 {
1192 struct pdu_data *pdu_data_tx;
1193 struct lll_conn *lll;
1194
1195 /* Prepare Tx PDU, maybe we have non-empty PDU when we check here */
1196 lll = param;
1197 lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
1198
1199 /* Fill sn and nesn */
1200 pdu_data_tx->sn = lll->sn;
1201 pdu_data_tx->nesn = lll->nesn;
1202
1203 /* setup the radio tx packet buffer */
1204 lll_conn_tx_pkt_set(lll, pdu_data_tx);
1205 }
1206 #endif /* CONFIG_BT_CTLR_TX_DEFER */
1207
1208 static void empty_tx_init(void)
1209 {
1210 struct pdu_data *p;
1211
1212 p = (void *)radio_pkt_empty_get();
1213 p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
1214
1215 /* cp, rfu, and resv fields in the empty PDU buffer is statically
1216 * zero initialized at power up and these values in this buffer are
1217 * not modified at runtime.
1218 */
1219 }
1220
1221 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_RX)
1222 static inline bool create_iq_report(struct lll_conn *lll, uint8_t rssi_ready, uint8_t packet_status)
1223 {
1224 struct lll_df_conn_rx_params *rx_params;
1225 struct lll_df_conn_rx_cfg *rx_cfg;
1226
1227 #if defined(CONFIG_BT_CTLR_PHY)
1228 if (lll->phy_rx == PHY_CODED) {
1229 return false;
1230 }
1231 #endif /* CONFIG_BT_CTLR_PHY */
1232
1233 rx_cfg = &lll->df_rx_cfg;
1234
1235 rx_params = dbuf_curr_get(&rx_cfg->hdr);
1236
1237 if (rx_params->is_enabled) {
1238 struct node_rx_iq_report *iq_report;
1239 struct node_rx_ftr *ftr;
1240 uint8_t cte_info;
1241 uint8_t ant;
1242
1243 cte_info = radio_df_cte_status_get();
1244 ant = radio_df_pdu_antenna_switch_pattern_get();
1245 iq_report = ull_df_iq_report_alloc();
1246
1247 iq_report->rx.hdr.type = NODE_RX_TYPE_CONN_IQ_SAMPLE_REPORT;
1248 iq_report->sample_count = radio_df_iq_samples_amount_get();
1249 iq_report->packet_status = packet_status;
1250 iq_report->rssi_ant_id = ant;
1251 iq_report->cte_info = *(struct pdu_cte_info *)&cte_info;
1252 iq_report->local_slot_durations = rx_params->slot_durations;
1253 /* Event counter is updated to next value during event preparation, hence
1254 * it has to be subtracted to store actual event counter value.
1255 */
1256 iq_report->event_counter = lll->event_counter - 1;
1257
1258 ftr = &iq_report->rx.rx_ftr;
1259 ftr->param = lll;
1260 ftr->rssi = ((rssi_ready) ? radio_rssi_get() : BT_HCI_LE_RSSI_NOT_AVAILABLE);
1261
1262 ull_rx_put(iq_report->rx.hdr.link, iq_report);
1263
1264 return true;
1265 }
1266
1267 return false;
1268 }
1269
1270 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_RX */
1271
1272 #if defined(CONFIG_BT_CTLR_DF_CONN_CTE_TX)
1273 /**
1274 * @brief Get latest transmitted pdu_data instance
1275 *
1276 * @param lll Pointer to lll_conn object
1277 *
1278 * @return Return pointer to latest pdu_data instance
1279 */
1280 static struct pdu_data *get_last_tx_pdu(struct lll_conn *lll)
1281 {
1282 struct node_tx *tx;
1283 struct pdu_data *p;
1284 memq_link_t *link;
1285
1286 link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail, (void **)&tx);
1287 if (lll->empty || !link) {
1288 p = radio_pkt_empty_get();
1289 } else {
1290 p = (void *)(tx->pdu + lll->packet_tx_head_offset);
1291 }
1292
1293 return p;
1294 }
1295 #endif /* CONFIG_BT_CTLR_DF_CONN_CTE_TX */
1296