Lines Matching +full:timing +full:- +full:ext
2 * Copyright (c) 2017-2021 Nordic Semiconductor ASA
4 * SPDX-License-Identifier: Apache-2.0
92 #define BT_ADV_TICKER_NODES ((TICKER_ID_ADV_LAST) - (TICKER_ID_ADV_STOP) + 1)
94 #define BT_ADV_AUX_TICKER_NODES ((TICKER_ID_ADV_AUX_LAST) - \
97 #define BT_ADV_SYNC_TICKER_NODES ((TICKER_ID_ADV_SYNC_LAST) - \
100 #define BT_ADV_ISO_TICKER_NODES ((TICKER_ID_ADV_ISO_LAST) - \
122 #define BT_SCAN_TICKER_NODES ((TICKER_ID_SCAN_LAST) - (TICKER_ID_SCAN_STOP) + 1)
127 #define BT_SCAN_AUX_TICKER_NODES ((TICKER_ID_SCAN_AUX_LAST) - \
131 #define BT_SCAN_SYNC_TICKER_NODES ((TICKER_ID_SCAN_SYNC_LAST) - \
134 #define BT_SCAN_SYNC_ISO_TICKER_NODES ((TICKER_ID_SCAN_SYNC_ISO_LAST) - \
136 (TICKER_ID_SCAN_SYNC_ISO_RESUME_LAST) - \
158 #define BT_CONN_TICKER_NODES ((TICKER_ID_CONN_LAST) - (TICKER_ID_CONN_BASE) + 1)
164 #define BT_CIG_TICKER_NODES ((TICKER_ID_CONN_ISO_LAST) - \
166 (TICKER_ID_CONN_ISO_RESUME_LAST) - \
233 #define BT_CTLR_MAX_CONNECTABLE (1U + MIN(((CONFIG_BT_MAX_CONN) - 1U), \
334 /* Semaphore to wakeup thread on Rx-ed objects */
337 /* Declare prepare-event FIFO: mfifo_prep.
342 /* Declare done-event RXFIFO. This is a composite pool-backed MFIFO for rx_nodes.
344 * - mfifo_done: FIFO with pointers to struct node_rx_event_done
345 * - mem_done: Backing data pool for struct node_rx_event_done elements
346 * - mem_link_done: Pool of memq_link_t elements
483 * fragments must be available for HCI complete-counting.
501 * comparison to a connection, the connection established uses incoming Rx-ed
505 * incoming Rx-ed PDU).
514 uint16_t quota_pdu; /* Number of un-utilized buffers */
605 * is re-initialized but job objects were not re-initialized there is a in ll_init()
607 * just after re-initialization. After enqueue operation with that link, in ll_init()
796 * - Reset ULL context, i.e. stop ULL scheduling, abort LLL events etc. in ll_reset()
797 * - Reset LLL context, i.e. post LLL event abort, let LLL cleanup its in ll_reset()
799 * - Reset ULL static variables (which otherwise was mem-zeroed in cases in ll_reset()
800 * if power-on reset wherein architecture startup mem-zeroes .bss in ll_reset()
802 * - Initialize ULL context variable, similar to on-power-up. in ll_reset()
868 /* Re-initialize ULL internals */ in ll_reset()
870 /* Re-initialize the prep mfifo */ in ll_reset()
873 /* Re-initialize the free rx mfifo */ in ll_reset()
877 /* Re-initialize the free ll rx mfifo */ in ll_reset()
908 /* LLL reset must complete before returning - wait for in ll_reset()
981 cmplt = tx_cmplt_get(handle, &mfifo_fifo_tx_ack.f, rx->hdr.ack_last); in ll_rx_get()
1002 } else if (rx->hdr.type == NODE_RX_TYPE_RELEASE) { in ll_rx_get()
1012 } else if (rx->hdr.type == NODE_RX_TYPE_IQ_SAMPLE_REPORT_LLL_RELEASE) { in ll_rx_get()
1025 } else if (rx->hdr.type == NODE_RX_TYPE_SYNC_CHM_COMPLETE) { in ll_rx_get()
1039 } else if (rx->hdr.type == NODE_RX_TYPE_BIG_CHM_COMPLETE) { in ll_rx_get()
1082 switch (rx->hdr.type) { in ll_rx_dequeue()
1096 adv = (struct pdu_adv *)rx->pdu; in ll_rx_dequeue()
1097 if (adv->type != PDU_ADV_TYPE_EXT_IND) { in ll_rx_dequeue()
1101 rx_curr = rx->rx_ftr.extra; in ll_rx_dequeue()
1106 loop--; in ll_rx_dequeue()
1108 link_free = rx_curr->hdr.link; in ll_rx_dequeue()
1109 rx_curr = rx_curr->rx_ftr.extra; in ll_rx_dequeue()
1118 ull_scan_term_dequeue(rx->hdr.handle); in ll_rx_dequeue()
1129 adv = ull_adv_set_get(rx->hdr.handle); in ll_rx_dequeue()
1132 lll_aux = adv->lll.aux; in ll_rx_dequeue()
1138 aux->is_started = 0U; in ll_rx_dequeue()
1142 struct lll_conn *lll_conn = adv->lll.conn; in ll_rx_dequeue()
1145 adv->is_enabled = 0U; in ll_rx_dequeue()
1150 LL_ASSERT(!lll_conn->link_tx_free); in ll_rx_dequeue()
1152 memq_link_t *memq_link = memq_deinit(&lll_conn->memq_tx.head, in ll_rx_dequeue()
1153 &lll_conn->memq_tx.tail); in ll_rx_dequeue()
1156 lll_conn->link_tx_free = memq_link; in ll_rx_dequeue()
1161 adv->lll.conn = NULL; in ll_rx_dequeue()
1163 ll_rx_release(adv->node_rx_cc_free); in ll_rx_dequeue()
1164 adv->node_rx_cc_free = NULL; in ll_rx_dequeue()
1166 ll_rx_link_release(adv->link_cc_free); in ll_rx_dequeue()
1167 adv->link_cc_free = NULL; in ll_rx_dequeue()
1170 adv->is_enabled = 0U; in ll_rx_dequeue()
1179 struct node_rx_cc *cc = (void *)rx->pdu; in ll_rx_dequeue()
1180 struct node_rx_ftr *ftr = &(rx->rx_ftr); in ll_rx_dequeue()
1185 } else if ((cc->status == BT_HCI_ERR_ADV_TIMEOUT) || cc->role) { in ll_rx_dequeue()
1190 lll = ftr->param; in ll_rx_dequeue()
1193 if (cc->status == BT_HCI_ERR_ADV_TIMEOUT) { in ll_rx_dequeue()
1198 conn_lll = lll->conn; in ll_rx_dequeue()
1200 lll->conn = NULL; in ll_rx_dequeue()
1202 LL_ASSERT(!conn_lll->link_tx_free); in ll_rx_dequeue()
1203 memq_link = memq_deinit(&conn_lll->memq_tx.head, in ll_rx_dequeue()
1204 &conn_lll->memq_tx.tail); in ll_rx_dequeue()
1206 conn_lll->link_tx_free = memq_link; in ll_rx_dequeue()
1211 /* Release un-utilized node rx */ in ll_rx_dequeue()
1212 if (adv->node_rx_cc_free) { in ll_rx_dequeue()
1215 rx_free = adv->node_rx_cc_free; in ll_rx_dequeue()
1216 adv->node_rx_cc_free = NULL; in ll_rx_dequeue()
1223 if (lll->aux) { in ll_rx_dequeue()
1226 aux = HDR_LLL2ULL(lll->aux); in ll_rx_dequeue()
1227 aux->is_started = 0U; in ll_rx_dequeue()
1235 if (!lll->node_rx_adv_term) { in ll_rx_dequeue()
1236 adv->is_enabled = 0U; in ll_rx_dequeue()
1239 adv->is_enabled = 0U; in ll_rx_dequeue()
1248 struct ll_scan_set *scan = HDR_LLL2ULL(ftr->param); in ll_rx_dequeue()
1260 scan_other->lll.conn = NULL; in ll_rx_dequeue()
1261 scan_other->is_enabled = 0U; in ll_rx_dequeue()
1266 scan->lll.conn = NULL; in ll_rx_dequeue()
1267 scan->is_enabled = 0U; in ll_rx_dequeue()
1359 case NODE_RX_TYPE_USER_START ... NODE_RX_TYPE_USER_END - 1: in ll_rx_dequeue()
1391 LL_ASSERT(rx->hdr.type != NODE_RX_TYPE_NONE); in ll_rx_dequeue()
1399 /* FIXME: clean up when porting Mesh Ext. */ in ll_rx_dequeue()
1402 } else if (rx->hdr.type == NODE_RX_TYPE_MESH_ADV_CPLT) { in ll_rx_dequeue()
1408 adv->is_enabled = 0U; in ll_rx_dequeue()
1413 scan->is_enabled = 0U; in ll_rx_dequeue()
1429 rx = rx->hdr.next; in ll_rx_mem_release()
1431 switch (rx_free->hdr.type) { in ll_rx_mem_release()
1445 struct ll_adv_iso_set *adv_iso = rx_free->rx_ftr.param; in ll_rx_mem_release()
1468 (void *)rx_free->pdu; in ll_rx_mem_release()
1473 } else if (cc->status == BT_HCI_ERR_ADV_TIMEOUT) { in ll_rx_mem_release()
1480 } else if (cc->status == BT_HCI_ERR_UNKNOWN_CONN_ID) { in ll_rx_mem_release()
1495 LL_ASSERT(!cc->status); in ll_rx_mem_release()
1558 case NODE_RX_TYPE_USER_START ... NODE_RX_TYPE_USER_END - 1: in ll_rx_mem_release()
1581 LL_ASSERT(rx_free->hdr.type != NODE_RX_TYPE_NONE); in ll_rx_mem_release()
1593 (void *)rx_free->pdu; in ll_rx_mem_release()
1594 uint8_t status = se->status; in ll_rx_mem_release()
1608 sync = (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1618 sync->node_rx_lost.rx.hdr.link; in ll_rx_mem_release()
1636 (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1646 (void *)rx_free->pdu; in ll_rx_mem_release()
1648 if (!se->status) { in ll_rx_mem_release()
1659 (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1685 if (IS_ACL_HANDLE(rx_free->hdr.handle)) { in ll_rx_mem_release()
1689 conn = ll_conn_get(rx_free->hdr.handle); in ll_rx_mem_release()
1691 LL_ASSERT(!conn->lll.link_tx_free); in ll_rx_mem_release()
1692 link = memq_deinit(&conn->lll.memq_tx.head, in ll_rx_mem_release()
1693 &conn->lll.memq_tx.tail); in ll_rx_mem_release()
1695 conn->lll.link_tx_free = link; in ll_rx_mem_release()
1698 } else if (IS_CIS_HANDLE(rx_free->hdr.handle)) { in ll_rx_mem_release()
1731 ll_rx_link_quota_update(-1); in ll_rx_link_quota_dec()
1762 rx_hdr->ack_last = mfifo_fifo_tx_ack.l; in ll_rx_put()
1812 tx->handle = handle; in ll_tx_ack_put()
1813 tx->node = node_tx; in ll_tx_ack_put()
1823 *ticker_id = (TICKER_NODES - FLASH_TICKER_NODES - COEX_TICKER_NODES); in ll_timeslice_ticker_id_get()
1830 *ticker_id = (TICKER_NODES - COEX_TICKER_NODES); in ll_coex_ticker_id_get()
1861 * unless the operation was executed inline due to same-priority caller/
1940 return -ENOLCK; in ull_ticker_stop_with_mark()
1951 return -ENOLCK; in ull_ticker_stop_with_mark()
1954 return -EALREADY; in ull_ticker_stop_with_mark()
1961 return -ENOLCK; in ull_ticker_stop_with_mark()
1964 if (err && (err != -EALREADY)) { in ull_ticker_stop_with_mark()
1998 return -EALREADY; in ull_disable()
2004 hdr->disabled_param = &sem; in ull_disable()
2005 hdr->disabled_cb = disabled_cb; in ull_disable()
2019 return -EALREADY; in ull_disable()
2057 rx_hdr->ack_last = ull_conn_ack_last_idx_get(); in ull_rx_put()
2093 memcpy(&e->prepare_param, prepare_param, sizeof(e->prepare_param)); in ull_prepare_enqueue()
2094 e->prepare_cb = prepare_cb; in ull_prepare_enqueue()
2095 e->is_abort_cb = is_abort_cb; in ull_prepare_enqueue()
2096 e->abort_cb = abort_cb; in ull_prepare_enqueue()
2097 e->is_resume = is_resume; in ull_prepare_enqueue()
2098 e->is_aborted = 0U; in ull_prepare_enqueue()
2130 * - 2 continuous scan prepare in queue (1M and Coded PHY) in ull_prepare_dequeue()
2131 * - 2 continuous scan resume in queue (1M and Coded PHY) in ull_prepare_dequeue()
2132 * - 1 directed adv prepare in ull_prepare_dequeue()
2133 * - 1 directed adv resume in ull_prepare_dequeue()
2134 * - 1 any other role with time reservation in ull_prepare_dequeue()
2138 * - 1 scan prepare (1M) in ull_prepare_dequeue()
2139 * - 1 scan prepare (Coded PHY) in ull_prepare_dequeue()
2140 * - 1 directed adv prepare in ull_prepare_dequeue()
2143 * - 1 scan resume (1M) in ull_prepare_dequeue()
2144 * - 1 scan resume (Coded PHY) in ull_prepare_dequeue()
2145 * - 1 directed adv resume in ull_prepare_dequeue()
2153 void *param = next->prepare_param.param; in ull_prepare_dequeue()
2154 uint8_t is_aborted = next->is_aborted; in ull_prepare_dequeue()
2155 uint8_t is_resume = next->is_resume; in ull_prepare_dequeue()
2160 loop--; in ull_prepare_dequeue()
2215 if (!next->is_aborted && in ull_prepare_dequeue()
2216 ((!next->is_resume && in ull_prepare_dequeue()
2217 ((next->prepare_param.param == in ull_prepare_dequeue()
2219 (next->prepare_param.param == in ull_prepare_dequeue()
2221 (next->is_resume && in ull_prepare_dequeue()
2223 ((next->prepare_param.param == in ull_prepare_dequeue()
2225 (next->prepare_param.param == in ull_prepare_dequeue()
2242 return &evdone->extra; in ull_event_done_extra_get()
2254 extra->type = type; in ull_done_extra_type_set()
2264 /* Obtain new node that signals "Done of an RX-event". in ull_event_done()
2278 link = evdone->hdr.link; in ull_event_done()
2279 evdone->hdr.link = NULL; in ull_event_done()
2281 evdone->hdr.type = NODE_RX_TYPE_EVENT_DONE; in ull_event_done()
2282 evdone->param = param; in ull_event_done()
2291 * @brief Extract timing from completed event
2293 * @param node_rx_event_done[in] Done event containing fresh timing information
2307 done->extra.drift.start_to_address_actual_us; in ull_drift_ticks_get()
2309 done->extra.drift.window_widening_event_us; in ull_drift_ticks_get()
2311 done->extra.drift.preamble_to_addr_us; in ull_drift_ticks_get()
2322 HAL_TICKER_US_TO_TICKS((start_to_address_expected_us - in ull_drift_ticks_get()
2469 rx->link = link; in rx_replenish()
2475 max--; in rx_replenish()
2502 link->mem = NULL; in rx_replenish()
2503 rx->link = link; in rx_replenish()
2567 link_tx = ull_conn_ack_by_last_peek(rx->ack_last, in rx_demux()
2570 rx_demux_conn_tx_ack(rx->ack_last, handle, in rx_demux()
2651 *handle = tx->handle; in tx_cmplt_get()
2657 } else if (IS_CIS_HANDLE(tx->handle) || in tx_cmplt_get()
2658 IS_ADV_ISO_HANDLE(tx->handle)) { in tx_cmplt_get()
2679 tx_node = tx->node; in tx_cmplt_get()
2684 sdu_fragments = tx_node->sdu_fragments; in tx_cmplt_get()
2689 NODE_TX_FRAGMENTS_SET(tx->node, sdu_fragments); in tx_cmplt_get()
2694 ll_iso_link_tx_release(tx_node->link); in tx_cmplt_get()
2729 tx_node = tx->node; in tx_cmplt_get()
2730 p = (void *)tx_node->pdu; in tx_cmplt_get()
2733 (p->ll_id == PDU_DATA_LLID_DATA_START || in tx_cmplt_get()
2734 p->ll_id == PDU_DATA_LLID_DATA_CONTINUE)) || in tx_cmplt_get()
2738 NODE_TX_DATA_SET(tx->node); in tx_cmplt_get()
2744 NODE_TX_CTRL_SET(tx->node); in tx_cmplt_get()
2763 } while (tx && tx->handle == *handle); in tx_cmplt_get()
2808 switch (rx->type) { in rx_demux_rx()
2829 adv = (void *)((struct node_rx_pdu *)rx)->pdu; in rx_demux_rx()
2830 if (adv->type != PDU_ADV_TYPE_EXT_IND) { in rx_demux_rx()
2871 conn = ll_conn_get(rx->handle); in rx_demux_rx()
2876 rx->type = NODE_RX_TYPE_RELEASE; in rx_demux_rx()
2910 if (rx && rx->type != NODE_RX_TYPE_RETAIN) { in rx_demux_rx()
2994 ull_hdr = done->param; in rx_demux_event_done()
3001 switch (done->extra.type) { in rx_demux_event_done()
3088 done->extra.type = 0U; in rx_demux_event_done()
3101 if (ull_hdr && !ull_ref_get(ull_hdr) && ull_hdr->disabled_cb) { in rx_demux_event_done()
3102 ull_hdr->disabled_cb(ull_hdr->disabled_param); in rx_demux_event_done()
3121 while ((max--) && mfifo_enqueue_idx_get(n, f, *l, &idx)) { in ull_rxfifo_alloc()
3136 link->mem = NULL; in ull_rxfifo_alloc()
3137 rx->link = link; in ull_rxfifo_alloc()
3156 rx->link = link; in ull_rxfifo_release()