Lines Matching +full:extra +full:- +full:wait +full:- +full:time

2  * Copyright (c) 2017-2021 Nordic Semiconductor ASA
4 * SPDX-License-Identifier: Apache-2.0
92 #define BT_ADV_TICKER_NODES ((TICKER_ID_ADV_LAST) - (TICKER_ID_ADV_STOP) + 1)
94 #define BT_ADV_AUX_TICKER_NODES ((TICKER_ID_ADV_AUX_LAST) - \
97 #define BT_ADV_SYNC_TICKER_NODES ((TICKER_ID_ADV_SYNC_LAST) - \
100 #define BT_ADV_ISO_TICKER_NODES ((TICKER_ID_ADV_ISO_LAST) - \
122 #define BT_SCAN_TICKER_NODES ((TICKER_ID_SCAN_LAST) - (TICKER_ID_SCAN_STOP) + 1)
127 #define BT_SCAN_AUX_TICKER_NODES ((TICKER_ID_SCAN_AUX_LAST) - \
131 #define BT_SCAN_SYNC_TICKER_NODES ((TICKER_ID_SCAN_SYNC_LAST) - \
134 #define BT_SCAN_SYNC_ISO_TICKER_NODES ((TICKER_ID_SCAN_SYNC_ISO_LAST) - \
136 (TICKER_ID_SCAN_SYNC_ISO_RESUME_LAST) - \
158 #define BT_CONN_TICKER_NODES ((TICKER_ID_CONN_LAST) - (TICKER_ID_CONN_BASE) + 1)
164 #define BT_CIG_TICKER_NODES ((TICKER_ID_CONN_ISO_LAST) - \
166 (TICKER_ID_CONN_ISO_RESUME_LAST) - \
230 #define BT_CTLR_MAX_CONNECTABLE (1U + MIN(((CONFIG_BT_MAX_CONN) - 1U), \
288 /* NOTE: As ticker job is not disabled inside radio events, no need for extra
296 /* NOTE: Extended Advertising needs one extra ticker operation being enqueued
331 /* Semaphore to wakeup thread on Rx-ed objects */
334 /* Declare prepare-event FIFO: mfifo_prep.
339 /* Declare done-event RXFIFO. This is a composite pool-backed MFIFO for rx_nodes.
341 * - mfifo_done: FIFO with pointers to struct node_rx_event_done
342 * - mem_done: Backing data pool for struct node_rx_event_done elements
343 * - mem_link_done: Pool of memq_link_t elements
348 * When there are radio events with time reservations lower than the preemption
359 * their time reservations, these are not yet late and hence no more additional
382 /* Maximum time allowed for comleting synchronous LLL disabling via
480 * fragments must be available for HCI complete-counting.
498 * comparison to a connection, the connection established uses incoming Rx-ed
502 * incoming Rx-ed PDU).
511 uint16_t quota_pdu; /* Number of un-utilized buffers */
602 * is re-initialized but job objects were not re-initialized there is a in ll_init()
604 * just after re-initialization. After enqueue operation with that link, in ll_init()
629 /* Initialize semaphore for ticker API blocking wait */ in ll_init()
793 * - Reset ULL context, i.e. stop ULL scheduling, abort LLL events etc. in ll_reset()
794 * - Reset LLL context, i.e. post LLL event abort, let LLL cleanup its in ll_reset()
796 * - Reset ULL static variables (which otherwise was mem-zeroed in cases in ll_reset()
797 * if power-on reset wherein architecture startup mem-zeroes .bss in ll_reset()
799 * - Initialize ULL context variable, similar to on-power-up. in ll_reset()
865 /* Re-initialize ULL internals */ in ll_reset()
867 /* Re-initialize the prep mfifo */ in ll_reset()
870 /* Re-initialize the free rx mfifo */ in ll_reset()
874 /* Re-initialize the free ll rx mfifo */ in ll_reset()
889 * we use semaphore to wait for perform_lll_reset to in ll_reset()
905 /* LLL reset must complete before returning - wait for in ll_reset()
978 cmplt = tx_cmplt_get(handle, &mfifo_fifo_tx_ack.f, rx->hdr.ack_last); in ll_rx_get()
999 } else if (rx->hdr.type == NODE_RX_TYPE_RELEASE) { in ll_rx_get()
1009 } else if (rx->hdr.type == NODE_RX_TYPE_IQ_SAMPLE_REPORT_LLL_RELEASE) { in ll_rx_get()
1022 } else if (rx->hdr.type == NODE_RX_TYPE_SYNC_CHM_COMPLETE) { in ll_rx_get()
1036 } else if (rx->hdr.type == NODE_RX_TYPE_BIG_CHM_COMPLETE) { in ll_rx_get()
1079 switch (rx->hdr.type) { in ll_rx_dequeue()
1093 adv = (struct pdu_adv *)rx->pdu; in ll_rx_dequeue()
1094 if (adv->type != PDU_ADV_TYPE_EXT_IND) { in ll_rx_dequeue()
1098 rx_curr = rx->rx_ftr.extra; in ll_rx_dequeue()
1103 loop--; in ll_rx_dequeue()
1105 link_free = rx_curr->hdr.link; in ll_rx_dequeue()
1106 rx_curr = rx_curr->rx_ftr.extra; in ll_rx_dequeue()
1115 ull_scan_term_dequeue(rx->hdr.handle); in ll_rx_dequeue()
1126 adv = ull_adv_set_get(rx->hdr.handle); in ll_rx_dequeue()
1129 lll_aux = adv->lll.aux; in ll_rx_dequeue()
1135 aux->is_started = 0U; in ll_rx_dequeue()
1139 struct lll_conn *lll_conn = adv->lll.conn; in ll_rx_dequeue()
1142 adv->is_enabled = 0U; in ll_rx_dequeue()
1147 LL_ASSERT(!lll_conn->link_tx_free); in ll_rx_dequeue()
1149 memq_link_t *memq_link = memq_deinit(&lll_conn->memq_tx.head, in ll_rx_dequeue()
1150 &lll_conn->memq_tx.tail); in ll_rx_dequeue()
1153 lll_conn->link_tx_free = memq_link; in ll_rx_dequeue()
1158 adv->lll.conn = NULL; in ll_rx_dequeue()
1160 ll_rx_release(adv->node_rx_cc_free); in ll_rx_dequeue()
1161 adv->node_rx_cc_free = NULL; in ll_rx_dequeue()
1163 ll_rx_link_release(adv->link_cc_free); in ll_rx_dequeue()
1164 adv->link_cc_free = NULL; in ll_rx_dequeue()
1167 adv->is_enabled = 0U; in ll_rx_dequeue()
1176 struct node_rx_cc *cc = (void *)rx->pdu; in ll_rx_dequeue()
1177 struct node_rx_ftr *ftr = &(rx->rx_ftr); in ll_rx_dequeue()
1182 } else if ((cc->status == BT_HCI_ERR_ADV_TIMEOUT) || cc->role) { in ll_rx_dequeue()
1187 lll = ftr->param; in ll_rx_dequeue()
1190 if (cc->status == BT_HCI_ERR_ADV_TIMEOUT) { in ll_rx_dequeue()
1195 conn_lll = lll->conn; in ll_rx_dequeue()
1197 lll->conn = NULL; in ll_rx_dequeue()
1199 LL_ASSERT(!conn_lll->link_tx_free); in ll_rx_dequeue()
1200 memq_link = memq_deinit(&conn_lll->memq_tx.head, in ll_rx_dequeue()
1201 &conn_lll->memq_tx.tail); in ll_rx_dequeue()
1203 conn_lll->link_tx_free = memq_link; in ll_rx_dequeue()
1208 /* Release un-utilized node rx */ in ll_rx_dequeue()
1209 if (adv->node_rx_cc_free) { in ll_rx_dequeue()
1212 rx_free = adv->node_rx_cc_free; in ll_rx_dequeue()
1213 adv->node_rx_cc_free = NULL; in ll_rx_dequeue()
1220 if (lll->aux) { in ll_rx_dequeue()
1223 aux = HDR_LLL2ULL(lll->aux); in ll_rx_dequeue()
1224 aux->is_started = 0U; in ll_rx_dequeue()
1232 if (!lll->node_rx_adv_term) { in ll_rx_dequeue()
1233 adv->is_enabled = 0U; in ll_rx_dequeue()
1236 adv->is_enabled = 0U; in ll_rx_dequeue()
1245 struct ll_scan_set *scan = HDR_LLL2ULL(ftr->param); in ll_rx_dequeue()
1257 scan_other->lll.conn = NULL; in ll_rx_dequeue()
1258 scan_other->is_enabled = 0U; in ll_rx_dequeue()
1263 scan->lll.conn = NULL; in ll_rx_dequeue()
1264 scan->is_enabled = 0U; in ll_rx_dequeue()
1356 case NODE_RX_TYPE_USER_START ... NODE_RX_TYPE_USER_END - 1: in ll_rx_dequeue()
1388 LL_ASSERT(rx->hdr.type != NODE_RX_TYPE_NONE); in ll_rx_dequeue()
1399 } else if (rx->hdr.type == NODE_RX_TYPE_MESH_ADV_CPLT) { in ll_rx_dequeue()
1405 adv->is_enabled = 0U; in ll_rx_dequeue()
1410 scan->is_enabled = 0U; in ll_rx_dequeue()
1426 rx = rx->hdr.next; in ll_rx_mem_release()
1428 switch (rx_free->hdr.type) { in ll_rx_mem_release()
1442 struct ll_adv_iso_set *adv_iso = rx_free->rx_ftr.param; in ll_rx_mem_release()
1465 (void *)rx_free->pdu; in ll_rx_mem_release()
1470 } else if (cc->status == BT_HCI_ERR_ADV_TIMEOUT) { in ll_rx_mem_release()
1477 } else if (cc->status == BT_HCI_ERR_UNKNOWN_CONN_ID) { in ll_rx_mem_release()
1492 LL_ASSERT(!cc->status); in ll_rx_mem_release()
1555 case NODE_RX_TYPE_USER_START ... NODE_RX_TYPE_USER_END - 1: in ll_rx_mem_release()
1578 LL_ASSERT(rx_free->hdr.type != NODE_RX_TYPE_NONE); in ll_rx_mem_release()
1590 (void *)rx_free->pdu; in ll_rx_mem_release()
1591 uint8_t status = se->status; in ll_rx_mem_release()
1605 sync = (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1615 sync->node_rx_lost.rx.hdr.link; in ll_rx_mem_release()
1633 (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1643 (void *)rx_free->pdu; in ll_rx_mem_release()
1645 if (!se->status) { in ll_rx_mem_release()
1656 (void *)rx_free->rx_ftr.param; in ll_rx_mem_release()
1682 if (IS_ACL_HANDLE(rx_free->hdr.handle)) { in ll_rx_mem_release()
1686 conn = ll_conn_get(rx_free->hdr.handle); in ll_rx_mem_release()
1688 LL_ASSERT(!conn->lll.link_tx_free); in ll_rx_mem_release()
1689 link = memq_deinit(&conn->lll.memq_tx.head, in ll_rx_mem_release()
1690 &conn->lll.memq_tx.tail); in ll_rx_mem_release()
1692 conn->lll.link_tx_free = link; in ll_rx_mem_release()
1695 } else if (IS_CIS_HANDLE(rx_free->hdr.handle)) { in ll_rx_mem_release()
1728 ll_rx_link_quota_update(-1); in ll_rx_link_quota_dec()
1759 rx_hdr->ack_last = mfifo_fifo_tx_ack.l; in ll_rx_put()
1809 tx->handle = handle; in ll_tx_ack_put()
1810 tx->node = node_tx; in ll_tx_ack_put()
1820 *ticker_id = (TICKER_NODES - FLASH_TICKER_NODES - COEX_TICKER_NODES); in ll_timeslice_ticker_id_get()
1827 *ticker_id = (TICKER_NODES - COEX_TICKER_NODES); in ll_coex_ticker_id_get()
1854 * @brief Take the ticker API semaphore (if applicable) and wait for operation
1858 * unless the operation was executed inline due to same-priority caller/
1893 * prior to this function call. Take the semaphore and wait, in ull_ticker_status_take()
1937 return -ENOLCK; in ull_ticker_stop_with_mark()
1948 return -ENOLCK; in ull_ticker_stop_with_mark()
1951 return -EALREADY; in ull_ticker_stop_with_mark()
1958 return -ENOLCK; in ull_ticker_stop_with_mark()
1961 if (err && (err != -EALREADY)) { in ull_ticker_stop_with_mark()
1995 return -EALREADY; in ull_disable()
2001 hdr->disabled_param = &sem; in ull_disable()
2002 hdr->disabled_cb = disabled_cb; in ull_disable()
2016 return -EALREADY; in ull_disable()
2054 rx_hdr->ack_last = ull_conn_ack_last_idx_get(); in ull_rx_put()
2090 memcpy(&e->prepare_param, prepare_param, sizeof(e->prepare_param)); in ull_prepare_enqueue()
2091 e->prepare_cb = prepare_cb; in ull_prepare_enqueue()
2092 e->is_abort_cb = is_abort_cb; in ull_prepare_enqueue()
2093 e->abort_cb = abort_cb; in ull_prepare_enqueue()
2094 e->is_resume = is_resume; in ull_prepare_enqueue()
2095 e->is_aborted = 0U; in ull_prepare_enqueue()
2127 * - 2 continuous scan prepare in queue (1M and Coded PHY) in ull_prepare_dequeue()
2128 * - 2 continuous scan resume in queue (1M and Coded PHY) in ull_prepare_dequeue()
2129 * - 1 directed adv prepare in ull_prepare_dequeue()
2130 * - 1 directed adv resume in ull_prepare_dequeue()
2131 * - 1 any other role with time reservation in ull_prepare_dequeue()
2135 * - 1 scan prepare (1M) in ull_prepare_dequeue()
2136 * - 1 scan prepare (Coded PHY) in ull_prepare_dequeue()
2137 * - 1 directed adv prepare in ull_prepare_dequeue()
2140 * - 1 scan resume (1M) in ull_prepare_dequeue()
2141 * - 1 scan resume (Coded PHY) in ull_prepare_dequeue()
2142 * - 1 directed adv resume in ull_prepare_dequeue()
2150 void *param = next->prepare_param.param; in ull_prepare_dequeue()
2151 uint8_t is_aborted = next->is_aborted; in ull_prepare_dequeue()
2152 uint8_t is_resume = next->is_resume; in ull_prepare_dequeue()
2157 loop--; in ull_prepare_dequeue()
2212 if (!next->is_aborted && in ull_prepare_dequeue()
2213 ((!next->is_resume && in ull_prepare_dequeue()
2214 ((next->prepare_param.param == in ull_prepare_dequeue()
2216 (next->prepare_param.param == in ull_prepare_dequeue()
2218 (next->is_resume && in ull_prepare_dequeue()
2220 ((next->prepare_param.param == in ull_prepare_dequeue()
2222 (next->prepare_param.param == in ull_prepare_dequeue()
2239 return &evdone->extra; in ull_event_done_extra_get()
2244 struct event_done_extra *extra; in ull_done_extra_type_set() local
2246 extra = ull_event_done_extra_get(); in ull_done_extra_type_set()
2247 if (!extra) { in ull_done_extra_type_set()
2251 extra->type = type; in ull_done_extra_type_set()
2253 return extra; in ull_done_extra_type_set()
2261 /* Obtain new node that signals "Done of an RX-event". in ull_event_done()
2275 link = evdone->hdr.link; in ull_event_done()
2276 evdone->hdr.link = NULL; in ull_event_done()
2278 evdone->hdr.type = NODE_RX_TYPE_EVENT_DONE; in ull_event_done()
2279 evdone->param = param; in ull_event_done()
2304 done->extra.drift.start_to_address_actual_us; in ull_drift_ticks_get()
2306 done->extra.drift.window_widening_event_us; in ull_drift_ticks_get()
2308 done->extra.drift.preamble_to_addr_us; in ull_drift_ticks_get()
2319 HAL_TICKER_US_TO_TICKS((start_to_address_expected_us - in ull_drift_ticks_get()
2466 rx->link = link; in rx_replenish()
2472 max--; in rx_replenish()
2499 link->mem = NULL; in rx_replenish()
2500 rx->link = link; in rx_replenish()
2564 link_tx = ull_conn_ack_by_last_peek(rx->ack_last, in rx_demux()
2567 rx_demux_conn_tx_ack(rx->ack_last, handle, in rx_demux()
2648 *handle = tx->handle; in tx_cmplt_get()
2654 } else if (IS_CIS_HANDLE(tx->handle) || in tx_cmplt_get()
2655 IS_ADV_ISO_HANDLE(tx->handle)) { in tx_cmplt_get()
2676 tx_node = tx->node; in tx_cmplt_get()
2681 sdu_fragments = tx_node->sdu_fragments; in tx_cmplt_get()
2686 NODE_TX_FRAGMENTS_SET(tx->node, sdu_fragments); in tx_cmplt_get()
2691 ll_iso_link_tx_release(tx_node->link); in tx_cmplt_get()
2726 tx_node = tx->node; in tx_cmplt_get()
2727 p = (void *)tx_node->pdu; in tx_cmplt_get()
2730 (p->ll_id == PDU_DATA_LLID_DATA_START || in tx_cmplt_get()
2731 p->ll_id == PDU_DATA_LLID_DATA_CONTINUE)) || in tx_cmplt_get()
2735 NODE_TX_DATA_SET(tx->node); in tx_cmplt_get()
2741 NODE_TX_CTRL_SET(tx->node); in tx_cmplt_get()
2760 } while (tx && tx->handle == *handle); in tx_cmplt_get()
2805 switch (rx->type) { in rx_demux_rx()
2826 adv = (void *)((struct node_rx_pdu *)rx)->pdu; in rx_demux_rx()
2827 if (adv->type != PDU_ADV_TYPE_EXT_IND) { in rx_demux_rx()
2868 conn = ll_conn_get(rx->handle); in rx_demux_rx()
2873 rx->type = NODE_RX_TYPE_RELEASE; in rx_demux_rx()
2907 if (rx && rx->type != NODE_RX_TYPE_RETAIN) { in rx_demux_rx()
2991 ull_hdr = done->param; in rx_demux_event_done()
2998 switch (done->extra.type) { in rx_demux_event_done()
3085 done->extra.type = 0U; in rx_demux_event_done()
3098 if (ull_hdr && !ull_ref_get(ull_hdr) && ull_hdr->disabled_cb) { in rx_demux_event_done()
3099 ull_hdr->disabled_cb(ull_hdr->disabled_param); in rx_demux_event_done()
3118 while ((max--) && mfifo_enqueue_idx_get(n, f, *l, &idx)) { in ull_rxfifo_alloc()
3133 link->mem = NULL; in ull_rxfifo_alloc()
3134 rx->link = link; in ull_rxfifo_alloc()
3153 rx->link = link; in ull_rxfifo_release()
3164 * @brief Wraps given time within the range of 0 to ULL_TIME_WRAPPING_POINT_US
3165 * @param time_now Current time value
3166 * @param time_diff Time difference (signed)
3167 * @return Wrapped time after difference