Lines Matching +full:divide +full:- +full:20

2  * Copyright (c) 2018-2021 Nordic Semiconductor ASA
4 * SPDX-License-Identifier: Apache-2.0
195 if (conn->lll.handle != handle) { in ll_connected_get()
230 return -EINVAL; in ll_tx_mem_enqueue()
235 return -ENOBUFS; in ll_tx_mem_enqueue()
238 lll_tx->handle = handle; in ll_tx_mem_enqueue()
239 lll_tx->node = tx; in ll_tx_mem_enqueue()
244 if (ull_ref_get(&conn->ull)) { in ll_tx_mem_enqueue()
249 force_md_cnt = force_md_cnt_calc(&conn->lll, tx_rate); in ll_tx_mem_enqueue()
266 if (IS_ENABLED(CONFIG_BT_PERIPHERAL) && conn->lll.role) { in ll_tx_mem_enqueue()
278 delta = k_cyc_to_ns_floor64(cycle_stamp - last_cycle_stamp); in ll_tx_mem_enqueue()
287 pdu = (void *)((struct node_tx *)tx)->pdu; in ll_tx_mem_enqueue()
288 tx_len += pdu->len; in ll_tx_mem_enqueue()
319 conn->lll.role) { in ll_conn_update()
351 * Core Spec 5.2 Vol4: 7.8.20: in ll_chm_get()
363 memcpy(chm, pending_chm, sizeof(conn->lll.data_chan_map)); in ll_chm_get()
365 memcpy(chm, conn->lll.data_chan_map, sizeof(conn->lll.data_chan_map)); in ll_chm_get()
427 if (IS_ENABLED(CONFIG_BT_PERIPHERAL) && conn->lll.role) { in ll_terminate_ind_send()
437 /* CIS is not connected - get the unconnected instance */ in ll_terminate_ind_send()
440 /* Sanity-check instance to make sure it's created but not connected */ in ll_terminate_ind_send()
441 if (cis->group && cis->lll.handle == handle && !cis->established) { in ll_terminate_ind_send()
442 if (cis->group->state == CIG_STATE_CONFIGURABLE) { in ll_terminate_ind_send()
446 } else if (cis->group->state == CIG_STATE_INITIATING) { in ll_terminate_ind_send()
447 conn = ll_connected_get(cis->lll.acl_handle); in ll_terminate_ind_send()
449 /* CIS is not yet established - try to cancel procedure */ in ll_terminate_ind_send()
451 /* Successfully canceled - complete disconnect */ in ll_terminate_ind_send()
457 node_terminate->hdr.handle = handle; in ll_terminate_ind_send()
458 node_terminate->hdr.type = NODE_RX_TYPE_TERMINATE; in ll_terminate_ind_send()
459 *((uint8_t *)node_terminate->pdu) = in ll_terminate_ind_send()
462 ll_rx_put_sched(node_terminate->hdr.link, in ll_terminate_ind_send()
466 cis->group->state = CIG_STATE_CONFIGURABLE; in ll_terminate_ind_send()
473 * state - let it run its course and enqueue a in ll_terminate_ind_send()
484 conn = ll_connected_get(cis->lll.acl_handle); in ll_terminate_ind_send()
516 conn->lll.role) { in ll_feature_req_send()
540 if (IS_ENABLED(CONFIG_BT_PERIPHERAL) && conn->lll.role) { in ll_version_ind_send()
591 if (IS_ENABLED(CONFIG_BT_PERIPHERAL) && conn->lll.role) { in ll_length_req_send()
644 *tx = conn->lll.phy_tx; in ll_phy_get()
645 *rx = conn->lll.phy_rx; in ll_phy_get()
680 if (IS_ENABLED(CONFIG_BT_PERIPHERAL) && conn->lll.role) { in ll_phy_req_send()
698 *rssi = conn->lll.rssi_latest; in ll_rssi_get()
714 if (conn->lll.interval >= BT_HCI_LE_INTERVAL_MIN) { in ll_apto_get()
715 *apto = conn->apto_reload * conn->lll.interval * in ll_apto_get()
718 *apto = conn->apto_reload * (conn->lll.interval + 1U) * in ll_apto_get()
734 if (conn->lll.interval >= BT_HCI_LE_INTERVAL_MIN) { in ll_apto_set()
735 conn->apto_reload = in ll_apto_set()
737 conn->lll.interval * in ll_apto_set()
740 conn->apto_reload = in ll_apto_set()
742 (conn->lll.interval + 1U) * in ll_apto_set()
776 /* Re-initialize the Tx mfifo */ in ull_conn_reset()
779 /* Re-initialize the Tx Ack mfifo */ in ull_conn_reset()
796 return &conn->lll; in ull_conn_lll_get()
853 conn->peer_id_addr_type == peer_id_addr_type && in ull_conn_peer_connected()
854 !memcmp(conn->peer_id_addr, peer_id_addr, BDADDR_SIZE) && in ull_conn_peer_connected()
855 conn->own_id_addr_type == own_id_addr_type && in ull_conn_peer_connected()
856 !memcmp(conn->own_id_addr, own_id_addr, BDADDR_SIZE)) { in ull_conn_peer_connected()
873 rx->hdr.link = rx_link; in ull_conn_setup()
878 ftr = &(rx->rx_ftr); in ull_conn_setup()
883 hdr = HDR_LLL2ULL(ftr->param); in ull_conn_setup()
888 LL_ASSERT(!hdr->disabled_cb); in ull_conn_setup()
889 hdr->disabled_param = rx; in ull_conn_setup()
890 hdr->disabled_cb = conn_setup_adv_scan_disabled_cb; in ull_conn_setup()
901 conn = ll_connected_get((*rx)->hdr.handle); in ull_conn_rx()
904 (*rx)->hdr.type = NODE_RX_TYPE_RELEASE; in ull_conn_rx()
911 pdu_rx = (void *)(*rx)->pdu; in ull_conn_rx()
913 switch (pdu_rx->ll_id) { in ull_conn_rx()
917 (*rx)->hdr.type = NODE_RX_TYPE_RELEASE; in ull_conn_rx()
927 if (conn->pause_rx_data) { in ull_conn_rx()
928 conn->llcp_terminate.reason_final = in ull_conn_rx()
932 (*rx)->hdr.type = NODE_RX_TYPE_RELEASE; in ull_conn_rx()
940 if (conn->pause_rx_data) { in ull_conn_rx()
941 conn->llcp_terminate.reason_final = in ull_conn_rx()
949 (*rx)->hdr.type = NODE_RX_TYPE_RELEASE; in ull_conn_rx()
958 LL_ASSERT(conn->lll.handle != LLL_HANDLE_INVALID); in ull_conn_llcp()
960 conn->llcp.prep.ticks_at_expire = ticks_at_expire; in ull_conn_llcp()
961 conn->llcp.prep.remainder = remainder; in ull_conn_llcp()
962 conn->llcp.prep.lazy = lazy; in ull_conn_llcp()
966 if (conn->cancel_prepare) { in ull_conn_llcp()
968 conn->cancel_prepare = 0U; in ull_conn_llcp()
971 return -ECANCELED; in ull_conn_llcp()
994 conn = CONTAINER_OF(done->param, struct ll_conn, ull); in ull_conn_done()
995 lll = &conn->lll; in ull_conn_done()
998 if (unlikely(lll->handle == LLL_HANDLE_INVALID)) { in ull_conn_done()
1010 switch (done->extra.mic_state) { in ull_conn_done()
1013 if (lll->enc_rx && lll->enc_tx) { in ull_conn_done()
1017 appto_reload_new = (conn->apto_reload > in ull_conn_done()
1018 (lll->latency + 6)) ? in ull_conn_done()
1019 (conn->apto_reload - in ull_conn_done()
1020 (lll->latency + 6)) : in ull_conn_done()
1021 conn->apto_reload; in ull_conn_done()
1022 if (conn->appto_reload != appto_reload_new) { in ull_conn_done()
1023 conn->appto_reload = appto_reload_new; in ull_conn_done()
1024 conn->apto_expire = 0U; in ull_conn_done()
1028 if (conn->apto_expire == 0U) { in ull_conn_done()
1029 conn->appto_expire = conn->appto_reload; in ull_conn_done()
1030 conn->apto_expire = conn->apto_reload; in ull_conn_done()
1038 conn->appto_expire = conn->apto_expire = 0U; in ull_conn_done()
1043 conn->llcp_terminate.reason_final = in ull_conn_done()
1049 reason_final = conn->llcp_terminate.reason_final; in ull_conn_done()
1061 latency_event = conn->common.is_must_expire ? 0 : lll->latency_event; in ull_conn_done()
1063 latency_event = lll->latency_event; in ull_conn_done()
1074 if (done->extra.trx_cnt) { in ull_conn_done()
1077 } else if (lll->role) { in ull_conn_done()
1078 if (!conn->periph.drift_skip) { in ull_conn_done()
1083 conn->periph.drift_skip = in ull_conn_done()
1084 ull_ref_get(&conn->ull); in ull_conn_done()
1087 conn->periph.drift_skip--; in ull_conn_done()
1090 if (!ull_tx_q_peek(&conn->tx_q)) { in ull_conn_done()
1094 if (ull_tx_q_peek(&conn->tx_q) || in ull_conn_done()
1095 memq_peek(lll->memq_tx.head, in ull_conn_done()
1096 lll->memq_tx.tail, NULL)) { in ull_conn_done()
1097 lll->latency_event = 0U; in ull_conn_done()
1098 } else if (lll->periph.latency_enabled) { in ull_conn_done()
1099 lll->latency_event = lll->latency; in ull_conn_done()
1105 conn->connect_expire = 0U; in ull_conn_done()
1108 elapsed_event = latency_event + lll->lazy_prepare + 1U; in ull_conn_done()
1111 if (done->extra.crc_valid && !done->extra.is_aborted) { in ull_conn_done()
1112 conn->supervision_expire = 0U; in ull_conn_done()
1116 else if (conn->connect_expire) { in ull_conn_done()
1117 if (conn->connect_expire > elapsed_event) { in ull_conn_done()
1118 conn->connect_expire -= elapsed_event; in ull_conn_done()
1126 /* if anchor point not sync-ed, start supervision timeout, and break in ull_conn_done()
1131 if (!conn->supervision_expire) { in ull_conn_done()
1134 if (conn->lll.interval >= BT_HCI_LE_INTERVAL_MIN) { in ull_conn_done()
1135 conn_interval_us = conn->lll.interval * in ull_conn_done()
1138 conn_interval_us = (conn->lll.interval + 1U) * in ull_conn_done()
1142 conn->supervision_expire = RADIO_CONN_EVENTS( in ull_conn_done()
1143 (conn->supervision_timeout * 10U * USEC_PER_MSEC), in ull_conn_done()
1151 if (conn->supervision_expire) { in ull_conn_done()
1152 if (conn->supervision_expire > elapsed_event) { in ull_conn_done()
1153 conn->supervision_expire -= elapsed_event; in ull_conn_done()
1156 lll->latency_event = 0U; in ull_conn_done()
1161 if (conn->supervision_expire <= 6U) { in ull_conn_done()
1170 else if (lll->role) { in ull_conn_done()
1174 force = conn->periph.force & 0x01; in ull_conn_done()
1177 conn->periph.force >>= 1U; in ull_conn_done()
1179 conn->periph.force |= BIT(31); in ull_conn_done()
1191 lll->forced = force_lll; in ull_conn_done()
1196 if (-ETIMEDOUT == ull_cp_prt_elapse(conn, elapsed_event, &error_code)) { in ull_conn_done()
1204 if (conn->apto_expire != 0U) { in ull_conn_done()
1205 if (conn->apto_expire > elapsed_event) { in ull_conn_done()
1206 conn->apto_expire -= elapsed_event; in ull_conn_done()
1212 conn->apto_expire = 0U; in ull_conn_done()
1214 rx->handle = lll->handle; in ull_conn_done()
1215 rx->type = NODE_RX_TYPE_APTO; in ull_conn_done()
1218 ll_rx_put_sched(rx->link, rx); in ull_conn_done()
1220 conn->apto_expire = 1U; in ull_conn_done()
1226 if (conn->appto_expire != 0U) { in ull_conn_done()
1227 if (conn->appto_expire > elapsed_event) { in ull_conn_done()
1228 conn->appto_expire -= elapsed_event; in ull_conn_done()
1230 conn->appto_expire = 0U; in ull_conn_done()
1242 if (conn->llcp.cte_req.req_interval != 0U && conn->llcp.cte_req.req_expire != 0U) { in ull_conn_done()
1243 if (conn->llcp.cte_req.req_expire > elapsed_event) { in ull_conn_done()
1244 conn->llcp.cte_req.req_expire -= elapsed_event; in ull_conn_done()
1249 * The counter is re-started after completion of this run. in ull_conn_done()
1251 conn->llcp.cte_req.req_expire = 0U; in ull_conn_done()
1253 err = ull_cp_cte_req(conn, conn->llcp.cte_req.min_cte_len, in ull_conn_done()
1254 conn->llcp.cte_req.cte_type); in ull_conn_done()
1268 if (lll->rssi_sample_count == 0U) { in ull_conn_done()
1274 lll->rssi_reported = lll->rssi_latest; in ull_conn_done()
1275 lll->rssi_sample_count = LLL_CONN_RSSI_SAMPLE_COUNT; in ull_conn_done()
1278 rx->hdr.handle = lll->handle; in ull_conn_done()
1279 rx->hdr.type = NODE_RX_TYPE_RSSI; in ull_conn_done()
1282 pdu_data_rx = (void *)rx->pdu; in ull_conn_done()
1283 pdu_data_rx->rssi = lll->rssi_reported; in ull_conn_done()
1286 ll_rx_put_sched(rx->hdr.link, rx); in ull_conn_done()
1293 if ((force) || (latency_event != lll->latency_event)) { in ull_conn_done()
1294 lazy = lll->latency_event + 1U; in ull_conn_done()
1299 if (lll->evt_len_upd) { in ull_conn_done()
1302 lll->evt_len_upd = 0; in ull_conn_done()
1305 ready_delay = (lll->role) ? in ull_conn_done()
1306 lll_radio_rx_ready_delay_get(lll->phy_rx, PHY_FLAGS_S8) : in ull_conn_done()
1307 lll_radio_tx_ready_delay_get(lll->phy_tx, lll->phy_flags); in ull_conn_done()
1311 tx_time = lll->dle.eff.max_tx_time; in ull_conn_done()
1312 rx_time = lll->dle.eff.max_rx_time; in ull_conn_done()
1316 PDU_DC_MAX_US(PDU_DC_PAYLOAD_SIZE_MIN, lll->phy_tx)); in ull_conn_done()
1318 PDU_DC_MAX_US(PDU_DC_PAYLOAD_SIZE_MIN, lll->phy_rx)); in ull_conn_done()
1322 tx_time = PDU_MAX_US(0U, 0U, lll->phy_tx); in ull_conn_done()
1323 rx_time = PDU_MAX_US(0U, 0U, lll->phy_rx); in ull_conn_done()
1327 ready_delay = (lll->role) ? in ull_conn_done()
1331 tx_time = PDU_DC_MAX_US(lll->dle.eff.max_tx_octets, 0); in ull_conn_done()
1332 rx_time = PDU_DC_MAX_US(lll->dle.eff.max_rx_octets, 0); in ull_conn_done()
1342 slot_us += lll->tifs_rx_us + (EVENT_CLOCK_JITTER_US << 1); in ull_conn_done()
1346 !conn->lll.role) { in ull_conn_done()
1351 if (ticks_slot > conn->ull.ticks_slot) { in ull_conn_done()
1352 ticks_slot_plus = ticks_slot - conn->ull.ticks_slot; in ull_conn_done()
1354 ticks_slot_minus = conn->ull.ticks_slot - ticks_slot; in ull_conn_done()
1356 conn->ull.ticks_slot = ticks_slot; in ull_conn_done()
1368 uint8_t ticker_id = TICKER_ID_CONN_BASE + lll->handle; in ull_conn_done()
1369 struct ll_conn *conn_ll = lll->hdr.parent; in ull_conn_done()
1415 conn = ll_connected_get(lll_tx->handle); in ull_conn_tx_demux()
1417 struct node_tx *tx = lll_tx->node; in ull_conn_tx_demux()
1425 ull_tx_q_enqueue_data(&conn->tx_q, tx); in ull_conn_tx_demux()
1427 struct node_tx *tx = lll_tx->node; in ull_conn_tx_demux()
1428 struct pdu_data *p = (void *)tx->pdu; in ull_conn_tx_demux()
1430 p->ll_id = PDU_DATA_LLID_RESV; in ull_conn_tx_demux()
1439 } while (--count); in ull_conn_tx_demux()
1444 while (count--) { in ull_conn_tx_lll_enqueue()
1458 memq_enqueue(link, tx, &conn->lll.memq_tx.tail); in ull_conn_tx_lll_enqueue()
1484 *handle = lll_tx->handle; in ull_conn_ack_peek()
1485 *tx = lll_tx->node; in ull_conn_ack_peek()
1487 return (*tx)->link; in ull_conn_ack_peek()
1501 *handle = lll_tx->handle; in ull_conn_ack_by_last_peek()
1502 *tx = lll_tx->node; in ull_conn_ack_by_last_peek()
1504 return (*tx)->link; in ull_conn_ack_by_last_peek()
1520 lll_tx->handle = handle; in ull_conn_lll_ack_enqueue()
1521 lll_tx->node = tx; in ull_conn_lll_ack_enqueue()
1530 pdu_tx = (void *)tx->pdu; in ull_conn_tx_ack()
1531 LL_ASSERT(pdu_tx->len); in ull_conn_tx_ack()
1533 if (pdu_tx->ll_id == PDU_DATA_LLID_CTRL) { in ull_conn_tx_ack()
1541 if (link->next == (void *)tx) { in ull_conn_tx_ack()
1542 LL_ASSERT(link->next); in ull_conn_tx_ack()
1549 /* Tx Node re-used to enqueue new ctrl PDU */ in ull_conn_tx_ack()
1552 LL_ASSERT(!link->next); in ull_conn_tx_ack()
1554 pdu_tx->ll_id = PDU_DATA_LLID_RESV; in ull_conn_tx_ack()
1568 switch (lll->phy_tx_time) { in ull_conn_lll_max_tx_octets_get()
1571 /* 1M PHY, 1us = 1 bit, hence divide by 8. in ull_conn_lll_max_tx_octets_get()
1575 max_tx_octets = (lll->dle.eff.max_tx_time >> 3) - 10; in ull_conn_lll_max_tx_octets_get()
1579 /* 2M PHY, 1us = 2 bits, hence divide by 4. in ull_conn_lll_max_tx_octets_get()
1583 max_tx_octets = (lll->dle.eff.max_tx_time >> 2) - 11; in ull_conn_lll_max_tx_octets_get()
1588 if (lll->phy_flags & 0x01) { in ull_conn_lll_max_tx_octets_get()
1589 /* S8 Coded PHY, 8us = 1 bit, hence divide by in ull_conn_lll_max_tx_octets_get()
1596 max_tx_octets = ((lll->dle.eff.max_tx_time - 592) >> in ull_conn_lll_max_tx_octets_get()
1597 6) - 2; in ull_conn_lll_max_tx_octets_get()
1599 /* S2 Coded PHY, 2us = 1 bit, hence divide by in ull_conn_lll_max_tx_octets_get()
1606 max_tx_octets = ((lll->dle.eff.max_tx_time - 430) >> in ull_conn_lll_max_tx_octets_get()
1607 4) - 2; in ull_conn_lll_max_tx_octets_get()
1614 if (lll->enc_tx) { in ull_conn_lll_max_tx_octets_get()
1616 max_tx_octets -= 4U; in ull_conn_lll_max_tx_octets_get()
1620 if (max_tx_octets > lll->dle.eff.max_tx_octets) { in ull_conn_lll_max_tx_octets_get()
1621 max_tx_octets = lll->dle.eff.max_tx_octets; in ull_conn_lll_max_tx_octets_get()
1625 max_tx_octets = lll->dle.eff.max_tx_octets; in ull_conn_lll_max_tx_octets_get()
1641 pdu->cp = 0U; in ull_pdu_data_init()
1642 pdu->octet3.resv[0] = 0U; in ull_pdu_data_init()
1722 tx = ull_tx_q_dequeue(&conn->tx_q); in tx_ull_dequeue()
1726 pdu_tx = (void *)tx->pdu; in tx_ull_dequeue()
1727 if (pdu_tx->ll_id == PDU_DATA_LLID_CTRL) { in tx_ull_dequeue()
1729 tx->next = tx; in tx_ull_dequeue()
1732 tx->next = NULL; in tx_ull_dequeue()
1779 ftr = &(rx->rx_ftr); in conn_setup_adv_scan_disabled_cb()
1780 lll = *((struct lll_conn **)((uint8_t *)ftr->param + in conn_setup_adv_scan_disabled_cb()
1786 /* Prevent fast ADV re-scheduling from re-triggering */ in conn_setup_adv_scan_disabled_cb()
1787 hdr = HDR_LLL2ULL(ftr->param); in conn_setup_adv_scan_disabled_cb()
1788 hdr->disabled_cb = NULL; in conn_setup_adv_scan_disabled_cb()
1791 switch (lll->role) { in conn_setup_adv_scan_disabled_cb()
1818 conn, &conn->lll); in disable()
1819 LL_ASSERT_INFO2(err == 0 || err == -EALREADY, handle, err); in disable()
1821 conn->lll.handle = LLL_HANDLE_INVALID; in disable()
1822 conn->lll.link_tx_free = NULL; in disable()
1835 /* More associated CISes - stop next */ in conn_cleanup_iso_cis_released_cb()
1836 rx = (void *)&conn->llcp_terminate.node_rx; in conn_cleanup_iso_cis_released_cb()
1837 reason = *(uint8_t *)rx->pdu; in conn_cleanup_iso_cis_released_cb()
1842 /* No more CISes associated with conn - finalize */ in conn_cleanup_iso_cis_released_cb()
1850 struct lll_conn *lll = &conn->lll; in conn_cleanup_finalize()
1861 /* flush demux-ed Tx buffer still in ULL context */ in conn_cleanup_finalize()
1867 TICKER_ID_CONN_BASE + lll->handle, in conn_cleanup_finalize()
1873 lll->handle = LLL_HANDLE_INVALID; in conn_cleanup_finalize()
1898 rx = (void *)&conn->llcp_terminate.node_rx.rx; in conn_cleanup()
1899 rx->hdr.handle = conn->lll.handle; in conn_cleanup()
1900 rx->hdr.type = NODE_RX_TYPE_TERMINATE; in conn_cleanup()
1901 *((uint8_t *)rx->pdu) = reason; in conn_cleanup()
1920 ull_tx_q_resume_data(&conn->tx_q); in tx_ull_flush()
1930 memq_enqueue(link, tx, &conn->lll.memq_tx.tail); in tx_ull_flush()
1958 hdr = &conn->ull; in conn_disable()
1964 mfy.param = &conn->lll; in conn_disable()
1969 LL_ASSERT(!hdr->disabled_cb); in conn_disable()
1970 hdr->disabled_param = mfy.param; in conn_disable()
1971 hdr->disabled_cb = disabled_cb; in conn_disable()
1979 disabled_cb(&conn->lll); in conn_disable()
2011 link = memq_dequeue(lll->memq_tx.tail, &lll->memq_tx.head, in tx_lll_flush()
2020 tx_buf->handle = LLL_HANDLE_INVALID; in tx_lll_flush()
2021 tx_buf->node = tx; in tx_lll_flush()
2024 link->next = tx->next; /* Indicates ctrl pool or data pool */ in tx_lll_flush()
2025 tx->next = link; in tx_lll_flush()
2029 link = memq_dequeue(lll->memq_tx.tail, &lll->memq_tx.head, in tx_lll_flush()
2037 rx = (void *)&conn->llcp_terminate.node_rx; in tx_lll_flush()
2038 LL_ASSERT(rx->hdr.link); in tx_lll_flush()
2039 link = rx->hdr.link; in tx_lll_flush()
2040 rx->hdr.link = NULL; in tx_lll_flush()
2049 struct pdu_data *p = (void *)tx->pdu; in empty_data_start_release()
2051 if ((p->ll_id == PDU_DATA_LLID_DATA_START) && !p->len) { in empty_data_start_release()
2052 conn->start_empty = 1U; in empty_data_start_release()
2054 ll_tx_ack_put(conn->lll.handle, tx); in empty_data_start_release()
2056 return -EINVAL; in empty_data_start_release()
2057 } else if (p->len && conn->start_empty) { in empty_data_start_release()
2058 conn->start_empty = 0U; in empty_data_start_release()
2060 if (p->ll_id == PDU_DATA_LLID_DATA_CONTINUE) { in empty_data_start_release()
2061 p->ll_id = PDU_DATA_LLID_DATA_START; in empty_data_start_release()
2079 phy = lll_connection->phy_tx; in force_md_cnt_calc()
2080 phy_flags = lll_connection->phy_flags; in force_md_cnt_calc()
2087 mic_size = PDU_MIC_SIZE * lll_connection->enc_tx; in force_md_cnt_calc()
2104 delta = (time_incoming << 1) - time_outgoing; in force_md_cnt_calc()
2107 force_md_cnt = (delta + (time_keep_alive - 1)) / in force_md_cnt_calc()
2125 conn->pause_rx_data = 1U; in ull_conn_pause_rx_data()
2133 conn->pause_rx_data = 0U; in ull_conn_resume_rx_data()
2142 lll = &conn->lll; in ull_conn_event_counter()
2144 /* Calculate current event counter. If refcount is non-zero, we have called in ull_conn_event_counter()
2152 if (ull_ref_get(&conn->ull)) { in ull_conn_event_counter()
2153 /* We are in post-prepare (RX path). Event counter is already in ull_conn_event_counter()
2156 event_counter = lll->event_counter - 1; in ull_conn_event_counter()
2158 event_counter = lll->event_counter + lll->latency_prepare + in ull_conn_event_counter()
2159 conn->llcp.prep.lazy; in ull_conn_event_counter()
2195 (ticks_slot_overhead + conn->ull.ticks_slot), in ull_conn_update_ticker()
2197 conn->lll.role == BT_HCI_ROLE_PERIPHERAL ? in ull_conn_update_ticker()
2237 lll = &conn->lll; in ull_conn_update_parameters()
2242 instant_latency = (event_counter - instant) & 0xFFFF; in ull_conn_update_parameters()
2245 ticks_at_expire = conn->llcp.prep.ticks_at_expire; in ull_conn_update_parameters()
2249 if (conn->ull.ticks_prepare_to_start & XON_BITMASK) { in ull_conn_update_parameters()
2251 MAX(conn->ull.ticks_active_to_start, conn->ull.ticks_preempt_to_start); in ull_conn_update_parameters()
2253 conn->ull.ticks_prepare_to_start &= ~XON_BITMASK; in ull_conn_update_parameters()
2255 ticks_at_expire -= (conn->ull.ticks_prepare_to_start - ticks_prepare_to_start); in ull_conn_update_parameters()
2260 ready_delay_us = lll_radio_tx_ready_delay_get(lll->phy_tx, in ull_conn_update_parameters()
2261 lll->phy_flags); in ull_conn_update_parameters()
2267 if (lll->interval >= BT_HCI_LE_INTERVAL_MIN) { in ull_conn_update_parameters()
2268 conn_interval_old = instant_latency * lll->interval; in ull_conn_update_parameters()
2271 conn_interval_old = instant_latency * (lll->interval + 1U); in ull_conn_update_parameters()
2282 lll->tifs_tx_us = EVENT_IFS_DEFAULT_US; in ull_conn_update_parameters()
2283 lll->tifs_rx_us = EVENT_IFS_DEFAULT_US; in ull_conn_update_parameters()
2284 lll->tifs_hcto_us = EVENT_IFS_DEFAULT_US; in ull_conn_update_parameters()
2288 max_tx_time = lll->dle.eff.max_tx_time; in ull_conn_update_parameters()
2289 max_rx_time = lll->dle.eff.max_rx_time; in ull_conn_update_parameters()
2297 max_tx_time = MAX(max_tx_time, PDU_DC_MAX_US(PDU_DC_PAYLOAD_SIZE_MIN, lll->phy_tx)); in ull_conn_update_parameters()
2298 max_rx_time = MAX(max_rx_time, PDU_DC_MAX_US(PDU_DC_PAYLOAD_SIZE_MIN, lll->phy_rx)); in ull_conn_update_parameters()
2306 slot_us += lll->tifs_rx_us + (EVENT_CLOCK_JITTER_US << 1); in ull_conn_update_parameters()
2310 (lll->role == BT_HCI_ROLE_CENTRAL)) { in ull_conn_update_parameters()
2314 conn->ull.ticks_slot = HAL_TICKER_US_TO_TICKS_CEIL(slot_us); in ull_conn_update_parameters()
2319 lll->tifs_tx_us = CONFIG_BT_CTLR_EVENT_IFS_LOW_LAT_US; in ull_conn_update_parameters()
2320 lll->tifs_rx_us = CONFIG_BT_CTLR_EVENT_IFS_LOW_LAT_US; in ull_conn_update_parameters()
2321 lll->tifs_hcto_us = CONFIG_BT_CTLR_EVENT_IFS_LOW_LAT_US; in ull_conn_update_parameters()
2326 conn->ull.ticks_slot = in ull_conn_update_parameters()
2338 conn_interval_new_us - conn_interval_old_us); in ull_conn_update_parameters()
2340 ticks_at_expire -= HAL_TICKER_US_TO_TICKS( in ull_conn_update_parameters()
2341 conn_interval_old_us - conn_interval_new_us); in ull_conn_update_parameters()
2344 lll->latency_prepare += conn->llcp.prep.lazy; in ull_conn_update_parameters()
2345 lll->latency_prepare -= (instant_latency - latency_upd); in ull_conn_update_parameters()
2350 MAX(conn->ull.ticks_active_to_start, in ull_conn_update_parameters()
2351 conn->ull.ticks_prepare_to_start); in ull_conn_update_parameters()
2358 switch (lll->role) { in ull_conn_update_parameters()
2361 lll->periph.window_widening_prepare_us -= in ull_conn_update_parameters()
2362 lll->periph.window_widening_periodic_us * instant_latency; in ull_conn_update_parameters()
2364 lll->periph.window_widening_periodic_us = in ull_conn_update_parameters()
2366 lll_clock_ppm_get(conn->periph.sca)) * in ull_conn_update_parameters()
2368 lll->periph.window_widening_max_us = (conn_interval_us >> 1U) - EVENT_IFS_US; in ull_conn_update_parameters()
2369 lll->periph.window_size_prepare_us = win_size * CONN_INT_UNIT_US; in ull_conn_update_parameters()
2372 conn->periph.ticks_to_offset = 0U; in ull_conn_update_parameters()
2375 lll->periph.window_widening_prepare_us += in ull_conn_update_parameters()
2376 lll->periph.window_widening_periodic_us * latency_upd; in ull_conn_update_parameters()
2377 if (lll->periph.window_widening_prepare_us > lll->periph.window_widening_max_us) { in ull_conn_update_parameters()
2378 lll->periph.window_widening_prepare_us = lll->periph.window_widening_max_us; in ull_conn_update_parameters()
2381 ticks_at_expire -= HAL_TICKER_US_TO_TICKS(lll->periph.window_widening_periodic_us * in ull_conn_update_parameters()
2385 periodic_us -= lll->periph.window_widening_periodic_us; in ull_conn_update_parameters()
2404 lll->interval = interval; in ull_conn_update_parameters()
2405 lll->latency = latency; in ull_conn_update_parameters()
2407 conn->supervision_timeout = timeout; in ull_conn_update_parameters()
2412 conn->apto_reload = RADIO_CONN_EVENTS((30U * 1000U * 1000U), conn_interval_us); in ull_conn_update_parameters()
2417 conn->appto_reload = (conn->apto_reload > (lll->latency + 6U)) ? in ull_conn_update_parameters()
2418 (conn->apto_reload - (lll->latency + 6U)) : in ull_conn_update_parameters()
2419 conn->apto_reload; in ull_conn_update_parameters()
2423 conn->supervision_expire = 0U; in ull_conn_update_parameters()
2430 conn->cancel_prepare = 1U; in ull_conn_update_parameters()
2441 lll = &conn->lll; in ull_conn_update_peer_sca()
2444 if (lll->interval >= BT_HCI_LE_INTERVAL_MIN) { in ull_conn_update_peer_sca()
2445 conn_interval_us = lll->interval * in ull_conn_update_peer_sca()
2448 conn_interval_us = (lll->interval + 1U) * in ull_conn_update_peer_sca()
2453 lll->periph.window_widening_periodic_us = in ull_conn_update_peer_sca()
2455 lll_clock_ppm_get(conn->periph.sca)) * in ull_conn_update_peer_sca()
2458 periodic_us -= lll->periph.window_widening_periodic_us; in ull_conn_update_peer_sca()
2462 conn->llcp.prep.ticks_at_expire); in ull_conn_update_peer_sca()
2469 struct lll_conn *lll = &conn->lll; in ull_conn_chan_map_set()
2471 memcpy(lll->data_chan_map, chm, sizeof(lll->data_chan_map)); in ull_conn_chan_map_set()
2472 lll->data_chan_count = util_ones_count_get(lll->data_chan_map, sizeof(lll->data_chan_map)); in ull_conn_chan_map_set()
2484 if (conn->llcp.fex.valid && feature_phy_coded(conn)) { in dle_max_time_get()
2496 tx_time = MIN(conn->lll.dle.default_tx_time, in dle_max_time_get()
2499 tx_time = PDU_DC_MAX_US(conn->lll.dle.default_tx_octets, phy_select); in dle_max_time_get()
2532 conn->lll.evt_len_upd = 1U; in ull_dle_update_eff()
2545 MAX(MIN(conn->lll.dle.local.max_rx_octets, conn->lll.dle.remote.max_tx_octets), in ull_dle_update_eff_rx()
2549 unsigned int min_eff_rx_time = (conn->lll.phy_rx == PHY_CODED) ? in ull_dle_update_eff_rx()
2553 MAX(MIN(conn->lll.dle.local.max_rx_time, conn->lll.dle.remote.max_tx_time), in ull_dle_update_eff_rx()
2556 if (eff_rx_time != conn->lll.dle.eff.max_rx_time) { in ull_dle_update_eff_rx()
2557 conn->lll.dle.eff.max_rx_time = eff_rx_time; in ull_dle_update_eff_rx()
2561 conn->lll.dle.eff.max_rx_time = PDU_DC_MAX_US(eff_rx_octets, PHY_1M); in ull_dle_update_eff_rx()
2564 if (eff_rx_octets != conn->lll.dle.eff.max_rx_octets) { in ull_dle_update_eff_rx()
2565 conn->lll.dle.eff.max_rx_octets = eff_rx_octets; in ull_dle_update_eff_rx()
2571 conn->lll.evt_len_upd_delayed = 1; in ull_dle_update_eff_rx()
2584 MAX(MIN(conn->lll.dle.local.max_tx_octets, conn->lll.dle.remote.max_rx_octets), in ull_dle_update_eff_tx()
2588 unsigned int min_eff_tx_time = (conn->lll.phy_tx == PHY_CODED) ? in ull_dle_update_eff_tx()
2592 MAX(MIN(conn->lll.dle.local.max_tx_time, conn->lll.dle.remote.max_rx_time), in ull_dle_update_eff_tx()
2595 if (eff_tx_time != conn->lll.dle.eff.max_tx_time) { in ull_dle_update_eff_tx()
2596 conn->lll.dle.eff.max_tx_time = eff_tx_time; in ull_dle_update_eff_tx()
2600 conn->lll.dle.eff.max_tx_time = PDU_DC_MAX_US(eff_tx_octets, PHY_1M); in ull_dle_update_eff_tx()
2603 if (eff_tx_octets != conn->lll.dle.eff.max_tx_octets) { in ull_dle_update_eff_tx()
2604 conn->lll.dle.eff.max_tx_octets = eff_tx_octets; in ull_dle_update_eff_tx()
2610 conn->lll.evt_len_upd = 1U; in ull_dle_update_eff_tx()
2612 conn->lll.evt_len_upd |= conn->lll.evt_len_upd_delayed; in ull_dle_update_eff_tx()
2613 conn->lll.evt_len_upd_delayed = 0; in ull_dle_update_eff_tx()
2644 conn->lll.dle.default_tx_octets = tx_octets; in ull_dle_local_tx_update()
2647 conn->lll.dle.default_tx_time = tx_time; in ull_dle_local_tx_update()
2650 dle_max_time_get(conn, &conn->lll.dle.local.max_rx_time, &conn->lll.dle.local.max_tx_time); in ull_dle_local_tx_update()
2651 conn->lll.dle.local.max_tx_octets = conn->lll.dle.default_tx_octets; in ull_dle_local_tx_update()
2662 memset(&conn->lll.dle, 0, sizeof(conn->lll.dle)); in ull_dle_init()
2663 /* See BT. 5.2 Spec - Vol 6, Part B, Sect 4.5.10 in ull_dle_init()
2668 conn->lll.dle.local.max_rx_octets = LL_LENGTH_OCTETS_RX_MAX; in ull_dle_init()
2670 conn->lll.dle.local.max_rx_time = max_time_max; in ull_dle_init()
2674 conn->lll.dle.remote.max_tx_octets = PDU_DC_PAYLOAD_SIZE_MIN; in ull_dle_init()
2675 conn->lll.dle.remote.max_rx_octets = PDU_DC_PAYLOAD_SIZE_MIN; in ull_dle_init()
2678 conn->lll.dle.remote.max_tx_time = max_time_min; in ull_dle_init()
2679 conn->lll.dle.remote.max_rx_time = max_time_min; in ull_dle_init()
2693 if ((conn->lll.dle.local.max_rx_time != max_time_min || in ull_dle_init()
2694 conn->lll.dle.local.max_tx_time != max_time_min)) { in ull_dle_init()
2695 conn->lll.dle.update = 1; in ull_dle_init()
2699 if (conn->lll.dle.local.max_tx_octets != PDU_DC_PAYLOAD_SIZE_MIN || in ull_dle_init()
2700 conn->lll.dle.local.max_rx_octets != PDU_DC_PAYLOAD_SIZE_MIN) { in ull_dle_init()
2701 conn->lll.dle.update = 1; in ull_dle_init()
2777 (sync_remainder_us - start_us)); in get_ticker_offset()
2809 pa_event_counter = adv_sync->lll.event_counter; in mfy_past_sender_offset_get()
2810 last_pa_event_counter = pa_event_counter - 1; in mfy_past_sender_offset_get()
2813 uint32_t interval_us = sync->interval * PERIODIC_INT_UNIT_US; in mfy_past_sender_offset_get()
2824 lazy = lazy - (ticker_offset_us / interval_us); in mfy_past_sender_offset_get()
2831 window_widening_event_us = sync->lll.window_widening_event_us + in mfy_past_sender_offset_get()
2832 sync->lll.window_widening_periodic_us * (lazy + 1U); in mfy_past_sender_offset_get()
2837 pa_event_counter = sync->lll.event_counter + lazy; in mfy_past_sender_offset_get()
2839 last_pa_event_counter = pa_event_counter - 1 - lazy; in mfy_past_sender_offset_get()
2842 if (sync->timeout_expire) { in mfy_past_sender_offset_get()
2843 last_pa_event_counter -= sync->timeout_reload - sync->timeout_expire; in mfy_past_sender_offset_get()
2867 if (!(phys & (conn->lll.phy_tx | conn->lll.phy_rx))) {
2878 return (memq_peek(conn->lll.memq_tx.head, conn->lll.memq_tx.tail, NULL) == NULL);