Home
last modified time | relevance | path

Searched refs:txq_map (Results 1 – 18 of 18) sorted by relevance

/Linux-v4.19/drivers/net/wireless/ath/ath9k/
Dlink.c32 txq = sc->tx.txq_map[i]; in ath_tx_complete_check()
182 txctl.txq = sc->tx.txq_map[IEEE80211_AC_BE]; in ath_paprd_send_frame()
Ddebug.h195 #define PR_QNUM(_n) sc->tx.txq_map[_n]->axq_qnum
Dtx99.c128 txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO]; in ath9k_tx99_init()
Dinit.c417 sc->tx.txq_map[i] = ath_txq_setup(sc, ATH9K_TX_QUEUE_DATA, i); in ath9k_init_queues()
418 sc->tx.txq_map[i]->mac80211_qnum = i; in ath9k_init_queues()
Dgpio.c429 txq = sc->tx.txq_map[IEEE80211_AC_BE]; in ath9k_init_btcoex()
Dxmit.c207 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
249 if (tid->txq == sc->tx.txq_map[q]) { in ath_tid_pull()
2054 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
2402 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2905 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
Dbeacon.c50 txq = sc->tx.txq_map[IEEE80211_AC_BE]; in ath9k_beaconq_config()
Dchannel.c1016 txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO]; in ath_scan_send_probe()
1137 txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO]; in ath_chanctx_send_vif_ps_frame()
Dath9k.h298 struct ath_txq *txq_map[IEEE80211_NUM_ACS]; member
Dmain.c805 txctl.txq = sc->tx.txq_map[skb_get_queue_mapping(skb)]; in ath9k_tx()
1641 txq = sc->tx.txq_map[queue]; in ath9k_conf_tx()
Ddebug.c647 txq = sc->tx.txq_map[i]; in read_file_queues()
/Linux-v4.19/drivers/net/ethernet/intel/ice/
Dice.h206 u16 txq_map[ICE_MAX_TXQS]; /* index in pf->avail_txqs */ member
Dice_main.c1487 wr32(hw, QINT_TQCTL(vsi->txq_map[txq]), 0); in ice_vsi_release_msix()
1546 ring->reg_idx = vsi->txq_map[i]; in ice_vsi_alloc_rings()
1675 wr32(hw, QINT_TQCTL(vsi->txq_map[txq]), val); in ice_vsi_cfg_msix()
2114 vsi->txq_map[i] = i + offset; in ice_vsi_get_qs_contig()
2157 vsi->txq_map[i] = index; in ice_vsi_get_qs_scatter()
2190 clear_bit(vsi->txq_map[index], pf->avail_txqs); in ice_vsi_get_qs_scatter()
2191 vsi->txq_map[index] = 0; in ice_vsi_get_qs_scatter()
2241 clear_bit(vsi->txq_map[i], pf->avail_txqs); in ice_vsi_put_qs()
2242 vsi->txq_map[i] = ICE_INVAL_Q_INDEX; in ice_vsi_put_qs()
3949 pf_q = vsi->txq_map[i]; in ice_vsi_cfg_txqs()
[all …]
/Linux-v4.19/drivers/net/ethernet/cisco/enic/
Denic_main.c860 unsigned int txq_map; in enic_hard_start_xmit() local
868 txq_map = skb_get_queue_mapping(skb) % enic->wq_count; in enic_hard_start_xmit()
869 wq = &enic->wq[txq_map]; in enic_hard_start_xmit()
870 txq = netdev_get_tx_queue(netdev, txq_map); in enic_hard_start_xmit()
884 spin_lock(&enic->wq_lock[txq_map]); in enic_hard_start_xmit()
891 spin_unlock(&enic->wq_lock[txq_map]); in enic_hard_start_xmit()
903 spin_unlock(&enic->wq_lock[txq_map]); in enic_hard_start_xmit()
/Linux-v4.19/include/linux/avf/
Dvirtchnl.h365 u16 txq_map; member
/Linux-v4.19/drivers/net/ethernet/marvell/
Dmvneta.c1383 int rxq_map = 0, txq_map = 0; in mvneta_defaults_set() local
1392 txq_map |= MVNETA_CPU_TXQ_ACCESS(txq); in mvneta_defaults_set()
1399 txq_map = (cpu == pp->rxq_def) ? in mvneta_defaults_set()
1403 txq_map = MVNETA_CPU_TXQ_ACCESS_ALL_MASK; in mvneta_defaults_set()
1407 mvreg_write(pp, MVNETA_CPU_MAP(cpu), rxq_map | txq_map); in mvneta_defaults_set()
3612 int rxq_map = 0, txq_map = 0; in mvneta_percpu_elect() local
3630 txq_map = (cpu == elected_cpu) ? in mvneta_percpu_elect()
3633 txq_map = mvreg_read(pp, MVNETA_CPU_MAP(cpu)) & in mvneta_percpu_elect()
3636 mvreg_write(pp, MVNETA_CPU_MAP(cpu), rxq_map | txq_map); in mvneta_percpu_elect()
/Linux-v4.19/drivers/net/ethernet/intel/i40evf/
Di40evf_virtchnl.c353 vecmap->txq_map = q_vector->ring_mask; in i40evf_map_queues()
362 vecmap->txq_map = 0; in i40evf_map_queues()
/Linux-v4.19/drivers/net/ethernet/intel/i40e/
Di40e_virtchnl_pf.c298 if (vecmap->rxq_map == 0 && vecmap->txq_map == 0) { in i40e_config_irq_link_list()
309 tempmap = vecmap->txq_map; in i40e_config_irq_link_list()
2143 if (i40e_validate_queue_map(vf, vsi_id, map->txq_map)) { in i40e_vc_config_irq_map_msg()