Lines Matching refs:tid
310 sta->addr, ba_data->tid); in iwl_mvm_rx_agg_session_expired()
365 int queue, u8 tid, u8 flags) in iwl_mvm_disable_txq() argument
382 mvm->queue_info[queue].tid_bitmap &= ~BIT(tid); in iwl_mvm_disable_txq()
399 cmd.tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_disable_txq()
411 iwl_mvm_txq_from_tid(sta, tid); in iwl_mvm_disable_txq()
436 int tid; in iwl_mvm_get_queue_agg_tids() local
455 for_each_set_bit(tid, &tid_bitmap, IWL_MAX_TID_COUNT + 1) { in iwl_mvm_get_queue_agg_tids()
456 if (mvmsta->tid_data[tid].state == IWL_AGG_ON) in iwl_mvm_get_queue_agg_tids()
457 agg_tids |= BIT(tid); in iwl_mvm_get_queue_agg_tids()
476 int tid; in iwl_mvm_remove_sta_queue_marking() local
499 for_each_set_bit(tid, &tid_bitmap, IWL_MAX_TID_COUNT + 1) { in iwl_mvm_remove_sta_queue_marking()
501 iwl_mvm_txq_from_tid(sta, tid); in iwl_mvm_remove_sta_queue_marking()
503 if (mvmsta->tid_data[tid].state == IWL_AGG_ON) in iwl_mvm_remove_sta_queue_marking()
504 disable_agg_tids |= BIT(tid); in iwl_mvm_remove_sta_queue_marking()
505 mvmsta->tid_data[tid].txq_id = IWL_MVM_INVALID_QUEUE; in iwl_mvm_remove_sta_queue_marking()
533 u8 sta_id, tid; in iwl_mvm_free_inactive_queue() local
544 tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_free_inactive_queue()
558 ret = iwl_mvm_disable_txq(mvm, old_sta, queue, tid, 0); in iwl_mvm_free_inactive_queue()
648 static int iwl_mvm_redirect_queue(struct iwl_mvm *mvm, int queue, int tid, in iwl_mvm_redirect_queue() argument
679 cmd.tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_redirect_queue()
707 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_redirect_queue()
713 cmd.sta_id, tid, IWL_FRAME_LIMIT, ssn); in iwl_mvm_redirect_queue()
760 u8 sta_id, u8 tid, unsigned int timeout) in iwl_mvm_tvqm_enable_txq() argument
765 if (tid == IWL_MAX_TID_COUNT) { in iwl_mvm_tvqm_enable_txq()
766 tid = IWL_MGMT_TID; in iwl_mvm_tvqm_enable_txq()
775 sta_id, tid, SCD_QUEUE_CFG, in iwl_mvm_tvqm_enable_txq()
781 size, sta_id, tid, queue); in iwl_mvm_tvqm_enable_txq()
789 queue, sta_id, tid); in iwl_mvm_tvqm_enable_txq()
796 int tid) in iwl_mvm_sta_alloc_queue_tvqm() argument
800 iwl_mvm_txq_from_tid(sta, tid); in iwl_mvm_sta_alloc_queue_tvqm()
809 mvmsta->sta_id, tid); in iwl_mvm_sta_alloc_queue_tvqm()
810 queue = iwl_mvm_tvqm_enable_txq(mvm, mvmsta->sta_id, tid, wdg_timeout); in iwl_mvm_sta_alloc_queue_tvqm()
815 mvm->tvqm_info[queue].txq_tid = tid; in iwl_mvm_sta_alloc_queue_tvqm()
821 mvmsta->tid_data[tid].txq_id = queue; in iwl_mvm_sta_alloc_queue_tvqm()
829 int queue, u8 sta_id, u8 tid) in iwl_mvm_update_txq_mapping() argument
834 if (mvm->queue_info[queue].tid_bitmap & BIT(tid)) { in iwl_mvm_update_txq_mapping()
836 queue, tid); in iwl_mvm_update_txq_mapping()
844 mvm->queue_info[queue].tid_bitmap |= BIT(tid); in iwl_mvm_update_txq_mapping()
848 if (tid != IWL_MAX_TID_COUNT) in iwl_mvm_update_txq_mapping()
850 tid_to_mac80211_ac[tid]; in iwl_mvm_update_txq_mapping()
854 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_update_txq_mapping()
859 iwl_mvm_txq_from_tid(sta, tid); in iwl_mvm_update_txq_mapping()
884 .tid = cfg->tid, in iwl_mvm_enable_txq()
892 if (!iwl_mvm_update_txq_mapping(mvm, sta, queue, cfg->sta_id, cfg->tid)) in iwl_mvm_enable_txq()
912 int tid; in iwl_mvm_change_queue_tid() local
927 tid = find_first_bit(&tid_bitmap, IWL_MAX_TID_COUNT + 1); in iwl_mvm_change_queue_tid()
928 cmd.tid = tid; in iwl_mvm_change_queue_tid()
929 cmd.tx_fifo = iwl_mvm_ac_to_tx_fifo[tid_to_mac80211_ac[tid]]; in iwl_mvm_change_queue_tid()
938 mvm->queue_info[queue].txq_tid = tid; in iwl_mvm_change_queue_tid()
940 queue, tid); in iwl_mvm_change_queue_tid()
948 int tid = -1; in iwl_mvm_unshare_queue() local
964 tid = find_first_bit(&tid_bitmap, IWL_MAX_TID_COUNT + 1); in iwl_mvm_unshare_queue()
965 if (tid_bitmap != BIT(tid)) { in iwl_mvm_unshare_queue()
972 tid); in iwl_mvm_unshare_queue()
983 ssn = IEEE80211_SEQ_TO_SN(mvmsta->tid_data[tid].seq_number); in iwl_mvm_unshare_queue()
985 ret = iwl_mvm_redirect_queue(mvm, queue, tid, in iwl_mvm_unshare_queue()
986 tid_to_mac80211_ac[tid], ssn, in iwl_mvm_unshare_queue()
988 iwl_mvm_txq_from_tid(sta, tid)); in iwl_mvm_unshare_queue()
995 if (mvmsta->tid_data[tid].state == IWL_AGG_ON) { in iwl_mvm_unshare_queue()
998 mvmsta->tid_disable_agg &= ~BIT(tid); in iwl_mvm_unshare_queue()
1035 int tid; in iwl_mvm_remove_inactive_tids() local
1044 for_each_set_bit(tid, &tid_bitmap, IWL_MAX_TID_COUNT + 1) { in iwl_mvm_remove_inactive_tids()
1046 if (iwl_mvm_tid_queued(mvm, &mvmsta->tid_data[tid])) in iwl_mvm_remove_inactive_tids()
1047 tid_bitmap &= ~BIT(tid); in iwl_mvm_remove_inactive_tids()
1050 if (mvmsta->tid_data[tid].state != IWL_AGG_OFF) in iwl_mvm_remove_inactive_tids()
1051 tid_bitmap &= ~BIT(tid); in iwl_mvm_remove_inactive_tids()
1064 for_each_set_bit(tid, &tid_bitmap, IWL_MAX_TID_COUNT + 1) { in iwl_mvm_remove_inactive_tids()
1067 mvmsta->tid_data[tid].txq_id = IWL_MVM_INVALID_QUEUE; in iwl_mvm_remove_inactive_tids()
1068 mvm->queue_info[queue].tid_bitmap &= ~BIT(tid); in iwl_mvm_remove_inactive_tids()
1088 tid, queue); in iwl_mvm_remove_inactive_tids()
1143 int tid; in iwl_mvm_inactivity_check() local
1157 for_each_set_bit(tid, &queue_tid_bitmap, in iwl_mvm_inactivity_check()
1159 if (time_after(mvm->queue_info[i].last_frame_time[tid] + in iwl_mvm_inactivity_check()
1163 inactive_tid_bitmap |= BIT(tid); in iwl_mvm_inactivity_check()
1221 struct ieee80211_sta *sta, u8 ac, int tid) in iwl_mvm_sta_alloc_queue() argument
1227 .tid = tid, in iwl_mvm_sta_alloc_queue()
1243 return iwl_mvm_sta_alloc_queue_tvqm(mvm, sta, ac, tid); in iwl_mvm_sta_alloc_queue()
1247 ssn = IEEE80211_SEQ_TO_SN(mvmsta->tid_data[tid].seq_number); in iwl_mvm_sta_alloc_queue()
1250 if (tid == IWL_MAX_TID_COUNT) { in iwl_mvm_sta_alloc_queue()
1299 tid, cfg.sta_id); in iwl_mvm_sta_alloc_queue()
1315 mvmsta->sta_id, tid); in iwl_mvm_sta_alloc_queue()
1347 mvmsta->tid_data[tid].seq_number += 0x10; in iwl_mvm_sta_alloc_queue()
1350 mvmsta->tid_data[tid].txq_id = queue; in iwl_mvm_sta_alloc_queue()
1352 queue_state = mvmsta->tid_data[tid].state; in iwl_mvm_sta_alloc_queue()
1365 ret = iwl_mvm_sta_tx_agg(mvm, sta, tid, queue, true); in iwl_mvm_sta_alloc_queue()
1371 ret = iwl_mvm_redirect_queue(mvm, queue, tid, ac, ssn, in iwl_mvm_sta_alloc_queue()
1373 iwl_mvm_txq_from_tid(sta, tid)); in iwl_mvm_sta_alloc_queue()
1381 iwl_mvm_disable_txq(mvm, sta, queue, tid, 0); in iwl_mvm_sta_alloc_queue()
1398 u8 tid; in iwl_mvm_add_new_dqa_stream_wk() local
1405 tid = txq->tid; in iwl_mvm_add_new_dqa_stream_wk()
1406 if (tid == IEEE80211_NUM_TIDS) in iwl_mvm_add_new_dqa_stream_wk()
1407 tid = IWL_MAX_TID_COUNT; in iwl_mvm_add_new_dqa_stream_wk()
1415 if (iwl_mvm_sta_alloc_queue(mvm, txq->sta, txq->ac, tid)) { in iwl_mvm_add_new_dqa_stream_wk()
1530 cfg.tid = i; in iwl_mvm_realloc_queues_after_restart()
2001 .tid = IWL_MAX_TID_COUNT, in iwl_mvm_enable_aux_snif_queue()
2155 .tid = IWL_MAX_TID_COUNT, in iwl_mvm_send_add_bcast_sta()
2341 .tid = 0, in iwl_mvm_add_mcast_sta()
2564 int tid, u16 ssn, bool start, u16 buf_size, u16 timeout) in iwl_mvm_sta_rx_agg() argument
2624 cmd.add_immediate_ba_tid = (u8) tid; in iwl_mvm_sta_rx_agg()
2628 cmd.remove_immediate_ba_tid = (u8) tid; in iwl_mvm_sta_rx_agg()
2680 baid_data->tid = tid; in iwl_mvm_sta_rx_agg()
2683 mvm_sta->tid_to_baid[tid] = baid; in iwl_mvm_sta_rx_agg()
2696 mvm_sta->sta_id, tid, baid); in iwl_mvm_sta_rx_agg()
2700 u8 baid = mvm_sta->tid_to_baid[tid]; in iwl_mvm_sta_rx_agg()
2730 int tid, u8 queue, bool start) in iwl_mvm_sta_tx_agg() argument
2741 mvm_sta->tid_disable_agg &= ~BIT(tid); in iwl_mvm_sta_tx_agg()
2744 mvm_sta->tid_disable_agg |= BIT(tid); in iwl_mvm_sta_tx_agg()
2800 struct ieee80211_sta *sta, u16 tid, u16 *ssn) in iwl_mvm_sta_tx_agg_start() argument
2808 if (WARN_ON_ONCE(tid >= IWL_MAX_TID_COUNT)) in iwl_mvm_sta_tx_agg_start()
2811 if (mvmsta->tid_data[tid].state != IWL_AGG_QUEUED && in iwl_mvm_sta_tx_agg_start()
2812 mvmsta->tid_data[tid].state != IWL_AGG_OFF) { in iwl_mvm_sta_tx_agg_start()
2815 mvmsta->tid_data[tid].state); in iwl_mvm_sta_tx_agg_start()
2821 if (mvmsta->tid_data[tid].txq_id == IWL_MVM_INVALID_QUEUE && in iwl_mvm_sta_tx_agg_start()
2823 u8 ac = tid_to_mac80211_ac[tid]; in iwl_mvm_sta_tx_agg_start()
2825 ret = iwl_mvm_sta_alloc_queue_tvqm(mvm, sta, ac, tid); in iwl_mvm_sta_tx_agg_start()
2838 txq_id = mvmsta->tid_data[tid].txq_id; in iwl_mvm_sta_tx_agg_start()
2855 tid, IWL_MAX_HW_QUEUES - 1); in iwl_mvm_sta_tx_agg_start()
2863 tid); in iwl_mvm_sta_tx_agg_start()
2869 tid, txq_id); in iwl_mvm_sta_tx_agg_start()
2871 tid_data = &mvmsta->tid_data[tid]; in iwl_mvm_sta_tx_agg_start()
2878 mvmsta->sta_id, tid, txq_id, tid_data->ssn, in iwl_mvm_sta_tx_agg_start()
2904 struct ieee80211_sta *sta, u16 tid, u16 buf_size, in iwl_mvm_sta_tx_agg_oper() argument
2908 struct iwl_mvm_tid_data *tid_data = &mvmsta->tid_data[tid]; in iwl_mvm_sta_tx_agg_oper()
2918 .tid = tid, in iwl_mvm_sta_tx_agg_oper()
2937 mvmsta->agg_tids |= BIT(tid); in iwl_mvm_sta_tx_agg_oper()
2957 ret = iwl_mvm_sta_tx_agg(mvm, sta, tid, queue, true); in iwl_mvm_sta_tx_agg_oper()
2963 cfg.fifo = iwl_mvm_ac_to_tx_fifo[tid_to_mac80211_ac[tid]]; in iwl_mvm_sta_tx_agg_oper()
2989 mvmsta->sta_id, tid, in iwl_mvm_sta_tx_agg_oper()
3004 ret = iwl_mvm_sta_tx_agg(mvm, sta, tid, queue, true); in iwl_mvm_sta_tx_agg_oper()
3025 sta->addr, tid); in iwl_mvm_sta_tx_agg_oper()
3055 struct ieee80211_sta *sta, u16 tid) in iwl_mvm_sta_tx_agg_stop() argument
3058 struct iwl_mvm_tid_data *tid_data = &mvmsta->tid_data[tid]; in iwl_mvm_sta_tx_agg_stop()
3067 ieee80211_stop_tx_ba_cb_irqsafe(vif, sta->addr, tid); in iwl_mvm_sta_tx_agg_stop()
3076 mvmsta->sta_id, tid, txq_id, tid_data->state); in iwl_mvm_sta_tx_agg_stop()
3078 mvmsta->agg_tids &= ~BIT(tid); in iwl_mvm_sta_tx_agg_stop()
3094 ieee80211_stop_tx_ba_cb_irqsafe(vif, sta->addr, tid); in iwl_mvm_sta_tx_agg_stop()
3096 iwl_mvm_sta_tx_agg(mvm, sta, tid, txq_id, false); in iwl_mvm_sta_tx_agg_stop()
3108 ieee80211_stop_tx_ba_cb_irqsafe(vif, sta->addr, tid); in iwl_mvm_sta_tx_agg_stop()
3115 mvmsta->sta_id, tid, tid_data->state); in iwl_mvm_sta_tx_agg_stop()
3127 struct ieee80211_sta *sta, u16 tid) in iwl_mvm_sta_tx_agg_flush() argument
3130 struct iwl_mvm_tid_data *tid_data = &mvmsta->tid_data[tid]; in iwl_mvm_sta_tx_agg_flush()
3141 mvmsta->sta_id, tid, txq_id, tid_data->state); in iwl_mvm_sta_tx_agg_flush()
3144 mvmsta->agg_tids &= ~BIT(tid); in iwl_mvm_sta_tx_agg_flush()
3154 BIT(tid), 0)) in iwl_mvm_sta_tx_agg_flush()
3165 iwl_mvm_sta_tx_agg(mvm, sta, tid, txq_id, false); in iwl_mvm_sta_tx_agg_flush()
3715 int tid, ret; in iwl_mvm_sta_modify_sleep_tx_count() local
3722 for_each_set_bit(tid, &_tids, IWL_MAX_TID_COUNT) in iwl_mvm_sta_modify_sleep_tx_count()
3723 cmd.awake_acs |= BIT(tid_to_ucode_ac[tid]); in iwl_mvm_sta_modify_sleep_tx_count()
3737 for_each_set_bit(tid, &_tids, IWL_MAX_TID_COUNT) { in iwl_mvm_sta_modify_sleep_tx_count()
3741 tid_data = &mvmsta->tid_data[tid]; in iwl_mvm_sta_modify_sleep_tx_count()