Lines Matching full:tcm

755 	low_latency = mvm->tcm.result.low_latency[mvmvif->id];  in iwl_mvm_tcm_iter()
757 if (!mvm->tcm.result.change[mvmvif->id] && in iwl_mvm_tcm_iter()
797 if (mvm->tcm.data[mvmvif->id].opened_rx_ba_sessions) in iwl_mvm_tcm_uapsd_nonagg_detected_wk()
828 if (mvm->tcm.data[mvmvif->id].uapsd_nonagg_detect.detected) in iwl_mvm_uapsd_agg_disconnect()
831 mvm->tcm.data[mvmvif->id].uapsd_nonagg_detect.detected = true; in iwl_mvm_uapsd_agg_disconnect()
841 u64 bytes = mvm->tcm.data[mac].uapsd_nonagg_detect.rx_bytes; in iwl_mvm_check_uapsd_agg_expected_tpt()
846 rate = ewma_rate_read(&mvm->tcm.data[mac].uapsd_nonagg_detect.rate); in iwl_mvm_check_uapsd_agg_expected_tpt()
848 if (!rate || mvm->tcm.data[mac].opened_rx_ba_sessions || in iwl_mvm_check_uapsd_agg_expected_tpt()
849 mvm->tcm.data[mac].uapsd_nonagg_detect.detected) in iwl_mvm_check_uapsd_agg_expected_tpt()
896 unsigned int elapsed = jiffies_to_msecs(ts - mvm->tcm.ts); in iwl_mvm_calc_tcm_stats()
898 jiffies_to_msecs(ts - mvm->tcm.uapsd_nonagg_ts); in iwl_mvm_calc_tcm_stats()
905 bool handle_ll = time_after(ts, mvm->tcm.ll_ts + MVM_LL_PERIOD); in iwl_mvm_calc_tcm_stats()
908 mvm->tcm.ll_ts = ts; in iwl_mvm_calc_tcm_stats()
910 mvm->tcm.uapsd_nonagg_ts = ts; in iwl_mvm_calc_tcm_stats()
912 mvm->tcm.result.elapsed = elapsed; in iwl_mvm_calc_tcm_stats()
920 struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[mac]; in iwl_mvm_calc_tcm_stats()
928 mvm->tcm.result.change[mac] = load != mvm->tcm.result.load[mac]; in iwl_mvm_calc_tcm_stats()
929 mvm->tcm.result.load[mac] = load; in iwl_mvm_calc_tcm_stats()
930 mvm->tcm.result.airtime[mac] = airtime; in iwl_mvm_calc_tcm_stats()
938 mvm->tcm.result.low_latency[mac] = true; in iwl_mvm_calc_tcm_stats()
940 mvm->tcm.result.low_latency[mac] = false; in iwl_mvm_calc_tcm_stats()
947 low_latency |= mvm->tcm.result.low_latency[mac]; in iwl_mvm_calc_tcm_stats()
949 if (!mvm->tcm.result.low_latency[mac] && handle_uapsd) in iwl_mvm_calc_tcm_stats()
960 mvm->tcm.result.global_load = load; in iwl_mvm_calc_tcm_stats()
964 mvm->tcm.result.band_load[i] = band_load; in iwl_mvm_calc_tcm_stats()
969 * in the TCM period, so that we can return to low load if there in iwl_mvm_calc_tcm_stats()
1000 time_after(ts, mvm->tcm.uapsd_nonagg_ts + in iwl_mvm_recalc_tcm()
1003 spin_lock(&mvm->tcm.lock); in iwl_mvm_recalc_tcm()
1004 if (mvm->tcm.paused || !time_after(ts, mvm->tcm.ts + MVM_TCM_PERIOD)) { in iwl_mvm_recalc_tcm()
1005 spin_unlock(&mvm->tcm.lock); in iwl_mvm_recalc_tcm()
1008 spin_unlock(&mvm->tcm.lock); in iwl_mvm_recalc_tcm()
1017 spin_lock(&mvm->tcm.lock); in iwl_mvm_recalc_tcm()
1019 if (!mvm->tcm.paused && time_after(ts, mvm->tcm.ts + MVM_TCM_PERIOD)) { in iwl_mvm_recalc_tcm()
1026 mvm->tcm.ts = ts; in iwl_mvm_recalc_tcm()
1028 schedule_delayed_work(&mvm->tcm.work, work_delay); in iwl_mvm_recalc_tcm()
1030 spin_unlock(&mvm->tcm.lock); in iwl_mvm_recalc_tcm()
1039 tcm.work); in iwl_mvm_tcm_work()
1046 spin_lock_bh(&mvm->tcm.lock); in iwl_mvm_pause_tcm()
1047 mvm->tcm.paused = true; in iwl_mvm_pause_tcm()
1048 spin_unlock_bh(&mvm->tcm.lock); in iwl_mvm_pause_tcm()
1050 cancel_delayed_work_sync(&mvm->tcm.work); in iwl_mvm_pause_tcm()
1058 spin_lock_bh(&mvm->tcm.lock); in iwl_mvm_resume_tcm()
1059 mvm->tcm.ts = jiffies; in iwl_mvm_resume_tcm()
1060 mvm->tcm.ll_ts = jiffies; in iwl_mvm_resume_tcm()
1062 struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[mac]; in iwl_mvm_resume_tcm()
1069 if (mvm->tcm.result.low_latency[mac]) in iwl_mvm_resume_tcm()
1072 /* The TCM data needs to be reset before "paused" flag changes */ in iwl_mvm_resume_tcm()
1074 mvm->tcm.paused = false; in iwl_mvm_resume_tcm()
1080 if (mvm->tcm.result.global_load > IWL_MVM_TRAFFIC_LOW) in iwl_mvm_resume_tcm()
1081 schedule_delayed_work(&mvm->tcm.work, MVM_TCM_PERIOD); in iwl_mvm_resume_tcm()
1083 schedule_delayed_work(&mvm->tcm.work, MVM_LL_PERIOD); in iwl_mvm_resume_tcm()
1085 spin_unlock_bh(&mvm->tcm.lock); in iwl_mvm_resume_tcm()