Lines Matching +full:out +full:- +full:of +full:- +full:band

1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright 2002-2005, Instant802 Networks, Inc.
4 * Copyright 2005-2006, Devicescape Software, Inc.
5 * Copyright 2006-2007 Jiri Benc <jbenc@suse.cz>
7 * Copyright 2013-2014 Intel Mobile Communications GmbH
8 * Copyright (C) 2018-2020 Intel Corporation
31 #include "driver-ops.h"
43 struct pcpu_sw_netstats *tstats = this_cpu_ptr(dev->tstats); in ieee80211_tx_stats()
45 u64_stats_update_begin(&tstats->syncp); in ieee80211_tx_stats()
46 tstats->tx_packets++; in ieee80211_tx_stats()
47 tstats->tx_bytes += len; in ieee80211_tx_stats()
48 u64_stats_update_end(&tstats->syncp); in ieee80211_tx_stats()
57 struct ieee80211_local *local = tx->local; in ieee80211_duration()
65 if (tx->rate.flags & (IEEE80211_TX_RC_MCS | IEEE80211_TX_RC_VHT_MCS)) in ieee80211_duration()
69 chanctx_conf = rcu_dereference(tx->sdata->vif.chanctx_conf); in ieee80211_duration()
71 shift = ieee80211_chandef_get_shift(&chanctx_conf->def); in ieee80211_duration()
72 rate_flags = ieee80211_chandef_rate_flags(&chanctx_conf->def); in ieee80211_duration()
77 if (WARN_ON_ONCE(tx->rate.idx < 0)) in ieee80211_duration()
80 sband = local->hw.wiphy->bands[info->band]; in ieee80211_duration()
81 txrate = &sband->bitrates[tx->rate.idx]; in ieee80211_duration()
83 erp = txrate->flags & IEEE80211_RATE_ERP_G; in ieee80211_duration()
86 if (sband->band == NL80211_BAND_S1GHZ) in ieee80211_duration()
91 * - during CFP: 32768 in ieee80211_duration()
92 * - during contention period: in ieee80211_duration()
100 * - control response frame (CTS or ACK) shall be transmitted using the in ieee80211_duration()
106 hdr = (struct ieee80211_hdr *)skb->data; in ieee80211_duration()
107 if (ieee80211_is_ctl(hdr->frame_control)) { in ieee80211_duration()
114 * CTS: duration of immediately previous RTS minus time in ieee80211_duration()
129 if (group_addr) /* Group address as the destination - no ACK */ in ieee80211_duration()
135 * basic rate set that is less than or equal to the rate of the in ieee80211_duration()
138 * the highest mandatory rate of the PHY that is less than or equal to in ieee80211_duration()
139 * the rate of the previous frame is used. in ieee80211_duration()
142 rate = -1; in ieee80211_duration()
144 mrate = sband->bitrates[0].bitrate; in ieee80211_duration()
145 for (i = 0; i < sband->n_bitrates; i++) { in ieee80211_duration()
146 struct ieee80211_rate *r = &sband->bitrates[i]; in ieee80211_duration()
148 if (r->bitrate > txrate->bitrate) in ieee80211_duration()
151 if ((rate_flags & r->flags) != rate_flags) in ieee80211_duration()
154 if (tx->sdata->vif.bss_conf.basic_rates & BIT(i)) in ieee80211_duration()
155 rate = DIV_ROUND_UP(r->bitrate, 1 << shift); in ieee80211_duration()
157 switch (sband->band) { in ieee80211_duration()
160 if (tx->sdata->flags & IEEE80211_SDATA_OPERATING_GMODE) in ieee80211_duration()
164 if (r->flags & flag) in ieee80211_duration()
165 mrate = r->bitrate; in ieee80211_duration()
170 if (r->flags & IEEE80211_RATE_MANDATORY_A) in ieee80211_duration()
171 mrate = r->bitrate; in ieee80211_duration()
181 if (rate == -1) { in ieee80211_duration()
188 if (ieee80211_is_data_qos(hdr->frame_control) && in ieee80211_duration()
193 * (10 bytes + 4-byte FCS = 112 bits) plus SIFS; rounded up in ieee80211_duration()
195 dur = ieee80211_frame_duration(sband->band, 10, rate, erp, in ieee80211_duration()
196 tx->sdata->vif.bss_conf.use_short_preamble, in ieee80211_duration()
204 dur += ieee80211_frame_duration(sband->band, next_frag_len, in ieee80211_duration()
205 txrate->bitrate, erp, in ieee80211_duration()
206 tx->sdata->vif.bss_conf.use_short_preamble, in ieee80211_duration()
217 struct ieee80211_local *local = tx->local; in ieee80211_tx_h_dynamic_ps()
219 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_dynamic_ps()
222 if (!ieee80211_hw_check(&local->hw, SUPPORTS_PS)) in ieee80211_tx_h_dynamic_ps()
226 if (ieee80211_hw_check(&local->hw, SUPPORTS_DYNAMIC_PS)) in ieee80211_tx_h_dynamic_ps()
230 if (local->hw.conf.dynamic_ps_timeout <= 0) in ieee80211_tx_h_dynamic_ps()
234 if (local->scanning) in ieee80211_tx_h_dynamic_ps()
237 if (!local->ps_sdata) in ieee80211_tx_h_dynamic_ps()
241 if (local->quiescing) in ieee80211_tx_h_dynamic_ps()
245 if (tx->sdata->vif.type != NL80211_IFTYPE_STATION) in ieee80211_tx_h_dynamic_ps()
248 if (unlikely(info->flags & IEEE80211_TX_INTFL_OFFCHAN_TX_OK)) in ieee80211_tx_h_dynamic_ps()
251 ifmgd = &tx->sdata->u.mgd; in ieee80211_tx_h_dynamic_ps()
254 * Don't wakeup from power save if u-apsd is enabled, voip ac has in ieee80211_tx_h_dynamic_ps()
255 * u-apsd enabled and the frame is in voip class. This effectively in ieee80211_tx_h_dynamic_ps()
256 * means that even if all access categories have u-apsd enabled, in in ieee80211_tx_h_dynamic_ps()
257 * practise u-apsd is only used with the voip ac. This is a in ieee80211_tx_h_dynamic_ps()
262 * Note: ifmgd->uapsd_queues access is racy here. If the value is in ieee80211_tx_h_dynamic_ps()
266 if ((ifmgd->flags & IEEE80211_STA_UAPSD_ENABLED) && in ieee80211_tx_h_dynamic_ps()
267 (ifmgd->uapsd_queues & IEEE80211_WMM_IE_STA_QOSINFO_AC_VO) && in ieee80211_tx_h_dynamic_ps()
268 skb_get_queue_mapping(tx->skb) == IEEE80211_AC_VO) in ieee80211_tx_h_dynamic_ps()
271 if (local->hw.conf.flags & IEEE80211_CONF_PS) { in ieee80211_tx_h_dynamic_ps()
272 ieee80211_stop_queues_by_reason(&local->hw, in ieee80211_tx_h_dynamic_ps()
276 ifmgd->flags &= ~IEEE80211_STA_NULLFUNC_ACKED; in ieee80211_tx_h_dynamic_ps()
277 ieee80211_queue_work(&local->hw, in ieee80211_tx_h_dynamic_ps()
278 &local->dynamic_ps_disable_work); in ieee80211_tx_h_dynamic_ps()
282 if (!ifmgd->associated) in ieee80211_tx_h_dynamic_ps()
285 mod_timer(&local->dynamic_ps_timer, jiffies + in ieee80211_tx_h_dynamic_ps()
286 msecs_to_jiffies(local->hw.conf.dynamic_ps_timeout)); in ieee80211_tx_h_dynamic_ps()
295 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx->skb->data; in ieee80211_tx_h_check_assoc()
296 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_check_assoc()
299 if (unlikely(info->flags & IEEE80211_TX_CTL_INJECTED)) in ieee80211_tx_h_check_assoc()
302 if (unlikely(test_bit(SCAN_SW_SCANNING, &tx->local->scanning)) && in ieee80211_tx_h_check_assoc()
303 test_bit(SDATA_STATE_OFFCHANNEL, &tx->sdata->state) && in ieee80211_tx_h_check_assoc()
304 !ieee80211_is_probe_req(hdr->frame_control) && in ieee80211_tx_h_check_assoc()
305 !ieee80211_is_any_nullfunc(hdr->frame_control)) in ieee80211_tx_h_check_assoc()
312 * off-channel. See the link below and in ieee80211_tx_h_check_assoc()
319 if (tx->sdata->vif.type == NL80211_IFTYPE_OCB) in ieee80211_tx_h_check_assoc()
322 if (tx->sdata->vif.type == NL80211_IFTYPE_WDS) in ieee80211_tx_h_check_assoc()
325 if (tx->flags & IEEE80211_TX_PS_BUFFERED) in ieee80211_tx_h_check_assoc()
328 if (tx->sta) in ieee80211_tx_h_check_assoc()
329 assoc = test_sta_flag(tx->sta, WLAN_STA_ASSOC); in ieee80211_tx_h_check_assoc()
331 if (likely(tx->flags & IEEE80211_TX_UNICAST)) { in ieee80211_tx_h_check_assoc()
333 ieee80211_is_data(hdr->frame_control))) { in ieee80211_tx_h_check_assoc()
335 sdata_info(tx->sdata, in ieee80211_tx_h_check_assoc()
337 hdr->addr1); in ieee80211_tx_h_check_assoc()
339 I802_DEBUG_INC(tx->local->tx_handlers_drop_not_assoc); in ieee80211_tx_h_check_assoc()
342 } else if (unlikely(ieee80211_is_data(hdr->frame_control) && in ieee80211_tx_h_check_assoc()
343 ieee80211_vif_get_num_mcast_if(tx->sdata) == 0)) { in ieee80211_tx_h_check_assoc()
345 * No associated STAs - no need to send multicast in ieee80211_tx_h_check_assoc()
355 * of buffered frames for power saving STAs. This situation should not really
365 list_for_each_entry_rcu(sdata, &local->interfaces, list) { in purge_old_ps_buffers()
368 if (sdata->vif.type == NL80211_IFTYPE_AP) in purge_old_ps_buffers()
369 ps = &sdata->u.ap.ps; in purge_old_ps_buffers()
370 else if (ieee80211_vif_is_mesh(&sdata->vif)) in purge_old_ps_buffers()
371 ps = &sdata->u.mesh.ps; in purge_old_ps_buffers()
375 skb = skb_dequeue(&ps->bc_buf); in purge_old_ps_buffers()
378 ieee80211_free_txskb(&local->hw, skb); in purge_old_ps_buffers()
380 total += skb_queue_len(&ps->bc_buf); in purge_old_ps_buffers()
384 * Drop one frame from each station from the lowest-priority in purge_old_ps_buffers()
387 list_for_each_entry_rcu(sta, &local->sta_list, list) { in purge_old_ps_buffers()
390 for (ac = IEEE80211_AC_BK; ac >= IEEE80211_AC_VO; ac--) { in purge_old_ps_buffers()
391 skb = skb_dequeue(&sta->ps_tx_buf[ac]); in purge_old_ps_buffers()
392 total += skb_queue_len(&sta->ps_tx_buf[ac]); in purge_old_ps_buffers()
395 ieee80211_free_txskb(&local->hw, skb); in purge_old_ps_buffers()
401 local->total_ps_buffered = total; in purge_old_ps_buffers()
402 ps_dbg_hw(&local->hw, "PS buffers full - purged %d frames\n", purged); in purge_old_ps_buffers()
408 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_multicast_ps_buf()
409 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx->skb->data; in ieee80211_tx_h_multicast_ps_buf()
415 * If any of the associated/peer stations is in power save mode, in ieee80211_tx_h_multicast_ps_buf()
421 if (tx->sdata->vif.type == NL80211_IFTYPE_AP || in ieee80211_tx_h_multicast_ps_buf()
422 tx->sdata->vif.type == NL80211_IFTYPE_AP_VLAN) { in ieee80211_tx_h_multicast_ps_buf()
423 if (!tx->sdata->bss) in ieee80211_tx_h_multicast_ps_buf()
426 ps = &tx->sdata->bss->ps; in ieee80211_tx_h_multicast_ps_buf()
427 } else if (ieee80211_vif_is_mesh(&tx->sdata->vif)) { in ieee80211_tx_h_multicast_ps_buf()
428 ps = &tx->sdata->u.mesh.ps; in ieee80211_tx_h_multicast_ps_buf()
435 if (ieee80211_has_order(hdr->frame_control)) in ieee80211_tx_h_multicast_ps_buf()
438 if (ieee80211_is_probe_req(hdr->frame_control)) in ieee80211_tx_h_multicast_ps_buf()
441 if (ieee80211_hw_check(&tx->local->hw, QUEUE_CONTROL)) in ieee80211_tx_h_multicast_ps_buf()
442 info->hw_queue = tx->sdata->vif.cab_queue; in ieee80211_tx_h_multicast_ps_buf()
445 if (!atomic_read(&ps->num_sta_ps) && skb_queue_empty(&ps->bc_buf)) in ieee80211_tx_h_multicast_ps_buf()
448 info->flags |= IEEE80211_TX_CTL_SEND_AFTER_DTIM; in ieee80211_tx_h_multicast_ps_buf()
451 if (!ieee80211_hw_check(&tx->local->hw, HOST_BROADCAST_PS_BUFFERING)) in ieee80211_tx_h_multicast_ps_buf()
455 if (tx->local->total_ps_buffered >= TOTAL_MAX_TX_BUFFER) in ieee80211_tx_h_multicast_ps_buf()
456 purge_old_ps_buffers(tx->local); in ieee80211_tx_h_multicast_ps_buf()
458 if (skb_queue_len(&ps->bc_buf) >= AP_MAX_BC_BUFFER) { in ieee80211_tx_h_multicast_ps_buf()
459 ps_dbg(tx->sdata, in ieee80211_tx_h_multicast_ps_buf()
460 "BC TX buffer full - dropping the oldest frame\n"); in ieee80211_tx_h_multicast_ps_buf()
461 ieee80211_free_txskb(&tx->local->hw, skb_dequeue(&ps->bc_buf)); in ieee80211_tx_h_multicast_ps_buf()
463 tx->local->total_ps_buffered++; in ieee80211_tx_h_multicast_ps_buf()
465 skb_queue_tail(&ps->bc_buf, tx->skb); in ieee80211_tx_h_multicast_ps_buf()
488 struct sta_info *sta = tx->sta; in ieee80211_tx_h_unicast_ps_buf()
489 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_unicast_ps_buf()
490 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx->skb->data; in ieee80211_tx_h_unicast_ps_buf()
491 struct ieee80211_local *local = tx->local; in ieee80211_tx_h_unicast_ps_buf()
499 !(info->flags & IEEE80211_TX_CTL_NO_PS_BUFFER))) { in ieee80211_tx_h_unicast_ps_buf()
500 int ac = skb_get_queue_mapping(tx->skb); in ieee80211_tx_h_unicast_ps_buf()
502 if (ieee80211_is_mgmt(hdr->frame_control) && in ieee80211_tx_h_unicast_ps_buf()
503 !ieee80211_is_bufferable_mmpdu(hdr->frame_control)) { in ieee80211_tx_h_unicast_ps_buf()
504 info->flags |= IEEE80211_TX_CTL_NO_PS_BUFFER; in ieee80211_tx_h_unicast_ps_buf()
508 ps_dbg(sta->sdata, "STA %pM aid %d: PS buffer for AC %d\n", in ieee80211_tx_h_unicast_ps_buf()
509 sta->sta.addr, sta->sta.aid, ac); in ieee80211_tx_h_unicast_ps_buf()
510 if (tx->local->total_ps_buffered >= TOTAL_MAX_TX_BUFFER) in ieee80211_tx_h_unicast_ps_buf()
511 purge_old_ps_buffers(tx->local); in ieee80211_tx_h_unicast_ps_buf()
514 spin_lock(&sta->ps_lock); in ieee80211_tx_h_unicast_ps_buf()
523 spin_unlock(&sta->ps_lock); in ieee80211_tx_h_unicast_ps_buf()
527 if (skb_queue_len(&sta->ps_tx_buf[ac]) >= STA_MAX_TX_BUFFER) { in ieee80211_tx_h_unicast_ps_buf()
528 struct sk_buff *old = skb_dequeue(&sta->ps_tx_buf[ac]); in ieee80211_tx_h_unicast_ps_buf()
529 ps_dbg(tx->sdata, in ieee80211_tx_h_unicast_ps_buf()
530 "STA %pM TX buffer for AC %d full - dropping oldest frame\n", in ieee80211_tx_h_unicast_ps_buf()
531 sta->sta.addr, ac); in ieee80211_tx_h_unicast_ps_buf()
532 ieee80211_free_txskb(&local->hw, old); in ieee80211_tx_h_unicast_ps_buf()
534 tx->local->total_ps_buffered++; in ieee80211_tx_h_unicast_ps_buf()
536 info->control.jiffies = jiffies; in ieee80211_tx_h_unicast_ps_buf()
537 info->control.vif = &tx->sdata->vif; in ieee80211_tx_h_unicast_ps_buf()
538 info->control.flags |= IEEE80211_TX_INTCFL_NEED_TXPROCESSING; in ieee80211_tx_h_unicast_ps_buf()
539 info->flags &= ~IEEE80211_TX_TEMPORARY_FLAGS; in ieee80211_tx_h_unicast_ps_buf()
540 skb_queue_tail(&sta->ps_tx_buf[ac], tx->skb); in ieee80211_tx_h_unicast_ps_buf()
541 spin_unlock(&sta->ps_lock); in ieee80211_tx_h_unicast_ps_buf()
543 if (!timer_pending(&local->sta_cleanup)) in ieee80211_tx_h_unicast_ps_buf()
544 mod_timer(&local->sta_cleanup, in ieee80211_tx_h_unicast_ps_buf()
556 ps_dbg(tx->sdata, in ieee80211_tx_h_unicast_ps_buf()
557 "STA %pM in PS mode, but polling/in SP -> send frame\n", in ieee80211_tx_h_unicast_ps_buf()
558 sta->sta.addr); in ieee80211_tx_h_unicast_ps_buf()
567 if (unlikely(tx->flags & IEEE80211_TX_PS_BUFFERED)) in ieee80211_tx_h_ps_buf()
570 if (tx->flags & IEEE80211_TX_UNICAST) in ieee80211_tx_h_ps_buf()
579 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_check_control_port_protocol()
581 if (unlikely(tx->sdata->control_port_protocol == tx->skb->protocol)) { in ieee80211_tx_h_check_control_port_protocol()
582 if (tx->sdata->control_port_no_encrypt) in ieee80211_tx_h_check_control_port_protocol()
583 info->flags |= IEEE80211_TX_INTFL_DONT_ENCRYPT; in ieee80211_tx_h_check_control_port_protocol()
584 info->control.flags |= IEEE80211_TX_CTRL_PORT_CTRL_PROTO; in ieee80211_tx_h_check_control_port_protocol()
585 info->flags |= IEEE80211_TX_CTL_USE_MINRATE; in ieee80211_tx_h_check_control_port_protocol()
595 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_select_key()
596 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx->skb->data; in ieee80211_tx_h_select_key()
598 if (unlikely(info->flags & IEEE80211_TX_INTFL_DONT_ENCRYPT)) { in ieee80211_tx_h_select_key()
599 tx->key = NULL; in ieee80211_tx_h_select_key()
603 if (tx->sta && in ieee80211_tx_h_select_key()
604 (key = rcu_dereference(tx->sta->ptk[tx->sta->ptk_idx]))) in ieee80211_tx_h_select_key()
605 tx->key = key; in ieee80211_tx_h_select_key()
606 else if (ieee80211_is_group_privacy_action(tx->skb) && in ieee80211_tx_h_select_key()
607 (key = rcu_dereference(tx->sdata->default_multicast_key))) in ieee80211_tx_h_select_key()
608 tx->key = key; in ieee80211_tx_h_select_key()
609 else if (ieee80211_is_mgmt(hdr->frame_control) && in ieee80211_tx_h_select_key()
610 is_multicast_ether_addr(hdr->addr1) && in ieee80211_tx_h_select_key()
611 ieee80211_is_robust_mgmt_frame(tx->skb) && in ieee80211_tx_h_select_key()
612 (key = rcu_dereference(tx->sdata->default_mgmt_key))) in ieee80211_tx_h_select_key()
613 tx->key = key; in ieee80211_tx_h_select_key()
614 else if (is_multicast_ether_addr(hdr->addr1) && in ieee80211_tx_h_select_key()
615 (key = rcu_dereference(tx->sdata->default_multicast_key))) in ieee80211_tx_h_select_key()
616 tx->key = key; in ieee80211_tx_h_select_key()
617 else if (!is_multicast_ether_addr(hdr->addr1) && in ieee80211_tx_h_select_key()
618 (key = rcu_dereference(tx->sdata->default_unicast_key))) in ieee80211_tx_h_select_key()
619 tx->key = key; in ieee80211_tx_h_select_key()
621 tx->key = NULL; in ieee80211_tx_h_select_key()
623 if (tx->key) { in ieee80211_tx_h_select_key()
628 switch (tx->key->conf.cipher) { in ieee80211_tx_h_select_key()
632 if (!ieee80211_is_data_present(hdr->frame_control)) in ieee80211_tx_h_select_key()
633 tx->key = NULL; in ieee80211_tx_h_select_key()
639 if (!ieee80211_is_data_present(hdr->frame_control) && in ieee80211_tx_h_select_key()
640 !ieee80211_use_mfp(hdr->frame_control, tx->sta, in ieee80211_tx_h_select_key()
641 tx->skb) && in ieee80211_tx_h_select_key()
642 !ieee80211_is_group_privacy_action(tx->skb)) in ieee80211_tx_h_select_key()
643 tx->key = NULL; in ieee80211_tx_h_select_key()
645 skip_hw = (tx->key->conf.flags & in ieee80211_tx_h_select_key()
647 ieee80211_is_mgmt(hdr->frame_control); in ieee80211_tx_h_select_key()
653 if (!ieee80211_is_mgmt(hdr->frame_control)) in ieee80211_tx_h_select_key()
654 tx->key = NULL; in ieee80211_tx_h_select_key()
658 if (unlikely(tx->key && tx->key->flags & KEY_FLAG_TAINTED && in ieee80211_tx_h_select_key()
659 !ieee80211_is_deauth(hdr->frame_control))) in ieee80211_tx_h_select_key()
662 if (!skip_hw && tx->key && in ieee80211_tx_h_select_key()
663 tx->key->flags & KEY_FLAG_UPLOADED_TO_HARDWARE) in ieee80211_tx_h_select_key()
664 info->control.hw_key = &tx->key->conf; in ieee80211_tx_h_select_key()
665 } else if (!ieee80211_is_mgmt(hdr->frame_control) && tx->sta && in ieee80211_tx_h_select_key()
666 test_sta_flag(tx->sta, WLAN_STA_USES_ENCRYPTION)) { in ieee80211_tx_h_select_key()
676 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_rate_ctrl()
677 struct ieee80211_hdr *hdr = (void *)tx->skb->data; in ieee80211_tx_h_rate_ctrl()
686 sband = tx->local->hw.wiphy->bands[info->band]; in ieee80211_tx_h_rate_ctrl()
688 len = min_t(u32, tx->skb->len + FCS_LEN, in ieee80211_tx_h_rate_ctrl()
689 tx->local->hw.wiphy->frag_threshold); in ieee80211_tx_h_rate_ctrl()
692 txrc.hw = &tx->local->hw; in ieee80211_tx_h_rate_ctrl()
694 txrc.bss_conf = &tx->sdata->vif.bss_conf; in ieee80211_tx_h_rate_ctrl()
695 txrc.skb = tx->skb; in ieee80211_tx_h_rate_ctrl()
696 txrc.reported_rate.idx = -1; in ieee80211_tx_h_rate_ctrl()
697 txrc.rate_idx_mask = tx->sdata->rc_rateidx_mask[info->band]; in ieee80211_tx_h_rate_ctrl()
699 if (tx->sdata->rc_has_mcs_mask[info->band]) in ieee80211_tx_h_rate_ctrl()
701 tx->sdata->rc_rateidx_mcs_mask[info->band]; in ieee80211_tx_h_rate_ctrl()
703 txrc.bss = (tx->sdata->vif.type == NL80211_IFTYPE_AP || in ieee80211_tx_h_rate_ctrl()
704 tx->sdata->vif.type == NL80211_IFTYPE_MESH_POINT || in ieee80211_tx_h_rate_ctrl()
705 tx->sdata->vif.type == NL80211_IFTYPE_ADHOC || in ieee80211_tx_h_rate_ctrl()
706 tx->sdata->vif.type == NL80211_IFTYPE_OCB); in ieee80211_tx_h_rate_ctrl()
709 if (len > tx->local->hw.wiphy->rts_threshold) { in ieee80211_tx_h_rate_ctrl()
713 info->control.use_rts = txrc.rts; in ieee80211_tx_h_rate_ctrl()
714 info->control.use_cts_prot = tx->sdata->vif.bss_conf.use_cts_prot; in ieee80211_tx_h_rate_ctrl()
719 * that -- the management frame might be to a station that in ieee80211_tx_h_rate_ctrl()
722 if (tx->sdata->vif.bss_conf.use_short_preamble && in ieee80211_tx_h_rate_ctrl()
723 (ieee80211_is_data(hdr->frame_control) || in ieee80211_tx_h_rate_ctrl()
724 (tx->sta && test_sta_flag(tx->sta, WLAN_STA_SHORT_PREAMBLE)))) in ieee80211_tx_h_rate_ctrl()
727 info->control.short_preamble = txrc.short_preamble; in ieee80211_tx_h_rate_ctrl()
730 if (info->control.flags & IEEE80211_TX_CTRL_RATE_INJECT) in ieee80211_tx_h_rate_ctrl()
733 if (tx->sta) in ieee80211_tx_h_rate_ctrl()
734 assoc = test_sta_flag(tx->sta, WLAN_STA_ASSOC); in ieee80211_tx_h_rate_ctrl()
740 if (WARN(test_bit(SCAN_SW_SCANNING, &tx->local->scanning) && assoc && in ieee80211_tx_h_rate_ctrl()
741 !rate_usable_index_exists(sband, &tx->sta->sta), in ieee80211_tx_h_rate_ctrl()
744 "%pM on %d GHz band\n", in ieee80211_tx_h_rate_ctrl()
745 tx->sdata->name, hdr->addr1, in ieee80211_tx_h_rate_ctrl()
746 info->band ? 5 : 2)) in ieee80211_tx_h_rate_ctrl()
753 rate_control_get_rate(tx->sdata, tx->sta, &txrc); in ieee80211_tx_h_rate_ctrl()
755 if (tx->sta && !info->control.skip_table) in ieee80211_tx_h_rate_ctrl()
756 ratetbl = rcu_dereference(tx->sta->sta.rates); in ieee80211_tx_h_rate_ctrl()
758 if (unlikely(info->control.rates[0].idx < 0)) { in ieee80211_tx_h_rate_ctrl()
761 .idx = ratetbl->rate[0].idx, in ieee80211_tx_h_rate_ctrl()
762 .flags = ratetbl->rate[0].flags, in ieee80211_tx_h_rate_ctrl()
763 .count = ratetbl->rate[0].count in ieee80211_tx_h_rate_ctrl()
766 if (ratetbl->rate[0].idx < 0) in ieee80211_tx_h_rate_ctrl()
769 tx->rate = rate; in ieee80211_tx_h_rate_ctrl()
774 tx->rate = info->control.rates[0]; in ieee80211_tx_h_rate_ctrl()
778 txrc.reported_rate = tx->rate; in ieee80211_tx_h_rate_ctrl()
779 if (tx->sta && ieee80211_is_data(hdr->frame_control)) in ieee80211_tx_h_rate_ctrl()
780 tx->sta->tx_stats.last_rate = txrc.reported_rate; in ieee80211_tx_h_rate_ctrl()
781 } else if (tx->sta) in ieee80211_tx_h_rate_ctrl()
782 tx->sta->tx_stats.last_rate = txrc.reported_rate; in ieee80211_tx_h_rate_ctrl()
787 if (unlikely(!info->control.rates[0].count)) in ieee80211_tx_h_rate_ctrl()
788 info->control.rates[0].count = 1; in ieee80211_tx_h_rate_ctrl()
790 if (WARN_ON_ONCE((info->control.rates[0].count > 1) && in ieee80211_tx_h_rate_ctrl()
791 (info->flags & IEEE80211_TX_CTL_NO_ACK))) in ieee80211_tx_h_rate_ctrl()
792 info->control.rates[0].count = 1; in ieee80211_tx_h_rate_ctrl()
799 u16 *seq = &sta->tid_seq[tid]; in ieee80211_tx_next_seq()
811 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in ieee80211_tx_h_sequence()
812 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)tx->skb->data; in ieee80211_tx_h_sequence()
820 if (unlikely(info->control.vif->type == NL80211_IFTYPE_MONITOR)) in ieee80211_tx_h_sequence()
823 if (unlikely(ieee80211_is_ctl(hdr->frame_control))) in ieee80211_tx_h_sequence()
826 if (ieee80211_hdrlen(hdr->frame_control) < 24) in ieee80211_tx_h_sequence()
829 if (ieee80211_is_qos_nullfunc(hdr->frame_control)) in ieee80211_tx_h_sequence()
832 if (info->control.flags & IEEE80211_TX_CTRL_NO_SEQNO) in ieee80211_tx_h_sequence()
839 * also use the global counter (802.11-2012 9.3.2.10). in ieee80211_tx_h_sequence()
841 if (!ieee80211_is_data_qos(hdr->frame_control) || in ieee80211_tx_h_sequence()
842 is_multicast_ether_addr(hdr->addr1)) { in ieee80211_tx_h_sequence()
844 info->flags |= IEEE80211_TX_CTL_ASSIGN_SEQ; in ieee80211_tx_h_sequence()
846 hdr->seq_ctrl = cpu_to_le16(tx->sdata->sequence_number); in ieee80211_tx_h_sequence()
847 tx->sdata->sequence_number += 0x10; in ieee80211_tx_h_sequence()
848 if (tx->sta) in ieee80211_tx_h_sequence()
849 tx->sta->tx_stats.msdu[IEEE80211_NUM_TIDS]++; in ieee80211_tx_h_sequence()
856 * above since they are not QoS-data frames. in ieee80211_tx_h_sequence()
858 if (!tx->sta) in ieee80211_tx_h_sequence()
861 /* include per-STA, per-TID sequence counter */ in ieee80211_tx_h_sequence()
863 tx->sta->tx_stats.msdu[tid]++; in ieee80211_tx_h_sequence()
865 hdr->seq_ctrl = ieee80211_tx_next_seq(tx->sta, tid); in ieee80211_tx_h_sequence()
874 struct ieee80211_local *local = tx->local; in ieee80211_fragment()
877 int per_fragm = frag_threshold - hdrlen - FCS_LEN; in ieee80211_fragment()
879 int rem = skb->len - hdrlen - per_fragm; in ieee80211_fragment()
882 return -EINVAL; in ieee80211_fragment()
891 rem -= fraglen; in ieee80211_fragment()
892 tmp = dev_alloc_skb(local->tx_headroom + in ieee80211_fragment()
894 tx->sdata->encrypt_headroom + in ieee80211_fragment()
897 return -ENOMEM; in ieee80211_fragment()
899 __skb_queue_tail(&tx->skbs, tmp); in ieee80211_fragment()
902 local->tx_headroom + tx->sdata->encrypt_headroom); in ieee80211_fragment()
905 memcpy(tmp->cb, skb->cb, sizeof(tmp->cb)); in ieee80211_fragment()
908 info->flags &= ~(IEEE80211_TX_CTL_CLEAR_PS_FILT | in ieee80211_fragment()
912 info->flags |= IEEE80211_TX_CTL_MORE_FRAMES; in ieee80211_fragment()
915 tmp->priority = skb->priority; in ieee80211_fragment()
916 tmp->dev = skb->dev; in ieee80211_fragment()
919 skb_put_data(tmp, skb->data, hdrlen); in ieee80211_fragment()
920 skb_put_data(tmp, skb->data + pos, fraglen); in ieee80211_fragment()
933 struct sk_buff *skb = tx->skb; in ieee80211_tx_h_fragment()
935 struct ieee80211_hdr *hdr = (void *)skb->data; in ieee80211_tx_h_fragment()
936 int frag_threshold = tx->local->hw.wiphy->frag_threshold; in ieee80211_tx_h_fragment()
940 /* no matter what happens, tx->skb moves to tx->skbs */ in ieee80211_tx_h_fragment()
941 __skb_queue_tail(&tx->skbs, skb); in ieee80211_tx_h_fragment()
942 tx->skb = NULL; in ieee80211_tx_h_fragment()
944 if (info->flags & IEEE80211_TX_CTL_DONTFRAG) in ieee80211_tx_h_fragment()
947 if (ieee80211_hw_check(&tx->local->hw, SUPPORTS_TX_FRAG)) in ieee80211_tx_h_fragment()
951 * Warn when submitting a fragmented A-MPDU frame and drop it. in ieee80211_tx_h_fragment()
955 if (WARN_ON(info->flags & IEEE80211_TX_CTL_AMPDU)) in ieee80211_tx_h_fragment()
958 hdrlen = ieee80211_hdrlen(hdr->frame_control); in ieee80211_tx_h_fragment()
961 if (WARN_ON(skb->len + FCS_LEN <= frag_threshold)) in ieee80211_tx_h_fragment()
966 * chain them (using skb as the first fragment) to skb->next. in ieee80211_tx_h_fragment()
968 * fragments from this list. When the low-level driver rejects one in ieee80211_tx_h_fragment()
969 * of the fragments then we will simply pretend to accept the skb in ieee80211_tx_h_fragment()
975 /* update duration/seq/flags of fragments */ in ieee80211_tx_h_fragment()
978 skb_queue_walk(&tx->skbs, skb) { in ieee80211_tx_h_fragment()
981 hdr = (void *)skb->data; in ieee80211_tx_h_fragment()
984 if (!skb_queue_is_last(&tx->skbs, skb)) { in ieee80211_tx_h_fragment()
985 hdr->frame_control |= morefrags; in ieee80211_tx_h_fragment()
987 * No multi-rate retries for fragmented frames, that in ieee80211_tx_h_fragment()
990 info->control.rates[1].idx = -1; in ieee80211_tx_h_fragment()
991 info->control.rates[2].idx = -1; in ieee80211_tx_h_fragment()
992 info->control.rates[3].idx = -1; in ieee80211_tx_h_fragment()
994 info->flags &= ~IEEE80211_TX_CTL_RATE_CTRL_PROBE; in ieee80211_tx_h_fragment()
996 hdr->frame_control &= ~morefrags; in ieee80211_tx_h_fragment()
998 hdr->seq_ctrl |= cpu_to_le16(fragnum & IEEE80211_SCTL_FRAG); in ieee80211_tx_h_fragment()
1009 int ac = -1; in ieee80211_tx_h_stats()
1011 if (!tx->sta) in ieee80211_tx_h_stats()
1014 skb_queue_walk(&tx->skbs, skb) { in ieee80211_tx_h_stats()
1016 tx->sta->tx_stats.bytes[ac] += skb->len; in ieee80211_tx_h_stats()
1019 tx->sta->tx_stats.packets[ac]++; in ieee80211_tx_h_stats()
1027 if (!tx->key) in ieee80211_tx_h_encrypt()
1030 switch (tx->key->conf.cipher) { in ieee80211_tx_h_encrypt()
1067 skb_queue_walk(&tx->skbs, skb) { in ieee80211_tx_h_calculate_duration()
1068 hdr = (void *) skb->data; in ieee80211_tx_h_calculate_duration()
1069 if (unlikely(ieee80211_is_pspoll(hdr->frame_control))) in ieee80211_tx_h_calculate_duration()
1071 if (!skb_queue_is_last(&tx->skbs, skb)) { in ieee80211_tx_h_calculate_duration()
1072 struct sk_buff *next = skb_queue_next(&tx->skbs, skb); in ieee80211_tx_h_calculate_duration()
1073 next_len = next->len; in ieee80211_tx_h_calculate_duration()
1076 group_addr = is_multicast_ether_addr(hdr->addr1); in ieee80211_tx_h_calculate_duration()
1078 hdr->duration_id = in ieee80211_tx_h_calculate_duration()
1097 if (test_bit(HT_AGG_STATE_OPERATIONAL, &tid_tx->state)) { in ieee80211_tx_prep_agg()
1098 info->flags |= IEEE80211_TX_CTL_AMPDU; in ieee80211_tx_prep_agg()
1100 } else if (test_bit(HT_AGG_STATE_WANT_START, &tid_tx->state)) { in ieee80211_tx_prep_agg()
1102 * nothing -- this aggregation session is being started in ieee80211_tx_prep_agg()
1105 } else if (!tx->sta->sta.txq[tid]) { in ieee80211_tx_prep_agg()
1106 spin_lock(&tx->sta->lock); in ieee80211_tx_prep_agg()
1108 * Need to re-check now, because we may get here in ieee80211_tx_prep_agg()
1113 * queue yet -- if this happened we acquire the lock in ieee80211_tx_prep_agg()
1115 * need to recheck which of these cases happened. in ieee80211_tx_prep_agg()
1120 * before it was assigned) -- in this case it may in ieee80211_tx_prep_agg()
1125 tid_tx = rcu_dereference_protected_tid_tx(tx->sta, tid); in ieee80211_tx_prep_agg()
1129 } else if (test_bit(HT_AGG_STATE_OPERATIONAL, &tid_tx->state)) { in ieee80211_tx_prep_agg()
1130 info->flags |= IEEE80211_TX_CTL_AMPDU; in ieee80211_tx_prep_agg()
1134 if (info->flags & IEEE80211_TX_CTL_NO_PS_BUFFER) { in ieee80211_tx_prep_agg()
1135 clear_sta_flag(tx->sta, WLAN_STA_SP); in ieee80211_tx_prep_agg()
1136 ps_dbg(tx->sta->sdata, in ieee80211_tx_prep_agg()
1138 tx->sta->sta.addr, tx->sta->sta.aid); in ieee80211_tx_prep_agg()
1140 info->control.vif = &tx->sdata->vif; in ieee80211_tx_prep_agg()
1141 info->control.flags |= IEEE80211_TX_INTCFL_NEED_TXPROCESSING; in ieee80211_tx_prep_agg()
1142 info->flags &= ~IEEE80211_TX_TEMPORARY_FLAGS; in ieee80211_tx_prep_agg()
1143 __skb_queue_tail(&tid_tx->pending, skb); in ieee80211_tx_prep_agg()
1144 if (skb_queue_len(&tid_tx->pending) > STA_MAX_TX_BUFFER) in ieee80211_tx_prep_agg()
1145 purge_skb = __skb_dequeue(&tid_tx->pending); in ieee80211_tx_prep_agg()
1147 spin_unlock(&tx->sta->lock); in ieee80211_tx_prep_agg()
1150 ieee80211_free_txskb(&tx->local->hw, purge_skb); in ieee80211_tx_prep_agg()
1155 tid_tx->last_tx = jiffies; in ieee80211_tx_prep_agg()
1170 struct ieee80211_local *local = sdata->local; in ieee80211_tx_prepare()
1176 tx->skb = skb; in ieee80211_tx_prepare()
1177 tx->local = local; in ieee80211_tx_prepare()
1178 tx->sdata = sdata; in ieee80211_tx_prepare()
1179 __skb_queue_head_init(&tx->skbs); in ieee80211_tx_prepare()
1186 info->control.flags &= ~IEEE80211_TX_INTCFL_NEED_TXPROCESSING; in ieee80211_tx_prepare()
1188 hdr = (struct ieee80211_hdr *) skb->data; in ieee80211_tx_prepare()
1192 tx->sta = sta; in ieee80211_tx_prepare()
1194 if (sdata->vif.type == NL80211_IFTYPE_AP_VLAN) { in ieee80211_tx_prepare()
1195 tx->sta = rcu_dereference(sdata->u.vlan.sta); in ieee80211_tx_prepare()
1196 if (!tx->sta && sdata->wdev.use_4addr) in ieee80211_tx_prepare()
1198 } else if (info->flags & (IEEE80211_TX_INTFL_NL80211_FRAME_TX | in ieee80211_tx_prepare()
1200 tx->sdata->control_port_protocol == tx->skb->protocol) { in ieee80211_tx_prepare()
1201 tx->sta = sta_info_get_bss(sdata, hdr->addr1); in ieee80211_tx_prepare()
1203 if (!tx->sta && !is_multicast_ether_addr(hdr->addr1)) in ieee80211_tx_prepare()
1204 tx->sta = sta_info_get(sdata, hdr->addr1); in ieee80211_tx_prepare()
1207 if (tx->sta && ieee80211_is_data_qos(hdr->frame_control) && in ieee80211_tx_prepare()
1208 !ieee80211_is_qos_nullfunc(hdr->frame_control) && in ieee80211_tx_prepare()
1209 ieee80211_hw_check(&local->hw, AMPDU_AGGREGATION) && in ieee80211_tx_prepare()
1210 !ieee80211_hw_check(&local->hw, TX_AMPDU_SETUP_IN_HW)) { in ieee80211_tx_prepare()
1215 tid_tx = rcu_dereference(tx->sta->ampdu_mlme.tid_tx[tid]); in ieee80211_tx_prepare()
1227 if (is_multicast_ether_addr(hdr->addr1)) { in ieee80211_tx_prepare()
1228 tx->flags &= ~IEEE80211_TX_UNICAST; in ieee80211_tx_prepare()
1229 info->flags |= IEEE80211_TX_CTL_NO_ACK; in ieee80211_tx_prepare()
1231 tx->flags |= IEEE80211_TX_UNICAST; in ieee80211_tx_prepare()
1233 if (!(info->flags & IEEE80211_TX_CTL_DONTFRAG)) { in ieee80211_tx_prepare()
1234 if (!(tx->flags & IEEE80211_TX_UNICAST) || in ieee80211_tx_prepare()
1235 skb->len + FCS_LEN <= local->hw.wiphy->frag_threshold || in ieee80211_tx_prepare()
1236 info->flags & IEEE80211_TX_CTL_AMPDU) in ieee80211_tx_prepare()
1237 info->flags |= IEEE80211_TX_CTL_DONTFRAG; in ieee80211_tx_prepare()
1240 if (!tx->sta) in ieee80211_tx_prepare()
1241 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ieee80211_tx_prepare()
1242 else if (test_and_clear_sta_flag(tx->sta, WLAN_STA_CLEAR_PS_FILT)) { in ieee80211_tx_prepare()
1243 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ieee80211_tx_prepare()
1244 ieee80211_check_fast_xmit(tx->sta); in ieee80211_tx_prepare()
1247 info->flags |= IEEE80211_TX_CTL_FIRST_FRAGMENT; in ieee80211_tx_prepare()
1257 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ieee80211_get_txq()
1261 if ((info->flags & IEEE80211_TX_CTL_SEND_AFTER_DTIM) || in ieee80211_get_txq()
1262 (info->control.flags & IEEE80211_TX_CTRL_PS_RESPONSE)) in ieee80211_get_txq()
1265 if (!(info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) && in ieee80211_get_txq()
1266 unlikely(!ieee80211_is_data_present(hdr->frame_control))) { in ieee80211_get_txq()
1267 if ((!ieee80211_is_mgmt(hdr->frame_control) || in ieee80211_get_txq()
1268 ieee80211_is_bufferable_mmpdu(hdr->frame_control) || in ieee80211_get_txq()
1269 vif->type == NL80211_IFTYPE_STATION) && in ieee80211_get_txq()
1270 sta && sta->uploaded) { in ieee80211_get_txq()
1273 * opt-in hardware flag. in ieee80211_get_txq()
1275 txq = sta->sta.txq[IEEE80211_NUM_TIDS]; in ieee80211_get_txq()
1278 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ieee80211_get_txq()
1280 if (!sta->uploaded) in ieee80211_get_txq()
1283 txq = sta->sta.txq[tid]; in ieee80211_get_txq()
1285 txq = vif->txq; in ieee80211_get_txq()
1296 IEEE80211_SKB_CB(skb)->control.enqueue_time = codel_get_time(); in ieee80211_set_skb_enqueue_time()
1301 return skb->len; in codel_skb_len_func()
1308 info = (const struct ieee80211_tx_info *)skb->cb; in codel_skb_time_func()
1309 return info->control.enqueue_time; in codel_skb_time_func()
1321 local = vif_to_sdata(txqi->txq.vif)->local; in codel_dequeue_func()
1322 fq = &local->fq; in codel_dequeue_func()
1324 if (cvars == &txqi->def_cvars) in codel_dequeue_func()
1325 flow = &txqi->def_flow; in codel_dequeue_func()
1327 flow = &fq->flows[cvars - local->cvars]; in codel_dequeue_func()
1340 local = vif_to_sdata(txqi->txq.vif)->local; in codel_drop_func()
1341 hw = &local->hw; in codel_drop_func()
1358 cstats = &txqi->cstats; in fq_tin_dequeue_func()
1360 if (txqi->txq.sta) { in fq_tin_dequeue_func()
1361 struct sta_info *sta = container_of(txqi->txq.sta, in fq_tin_dequeue_func()
1363 cparams = &sta->cparams; in fq_tin_dequeue_func()
1365 cparams = &local->cparams; in fq_tin_dequeue_func()
1368 if (flow == &txqi->def_flow) in fq_tin_dequeue_func()
1369 cvars = &txqi->def_cvars; in fq_tin_dequeue_func()
1371 cvars = &local->cvars[flow - fq->flows]; in fq_tin_dequeue_func()
1374 &flow->backlog, in fq_tin_dequeue_func()
1392 ieee80211_free_txskb(&local->hw, skb); in fq_skb_free_func()
1403 return &txqi->def_flow; in fq_flow_get_default_func()
1410 struct fq *fq = &local->fq; in ieee80211_txq_enqueue()
1411 struct fq_tin *tin = &txqi->tin; in ieee80211_txq_enqueue()
1416 spin_lock_bh(&fq->lock); in ieee80211_txq_enqueue()
1420 spin_unlock_bh(&fq->lock); in ieee80211_txq_enqueue()
1429 return info->control.vif == data; in fq_vlan_filter_func()
1435 struct fq *fq = &local->fq; in ieee80211_txq_remove_vlan()
1440 if (WARN_ON(sdata->vif.type != NL80211_IFTYPE_AP_VLAN)) in ieee80211_txq_remove_vlan()
1443 ap = container_of(sdata->bss, struct ieee80211_sub_if_data, u.ap); in ieee80211_txq_remove_vlan()
1445 if (!ap->vif.txq) in ieee80211_txq_remove_vlan()
1448 txqi = to_txq_info(ap->vif.txq); in ieee80211_txq_remove_vlan()
1449 tin = &txqi->tin; in ieee80211_txq_remove_vlan()
1451 spin_lock_bh(&fq->lock); in ieee80211_txq_remove_vlan()
1452 fq_tin_filter(fq, tin, fq_vlan_filter_func, &sdata->vif, in ieee80211_txq_remove_vlan()
1454 spin_unlock_bh(&fq->lock); in ieee80211_txq_remove_vlan()
1461 fq_tin_init(&txqi->tin); in ieee80211_txq_init()
1462 fq_flow_init(&txqi->def_flow); in ieee80211_txq_init()
1463 codel_vars_init(&txqi->def_cvars); in ieee80211_txq_init()
1464 codel_stats_init(&txqi->cstats); in ieee80211_txq_init()
1465 __skb_queue_head_init(&txqi->frags); in ieee80211_txq_init()
1466 INIT_LIST_HEAD(&txqi->schedule_order); in ieee80211_txq_init()
1468 txqi->txq.vif = &sdata->vif; in ieee80211_txq_init()
1471 sdata->vif.txq = &txqi->txq; in ieee80211_txq_init()
1472 txqi->txq.tid = 0; in ieee80211_txq_init()
1473 txqi->txq.ac = IEEE80211_AC_BE; in ieee80211_txq_init()
1479 if (sdata->vif.type == NL80211_IFTYPE_STATION) { in ieee80211_txq_init()
1481 if (!ieee80211_hw_check(&sdata->local->hw, in ieee80211_txq_init()
1484 } else if (!ieee80211_hw_check(&sdata->local->hw, in ieee80211_txq_init()
1489 txqi->txq.ac = IEEE80211_AC_VO; in ieee80211_txq_init()
1491 txqi->txq.ac = ieee80211_ac_from_tid(tid); in ieee80211_txq_init()
1494 txqi->txq.sta = &sta->sta; in ieee80211_txq_init()
1495 txqi->txq.tid = tid; in ieee80211_txq_init()
1496 sta->sta.txq[tid] = &txqi->txq; in ieee80211_txq_init()
1502 struct fq *fq = &local->fq; in ieee80211_txq_purge()
1503 struct fq_tin *tin = &txqi->tin; in ieee80211_txq_purge()
1505 spin_lock_bh(&fq->lock); in ieee80211_txq_purge()
1507 ieee80211_purge_tx_queue(&local->hw, &txqi->frags); in ieee80211_txq_purge()
1508 spin_unlock_bh(&fq->lock); in ieee80211_txq_purge()
1510 spin_lock_bh(&local->active_txq_lock[txqi->txq.ac]); in ieee80211_txq_purge()
1511 list_del_init(&txqi->schedule_order); in ieee80211_txq_purge()
1512 spin_unlock_bh(&local->active_txq_lock[txqi->txq.ac]); in ieee80211_txq_purge()
1517 if (local->hw.wiphy->txq_limit) in ieee80211_txq_set_params()
1518 local->fq.limit = local->hw.wiphy->txq_limit; in ieee80211_txq_set_params()
1520 local->hw.wiphy->txq_limit = local->fq.limit; in ieee80211_txq_set_params()
1522 if (local->hw.wiphy->txq_memory_limit) in ieee80211_txq_set_params()
1523 local->fq.memory_limit = local->hw.wiphy->txq_memory_limit; in ieee80211_txq_set_params()
1525 local->hw.wiphy->txq_memory_limit = local->fq.memory_limit; in ieee80211_txq_set_params()
1527 if (local->hw.wiphy->txq_quantum) in ieee80211_txq_set_params()
1528 local->fq.quantum = local->hw.wiphy->txq_quantum; in ieee80211_txq_set_params()
1530 local->hw.wiphy->txq_quantum = local->fq.quantum; in ieee80211_txq_set_params()
1535 struct fq *fq = &local->fq; in ieee80211_txq_setup_flows()
1539 enum nl80211_band band; in ieee80211_txq_setup_flows() local
1541 if (!local->ops->wake_tx_queue) in ieee80211_txq_setup_flows()
1550 * queue size. 4 Mbytes is 64 max-size aggregates in 802.11n. in ieee80211_txq_setup_flows()
1552 for (band = 0; band < NUM_NL80211_BANDS; band++) { in ieee80211_txq_setup_flows()
1555 sband = local->hw.wiphy->bands[band]; in ieee80211_txq_setup_flows()
1559 supp_vht = supp_vht || sband->vht_cap.vht_supported; in ieee80211_txq_setup_flows()
1563 fq->memory_limit = 4 << 20; /* 4 Mbytes */ in ieee80211_txq_setup_flows()
1565 codel_params_init(&local->cparams); in ieee80211_txq_setup_flows()
1566 local->cparams.interval = MS2TIME(100); in ieee80211_txq_setup_flows()
1567 local->cparams.target = MS2TIME(20); in ieee80211_txq_setup_flows()
1568 local->cparams.ecn = true; in ieee80211_txq_setup_flows()
1570 local->cvars = kcalloc(fq->flows_cnt, sizeof(local->cvars[0]), in ieee80211_txq_setup_flows()
1572 if (!local->cvars) { in ieee80211_txq_setup_flows()
1573 spin_lock_bh(&fq->lock); in ieee80211_txq_setup_flows()
1575 spin_unlock_bh(&fq->lock); in ieee80211_txq_setup_flows()
1576 return -ENOMEM; in ieee80211_txq_setup_flows()
1579 for (i = 0; i < fq->flows_cnt; i++) in ieee80211_txq_setup_flows()
1580 codel_vars_init(&local->cvars[i]); in ieee80211_txq_setup_flows()
1589 struct fq *fq = &local->fq; in ieee80211_txq_teardown_flows()
1591 if (!local->ops->wake_tx_queue) in ieee80211_txq_teardown_flows()
1594 kfree(local->cvars); in ieee80211_txq_teardown_flows()
1595 local->cvars = NULL; in ieee80211_txq_teardown_flows()
1597 spin_lock_bh(&fq->lock); in ieee80211_txq_teardown_flows()
1599 spin_unlock_bh(&fq->lock); in ieee80211_txq_teardown_flows()
1610 if (!local->ops->wake_tx_queue || in ieee80211_queue_skb()
1611 sdata->vif.type == NL80211_IFTYPE_MONITOR) in ieee80211_queue_skb()
1614 if (sdata->vif.type == NL80211_IFTYPE_AP_VLAN) in ieee80211_queue_skb()
1615 sdata = container_of(sdata->bss, in ieee80211_queue_skb()
1618 vif = &sdata->vif; in ieee80211_queue_skb()
1643 int q = info->hw_queue; in ieee80211_tx_frags()
1646 if (WARN_ON_ONCE(q >= local->hw.queues)) { in ieee80211_tx_frags()
1648 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_frags()
1653 spin_lock_irqsave(&local->queue_stop_reason_lock, flags); in ieee80211_tx_frags()
1654 if (local->queue_stop_reasons[q] || in ieee80211_tx_frags()
1655 (!txpending && !skb_queue_empty(&local->pending[q]))) { in ieee80211_tx_frags()
1656 if (unlikely(info->flags & in ieee80211_tx_frags()
1658 if (local->queue_stop_reasons[q] & in ieee80211_tx_frags()
1661 * Drop off-channel frames if queues in ieee80211_tx_frags()
1663 * than off-channel operation. Never in ieee80211_tx_frags()
1667 &local->queue_stop_reason_lock, in ieee80211_tx_frags()
1669 ieee80211_purge_tx_queue(&local->hw, in ieee80211_tx_frags()
1677 * later transmission from the tx-pending in ieee80211_tx_frags()
1682 &local->pending[q]); in ieee80211_tx_frags()
1685 &local->pending[q]); in ieee80211_tx_frags()
1687 spin_unlock_irqrestore(&local->queue_stop_reason_lock, in ieee80211_tx_frags()
1692 spin_unlock_irqrestore(&local->queue_stop_reason_lock, flags); in ieee80211_tx_frags()
1694 info->control.vif = vif; in ieee80211_tx_frags()
1695 control.sta = sta ? &sta->sta : NULL; in ieee80211_tx_frags()
1722 fc = ((struct ieee80211_hdr *)skb->data)->frame_control; in __ieee80211_tx()
1724 sdata = vif_to_sdata(info->control.vif); in __ieee80211_tx()
1725 if (sta && !sta->uploaded) in __ieee80211_tx()
1728 switch (sdata->vif.type) { in __ieee80211_tx()
1730 if (sdata->u.mntr.flags & MONITOR_FLAG_ACTIVE) { in __ieee80211_tx()
1731 vif = &sdata->vif; in __ieee80211_tx()
1734 sdata = rcu_dereference(local->monitor_sdata); in __ieee80211_tx()
1736 vif = &sdata->vif; in __ieee80211_tx()
1737 info->hw_queue = in __ieee80211_tx()
1738 vif->hw_queue[skb_get_queue_mapping(skb)]; in __ieee80211_tx()
1739 } else if (ieee80211_hw_check(&local->hw, QUEUE_CONTROL)) { in __ieee80211_tx()
1740 ieee80211_purge_tx_queue(&local->hw, skbs); in __ieee80211_tx()
1746 sdata = container_of(sdata->bss, in __ieee80211_tx()
1750 vif = &sdata->vif; in __ieee80211_tx()
1764 * Invoke TX handlers, return 0 on success and non-zero if the
1787 if (!ieee80211_hw_check(&tx->local->hw, HAS_RATE_CONTROL)) in invoke_tx_handlers_early()
1792 I802_DEBUG_INC(tx->local->tx_handlers_drop); in invoke_tx_handlers_early()
1793 if (tx->skb) in invoke_tx_handlers_early()
1794 ieee80211_free_txskb(&tx->local->hw, tx->skb); in invoke_tx_handlers_early()
1796 ieee80211_purge_tx_queue(&tx->local->hw, &tx->skbs); in invoke_tx_handlers_early()
1797 return -1; in invoke_tx_handlers_early()
1799 I802_DEBUG_INC(tx->local->tx_handlers_queued); in invoke_tx_handlers_early()
1800 return -1; in invoke_tx_handlers_early()
1812 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(tx->skb); in invoke_tx_handlers_late()
1815 if (unlikely(info->flags & IEEE80211_TX_INTFL_RETRANSMISSION)) { in invoke_tx_handlers_late()
1816 __skb_queue_tail(&tx->skbs, tx->skb); in invoke_tx_handlers_late()
1817 tx->skb = NULL; in invoke_tx_handlers_late()
1824 /* handlers after fragment must be aware of tx info fragmentation! */ in invoke_tx_handlers_late()
1827 if (!ieee80211_hw_check(&tx->local->hw, HAS_RATE_CONTROL)) in invoke_tx_handlers_late()
1833 I802_DEBUG_INC(tx->local->tx_handlers_drop); in invoke_tx_handlers_late()
1834 if (tx->skb) in invoke_tx_handlers_late()
1835 ieee80211_free_txskb(&tx->local->hw, tx->skb); in invoke_tx_handlers_late()
1837 ieee80211_purge_tx_queue(&tx->local->hw, &tx->skbs); in invoke_tx_handlers_late()
1838 return -1; in invoke_tx_handlers_late()
1840 I802_DEBUG_INC(tx->local->tx_handlers_queued); in invoke_tx_handlers_late()
1841 return -1; in invoke_tx_handlers_late()
1858 int band, struct ieee80211_sta **sta) in ieee80211_tx_prepare_skb() argument
1868 info->band = band; in ieee80211_tx_prepare_skb()
1869 info->control.vif = vif; in ieee80211_tx_prepare_skb()
1870 info->hw_queue = vif->hw_queue[skb_get_queue_mapping(skb)]; in ieee80211_tx_prepare_skb()
1877 *sta = &tx.sta->sta; in ieee80211_tx_prepare_skb()
1901 struct ieee80211_local *local = sdata->local; in ieee80211_tx()
1908 if (unlikely(skb->len < 10)) { in ieee80211_tx()
1914 led_len = skb->len; in ieee80211_tx()
1918 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx()
1925 if (!(info->flags & IEEE80211_TX_CTL_TX_OFFCHAN) || in ieee80211_tx()
1926 !ieee80211_hw_check(&local->hw, QUEUE_CONTROL)) in ieee80211_tx()
1927 info->hw_queue = in ieee80211_tx()
1928 sdata->vif.hw_queue[skb_get_queue_mapping(skb)]; in ieee80211_tx()
1956 struct ieee80211_local *local = sdata->local; in ieee80211_skb_resize()
1962 sdata->crypto_tx_tailroom_needed_cnt); in ieee80211_skb_resize()
1966 tail_need -= skb_tailroom(skb); in ieee80211_skb_resize()
1971 (!ieee80211_hw_check(&local->hw, SUPPORTS_CLONED_SKBS) || in ieee80211_skb_resize()
1973 I802_DEBUG_INC(local->tx_expand_skb_head_cloned); in ieee80211_skb_resize()
1975 I802_DEBUG_INC(local->tx_expand_skb_head); in ieee80211_skb_resize()
1980 wiphy_debug(local->hw.wiphy, in ieee80211_skb_resize()
1982 return -ENOMEM; in ieee80211_skb_resize()
1991 struct ieee80211_local *local = sdata->local; in ieee80211_xmit()
1993 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ieee80211_xmit()
1997 if (info->flags & IEEE80211_TX_INTFL_DONT_ENCRYPT) in ieee80211_xmit()
1999 else if (ieee80211_is_mgmt(hdr->frame_control)) in ieee80211_xmit()
2004 headroom = local->tx_headroom; in ieee80211_xmit()
2006 headroom += sdata->encrypt_headroom; in ieee80211_xmit()
2007 headroom -= skb_headroom(skb); in ieee80211_xmit()
2011 ieee80211_free_txskb(&local->hw, skb); in ieee80211_xmit()
2016 hdr = (struct ieee80211_hdr *) skb->data; in ieee80211_xmit()
2017 info->control.vif = &sdata->vif; in ieee80211_xmit()
2019 if (ieee80211_vif_is_mesh(&sdata->vif)) { in ieee80211_xmit()
2020 if (ieee80211_is_data(hdr->frame_control) && in ieee80211_xmit()
2021 is_unicast_ether_addr(hdr->addr1)) { in ieee80211_xmit()
2036 struct ieee80211_local *local = wdev_priv(dev->ieee80211_ptr); in ieee80211_parse_tx_radiotap()
2039 (struct ieee80211_radiotap_header *) skb->data; in ieee80211_parse_tx_radiotap()
2042 local->hw.wiphy->bands[info->band]; in ieee80211_parse_tx_radiotap()
2043 int ret = ieee80211_radiotap_iterator_init(&iterator, rthdr, skb->len, in ieee80211_parse_tx_radiotap()
2056 if (unlikely(skb->len < sizeof(struct ieee80211_radiotap_header))) in ieee80211_parse_tx_radiotap()
2060 if (unlikely(rthdr->it_version)) in ieee80211_parse_tx_radiotap()
2064 if (unlikely(skb->len < ieee80211_get_radiotap_len(skb->data))) in ieee80211_parse_tx_radiotap()
2067 info->flags |= IEEE80211_TX_INTFL_DONT_ENCRYPT | in ieee80211_parse_tx_radiotap()
2072 * (ieee80211_radiotap_iterator_next returns -ENOENT when no more in ieee80211_parse_tx_radiotap()
2073 * entries present, or -EINVAL on error) in ieee80211_parse_tx_radiotap()
2094 * handed has the 32-bit FCS CRC at the end... in ieee80211_parse_tx_radiotap()
2099 if (skb->len < (iterator._max_length + FCS_LEN)) in ieee80211_parse_tx_radiotap()
2102 skb_trim(skb, skb->len - FCS_LEN); in ieee80211_parse_tx_radiotap()
2105 info->flags &= ~IEEE80211_TX_INTFL_DONT_ENCRYPT; in ieee80211_parse_tx_radiotap()
2107 info->flags &= ~IEEE80211_TX_CTL_DONTFRAG; in ieee80211_parse_tx_radiotap()
2113 info->flags |= IEEE80211_TX_CTL_NO_ACK; in ieee80211_parse_tx_radiotap()
2115 info->control.flags |= IEEE80211_TX_CTRL_NO_SEQNO; in ieee80211_parse_tx_radiotap()
2176 * Documentation/networking/mac80211-injection.rst in ieee80211_parse_tx_radiotap()
2185 if (ret != -ENOENT) /* ie, if we didn't simply run out of fields */ in ieee80211_parse_tx_radiotap()
2189 info->control.flags |= IEEE80211_TX_CTRL_RATE_INJECT; in ieee80211_parse_tx_radiotap()
2192 info->control.rates[i].idx = -1; in ieee80211_parse_tx_radiotap()
2193 info->control.rates[i].flags = 0; in ieee80211_parse_tx_radiotap()
2194 info->control.rates[i].count = 0; in ieee80211_parse_tx_radiotap()
2198 info->control.rates[0].idx = rate; in ieee80211_parse_tx_radiotap()
2200 ieee80211_rate_set_vht(info->control.rates, vht_mcs, in ieee80211_parse_tx_radiotap()
2203 for (i = 0; i < sband->n_bitrates; i++) { in ieee80211_parse_tx_radiotap()
2204 if (rate * 5 != sband->bitrates[i].bitrate) in ieee80211_parse_tx_radiotap()
2207 info->control.rates[0].idx = i; in ieee80211_parse_tx_radiotap()
2212 if (info->control.rates[0].idx < 0) in ieee80211_parse_tx_radiotap()
2213 info->control.flags &= ~IEEE80211_TX_CTRL_RATE_INJECT; in ieee80211_parse_tx_radiotap()
2215 info->control.rates[0].flags = rate_flags; in ieee80211_parse_tx_radiotap()
2216 info->control.rates[0].count = min_t(u8, rate_retries + 1, in ieee80211_parse_tx_radiotap()
2217 local->hw.max_rate_tries); in ieee80211_parse_tx_radiotap()
2226 struct ieee80211_local *local = wdev_priv(dev->ieee80211_ptr); in ieee80211_monitor_start_xmit()
2236 info->flags = IEEE80211_TX_CTL_REQ_TX_STATUS | in ieee80211_monitor_start_xmit()
2239 /* Sanity-check and process the injection radiotap header */ in ieee80211_monitor_start_xmit()
2244 len_rthdr = ieee80211_get_radiotap_len(skb->data); in ieee80211_monitor_start_xmit()
2254 * these are just fixed to the end of the rt area since we in ieee80211_monitor_start_xmit()
2260 if (skb->len < len_rthdr + 2) in ieee80211_monitor_start_xmit()
2263 hdr = (struct ieee80211_hdr *)(skb->data + len_rthdr); in ieee80211_monitor_start_xmit()
2264 hdrlen = ieee80211_hdrlen(hdr->frame_control); in ieee80211_monitor_start_xmit()
2266 if (skb->len < len_rthdr + hdrlen) in ieee80211_monitor_start_xmit()
2270 * Initialize skb->protocol if the injected frame is a data frame in ieee80211_monitor_start_xmit()
2273 if (ieee80211_is_data(hdr->frame_control) && in ieee80211_monitor_start_xmit()
2274 skb->len >= len_rthdr + hdrlen + sizeof(rfc1042_header) + 2) { in ieee80211_monitor_start_xmit()
2278 skb->protocol = cpu_to_be16((payload[6] << 8) | in ieee80211_monitor_start_xmit()
2283 * Initialize skb->priority for QoS frames. This is put in the TID field in ieee80211_monitor_start_xmit()
2284 * of the frame before passing it to the driver. in ieee80211_monitor_start_xmit()
2286 if (ieee80211_is_data_qos(hdr->frame_control)) { in ieee80211_monitor_start_xmit()
2288 skb->priority = *p & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_monitor_start_xmit()
2295 * we handle as though they are non-injected frames. in ieee80211_monitor_start_xmit()
2302 * don't use nl80211-based management TX/RX. in ieee80211_monitor_start_xmit()
2306 list_for_each_entry_rcu(tmp_sdata, &local->interfaces, list) { in ieee80211_monitor_start_xmit()
2309 if (tmp_sdata->vif.type == NL80211_IFTYPE_MONITOR || in ieee80211_monitor_start_xmit()
2310 tmp_sdata->vif.type == NL80211_IFTYPE_AP_VLAN || in ieee80211_monitor_start_xmit()
2311 tmp_sdata->vif.type == NL80211_IFTYPE_WDS) in ieee80211_monitor_start_xmit()
2313 if (ether_addr_equal(tmp_sdata->vif.addr, hdr->addr2)) { in ieee80211_monitor_start_xmit()
2319 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_monitor_start_xmit()
2321 tmp_sdata = rcu_dereference(local->monitor_sdata); in ieee80211_monitor_start_xmit()
2324 rcu_dereference(tmp_sdata->vif.chanctx_conf); in ieee80211_monitor_start_xmit()
2328 chandef = &chanctx_conf->def; in ieee80211_monitor_start_xmit()
2329 else if (!local->use_chanctx) in ieee80211_monitor_start_xmit()
2330 chandef = &local->_oper_chandef; in ieee80211_monitor_start_xmit()
2350 if (!cfg80211_reg_can_beacon(local->hw.wiphy, chandef, in ieee80211_monitor_start_xmit()
2351 sdata->vif.type)) in ieee80211_monitor_start_xmit()
2354 info->band = chandef->chan->band; in ieee80211_monitor_start_xmit()
2373 u16 ethertype = (skb->data[12] << 8) | skb->data[13]; in ieee80211_is_tdls_setup()
2376 skb->len > 14 && in ieee80211_is_tdls_setup()
2377 skb->data[14] == WLAN_TDLS_SNAP_RFTYPE; in ieee80211_is_tdls_setup()
2386 switch (sdata->vif.type) { in ieee80211_lookup_ra_sta()
2388 sta = rcu_dereference(sdata->u.vlan.sta); in ieee80211_lookup_ra_sta()
2392 } else if (sdata->wdev.use_4addr) { in ieee80211_lookup_ra_sta()
2393 return -ENOLINK; in ieee80211_lookup_ra_sta()
2399 if (is_multicast_ether_addr(skb->data)) { in ieee80211_lookup_ra_sta()
2400 *sta_out = ERR_PTR(-ENOENT); in ieee80211_lookup_ra_sta()
2403 sta = sta_info_get_bss(sdata, skb->data); in ieee80211_lookup_ra_sta()
2406 sta = sta_info_get(sdata, sdata->u.wds.remote_addr); in ieee80211_lookup_ra_sta()
2415 if (sdata->wdev.wiphy->flags & WIPHY_FLAG_SUPPORTS_TDLS) { in ieee80211_lookup_ra_sta()
2416 sta = sta_info_get(sdata, skb->data); in ieee80211_lookup_ra_sta()
2425 * TDLS link during setup - throw out frames to in ieee80211_lookup_ra_sta()
2426 * peer. Allow TDLS-setup frames to unauthorized in ieee80211_lookup_ra_sta()
2427 * peers for the special case of a link teardown in ieee80211_lookup_ra_sta()
2432 return -EINVAL; in ieee80211_lookup_ra_sta()
2437 sta = sta_info_get(sdata, sdata->u.mgd.bssid); in ieee80211_lookup_ra_sta()
2439 return -ENOLINK; in ieee80211_lookup_ra_sta()
2442 return -EINVAL; in ieee80211_lookup_ra_sta()
2445 *sta_out = sta ?: ERR_PTR(-ENOENT); in ieee80211_lookup_ra_sta()
2457 if (skb->sk) in ieee80211_store_ack_skb()
2466 spin_lock_irqsave(&local->ack_status_lock, flags); in ieee80211_store_ack_skb()
2467 id = idr_alloc(&local->ack_status_frames, ack_skb, in ieee80211_store_ack_skb()
2469 spin_unlock_irqrestore(&local->ack_status_lock, flags); in ieee80211_store_ack_skb()
2476 IEEE80211_SKB_CB(ack_skb)->ack.cookie = *cookie; in ieee80211_store_ack_skb()
2487 * ieee80211_build_hdr - build 802.11 header in the given frame
2499 * Note that this function also takes care of the TX status request and
2500 * potential unsharing of the SKB - this needs to be interleaved with the
2503 * The function requires the read-side RCU lock held
2512 struct ieee80211_local *local = sdata->local; in ieee80211_build_hdr()
2528 enum nl80211_band band; in ieee80211_build_hdr() local
2535 if (local->force_tx_status) in ieee80211_build_hdr()
2541 ethertype = (skb->data[12] << 8) | skb->data[13]; in ieee80211_build_hdr()
2544 switch (sdata->vif.type) { in ieee80211_build_hdr()
2546 if (sdata->wdev.use_4addr) { in ieee80211_build_hdr()
2549 memcpy(hdr.addr1, sta->sta.addr, ETH_ALEN); in ieee80211_build_hdr()
2550 memcpy(hdr.addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_build_hdr()
2551 memcpy(hdr.addr3, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2552 memcpy(hdr.addr4, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2555 wme_sta = sta->sta.wme; in ieee80211_build_hdr()
2557 ap_sdata = container_of(sdata->bss, struct ieee80211_sub_if_data, in ieee80211_build_hdr()
2559 chanctx_conf = rcu_dereference(ap_sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2561 ret = -ENOTCONN; in ieee80211_build_hdr()
2564 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2565 if (sdata->wdev.use_4addr) in ieee80211_build_hdr()
2569 if (sdata->vif.type == NL80211_IFTYPE_AP) in ieee80211_build_hdr()
2570 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2572 ret = -ENOTCONN; in ieee80211_build_hdr()
2577 memcpy(hdr.addr1, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2578 memcpy(hdr.addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_build_hdr()
2579 memcpy(hdr.addr3, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2581 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2586 memcpy(hdr.addr1, sdata->u.wds.remote_addr, ETH_ALEN); in ieee80211_build_hdr()
2587 memcpy(hdr.addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_build_hdr()
2588 memcpy(hdr.addr3, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2589 memcpy(hdr.addr4, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2595 band = local->hw.conf.chandef.chan->band; in ieee80211_build_hdr()
2599 if (!is_multicast_ether_addr(skb->data)) { in ieee80211_build_hdr()
2603 mpath = mesh_path_lookup(sdata, skb->data); in ieee80211_build_hdr()
2606 next_hop = rcu_dereference(mpath->next_hop); in ieee80211_build_hdr()
2608 !(mpath->flags & (MESH_PATH_ACTIVE | in ieee80211_build_hdr()
2614 mppath = mpp_path_lookup(sdata, skb->data); in ieee80211_build_hdr()
2616 mppath->exp_time = jiffies; in ieee80211_build_hdr()
2620 mesh_path_del(sdata, mpath->dst); in ieee80211_build_hdr()
2629 if (ether_addr_equal(sdata->vif.addr, skb->data + ETH_ALEN) && in ieee80211_build_hdr()
2630 !(mppath && !ether_addr_equal(mppath->mpp, skb->data))) { in ieee80211_build_hdr()
2632 skb->data, skb->data + ETH_ALEN); in ieee80211_build_hdr()
2636 /* DS -> MBSS (802.11-2012 13.11.3.3). in ieee80211_build_hdr()
2642 const u8 *mesh_da = skb->data; in ieee80211_build_hdr()
2645 mesh_da = mppath->mpp; in ieee80211_build_hdr()
2647 mesh_da = mpath->dst; in ieee80211_build_hdr()
2650 mesh_da, sdata->vif.addr); in ieee80211_build_hdr()
2655 skb->data + ETH_ALEN, NULL); in ieee80211_build_hdr()
2659 sdata, &mesh_hdr, skb->data, in ieee80211_build_hdr()
2660 skb->data + ETH_ALEN); in ieee80211_build_hdr()
2663 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2665 ret = -ENOTCONN; in ieee80211_build_hdr()
2668 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2675 memcpy(hdr.addr1, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2684 memcpy(hdr.addr1, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2685 memcpy(hdr.addr2, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2686 memcpy(hdr.addr3, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_build_hdr()
2688 } else if (sdata->u.mgd.use_4addr && in ieee80211_build_hdr()
2689 cpu_to_be16(ethertype) != sdata->control_port_protocol) { in ieee80211_build_hdr()
2693 memcpy(hdr.addr1, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_build_hdr()
2694 memcpy(hdr.addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_build_hdr()
2695 memcpy(hdr.addr3, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2696 memcpy(hdr.addr4, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2701 memcpy(hdr.addr1, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_build_hdr()
2702 memcpy(hdr.addr2, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2703 memcpy(hdr.addr3, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2706 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2708 ret = -ENOTCONN; in ieee80211_build_hdr()
2711 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2715 memcpy(hdr.addr1, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2716 memcpy(hdr.addr2, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2719 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2721 ret = -ENOTCONN; in ieee80211_build_hdr()
2724 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2728 memcpy(hdr.addr1, skb->data, ETH_ALEN); in ieee80211_build_hdr()
2729 memcpy(hdr.addr2, skb->data + ETH_ALEN, ETH_ALEN); in ieee80211_build_hdr()
2730 memcpy(hdr.addr3, sdata->u.ibss.bssid, ETH_ALEN); in ieee80211_build_hdr()
2732 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_build_hdr()
2734 ret = -ENOTCONN; in ieee80211_build_hdr()
2737 band = chanctx_conf->def.chan->band; in ieee80211_build_hdr()
2740 ret = -EINVAL; in ieee80211_build_hdr()
2749 wme_sta = sta->sta.wme; in ieee80211_build_hdr()
2750 } else if (ieee80211_vif_is_mesh(&sdata->vif)) { in ieee80211_build_hdr()
2751 /* For mesh, the use of the QoS header is mandatory */ in ieee80211_build_hdr()
2765 if (unlikely(!ieee80211_vif_is_mesh(&sdata->vif) && in ieee80211_build_hdr()
2766 (sdata->vif.type != NL80211_IFTYPE_OCB) && in ieee80211_build_hdr()
2768 (cpu_to_be16(ethertype) != sdata->control_port_protocol || in ieee80211_build_hdr()
2769 !ether_addr_equal(sdata->vif.addr, skb->data + ETH_ALEN)))) { in ieee80211_build_hdr()
2772 sdata->name, hdr.addr1); in ieee80211_build_hdr()
2775 I802_DEBUG_INC(local->tx_handlers_drop_unauth_port); in ieee80211_build_hdr()
2777 ret = -EPERM; in ieee80211_build_hdr()
2781 if (unlikely(!multicast && ((skb->sk && in ieee80211_build_hdr()
2782 skb_shinfo(skb)->tx_flags & SKBTX_WIFI_STATUS) || in ieee80211_build_hdr()
2793 /* can't happen -- skb is a clone if info_id != 0 */ in ieee80211_build_hdr()
2800 ret = -ENOMEM; in ieee80211_build_hdr()
2813 skip_header_bytes -= 2; in ieee80211_build_hdr()
2817 skip_header_bytes -= 2; in ieee80211_build_hdr()
2824 head_need = hdrlen + encaps_len + meshhdrlen - skb_headroom(skb); in ieee80211_build_hdr()
2827 * So we need to modify the skb header and hence need a copy of in ieee80211_build_hdr()
2839 head_need += sdata->encrypt_headroom; in ieee80211_build_hdr()
2840 head_need += local->tx_headroom; in ieee80211_build_hdr()
2843 ieee80211_free_txskb(&local->hw, skb); in ieee80211_build_hdr()
2845 return ERR_PTR(-ENOMEM); in ieee80211_build_hdr()
2861 memcpy(skb_push(skb, hdrlen - 2), &hdr, hdrlen - 2); in ieee80211_build_hdr()
2875 info->flags = info_flags; in ieee80211_build_hdr()
2876 info->ack_frame_id = info_id; in ieee80211_build_hdr()
2877 info->band = band; in ieee80211_build_hdr()
2878 info->control.flags = ctrl_flags; in ieee80211_build_hdr()
2887 * fast-xmit overview
2889 * The core idea of this fast-xmit is to remove per-packet checks by checking
2890 * them out of band. ieee80211_check_fast_xmit() implements the out-of-band
2891 * checks that are needed to get the sta->fast_tx pointer assigned, after which
2896 * Once assigned, the fast_tx data structure also caches the per-packet 802.11
2899 * The most difficult part of this is that when any of these assumptions
2902 * since the per-packet code no longer checks the conditions. This is reflected
2903 * by the calls to these functions throughout the rest of the code, and must be
2904 * maintained if any of the TX path checks change.
2910 struct ieee80211_local *local = sta->local; in ieee80211_check_fast_xmit()
2911 struct ieee80211_sub_if_data *sdata = sta->sdata; in ieee80211_check_fast_xmit()
2916 if (!ieee80211_hw_check(&local->hw, SUPPORT_FAST_XMIT)) in ieee80211_check_fast_xmit()
2922 * Without it, the invocation of this function right after the key in ieee80211_check_fast_xmit()
2927 * key and the fast-tx assignment are done atomically, so the CPU that in ieee80211_check_fast_xmit()
2931 spin_lock_bh(&sta->lock); in ieee80211_check_fast_xmit()
2932 if (ieee80211_hw_check(&local->hw, SUPPORTS_PS) && in ieee80211_check_fast_xmit()
2933 !ieee80211_hw_check(&local->hw, SUPPORTS_DYNAMIC_PS) && in ieee80211_check_fast_xmit()
2934 sdata->vif.type == NL80211_IFTYPE_STATION) in ieee80211_check_fast_xmit()
2935 goto out; in ieee80211_check_fast_xmit()
2938 goto out; in ieee80211_check_fast_xmit()
2944 goto out; in ieee80211_check_fast_xmit()
2946 if (sdata->noack_map) in ieee80211_check_fast_xmit()
2947 goto out; in ieee80211_check_fast_xmit()
2949 /* fast-xmit doesn't handle fragmentation at all */ in ieee80211_check_fast_xmit()
2950 if (local->hw.wiphy->frag_threshold != (u32)-1 && in ieee80211_check_fast_xmit()
2951 !ieee80211_hw_check(&local->hw, SUPPORTS_TX_FRAG)) in ieee80211_check_fast_xmit()
2952 goto out; in ieee80211_check_fast_xmit()
2955 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_check_fast_xmit()
2958 goto out; in ieee80211_check_fast_xmit()
2960 build.band = chanctx_conf->def.chan->band; in ieee80211_check_fast_xmit()
2965 switch (sdata->vif.type) { in ieee80211_check_fast_xmit()
2970 memcpy(hdr->addr3, sdata->u.ibss.bssid, ETH_ALEN); in ieee80211_check_fast_xmit()
2978 memcpy(hdr->addr3, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_check_fast_xmit()
2983 if (sdata->u.mgd.use_4addr) { in ieee80211_check_fast_xmit()
2984 /* non-regular ethertype cannot use the fastpath */ in ieee80211_check_fast_xmit()
2988 memcpy(hdr->addr1, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_check_fast_xmit()
2989 memcpy(hdr->addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_check_fast_xmit()
2997 memcpy(hdr->addr1, sdata->u.mgd.bssid, ETH_ALEN); in ieee80211_check_fast_xmit()
3003 if (sdata->wdev.use_4addr) { in ieee80211_check_fast_xmit()
3007 memcpy(hdr->addr1, sta->sta.addr, ETH_ALEN); in ieee80211_check_fast_xmit()
3008 memcpy(hdr->addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_check_fast_xmit()
3019 memcpy(hdr->addr2, sdata->vif.addr, ETH_ALEN); in ieee80211_check_fast_xmit()
3024 /* not handled on fast-xmit */ in ieee80211_check_fast_xmit()
3025 goto out; in ieee80211_check_fast_xmit()
3028 if (sta->sta.wme) { in ieee80211_check_fast_xmit()
3038 build.key = rcu_access_pointer(sta->ptk[sta->ptk_idx]); in ieee80211_check_fast_xmit()
3040 build.key = rcu_access_pointer(sdata->default_unicast_key); in ieee80211_check_fast_xmit()
3044 gen_iv = build.key->conf.flags & IEEE80211_KEY_FLAG_GENERATE_IV; in ieee80211_check_fast_xmit()
3045 iv_spc = build.key->conf.flags & IEEE80211_KEY_FLAG_PUT_IV_SPACE; in ieee80211_check_fast_xmit()
3046 mmic = build.key->conf.flags & in ieee80211_check_fast_xmit()
3051 if (!(build.key->flags & KEY_FLAG_UPLOADED_TO_HARDWARE)) in ieee80211_check_fast_xmit()
3052 goto out; in ieee80211_check_fast_xmit()
3055 if (build.key->flags & KEY_FLAG_TAINTED) in ieee80211_check_fast_xmit()
3056 goto out; in ieee80211_check_fast_xmit()
3058 switch (build.key->conf.cipher) { in ieee80211_check_fast_xmit()
3074 /* cannot handle MMIC or IV generation in xmit-fast */ in ieee80211_check_fast_xmit()
3076 goto out; in ieee80211_check_fast_xmit()
3082 /* cannot handle IV generation in fast-xmit */ in ieee80211_check_fast_xmit()
3084 goto out; in ieee80211_check_fast_xmit()
3094 build.key->conf.cipher); in ieee80211_check_fast_xmit()
3095 goto out; in ieee80211_check_fast_xmit()
3099 goto out; in ieee80211_check_fast_xmit()
3100 /* pure hardware keys are OK, of course */ in ieee80211_check_fast_xmit()
3101 if (!(build.key->flags & KEY_FLAG_CIPHER_SCHEME)) in ieee80211_check_fast_xmit()
3105 build.key->conf.iv_len > IEEE80211_FAST_XMIT_MAX_IV) in ieee80211_check_fast_xmit()
3106 goto out; in ieee80211_check_fast_xmit()
3108 build.hdr_len += build.key->conf.iv_len; in ieee80211_check_fast_xmit()
3114 hdr->frame_control = fc; in ieee80211_check_fast_xmit()
3123 goto out; in ieee80211_check_fast_xmit()
3125 out: in ieee80211_check_fast_xmit()
3127 old = rcu_dereference_protected(sta->fast_tx, in ieee80211_check_fast_xmit()
3128 lockdep_is_held(&sta->lock)); in ieee80211_check_fast_xmit()
3129 rcu_assign_pointer(sta->fast_tx, fast_tx); in ieee80211_check_fast_xmit()
3132 spin_unlock_bh(&sta->lock); in ieee80211_check_fast_xmit()
3140 list_for_each_entry_rcu(sta, &local->sta_list, list) in ieee80211_check_fast_xmit_all()
3147 struct ieee80211_local *local = sdata->local; in ieee80211_check_fast_xmit_iface()
3152 list_for_each_entry_rcu(sta, &local->sta_list, list) { in ieee80211_check_fast_xmit_iface()
3153 if (sdata != sta->sdata && in ieee80211_check_fast_xmit_iface()
3154 (!sta->sdata->bss || sta->sdata->bss != sdata->bss)) in ieee80211_check_fast_xmit_iface()
3166 spin_lock_bh(&sta->lock); in ieee80211_clear_fast_xmit()
3167 fast_tx = rcu_dereference_protected(sta->fast_tx, in ieee80211_clear_fast_xmit()
3168 lockdep_is_held(&sta->lock)); in ieee80211_clear_fast_xmit()
3169 RCU_INIT_POINTER(sta->fast_tx, NULL); in ieee80211_clear_fast_xmit()
3170 spin_unlock_bh(&sta->lock); in ieee80211_clear_fast_xmit()
3180 I802_DEBUG_INC(local->tx_expand_skb_head); in ieee80211_amsdu_realloc_pad()
3183 wiphy_debug(local->hw.wiphy, in ieee80211_amsdu_realloc_pad()
3196 struct ieee80211_local *local = sdata->local; in ieee80211_amsdu_prepare_head()
3200 int hdr_len = fast_tx->hdr_len - sizeof(rfc1042_header); in ieee80211_amsdu_prepare_head()
3201 int subframe_len = skb->len - hdr_len; in ieee80211_amsdu_prepare_head()
3206 if (info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) in ieee80211_amsdu_prepare_head()
3209 if (info->control.flags & IEEE80211_TX_CTRL_AMSDU) in ieee80211_amsdu_prepare_head()
3220 h_80211_src = data + fast_tx->sa_offs; in ieee80211_amsdu_prepare_head()
3221 h_80211_dst = data + fast_tx->da_offs; in ieee80211_amsdu_prepare_head()
3223 amsdu_hdr->h_proto = cpu_to_be16(subframe_len); in ieee80211_amsdu_prepare_head()
3224 ether_addr_copy(amsdu_hdr->h_source, h_80211_src); in ieee80211_amsdu_prepare_head()
3225 ether_addr_copy(amsdu_hdr->h_dest, h_80211_dst); in ieee80211_amsdu_prepare_head()
3227 /* according to IEEE 802.11-2012 8.3.2 table 8-19, the outer SA/DA in ieee80211_amsdu_prepare_head()
3228 * fields needs to be changed to BSSID for A-MSDU frames depending in ieee80211_amsdu_prepare_head()
3231 switch (sdata->vif.type) { in ieee80211_amsdu_prepare_head()
3233 bssid = sdata->u.mgd.bssid; in ieee80211_amsdu_prepare_head()
3237 bssid = sdata->vif.addr; in ieee80211_amsdu_prepare_head()
3243 if (bssid && ieee80211_has_fromds(hdr->frame_control)) in ieee80211_amsdu_prepare_head()
3246 if (bssid && ieee80211_has_tods(hdr->frame_control)) in ieee80211_amsdu_prepare_head()
3252 info->control.flags |= IEEE80211_TX_CTRL_AMSDU; in ieee80211_amsdu_prepare_head()
3262 struct ieee80211_local *local = sdata->local; in ieee80211_amsdu_aggregate()
3263 struct fq *fq = &local->fq; in ieee80211_amsdu_aggregate()
3266 u8 tid = skb->priority & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_amsdu_aggregate()
3267 struct ieee80211_txq *txq = sta->sta.txq[tid]; in ieee80211_amsdu_aggregate()
3270 int subframe_len = skb->len - ETH_ALEN; in ieee80211_amsdu_aggregate()
3271 u8 max_subframes = sta->sta.max_amsdu_subframes; in ieee80211_amsdu_aggregate()
3272 int max_frags = local->hw.max_tx_fragments; in ieee80211_amsdu_aggregate()
3273 int max_amsdu_len = sta->sta.max_amsdu_len; in ieee80211_amsdu_aggregate()
3283 if (!ieee80211_hw_check(&local->hw, TX_AMSDU)) in ieee80211_amsdu_aggregate()
3293 if (test_bit(IEEE80211_TXQ_NO_AMSDU, &txqi->flags)) in ieee80211_amsdu_aggregate()
3296 if (sta->sta.max_rc_amsdu_len) in ieee80211_amsdu_aggregate()
3298 sta->sta.max_rc_amsdu_len); in ieee80211_amsdu_aggregate()
3300 if (sta->sta.max_tid_amsdu_len[tid]) in ieee80211_amsdu_aggregate()
3302 sta->sta.max_tid_amsdu_len[tid]); in ieee80211_amsdu_aggregate()
3306 spin_lock_bh(&fq->lock); in ieee80211_amsdu_aggregate()
3312 tin = &txqi->tin; in ieee80211_amsdu_aggregate()
3315 head = skb_peek_tail(&flow->queue); in ieee80211_amsdu_aggregate()
3317 goto out; in ieee80211_amsdu_aggregate()
3319 orig_truesize = head->truesize; in ieee80211_amsdu_aggregate()
3320 orig_len = head->len; in ieee80211_amsdu_aggregate()
3322 if (skb->len + head->len > max_amsdu_len) in ieee80211_amsdu_aggregate()
3323 goto out; in ieee80211_amsdu_aggregate()
3325 nfrags = 1 + skb_shinfo(skb)->nr_frags; in ieee80211_amsdu_aggregate()
3326 nfrags += 1 + skb_shinfo(head)->nr_frags; in ieee80211_amsdu_aggregate()
3327 frag_tail = &skb_shinfo(head)->frag_list; in ieee80211_amsdu_aggregate()
3329 nfrags += 1 + skb_shinfo(*frag_tail)->nr_frags; in ieee80211_amsdu_aggregate()
3330 frag_tail = &(*frag_tail)->next; in ieee80211_amsdu_aggregate()
3335 goto out; in ieee80211_amsdu_aggregate()
3338 goto out; in ieee80211_amsdu_aggregate()
3341 goto out; in ieee80211_amsdu_aggregate()
3344 goto out; in ieee80211_amsdu_aggregate()
3347 * Pad out the previous subframe to a multiple of 4 by adding the in ieee80211_amsdu_aggregate()
3348 * padding to the next one, that's being added. Note that head->len in ieee80211_amsdu_aggregate()
3349 * is the length of the full A-MSDU, but that works since each time in ieee80211_amsdu_aggregate()
3350 * we add a new subframe we pad out the previous one to a multiple in ieee80211_amsdu_aggregate()
3351 * of 4 and thus it no longer matters in the next round. in ieee80211_amsdu_aggregate()
3353 hdrlen = fast_tx->hdr_len - sizeof(rfc1042_header); in ieee80211_amsdu_aggregate()
3354 if ((head->len - hdrlen) & 3) in ieee80211_amsdu_aggregate()
3355 pad = 4 - ((head->len - hdrlen) & 3); in ieee80211_amsdu_aggregate()
3372 head->len += skb->len; in ieee80211_amsdu_aggregate()
3373 head->data_len += skb->len; in ieee80211_amsdu_aggregate()
3377 fq->memory_usage += head->truesize - orig_truesize; in ieee80211_amsdu_aggregate()
3378 if (head->len != orig_len) { in ieee80211_amsdu_aggregate()
3379 flow->backlog += head->len - orig_len; in ieee80211_amsdu_aggregate()
3380 tin->backlog_bytes += head->len - orig_len; in ieee80211_amsdu_aggregate()
3384 out: in ieee80211_amsdu_aggregate()
3385 spin_unlock_bh(&fq->lock); in ieee80211_amsdu_aggregate()
3400 struct ieee80211_hdr *hdr = (void *)skb->data; in ieee80211_xmit_fast_finish()
3404 info->control.hw_key = &key->conf; in ieee80211_xmit_fast_finish()
3406 ieee80211_tx_stats(skb->dev, skb->len); in ieee80211_xmit_fast_finish()
3408 if (hdr->frame_control & cpu_to_le16(IEEE80211_STYPE_QOS_DATA)) { in ieee80211_xmit_fast_finish()
3409 tid = skb->priority & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_xmit_fast_finish()
3410 hdr->seq_ctrl = ieee80211_tx_next_seq(sta, tid); in ieee80211_xmit_fast_finish()
3412 info->flags |= IEEE80211_TX_CTL_ASSIGN_SEQ; in ieee80211_xmit_fast_finish()
3413 hdr->seq_ctrl = cpu_to_le16(sdata->sequence_number); in ieee80211_xmit_fast_finish()
3414 sdata->sequence_number += 0x10; in ieee80211_xmit_fast_finish()
3417 if (skb_shinfo(skb)->gso_size) in ieee80211_xmit_fast_finish()
3418 sta->tx_stats.msdu[tid] += in ieee80211_xmit_fast_finish()
3419 DIV_ROUND_UP(skb->len, skb_shinfo(skb)->gso_size); in ieee80211_xmit_fast_finish()
3421 sta->tx_stats.msdu[tid]++; in ieee80211_xmit_fast_finish()
3423 info->hw_queue = sdata->vif.hw_queue[skb_get_queue_mapping(skb)]; in ieee80211_xmit_fast_finish()
3428 sta->tx_stats.bytes[skb_get_queue_mapping(skb)] += skb->len; in ieee80211_xmit_fast_finish()
3429 sta->tx_stats.packets[skb_get_queue_mapping(skb)]++; in ieee80211_xmit_fast_finish()
3433 u8 *crypto_hdr = skb->data + pn_offs; in ieee80211_xmit_fast_finish()
3435 switch (key->conf.cipher) { in ieee80211_xmit_fast_finish()
3440 pn = atomic64_inc_return(&key->conf.tx_pn); in ieee80211_xmit_fast_finish()
3443 crypto_hdr[3] = 0x20 | (key->conf.keyidx << 6); in ieee80211_xmit_fast_finish()
3458 struct ieee80211_local *local = sdata->local; in ieee80211_xmit_fast()
3459 u16 ethertype = (skb->data[12] << 8) | skb->data[13]; in ieee80211_xmit_fast()
3460 int extra_head = fast_tx->hdr_len - (ETH_HLEN - 2); in ieee80211_xmit_fast()
3461 int hw_headroom = sdata->local->hw.extra_tx_headroom; in ieee80211_xmit_fast()
3464 struct ieee80211_hdr *hdr = (void *)fast_tx->hdr; in ieee80211_xmit_fast()
3470 /* control port protocol needs a lot of special handling */ in ieee80211_xmit_fast()
3471 if (cpu_to_be16(ethertype) == sdata->control_port_protocol) in ieee80211_xmit_fast()
3479 if (skb->sk && skb_shinfo(skb)->tx_flags & SKBTX_WIFI_STATUS) in ieee80211_xmit_fast()
3482 if (hdr->frame_control & cpu_to_le16(IEEE80211_STYPE_QOS_DATA)) { in ieee80211_xmit_fast()
3483 tid = skb->priority & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_xmit_fast()
3484 tid_tx = rcu_dereference(sta->ampdu_mlme.tid_tx[tid]); in ieee80211_xmit_fast()
3486 if (!test_bit(HT_AGG_STATE_OPERATIONAL, &tid_tx->state)) in ieee80211_xmit_fast()
3488 if (tid_tx->timeout) in ieee80211_xmit_fast()
3489 tid_tx->last_tx = jiffies; in ieee80211_xmit_fast()
3505 if ((hdr->frame_control & cpu_to_le16(IEEE80211_STYPE_QOS_DATA)) && in ieee80211_xmit_fast()
3509 /* will not be crypto-handled beyond what we do here, so use false in ieee80211_xmit_fast()
3510 * as the may-encrypt argument for the resize to not account for in ieee80211_xmit_fast()
3514 max_t(int, extra_head + hw_headroom - in ieee80211_xmit_fast()
3521 memcpy(&eth, skb->data, ETH_HLEN - 2); in ieee80211_xmit_fast()
3523 memcpy(skb->data, fast_tx->hdr, fast_tx->hdr_len); in ieee80211_xmit_fast()
3524 memcpy(skb->data + fast_tx->da_offs, eth.h_dest, ETH_ALEN); in ieee80211_xmit_fast()
3525 memcpy(skb->data + fast_tx->sa_offs, eth.h_source, ETH_ALEN); in ieee80211_xmit_fast()
3529 info->band = fast_tx->band; in ieee80211_xmit_fast()
3530 info->control.vif = &sdata->vif; in ieee80211_xmit_fast()
3531 info->flags = IEEE80211_TX_CTL_FIRST_FRAGMENT | in ieee80211_xmit_fast()
3534 info->control.flags = IEEE80211_TX_CTRL_FAST_XMIT; in ieee80211_xmit_fast()
3537 if (local->force_tx_status) in ieee80211_xmit_fast()
3538 info->flags |= IEEE80211_TX_CTL_REQ_TX_STATUS; in ieee80211_xmit_fast()
3541 if (hdr->frame_control & cpu_to_le16(IEEE80211_STYPE_QOS_DATA)) { in ieee80211_xmit_fast()
3542 tid = skb->priority & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_xmit_fast()
3552 tx.key = fast_tx->key; in ieee80211_xmit_fast()
3554 if (!ieee80211_hw_check(&local->hw, HAS_RATE_CONTROL)) { in ieee80211_xmit_fast()
3570 ieee80211_xmit_fast_finish(sdata, sta, fast_tx->pn_offs, in ieee80211_xmit_fast()
3571 fast_tx->key, skb); in ieee80211_xmit_fast()
3573 if (sdata->vif.type == NL80211_IFTYPE_AP_VLAN) in ieee80211_xmit_fast()
3574 sdata = container_of(sdata->bss, in ieee80211_xmit_fast()
3578 ieee80211_tx_frags(local, &sdata->vif, sta, &tx.skbs, false); in ieee80211_xmit_fast()
3589 struct fq *fq = &local->fq; in ieee80211_tx_dequeue()
3590 struct fq_tin *tin = &txqi->tin; in ieee80211_tx_dequeue()
3594 struct ieee80211_vif *vif = txq->vif; in ieee80211_tx_dequeue()
3602 spin_lock_bh(&fq->lock); in ieee80211_tx_dequeue()
3604 if (test_bit(IEEE80211_TXQ_STOP, &txqi->flags) || in ieee80211_tx_dequeue()
3605 test_bit(IEEE80211_TXQ_STOP_NETIF_TX, &txqi->flags)) in ieee80211_tx_dequeue()
3606 goto out; in ieee80211_tx_dequeue()
3608 if (vif->txqs_stopped[ieee80211_ac_from_tid(txq->tid)]) { in ieee80211_tx_dequeue()
3609 set_bit(IEEE80211_TXQ_STOP_NETIF_TX, &txqi->flags); in ieee80211_tx_dequeue()
3610 goto out; in ieee80211_tx_dequeue()
3614 skb = __skb_dequeue(&txqi->frags); in ieee80211_tx_dequeue()
3616 goto out; in ieee80211_tx_dequeue()
3620 goto out; in ieee80211_tx_dequeue()
3622 spin_unlock_bh(&fq->lock); in ieee80211_tx_dequeue()
3624 hdr = (struct ieee80211_hdr *)skb->data; in ieee80211_tx_dequeue()
3631 tx.sdata = vif_to_sdata(info->control.vif); in ieee80211_tx_dequeue()
3633 if (txq->sta) { in ieee80211_tx_dequeue()
3634 tx.sta = container_of(txq->sta, struct sta_info, sta); in ieee80211_tx_dequeue()
3639 if (unlikely(!(info->flags & IEEE80211_TX_CTL_INJECTED) && in ieee80211_tx_dequeue()
3640 ieee80211_is_data(hdr->frame_control) && in ieee80211_tx_dequeue()
3641 !ieee80211_vif_is_mesh(&tx.sdata->vif) && in ieee80211_tx_dequeue()
3642 tx.sdata->vif.type != NL80211_IFTYPE_OCB && in ieee80211_tx_dequeue()
3643 !is_multicast_ether_addr(hdr->addr1) && in ieee80211_tx_dequeue()
3645 (!(info->control.flags & in ieee80211_tx_dequeue()
3647 !ether_addr_equal(tx.sdata->vif.addr, in ieee80211_tx_dequeue()
3648 hdr->addr2)))) { in ieee80211_tx_dequeue()
3649 I802_DEBUG_INC(local->tx_handlers_drop_unauth_port); in ieee80211_tx_dequeue()
3650 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_dequeue()
3661 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_dequeue()
3665 if (test_bit(IEEE80211_TXQ_AMPDU, &txqi->flags)) in ieee80211_tx_dequeue()
3666 info->flags |= IEEE80211_TX_CTL_AMPDU; in ieee80211_tx_dequeue()
3668 info->flags &= ~IEEE80211_TX_CTL_AMPDU; in ieee80211_tx_dequeue()
3670 if (info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) in ieee80211_tx_dequeue()
3673 if (info->control.flags & IEEE80211_TX_CTRL_FAST_XMIT) { in ieee80211_tx_dequeue()
3674 struct sta_info *sta = container_of(txq->sta, struct sta_info, in ieee80211_tx_dequeue()
3679 (tx.key->conf.flags & IEEE80211_KEY_FLAG_GENERATE_IV)) in ieee80211_tx_dequeue()
3680 pn_offs = ieee80211_hdrlen(hdr->frame_control); in ieee80211_tx_dequeue()
3682 ieee80211_xmit_fast_finish(sta->sdata, sta, pn_offs, in ieee80211_tx_dequeue()
3691 spin_lock_bh(&fq->lock); in ieee80211_tx_dequeue()
3692 skb_queue_splice_tail(&tx.skbs, &txqi->frags); in ieee80211_tx_dequeue()
3693 spin_unlock_bh(&fq->lock); in ieee80211_tx_dequeue()
3698 !ieee80211_hw_check(&local->hw, TX_FRAG_LIST)) { in ieee80211_tx_dequeue()
3700 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_dequeue()
3705 switch (tx.sdata->vif.type) { in ieee80211_tx_dequeue()
3707 if (tx.sdata->u.mntr.flags & MONITOR_FLAG_ACTIVE) { in ieee80211_tx_dequeue()
3708 vif = &tx.sdata->vif; in ieee80211_tx_dequeue()
3711 tx.sdata = rcu_dereference(local->monitor_sdata); in ieee80211_tx_dequeue()
3713 vif = &tx.sdata->vif; in ieee80211_tx_dequeue()
3714 info->hw_queue = in ieee80211_tx_dequeue()
3715 vif->hw_queue[skb_get_queue_mapping(skb)]; in ieee80211_tx_dequeue()
3716 } else if (ieee80211_hw_check(&local->hw, QUEUE_CONTROL)) { in ieee80211_tx_dequeue()
3717 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_dequeue()
3724 tx.sdata = container_of(tx.sdata->bss, in ieee80211_tx_dequeue()
3728 vif = &tx.sdata->vif; in ieee80211_tx_dequeue()
3733 IEEE80211_SKB_CB(skb)->control.vif = vif; in ieee80211_tx_dequeue()
3736 wiphy_ext_feature_isset(local->hw.wiphy, NL80211_EXT_FEATURE_AQL)) { in ieee80211_tx_dequeue()
3737 bool ampdu = txq->ac != IEEE80211_AC_VO; in ieee80211_tx_dequeue()
3740 airtime = ieee80211_calc_expected_tx_airtime(hw, vif, txq->sta, in ieee80211_tx_dequeue()
3741 skb->len, ampdu); in ieee80211_tx_dequeue()
3745 txq->ac, in ieee80211_tx_dequeue()
3753 out: in ieee80211_tx_dequeue()
3754 spin_unlock_bh(&fq->lock); in ieee80211_tx_dequeue()
3767 spin_lock_bh(&local->active_txq_lock[ac]); in ieee80211_next_txq()
3770 txqi = list_first_entry_or_null(&local->active_txqs[ac], in ieee80211_next_txq()
3774 goto out; in ieee80211_next_txq()
3778 goto out; in ieee80211_next_txq()
3786 if (txqi->txq.sta) { in ieee80211_next_txq()
3787 struct sta_info *sta = container_of(txqi->txq.sta, in ieee80211_next_txq()
3789 bool aql_check = ieee80211_txq_airtime_check(hw, &txqi->txq); in ieee80211_next_txq()
3790 s64 deficit = sta->airtime[txqi->txq.ac].deficit; in ieee80211_next_txq()
3796 sta->airtime[txqi->txq.ac].deficit += in ieee80211_next_txq()
3797 sta->airtime_weight; in ieee80211_next_txq()
3800 list_move_tail(&txqi->schedule_order, in ieee80211_next_txq()
3801 &local->active_txqs[txqi->txq.ac]); in ieee80211_next_txq()
3807 if (txqi->schedule_round == local->schedule_round[ac]) in ieee80211_next_txq()
3808 goto out; in ieee80211_next_txq()
3810 list_del_init(&txqi->schedule_order); in ieee80211_next_txq()
3811 txqi->schedule_round = local->schedule_round[ac]; in ieee80211_next_txq()
3812 ret = &txqi->txq; in ieee80211_next_txq()
3814 out: in ieee80211_next_txq()
3815 spin_unlock_bh(&local->active_txq_lock[ac]); in ieee80211_next_txq()
3827 spin_lock_bh(&local->active_txq_lock[txq->ac]); in __ieee80211_schedule_txq()
3829 if (list_empty(&txqi->schedule_order) && in __ieee80211_schedule_txq()
3830 (force || !skb_queue_empty(&txqi->frags) || in __ieee80211_schedule_txq()
3831 txqi->tin.backlog_packets)) { in __ieee80211_schedule_txq()
3833 * head of the list to ensure that they only get moved to the in __ieee80211_schedule_txq()
3836 * get immediately moved to the back of the list on the next in __ieee80211_schedule_txq()
3839 if (txqi->txq.sta && in __ieee80211_schedule_txq()
3840 wiphy_ext_feature_isset(local->hw.wiphy, in __ieee80211_schedule_txq()
3842 list_add(&txqi->schedule_order, in __ieee80211_schedule_txq()
3843 &local->active_txqs[txq->ac]); in __ieee80211_schedule_txq()
3845 list_add_tail(&txqi->schedule_order, in __ieee80211_schedule_txq()
3846 &local->active_txqs[txq->ac]); in __ieee80211_schedule_txq()
3849 spin_unlock_bh(&local->active_txq_lock[txq->ac]); in __ieee80211_schedule_txq()
3859 if (!wiphy_ext_feature_isset(local->hw.wiphy, NL80211_EXT_FEATURE_AQL)) in ieee80211_txq_airtime_check()
3862 if (!txq->sta) in ieee80211_txq_airtime_check()
3865 sta = container_of(txq->sta, struct sta_info, sta); in ieee80211_txq_airtime_check()
3866 if (atomic_read(&sta->airtime[txq->ac].aql_tx_pending) < in ieee80211_txq_airtime_check()
3867 sta->airtime[txq->ac].aql_limit_low) in ieee80211_txq_airtime_check()
3870 if (atomic_read(&local->aql_total_pending_airtime) < in ieee80211_txq_airtime_check()
3871 local->aql_threshold && in ieee80211_txq_airtime_check()
3872 atomic_read(&sta->airtime[txq->ac].aql_tx_pending) < in ieee80211_txq_airtime_check()
3873 sta->airtime[txq->ac].aql_limit_high) in ieee80211_txq_airtime_check()
3886 u8 ac = txq->ac; in ieee80211_txq_may_transmit()
3888 spin_lock_bh(&local->active_txq_lock[ac]); in ieee80211_txq_may_transmit()
3890 if (!txqi->txq.sta) in ieee80211_txq_may_transmit()
3891 goto out; in ieee80211_txq_may_transmit()
3893 if (list_empty(&txqi->schedule_order)) in ieee80211_txq_may_transmit()
3894 goto out; in ieee80211_txq_may_transmit()
3896 list_for_each_entry_safe(iter, tmp, &local->active_txqs[ac], in ieee80211_txq_may_transmit()
3901 if (!iter->txq.sta) { in ieee80211_txq_may_transmit()
3902 list_move_tail(&iter->schedule_order, in ieee80211_txq_may_transmit()
3903 &local->active_txqs[ac]); in ieee80211_txq_may_transmit()
3906 sta = container_of(iter->txq.sta, struct sta_info, sta); in ieee80211_txq_may_transmit()
3907 if (sta->airtime[ac].deficit < 0) in ieee80211_txq_may_transmit()
3908 sta->airtime[ac].deficit += sta->airtime_weight; in ieee80211_txq_may_transmit()
3909 list_move_tail(&iter->schedule_order, &local->active_txqs[ac]); in ieee80211_txq_may_transmit()
3912 sta = container_of(txqi->txq.sta, struct sta_info, sta); in ieee80211_txq_may_transmit()
3913 if (sta->airtime[ac].deficit >= 0) in ieee80211_txq_may_transmit()
3914 goto out; in ieee80211_txq_may_transmit()
3916 sta->airtime[ac].deficit += sta->airtime_weight; in ieee80211_txq_may_transmit()
3917 list_move_tail(&txqi->schedule_order, &local->active_txqs[ac]); in ieee80211_txq_may_transmit()
3918 spin_unlock_bh(&local->active_txq_lock[ac]); in ieee80211_txq_may_transmit()
3921 out: in ieee80211_txq_may_transmit()
3922 if (!list_empty(&txqi->schedule_order)) in ieee80211_txq_may_transmit()
3923 list_del_init(&txqi->schedule_order); in ieee80211_txq_may_transmit()
3924 spin_unlock_bh(&local->active_txq_lock[ac]); in ieee80211_txq_may_transmit()
3934 spin_lock_bh(&local->active_txq_lock[ac]); in ieee80211_txq_schedule_start()
3935 local->schedule_round[ac]++; in ieee80211_txq_schedule_start()
3936 spin_unlock_bh(&local->active_txq_lock[ac]); in ieee80211_txq_schedule_start()
3947 struct ieee80211_local *local = sdata->local; in __ieee80211_subif_start_xmit()
3951 if (unlikely(skb->len < ETH_HLEN)) { in __ieee80211_subif_start_xmit()
3964 if (local->ops->wake_tx_queue) { in __ieee80211_subif_start_xmit()
3973 sk_pacing_shift_update(skb->sk, sdata->local->hw.tx_sk_pacing_shift); in __ieee80211_subif_start_xmit()
3975 fast_tx = rcu_dereference(sta->fast_tx); in __ieee80211_subif_start_xmit()
3979 goto out; in __ieee80211_subif_start_xmit()
3993 /* we cannot process non-linear frames on this path */ in __ieee80211_subif_start_xmit()
3996 goto out; in __ieee80211_subif_start_xmit()
3999 /* the frame could be fragmented, software-encrypted, and other in __ieee80211_subif_start_xmit()
4000 * things so we cannot really handle checksum offload with it - in __ieee80211_subif_start_xmit()
4003 if (skb->ip_summed == CHECKSUM_PARTIAL) { in __ieee80211_subif_start_xmit()
4014 if (skb->protocol == sdata->control_port_protocol) in __ieee80211_subif_start_xmit()
4021 goto out; in __ieee80211_subif_start_xmit()
4024 ieee80211_tx_stats(dev, skb->len); in __ieee80211_subif_start_xmit()
4028 goto out; in __ieee80211_subif_start_xmit()
4031 out: in __ieee80211_subif_start_xmit()
4044 eth = (void *)skb->data; in ieee80211_change_da()
4045 ether_addr_copy(eth->h_dest, sta->sta.addr); in ieee80211_change_da()
4054 const struct ethhdr *eth = (void *)skb->data; in ieee80211_multicast_to_unicast()
4055 const struct vlan_ethhdr *ethvlan = (void *)skb->data; in ieee80211_multicast_to_unicast()
4058 if (likely(!is_multicast_ether_addr(eth->h_dest))) in ieee80211_multicast_to_unicast()
4061 switch (sdata->vif.type) { in ieee80211_multicast_to_unicast()
4063 if (sdata->u.vlan.sta) in ieee80211_multicast_to_unicast()
4065 if (sdata->wdev.use_4addr) in ieee80211_multicast_to_unicast()
4070 if (!sdata->bss->multicast_to_unicast) in ieee80211_multicast_to_unicast()
4078 ethertype = eth->h_proto; in ieee80211_multicast_to_unicast()
4079 if (ethertype == htons(ETH_P_8021Q) && skb->len >= VLAN_ETH_HLEN) in ieee80211_multicast_to_unicast()
4080 ethertype = ethvlan->h_vlan_encapsulated_proto; in ieee80211_multicast_to_unicast()
4098 struct ieee80211_local *local = sdata->local; in ieee80211_convert_to_unicast()
4099 const struct ethhdr *eth = (struct ethhdr *)skb->data; in ieee80211_convert_to_unicast()
4105 list_for_each_entry_rcu(sta, &local->sta_list, list) { in ieee80211_convert_to_unicast()
4106 if (sdata != sta->sdata) in ieee80211_convert_to_unicast()
4107 /* AP-VLAN mismatch */ in ieee80211_convert_to_unicast()
4109 if (unlikely(ether_addr_equal(eth->h_source, sta->sta.addr))) in ieee80211_convert_to_unicast()
4136 goto out; in ieee80211_convert_to_unicast()
4140 out: in ieee80211_convert_to_unicast()
4145 * ieee80211_subif_start_xmit - netif start_xmit function for 802.3 vifs
4173 struct ieee80211_local *local = sdata->local; in ieee80211_tx_8023()
4178 int q = info->hw_queue; in ieee80211_tx_8023()
4183 spin_lock_irqsave(&local->queue_stop_reason_lock, flags); in ieee80211_tx_8023()
4185 if (local->queue_stop_reasons[q] || in ieee80211_tx_8023()
4186 (!txpending && !skb_queue_empty(&local->pending[q]))) { in ieee80211_tx_8023()
4188 skb_queue_head(&local->pending[q], skb); in ieee80211_tx_8023()
4190 skb_queue_tail(&local->pending[q], skb); in ieee80211_tx_8023()
4192 spin_unlock_irqrestore(&local->queue_stop_reason_lock, flags); in ieee80211_tx_8023()
4197 spin_unlock_irqrestore(&local->queue_stop_reason_lock, flags); in ieee80211_tx_8023()
4199 if (sta && sta->uploaded) in ieee80211_tx_8023()
4200 pubsta = &sta->sta; in ieee80211_tx_8023()
4214 struct ieee80211_local *local = sdata->local; in ieee80211_8023_xmit()
4218 if (local->ops->wake_tx_queue) { in ieee80211_8023_xmit()
4224 if (unlikely(test_bit(SCAN_SW_SCANNING, &local->scanning)) && in ieee80211_8023_xmit()
4225 test_bit(SDATA_STATE_OFFCHANNEL, &sdata->state)) in ieee80211_8023_xmit()
4230 tid = skb->priority & IEEE80211_QOS_CTL_TAG1D_MASK; in ieee80211_8023_xmit()
4231 tid_tx = rcu_dereference(sta->ampdu_mlme.tid_tx[tid]); in ieee80211_8023_xmit()
4233 if (!test_bit(HT_AGG_STATE_OPERATIONAL, &tid_tx->state)) { in ieee80211_8023_xmit()
4234 /* fall back to non-offload slow path */ in ieee80211_8023_xmit()
4239 info->flags |= IEEE80211_TX_CTL_AMPDU; in ieee80211_8023_xmit()
4240 if (tid_tx->timeout) in ieee80211_8023_xmit()
4241 tid_tx->last_tx = jiffies; in ieee80211_8023_xmit()
4244 if (unlikely(skb->sk && in ieee80211_8023_xmit()
4245 skb_shinfo(skb)->tx_flags & SKBTX_WIFI_STATUS)) in ieee80211_8023_xmit()
4246 info->ack_frame_id = ieee80211_store_ack_skb(local, skb, in ieee80211_8023_xmit()
4247 &info->flags, NULL); in ieee80211_8023_xmit()
4249 info->hw_queue = sdata->vif.hw_queue[skb_get_queue_mapping(skb)]; in ieee80211_8023_xmit()
4251 ieee80211_tx_stats(dev, skb->len); in ieee80211_8023_xmit()
4253 sta->tx_stats.bytes[skb_get_queue_mapping(skb)] += skb->len; in ieee80211_8023_xmit()
4254 sta->tx_stats.packets[skb_get_queue_mapping(skb)]++; in ieee80211_8023_xmit()
4256 if (sdata->vif.type == NL80211_IFTYPE_AP_VLAN) in ieee80211_8023_xmit()
4257 sdata = container_of(sdata->bss, in ieee80211_8023_xmit()
4260 info->flags |= IEEE80211_TX_CTL_HW_80211_ENCAP; in ieee80211_8023_xmit()
4261 info->control.vif = &sdata->vif; in ieee80211_8023_xmit()
4264 info->control.hw_key = &key->conf; in ieee80211_8023_xmit()
4266 ieee80211_tx_8023(sdata, skb, skb->len, sta, false); in ieee80211_8023_xmit()
4278 struct ethhdr *ehdr = (struct ethhdr *)skb->data; in ieee80211_subif_start_xmit_8023()
4283 if (unlikely(skb->len < ETH_HLEN)) { in ieee80211_subif_start_xmit_8023()
4292 goto out; in ieee80211_subif_start_xmit_8023()
4295 if (unlikely(IS_ERR_OR_NULL(sta) || !sta->uploaded || in ieee80211_subif_start_xmit_8023()
4297 sdata->control_port_protocol == ehdr->h_proto)) in ieee80211_subif_start_xmit_8023()
4299 else if ((key = rcu_dereference(sta->ptk[sta->ptk_idx])) && in ieee80211_subif_start_xmit_8023()
4300 (!(key->flags & KEY_FLAG_UPLOADED_TO_HARDWARE) || in ieee80211_subif_start_xmit_8023()
4301 key->conf.cipher == WLAN_CIPHER_SUITE_TKIP)) in ieee80211_subif_start_xmit_8023()
4309 out: in ieee80211_subif_start_xmit_8023()
4321 .local = sdata->local, in ieee80211_build_data_template()
4330 skb = ERR_PTR(-EINVAL); in ieee80211_build_data_template()
4331 goto out; in ieee80211_build_data_template()
4336 goto out; in ieee80211_build_data_template()
4338 hdr = (void *)skb->data; in ieee80211_build_data_template()
4339 tx.sta = sta_info_get(sdata, hdr->addr1); in ieee80211_build_data_template()
4345 return ERR_PTR(-EINVAL); in ieee80211_build_data_template()
4348 out: in ieee80211_build_data_template()
4362 for (i = 0; i < local->hw.queues; i++) { in ieee80211_clear_tx_pending()
4363 while ((skb = skb_dequeue(&local->pending[i])) != NULL) in ieee80211_clear_tx_pending()
4364 ieee80211_free_txskb(&local->hw, skb); in ieee80211_clear_tx_pending()
4370 * which in this case means re-queued -- take as an indication to stop sending
4383 sdata = vif_to_sdata(info->control.vif); in ieee80211_tx_pending_skb()
4385 if (info->control.flags & IEEE80211_TX_INTCFL_NEED_TXPROCESSING) { in ieee80211_tx_pending_skb()
4386 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_tx_pending_skb()
4391 info->band = chanctx_conf->def.chan->band; in ieee80211_tx_pending_skb()
4393 } else if (info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP) { in ieee80211_tx_pending_skb()
4399 if (IS_ERR(sta) || (sta && !sta->uploaded)) in ieee80211_tx_pending_skb()
4402 result = ieee80211_tx_8023(sdata, skb, skb->len, sta, true); in ieee80211_tx_pending_skb()
4409 hdr = (struct ieee80211_hdr *)skb->data; in ieee80211_tx_pending_skb()
4410 sta = sta_info_get(sdata, hdr->addr1); in ieee80211_tx_pending_skb()
4412 result = __ieee80211_tx(local, &skbs, skb->len, sta, true); in ieee80211_tx_pending_skb()
4430 spin_lock_irqsave(&local->queue_stop_reason_lock, flags); in ieee80211_tx_pending()
4431 for (i = 0; i < local->hw.queues; i++) { in ieee80211_tx_pending()
4436 if (local->queue_stop_reasons[i] || in ieee80211_tx_pending()
4437 skb_queue_empty(&local->pending[i])) in ieee80211_tx_pending()
4440 while (!skb_queue_empty(&local->pending[i])) { in ieee80211_tx_pending()
4441 struct sk_buff *skb = __skb_dequeue(&local->pending[i]); in ieee80211_tx_pending()
4444 if (WARN_ON(!info->control.vif)) { in ieee80211_tx_pending()
4445 ieee80211_free_txskb(&local->hw, skb); in ieee80211_tx_pending()
4449 spin_unlock_irqrestore(&local->queue_stop_reason_lock, in ieee80211_tx_pending()
4453 spin_lock_irqsave(&local->queue_stop_reason_lock, in ieee80211_tx_pending()
4459 if (skb_queue_empty(&local->pending[i])) in ieee80211_tx_pending()
4462 spin_unlock_irqrestore(&local->queue_stop_reason_lock, flags); in ieee80211_tx_pending()
4479 if (atomic_read(&ps->num_sta_ps) > 0) in __ieee80211_beacon_add_tim()
4481 * checking byte-for-byte */ in __ieee80211_beacon_add_tim()
4482 have_bits = !bitmap_empty((unsigned long *)ps->tim, in __ieee80211_beacon_add_tim()
4485 if (ps->dtim_count == 0) in __ieee80211_beacon_add_tim()
4486 ps->dtim_count = sdata->vif.bss_conf.dtim_period - 1; in __ieee80211_beacon_add_tim()
4488 ps->dtim_count--; in __ieee80211_beacon_add_tim()
4494 *pos++ = ps->dtim_count; in __ieee80211_beacon_add_tim()
4495 *pos++ = sdata->vif.bss_conf.dtim_period; in __ieee80211_beacon_add_tim()
4497 if (ps->dtim_count == 0 && !skb_queue_empty(&ps->bc_buf)) in __ieee80211_beacon_add_tim()
4500 ps->dtim_bc_mc = aid0 == 1; in __ieee80211_beacon_add_tim()
4504 * (N1 x 8) - 1 in the bitmap are 0 and number N2 so that bits in __ieee80211_beacon_add_tim()
4508 if (ps->tim[i]) { in __ieee80211_beacon_add_tim()
4514 for (i = IEEE80211_MAX_TIM_LEN - 1; i >= n1; i--) { in __ieee80211_beacon_add_tim()
4515 if (ps->tim[i]) { in __ieee80211_beacon_add_tim()
4524 skb_put(skb, n2 - n1); in __ieee80211_beacon_add_tim()
4525 memcpy(pos, ps->tim + n1, n2 - n1 + 1); in __ieee80211_beacon_add_tim()
4527 tim[1] = n2 - n1 + 4; in __ieee80211_beacon_add_tim()
4538 struct ieee80211_local *local = sdata->local; in ieee80211_beacon_add_tim()
4544 * sta_lock to guarantee consistent and race-free update in ieee80211_beacon_add_tim()
4545 * of the tim bitmap in mac80211 and the driver. in ieee80211_beacon_add_tim()
4547 if (local->tim_in_locked_section) { in ieee80211_beacon_add_tim()
4550 spin_lock_bh(&local->tim_lock); in ieee80211_beacon_add_tim()
4552 spin_unlock_bh(&local->tim_lock); in ieee80211_beacon_add_tim()
4565 u8 count = beacon->cntdwn_current_counter; in ieee80211_set_beacon_cntdwn()
4567 switch (sdata->vif.type) { in ieee80211_set_beacon_cntdwn()
4569 beacon_data = beacon->tail; in ieee80211_set_beacon_cntdwn()
4570 beacon_data_len = beacon->tail_len; in ieee80211_set_beacon_cntdwn()
4573 beacon_data = beacon->head; in ieee80211_set_beacon_cntdwn()
4574 beacon_data_len = beacon->head_len; in ieee80211_set_beacon_cntdwn()
4577 beacon_data = beacon->head; in ieee80211_set_beacon_cntdwn()
4578 beacon_data_len = beacon->head_len; in ieee80211_set_beacon_cntdwn()
4586 resp = rcu_dereference(sdata->u.ap.probe_resp); in ieee80211_set_beacon_cntdwn()
4588 if (beacon->cntdwn_counter_offsets[i]) { in ieee80211_set_beacon_cntdwn()
4589 if (WARN_ON_ONCE(beacon->cntdwn_counter_offsets[i] >= in ieee80211_set_beacon_cntdwn()
4595 beacon_data[beacon->cntdwn_counter_offsets[i]] = count; in ieee80211_set_beacon_cntdwn()
4598 if (sdata->vif.type == NL80211_IFTYPE_AP && resp) in ieee80211_set_beacon_cntdwn()
4599 resp->data[resp->cntdwn_counter_offsets[i]] = count; in ieee80211_set_beacon_cntdwn()
4606 beacon->cntdwn_current_counter--; in __ieee80211_beacon_update_cntdwn()
4609 WARN_ON_ONCE(!beacon->cntdwn_current_counter); in __ieee80211_beacon_update_cntdwn()
4611 return beacon->cntdwn_current_counter; in __ieee80211_beacon_update_cntdwn()
4622 if (sdata->vif.type == NL80211_IFTYPE_AP) in ieee80211_beacon_update_cntdwn()
4623 beacon = rcu_dereference(sdata->u.ap.beacon); in ieee80211_beacon_update_cntdwn()
4624 else if (sdata->vif.type == NL80211_IFTYPE_ADHOC) in ieee80211_beacon_update_cntdwn()
4625 beacon = rcu_dereference(sdata->u.ibss.presp); in ieee80211_beacon_update_cntdwn()
4626 else if (ieee80211_vif_is_mesh(&sdata->vif)) in ieee80211_beacon_update_cntdwn()
4627 beacon = rcu_dereference(sdata->u.mesh.beacon); in ieee80211_beacon_update_cntdwn()
4647 if (sdata->vif.type == NL80211_IFTYPE_AP) in ieee80211_beacon_set_cntdwn()
4648 beacon = rcu_dereference(sdata->u.ap.beacon); in ieee80211_beacon_set_cntdwn()
4649 else if (sdata->vif.type == NL80211_IFTYPE_ADHOC) in ieee80211_beacon_set_cntdwn()
4650 beacon = rcu_dereference(sdata->u.ibss.presp); in ieee80211_beacon_set_cntdwn()
4651 else if (ieee80211_vif_is_mesh(&sdata->vif)) in ieee80211_beacon_set_cntdwn()
4652 beacon = rcu_dereference(sdata->u.mesh.beacon); in ieee80211_beacon_set_cntdwn()
4657 if (counter < beacon->cntdwn_current_counter) in ieee80211_beacon_set_cntdwn()
4658 beacon->cntdwn_current_counter = counter; in ieee80211_beacon_set_cntdwn()
4677 if (vif->type == NL80211_IFTYPE_AP) { in ieee80211_beacon_cntdwn_is_complete()
4678 struct ieee80211_if_ap *ap = &sdata->u.ap; in ieee80211_beacon_cntdwn_is_complete()
4680 beacon = rcu_dereference(ap->beacon); in ieee80211_beacon_cntdwn_is_complete()
4681 if (WARN_ON(!beacon || !beacon->tail)) in ieee80211_beacon_cntdwn_is_complete()
4682 goto out; in ieee80211_beacon_cntdwn_is_complete()
4683 beacon_data = beacon->tail; in ieee80211_beacon_cntdwn_is_complete()
4684 beacon_data_len = beacon->tail_len; in ieee80211_beacon_cntdwn_is_complete()
4685 } else if (vif->type == NL80211_IFTYPE_ADHOC) { in ieee80211_beacon_cntdwn_is_complete()
4686 struct ieee80211_if_ibss *ifibss = &sdata->u.ibss; in ieee80211_beacon_cntdwn_is_complete()
4688 beacon = rcu_dereference(ifibss->presp); in ieee80211_beacon_cntdwn_is_complete()
4690 goto out; in ieee80211_beacon_cntdwn_is_complete()
4692 beacon_data = beacon->head; in ieee80211_beacon_cntdwn_is_complete()
4693 beacon_data_len = beacon->head_len; in ieee80211_beacon_cntdwn_is_complete()
4694 } else if (vif->type == NL80211_IFTYPE_MESH_POINT) { in ieee80211_beacon_cntdwn_is_complete()
4695 struct ieee80211_if_mesh *ifmsh = &sdata->u.mesh; in ieee80211_beacon_cntdwn_is_complete()
4697 beacon = rcu_dereference(ifmsh->beacon); in ieee80211_beacon_cntdwn_is_complete()
4699 goto out; in ieee80211_beacon_cntdwn_is_complete()
4701 beacon_data = beacon->head; in ieee80211_beacon_cntdwn_is_complete()
4702 beacon_data_len = beacon->head_len; in ieee80211_beacon_cntdwn_is_complete()
4705 goto out; in ieee80211_beacon_cntdwn_is_complete()
4708 if (!beacon->cntdwn_counter_offsets[0]) in ieee80211_beacon_cntdwn_is_complete()
4709 goto out; in ieee80211_beacon_cntdwn_is_complete()
4711 if (WARN_ON_ONCE(beacon->cntdwn_counter_offsets[0] > beacon_data_len)) in ieee80211_beacon_cntdwn_is_complete()
4712 goto out; in ieee80211_beacon_cntdwn_is_complete()
4714 if (beacon_data[beacon->cntdwn_counter_offsets[0]] == 1) in ieee80211_beacon_cntdwn_is_complete()
4717 out: in ieee80211_beacon_cntdwn_is_complete()
4733 tx.key = rcu_dereference(sdata->default_beacon_key); in ieee80211_beacon_protect()
4745 return -EINVAL; in ieee80211_beacon_protect()
4761 enum nl80211_band band; in __ieee80211_beacon_get() local
4769 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in __ieee80211_beacon_get()
4772 goto out; in __ieee80211_beacon_get()
4777 if (sdata->vif.type == NL80211_IFTYPE_AP) { in __ieee80211_beacon_get()
4778 struct ieee80211_if_ap *ap = &sdata->u.ap; in __ieee80211_beacon_get()
4780 beacon = rcu_dereference(ap->beacon); in __ieee80211_beacon_get()
4782 if (beacon->cntdwn_counter_offsets[0]) { in __ieee80211_beacon_get()
4793 skb = dev_alloc_skb(local->tx_headroom + in __ieee80211_beacon_get()
4794 beacon->head_len + in __ieee80211_beacon_get()
4795 beacon->tail_len + 256 + in __ieee80211_beacon_get()
4796 local->hw.extra_beacon_tailroom); in __ieee80211_beacon_get()
4798 goto out; in __ieee80211_beacon_get()
4800 skb_reserve(skb, local->tx_headroom); in __ieee80211_beacon_get()
4801 skb_put_data(skb, beacon->head, beacon->head_len); in __ieee80211_beacon_get()
4803 ieee80211_beacon_add_tim(sdata, &ap->ps, skb, in __ieee80211_beacon_get()
4807 offs->tim_offset = beacon->head_len; in __ieee80211_beacon_get()
4808 offs->tim_length = skb->len - beacon->head_len; in __ieee80211_beacon_get()
4811 csa_off_base = skb->len; in __ieee80211_beacon_get()
4814 if (beacon->tail) in __ieee80211_beacon_get()
4815 skb_put_data(skb, beacon->tail, in __ieee80211_beacon_get()
4816 beacon->tail_len); in __ieee80211_beacon_get()
4819 goto out; in __ieee80211_beacon_get()
4821 goto out; in __ieee80211_beacon_get()
4822 } else if (sdata->vif.type == NL80211_IFTYPE_ADHOC) { in __ieee80211_beacon_get()
4823 struct ieee80211_if_ibss *ifibss = &sdata->u.ibss; in __ieee80211_beacon_get()
4826 beacon = rcu_dereference(ifibss->presp); in __ieee80211_beacon_get()
4828 goto out; in __ieee80211_beacon_get()
4830 if (beacon->cntdwn_counter_offsets[0]) { in __ieee80211_beacon_get()
4837 skb = dev_alloc_skb(local->tx_headroom + beacon->head_len + in __ieee80211_beacon_get()
4838 local->hw.extra_beacon_tailroom); in __ieee80211_beacon_get()
4840 goto out; in __ieee80211_beacon_get()
4841 skb_reserve(skb, local->tx_headroom); in __ieee80211_beacon_get()
4842 skb_put_data(skb, beacon->head, beacon->head_len); in __ieee80211_beacon_get()
4844 hdr = (struct ieee80211_hdr *) skb->data; in __ieee80211_beacon_get()
4845 hdr->frame_control = cpu_to_le16(IEEE80211_FTYPE_MGMT | in __ieee80211_beacon_get()
4847 } else if (ieee80211_vif_is_mesh(&sdata->vif)) { in __ieee80211_beacon_get()
4848 struct ieee80211_if_mesh *ifmsh = &sdata->u.mesh; in __ieee80211_beacon_get()
4850 beacon = rcu_dereference(ifmsh->beacon); in __ieee80211_beacon_get()
4852 goto out; in __ieee80211_beacon_get()
4854 if (beacon->cntdwn_counter_offsets[0]) { in __ieee80211_beacon_get()
4866 if (ifmsh->sync_ops) in __ieee80211_beacon_get()
4867 ifmsh->sync_ops->adjust_tsf(sdata, beacon); in __ieee80211_beacon_get()
4869 skb = dev_alloc_skb(local->tx_headroom + in __ieee80211_beacon_get()
4870 beacon->head_len + in __ieee80211_beacon_get()
4872 beacon->tail_len + in __ieee80211_beacon_get()
4873 local->hw.extra_beacon_tailroom); in __ieee80211_beacon_get()
4875 goto out; in __ieee80211_beacon_get()
4876 skb_reserve(skb, local->tx_headroom); in __ieee80211_beacon_get()
4877 skb_put_data(skb, beacon->head, beacon->head_len); in __ieee80211_beacon_get()
4878 ieee80211_beacon_add_tim(sdata, &ifmsh->ps, skb, is_template); in __ieee80211_beacon_get()
4881 offs->tim_offset = beacon->head_len; in __ieee80211_beacon_get()
4882 offs->tim_length = skb->len - beacon->head_len; in __ieee80211_beacon_get()
4885 skb_put_data(skb, beacon->tail, beacon->tail_len); in __ieee80211_beacon_get()
4888 goto out; in __ieee80211_beacon_get()
4896 u16 csa_off = beacon->cntdwn_counter_offsets[i]; in __ieee80211_beacon_get()
4901 offs->cntdwn_counter_offs[i] = csa_off_base + csa_off; in __ieee80211_beacon_get()
4905 band = chanctx_conf->def.chan->band; in __ieee80211_beacon_get()
4909 info->flags |= IEEE80211_TX_INTFL_DONT_ENCRYPT; in __ieee80211_beacon_get()
4910 info->flags |= IEEE80211_TX_CTL_NO_ACK; in __ieee80211_beacon_get()
4911 info->band = band; in __ieee80211_beacon_get()
4915 txrc.sband = local->hw.wiphy->bands[band]; in __ieee80211_beacon_get()
4916 txrc.bss_conf = &sdata->vif.bss_conf; in __ieee80211_beacon_get()
4918 txrc.reported_rate.idx = -1; in __ieee80211_beacon_get()
4919 if (sdata->beacon_rate_set && sdata->beacon_rateidx_mask[band]) in __ieee80211_beacon_get()
4920 txrc.rate_idx_mask = sdata->beacon_rateidx_mask[band]; in __ieee80211_beacon_get()
4922 txrc.rate_idx_mask = sdata->rc_rateidx_mask[band]; in __ieee80211_beacon_get()
4926 info->control.vif = vif; in __ieee80211_beacon_get()
4928 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT | in __ieee80211_beacon_get()
4931 out: in __ieee80211_beacon_get()
4966 !hw_to_local(hw)->monitors) in ieee80211_beacon_get_tim()
4995 if (sdata->vif.type != NL80211_IFTYPE_AP) in ieee80211_proberesp_get()
5000 ap = &sdata->u.ap; in ieee80211_proberesp_get()
5001 presp = rcu_dereference(ap->probe_resp); in ieee80211_proberesp_get()
5003 goto out; in ieee80211_proberesp_get()
5005 skb = dev_alloc_skb(presp->len); in ieee80211_proberesp_get()
5007 goto out; in ieee80211_proberesp_get()
5009 skb_put_data(skb, presp->data, presp->len); in ieee80211_proberesp_get()
5011 hdr = (struct ieee80211_hdr *) skb->data; in ieee80211_proberesp_get()
5012 memset(hdr->addr1, 0, sizeof(hdr->addr1)); in ieee80211_proberesp_get()
5014 out: in ieee80211_proberesp_get()
5027 if (sdata->vif.type != NL80211_IFTYPE_AP) in ieee80211_get_fils_discovery_tmpl()
5031 tmpl = rcu_dereference(sdata->u.ap.fils_discovery); in ieee80211_get_fils_discovery_tmpl()
5037 skb = dev_alloc_skb(sdata->local->hw.extra_tx_headroom + tmpl->len); in ieee80211_get_fils_discovery_tmpl()
5039 skb_reserve(skb, sdata->local->hw.extra_tx_headroom); in ieee80211_get_fils_discovery_tmpl()
5040 skb_put_data(skb, tmpl->data, tmpl->len); in ieee80211_get_fils_discovery_tmpl()
5056 if (sdata->vif.type != NL80211_IFTYPE_AP) in ieee80211_get_unsol_bcast_probe_resp_tmpl()
5060 tmpl = rcu_dereference(sdata->u.ap.unsol_bcast_probe_resp); in ieee80211_get_unsol_bcast_probe_resp_tmpl()
5066 skb = dev_alloc_skb(sdata->local->hw.extra_tx_headroom + tmpl->len); in ieee80211_get_unsol_bcast_probe_resp_tmpl()
5068 skb_reserve(skb, sdata->local->hw.extra_tx_headroom); in ieee80211_get_unsol_bcast_probe_resp_tmpl()
5069 skb_put_data(skb, tmpl->data, tmpl->len); in ieee80211_get_unsol_bcast_probe_resp_tmpl()
5086 if (WARN_ON(vif->type != NL80211_IFTYPE_STATION)) in ieee80211_pspoll_get()
5090 ifmgd = &sdata->u.mgd; in ieee80211_pspoll_get()
5091 local = sdata->local; in ieee80211_pspoll_get()
5093 skb = dev_alloc_skb(local->hw.extra_tx_headroom + sizeof(*pspoll)); in ieee80211_pspoll_get()
5097 skb_reserve(skb, local->hw.extra_tx_headroom); in ieee80211_pspoll_get()
5100 pspoll->frame_control = cpu_to_le16(IEEE80211_FTYPE_CTL | in ieee80211_pspoll_get()
5102 pspoll->aid = cpu_to_le16(sdata->vif.bss_conf.aid); in ieee80211_pspoll_get()
5104 /* aid in PS-Poll has its two MSBs each set to 1 */ in ieee80211_pspoll_get()
5105 pspoll->aid |= cpu_to_le16(1 << 15 | 1 << 14); in ieee80211_pspoll_get()
5107 memcpy(pspoll->bssid, ifmgd->bssid, ETH_ALEN); in ieee80211_pspoll_get()
5108 memcpy(pspoll->ta, vif->addr, ETH_ALEN); in ieee80211_pspoll_get()
5125 if (WARN_ON(vif->type != NL80211_IFTYPE_STATION)) in ieee80211_nullfunc_get()
5129 ifmgd = &sdata->u.mgd; in ieee80211_nullfunc_get()
5130 local = sdata->local; in ieee80211_nullfunc_get()
5136 sta = sta_info_get(sdata, ifmgd->bssid); in ieee80211_nullfunc_get()
5137 qos = sta && sta->sta.wme; in ieee80211_nullfunc_get()
5141 skb = dev_alloc_skb(local->hw.extra_tx_headroom + in ieee80211_nullfunc_get()
5146 skb_reserve(skb, local->hw.extra_tx_headroom); in ieee80211_nullfunc_get()
5149 nullfunc->frame_control = cpu_to_le16(IEEE80211_FTYPE_DATA | in ieee80211_nullfunc_get()
5158 nullfunc->frame_control |= in ieee80211_nullfunc_get()
5160 skb->priority = 7; in ieee80211_nullfunc_get()
5165 memcpy(nullfunc->addr1, ifmgd->bssid, ETH_ALEN); in ieee80211_nullfunc_get()
5166 memcpy(nullfunc->addr2, vif->addr, ETH_ALEN); in ieee80211_nullfunc_get()
5167 memcpy(nullfunc->addr3, ifmgd->bssid, ETH_ALEN); in ieee80211_nullfunc_get()
5186 skb = dev_alloc_skb(local->hw.extra_tx_headroom + sizeof(*hdr) + in ieee80211_probereq_get()
5191 skb_reserve(skb, local->hw.extra_tx_headroom); in ieee80211_probereq_get()
5194 hdr->frame_control = cpu_to_le16(IEEE80211_FTYPE_MGMT | in ieee80211_probereq_get()
5196 eth_broadcast_addr(hdr->addr1); in ieee80211_probereq_get()
5197 memcpy(hdr->addr2, src_addr, ETH_ALEN); in ieee80211_probereq_get()
5198 eth_broadcast_addr(hdr->addr3); in ieee80211_probereq_get()
5218 rts->frame_control = in ieee80211_rts_get()
5220 rts->duration = ieee80211_rts_duration(hw, vif, frame_len, in ieee80211_rts_get()
5222 memcpy(rts->ra, hdr->addr1, sizeof(rts->ra)); in ieee80211_rts_get()
5223 memcpy(rts->ta, hdr->addr2, sizeof(rts->ta)); in ieee80211_rts_get()
5234 cts->frame_control = in ieee80211_ctstoself_get()
5236 cts->duration = ieee80211_ctstoself_duration(hw, vif, in ieee80211_ctstoself_get()
5238 memcpy(cts->ra, hdr->addr1, sizeof(cts->ra)); in ieee80211_ctstoself_get()
5257 chanctx_conf = rcu_dereference(sdata->vif.chanctx_conf); in ieee80211_get_buffered_bc()
5260 goto out; in ieee80211_get_buffered_bc()
5262 if (sdata->vif.type == NL80211_IFTYPE_AP) { in ieee80211_get_buffered_bc()
5264 rcu_dereference(sdata->u.ap.beacon); in ieee80211_get_buffered_bc()
5266 if (!beacon || !beacon->head) in ieee80211_get_buffered_bc()
5267 goto out; in ieee80211_get_buffered_bc()
5269 ps = &sdata->u.ap.ps; in ieee80211_get_buffered_bc()
5270 } else if (ieee80211_vif_is_mesh(&sdata->vif)) { in ieee80211_get_buffered_bc()
5271 ps = &sdata->u.mesh.ps; in ieee80211_get_buffered_bc()
5273 goto out; in ieee80211_get_buffered_bc()
5276 if (ps->dtim_count != 0 || !ps->dtim_bc_mc) in ieee80211_get_buffered_bc()
5277 goto out; /* send buffered bc/mc only after DTIM beacon */ in ieee80211_get_buffered_bc()
5280 skb = skb_dequeue(&ps->bc_buf); in ieee80211_get_buffered_bc()
5282 goto out; in ieee80211_get_buffered_bc()
5283 local->total_ps_buffered--; in ieee80211_get_buffered_bc()
5285 if (!skb_queue_empty(&ps->bc_buf) && skb->len >= 2) { in ieee80211_get_buffered_bc()
5287 (struct ieee80211_hdr *) skb->data; in ieee80211_get_buffered_bc()
5291 hdr->frame_control |= in ieee80211_get_buffered_bc()
5295 if (sdata->vif.type == NL80211_IFTYPE_AP) in ieee80211_get_buffered_bc()
5296 sdata = IEEE80211_DEV_TO_SUB_IF(skb->dev); in ieee80211_get_buffered_bc()
5305 info->band = chanctx_conf->def.chan->band; in ieee80211_get_buffered_bc()
5309 out: in ieee80211_get_buffered_bc()
5319 struct ieee80211_sub_if_data *sdata = sta->sdata; in ieee80211_reserve_tid()
5320 struct ieee80211_local *local = sdata->local; in ieee80211_reserve_tid()
5324 lockdep_assert_held(&local->sta_mtx); in ieee80211_reserve_tid()
5327 switch (sdata->vif.type) { in ieee80211_reserve_tid()
5334 return -EINVAL; in ieee80211_reserve_tid()
5338 return -EINVAL; in ieee80211_reserve_tid()
5340 if (sta->reserved_tid == tid) { in ieee80211_reserve_tid()
5342 goto out; in ieee80211_reserve_tid()
5345 if (sta->reserved_tid != IEEE80211_TID_UNRESERVED) { in ieee80211_reserve_tid()
5347 ret = -EALREADY; in ieee80211_reserve_tid()
5348 goto out; in ieee80211_reserve_tid()
5351 ieee80211_stop_vif_queues(sdata->local, sdata, in ieee80211_reserve_tid()
5357 if (ieee80211_hw_check(&local->hw, AMPDU_AGGREGATION)) { in ieee80211_reserve_tid()
5363 queues = BIT(sdata->vif.hw_queue[ieee802_1d_to_ac[tid]]); in ieee80211_reserve_tid()
5366 sta->reserved_tid = tid; in ieee80211_reserve_tid()
5371 if (ieee80211_hw_check(&local->hw, AMPDU_AGGREGATION)) in ieee80211_reserve_tid()
5375 out: in ieee80211_reserve_tid()
5383 struct ieee80211_sub_if_data *sdata = sta->sdata; in ieee80211_unreserve_tid()
5385 lockdep_assert_held(&sdata->local->sta_mtx); in ieee80211_unreserve_tid()
5388 switch (sdata->vif.type) { in ieee80211_unreserve_tid()
5398 if (tid != sta->reserved_tid) { in ieee80211_unreserve_tid()
5403 sta->reserved_tid = IEEE80211_TID_UNRESERVED; in ieee80211_unreserve_tid()
5409 enum nl80211_band band) in __ieee80211_tx_skb_tid_band() argument
5415 skb->priority = tid; in __ieee80211_tx_skb_tid_band()
5417 skb->dev = sdata->dev; in __ieee80211_tx_skb_tid_band()
5425 IEEE80211_SKB_CB(skb)->band = band; in __ieee80211_tx_skb_tid_band()
5436 struct ieee80211_local *local = sdata->local; in ieee80211_tx_control_port()
5443 * or Pre-Authentication in ieee80211_tx_control_port()
5445 if (proto != sdata->control_port_protocol && in ieee80211_tx_control_port()
5447 return -EINVAL; in ieee80211_tx_control_port()
5449 if (proto == sdata->control_port_protocol) in ieee80211_tx_control_port()
5462 skb = dev_alloc_skb(local->hw.extra_tx_headroom + in ieee80211_tx_control_port()
5465 return -ENOMEM; in ieee80211_tx_control_port()
5467 skb_reserve(skb, local->hw.extra_tx_headroom + sizeof(struct ethhdr)); in ieee80211_tx_control_port()
5472 memcpy(ehdr->h_dest, dest, ETH_ALEN); in ieee80211_tx_control_port()
5473 memcpy(ehdr->h_source, sdata->vif.addr, ETH_ALEN); in ieee80211_tx_control_port()
5474 ehdr->h_proto = proto; in ieee80211_tx_control_port()
5476 skb->dev = dev; in ieee80211_tx_control_port()
5477 skb->protocol = htons(ETH_P_802_3); in ieee80211_tx_control_port()
5482 mutex_lock(&local->mtx); in ieee80211_tx_control_port()
5485 __ieee80211_subif_start_xmit(skb, skb->dev, flags, ctrl_flags, cookie); in ieee80211_tx_control_port()
5488 mutex_unlock(&local->mtx); in ieee80211_tx_control_port()
5497 struct ieee80211_local *local = sdata->local; in ieee80211_probe_mesh_link()
5500 skb = dev_alloc_skb(local->hw.extra_tx_headroom + len + in ieee80211_probe_mesh_link()
5504 return -ENOMEM; in ieee80211_probe_mesh_link()
5506 skb_reserve(skb, local->hw.extra_tx_headroom); in ieee80211_probe_mesh_link()
5509 skb->dev = dev; in ieee80211_probe_mesh_link()
5510 skb->protocol = htons(ETH_P_802_3); in ieee80211_probe_mesh_link()
5515 __ieee80211_subif_start_xmit(skb, skb->dev, 0, in ieee80211_probe_mesh_link()