Lines Matching +full:tx +full:- +full:ts +full:- +full:max
2 * Copyright (c) 2008-2011 Atheros Communications Inc.
17 #include <linux/dma-mapping.h>
33 #define TIME_SYMBOLS_HALFGI(t) (((t) * 5 - 4) / 18)
35 #define NUM_SYMBOLS_PER_USEC_HALFGI(_usec) (((_usec*5)-4)/18)
43 { 104, 216 }, /* 3: 16-QAM 1/2 */
44 { 156, 324 }, /* 4: 16-QAM 3/4 */
45 { 208, 432 }, /* 5: 64-QAM 2/3 */
46 { 234, 486 }, /* 6: 64-QAM 3/4 */
47 { 260, 540 }, /* 7: 64-QAM 5/6 */
58 struct ath_tx_status *ts, int txok);
62 struct ath_tx_status *ts, int nframes, int nbad,
87 struct ieee80211_sta *sta = info->status.status_driver_data[0]; in ath_tx_status()
89 if (info->flags & (IEEE80211_TX_CTL_REQ_TX_STATUS | in ath_tx_status()
102 __releases(&txq->axq_lock) in ath_txq_unlock_complete()
104 struct ieee80211_hw *hw = sc->hw; in ath_txq_unlock_complete()
109 skb_queue_splice_init(&txq->complete_q, &q); in ath_txq_unlock_complete()
110 spin_unlock_bh(&txq->axq_lock); in ath_txq_unlock_complete()
121 ieee80211_schedule_txq(sc->hw, queue); in ath_tx_queue_tid()
126 struct ath_softc *sc = hw->priv; in ath9k_wake_tx_queue()
127 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_wake_tx_queue()
128 struct ath_atx_tid *tid = (struct ath_atx_tid *) queue->drv_priv; in ath9k_wake_tx_queue()
129 struct ath_txq *txq = tid->txq; in ath9k_wake_tx_queue()
131 ath_dbg(common, QUEUE, "Waking TX queue: %pM (%d)\n", in ath9k_wake_tx_queue()
132 queue->sta ? queue->sta->addr : queue->vif->addr, in ath9k_wake_tx_queue()
133 tid->tidno); in ath9k_wake_tx_queue()
144 sizeof(tx_info->status.status_driver_data)); in get_frame_info()
145 return (struct ath_frame_info *) &tx_info->status.status_driver_data[0]; in get_frame_info()
150 if (!tid->an->sta) in ath_send_bar()
153 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
166 ratetbl = rcu_dereference(sta->rates); in ath_merge_ratetbl()
170 if (tx_info->control.rates[0].idx < 0 || in ath_merge_ratetbl()
171 tx_info->control.rates[0].count == 0) in ath_merge_ratetbl()
175 bf->rates[0] = tx_info->control.rates[0]; in ath_merge_ratetbl()
180 bf->rates[i].idx = ratetbl->rate[i].idx; in ath_merge_ratetbl()
181 bf->rates[i].flags = ratetbl->rate[i].flags; in ath_merge_ratetbl()
182 if (tx_info->control.use_rts) in ath_merge_ratetbl()
183 bf->rates[i].count = ratetbl->rate[i].count_rts; in ath_merge_ratetbl()
184 else if (tx_info->control.use_cts_prot) in ath_merge_ratetbl()
185 bf->rates[i].count = ratetbl->rate[i].count_cts; in ath_merge_ratetbl()
187 bf->rates[i].count = ratetbl->rate[i].count; in ath_merge_ratetbl()
198 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_set_rates()
201 ieee80211_get_tx_rates(vif, sta, bf->bf_mpdu, bf->rates, in ath_set_rates()
202 ARRAY_SIZE(bf->rates)); in ath_set_rates()
209 int q = fi->txq; in ath_txq_skb_done()
214 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
215 if (WARN_ON(--txq->pending_frames < 0)) in ath_txq_skb_done()
216 txq->pending_frames = 0; in ath_txq_skb_done()
223 u8 tidno = skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath_get_skb_tid()
231 struct ath_softc *sc = tid->an->sc; in ath_tid_pull()
232 struct ieee80211_hw *hw = sc->hw; in ath_tid_pull()
234 .txq = tid->txq, in ath_tid_pull()
235 .sta = tid->an->sta, in ath_tid_pull()
243 return -ENOENT; in ath_tid_pull()
252 if (tid->txq == sc->tx.txq_map[q]) { in ath_tid_pull()
254 fi->txq = q; in ath_tid_pull()
255 ++tid->txq->pending_frames; in ath_tid_pull()
266 *skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
275 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid()
279 struct ath_tx_status ts; in ath_tx_flush_tid() local
285 memset(&ts, 0, sizeof(ts)); in ath_tx_flush_tid()
287 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
289 bf = fi->bf; in ath_tx_flush_tid()
292 ieee80211_free_txskb(sc->hw, skb); in ath_tx_flush_tid()
296 if (fi->baw_tracked) { in ath_tx_flush_tid()
301 list_add_tail(&bf->list, &bf_head); in ath_tx_flush_tid()
302 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_flush_tid()
307 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
315 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_update_baw()
316 u16 seqno = bf->bf_state.seqno; in ath_tx_update_baw()
319 if (!fi->baw_tracked) in ath_tx_update_baw()
322 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
323 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
325 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
327 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
328 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
329 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
330 if (tid->bar_index >= 0) in ath_tx_update_baw()
331 tid->bar_index--; in ath_tx_update_baw()
338 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_addto_baw()
339 u16 seqno = bf->bf_state.seqno; in ath_tx_addto_baw()
342 if (fi->baw_tracked) in ath_tx_addto_baw()
345 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
346 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
347 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
348 fi->baw_tracked = 1; in ath_tx_addto_baw()
350 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
351 (ATH_TID_MAX_BUFS - 1))) { in ath_tx_addto_baw()
352 tid->baw_tail = cindex; in ath_tx_addto_baw()
353 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
364 struct ath_tx_status ts; in ath_tid_drain() local
368 memset(&ts, 0, sizeof(ts)); in ath_tid_drain()
373 bf = fi->bf; in ath_tid_drain()
380 list_add_tail(&bf->list, &bf_head); in ath_tid_drain()
381 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tid_drain()
389 struct ath_buf *bf = fi->bf; in ath_tx_set_retry()
391 int prev = fi->retries; in ath_tx_set_retry()
393 TX_STAT_INC(sc, txq->axq_qnum, a_retries); in ath_tx_set_retry()
394 fi->retries += count; in ath_tx_set_retry()
399 hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_set_retry()
400 hdr->frame_control |= cpu_to_le16(IEEE80211_FCTL_RETRY); in ath_tx_set_retry()
401 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath_tx_set_retry()
409 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
411 if (unlikely(list_empty(&sc->tx.txbuf))) { in ath_tx_get_buffer()
412 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
416 bf = list_first_entry(&sc->tx.txbuf, struct ath_buf, list); in ath_tx_get_buffer()
417 list_del(&bf->list); in ath_tx_get_buffer()
419 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
426 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
427 list_add_tail(&bf->list, &sc->tx.txbuf); in ath_tx_return_buffer()
428 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
441 tbf->bf_mpdu = bf->bf_mpdu; in ath_clone_txbuf()
442 tbf->bf_buf_addr = bf->bf_buf_addr; in ath_clone_txbuf()
443 memcpy(tbf->bf_desc, bf->bf_desc, sc->sc_ah->caps.tx_desc_len); in ath_clone_txbuf()
444 tbf->bf_state = bf->bf_state; in ath_clone_txbuf()
445 tbf->bf_state.stale = false; in ath_clone_txbuf()
451 struct ath_tx_status *ts, int txok, in ath_tx_count_frames() argument
464 seq_st = ts->ts_seqnum; in ath_tx_count_frames()
465 memcpy(ba, &ts->ba_low, WME_BA_BMP_SIZE >> 3); in ath_tx_count_frames()
469 ba_index = ATH_BA_INDEX(seq_st, bf->bf_state.seqno); in ath_tx_count_frames()
475 bf = bf->bf_next; in ath_tx_count_frames()
484 struct ath_tx_status *ts, int txok) in ath_tx_complete_aggr() argument
489 struct ath_buf *bf_next, *bf_last = bf->bf_lastbf; in ath_tx_complete_aggr()
499 bool flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_complete_aggr()
501 int bar_index = -1; in ath_tx_complete_aggr()
503 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
506 memcpy(rates, bf->rates, sizeof(rates)); in ath_tx_complete_aggr()
508 retries = ts->ts_longretry + 1; in ath_tx_complete_aggr()
509 for (i = 0; i < ts->ts_rateindex; i++) in ath_tx_complete_aggr()
515 bf_next = bf->bf_next; in ath_tx_complete_aggr()
517 if (!bf->bf_state.stale || bf_next != NULL) in ath_tx_complete_aggr()
518 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
520 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, ts, 0); in ath_tx_complete_aggr()
527 an = (struct ath_node *)sta->drv_priv; in ath_tx_complete_aggr()
528 seq_first = tid->seq_start; in ath_tx_complete_aggr()
529 isba = ts->ts_flags & ATH9K_TX_BA; in ath_tx_complete_aggr()
532 * The hardware occasionally sends a tx status for the wrong TID. in ath_tx_complete_aggr()
539 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
546 if (ts->ts_flags & ATH9K_TX_BA) { in ath_tx_complete_aggr()
547 seq_st = ts->ts_seqnum; in ath_tx_complete_aggr()
548 memcpy(ba, &ts->ba_low, WME_BA_BMP_SIZE >> 3); in ath_tx_complete_aggr()
557 if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION) in ath_tx_complete_aggr()
564 ath_tx_count_frames(sc, bf, ts, txok, &nframes, &nbad); in ath_tx_complete_aggr()
566 u16 seqno = bf->bf_state.seqno; in ath_tx_complete_aggr()
569 bf_next = bf->bf_next; in ath_tx_complete_aggr()
571 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
575 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
576 !tid->active) { in ath_tx_complete_aggr()
591 } else if (fi->retries < ATH_MAX_SW_RETRIES) { in ath_tx_complete_aggr()
592 if (txok || !an->sleeping) in ath_tx_complete_aggr()
593 ath_tx_set_retry(sc, txq, bf->bf_mpdu, in ath_tx_complete_aggr()
609 if (bf_next != NULL || !bf_last->bf_state.stale) in ath_tx_complete_aggr()
610 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
614 * complete the acked-ones/xretried ones; update in ath_tx_complete_aggr()
615 * block-ack window in ath_tx_complete_aggr()
620 memcpy(tx_info->control.rates, rates, sizeof(rates)); in ath_tx_complete_aggr()
621 ath_tx_rc_status(sc, bf, ts, nframes, nbad, txok); in ath_tx_complete_aggr()
623 if (bf == bf->bf_lastbf) in ath_tx_complete_aggr()
624 ath_dynack_sample_tx_ts(sc->sc_ah, in ath_tx_complete_aggr()
625 bf->bf_mpdu, in ath_tx_complete_aggr()
626 ts, sta); in ath_tx_complete_aggr()
629 ath_tx_complete_buf(sc, bf, txq, &bf_head, sta, ts, in ath_tx_complete_aggr()
632 if (tx_info->flags & IEEE80211_TX_STATUS_EOSP) { in ath_tx_complete_aggr()
633 tx_info->flags &= ~IEEE80211_TX_STATUS_EOSP; in ath_tx_complete_aggr()
636 /* retry the un-acked ones */ in ath_tx_complete_aggr()
637 if (bf->bf_next == NULL && bf_last->bf_state.stale) { in ath_tx_complete_aggr()
642 * Update tx baw and complete the in ath_tx_complete_aggr()
644 * run out of tx buf. in ath_tx_complete_aggr()
650 &bf_head, NULL, ts, in ath_tx_complete_aggr()
657 fi->bf = tbf; in ath_tx_complete_aggr()
670 /* prepend un-acked frames to the beginning of the pending frame queue */ in ath_tx_complete_aggr()
672 if (an->sleeping) in ath_tx_complete_aggr()
673 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
675 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
676 if (!an->sleeping) { in ath_tx_complete_aggr()
678 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_complete_aggr()
679 tid->clear_ps_filter = true; in ath_tx_complete_aggr()
686 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
687 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
700 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(bf->bf_mpdu); in bf_is_ampdu_not_probing()
701 return bf_isampdu(bf) && !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE); in bf_is_ampdu_not_probing()
707 struct ath_tx_status *ts, in ath_tx_count_airtime() argument
713 airtime += ts->duration * (ts->ts_longretry + 1); in ath_tx_count_airtime()
714 for(i = 0; i < ts->ts_rateindex; i++) { in ath_tx_count_airtime()
715 int rate_dur = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, i); in ath_tx_count_airtime()
716 airtime += rate_dur * bf->rates[i].count; in ath_tx_count_airtime()
723 struct ath_tx_status *ts, struct ath_buf *bf, in ath_tx_process_buffer() argument
726 struct ieee80211_hw *hw = sc->hw; in ath_tx_process_buffer()
733 txok = !(ts->ts_status & ATH9K_TXERR_MASK); in ath_tx_process_buffer()
734 flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_process_buffer()
735 txq->axq_tx_inprogress = false; in ath_tx_process_buffer()
737 txq->axq_depth--; in ath_tx_process_buffer()
739 txq->axq_ampdu_depth--; in ath_tx_process_buffer()
741 ts->duration = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, in ath_tx_process_buffer()
742 ts->ts_rateindex); in ath_tx_process_buffer()
744 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath_tx_process_buffer()
745 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr1, hdr->addr2); in ath_tx_process_buffer()
747 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_process_buffer()
748 tid = ath_get_skb_tid(sc, an, bf->bf_mpdu); in ath_tx_process_buffer()
749 ath_tx_count_airtime(sc, sta, bf, ts, tid->tidno); in ath_tx_process_buffer()
750 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_process_buffer()
751 tid->clear_ps_filter = true; in ath_tx_process_buffer()
756 info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_process_buffer()
757 memcpy(info->control.rates, bf->rates, in ath_tx_process_buffer()
758 sizeof(info->control.rates)); in ath_tx_process_buffer()
759 ath_tx_rc_status(sc, bf, ts, 1, txok ? 0 : 1, txok); in ath_tx_process_buffer()
760 ath_dynack_sample_tx_ts(sc->sc_ah, bf->bf_mpdu, ts, in ath_tx_process_buffer()
763 ath_tx_complete_buf(sc, bf, txq, bf_head, sta, ts, txok); in ath_tx_process_buffer()
765 ath_tx_complete_aggr(sc, txq, bf, bf_head, sta, tid, ts, txok); in ath_tx_process_buffer()
778 skb = bf->bf_mpdu; in ath_lookup_legacy()
780 rates = tx_info->control.rates; in ath_lookup_legacy()
801 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
804 skb = bf->bf_mpdu; in ath_lookup_rate()
806 rates = bf->rates; in ath_lookup_rate()
833 frmlen = sc->tx.max_aggr_framelen[q][modeidx][rates[i].idx]; in ath_lookup_rate()
842 if (tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE || legacy) in ath_lookup_rate()
854 if (tid->an->maxampdu) in ath_lookup_rate()
855 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
873 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_compute_num_delims()
881 * TODO - this could be improved to be dependent on the rate. in ath_compute_num_delims()
884 if ((fi->keyix != ATH9K_TXKEYIX_INVALID) && in ath_compute_num_delims()
885 !(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)) in ath_compute_num_delims()
892 if (first_subfrm && !AR_SREV_9580_10_OR_LATER(sc->sc_ah) && in ath_compute_num_delims()
893 (sc->sc_ah->ent_mode & AR_ENT_OTP_MIN_PKT_SIZE_DISABLE)) in ath_compute_num_delims()
894 ndelim = max(ndelim, FIRST_DESC_NDELIMS); in ath_compute_num_delims()
906 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
909 rix = bf->rates[0].idx; in ath_compute_num_delims()
910 flags = bf->rates[0].flags; in ath_compute_num_delims()
915 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
917 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
927 mindelim = (minlen - frmlen) / ATH_AGGR_DELIM_SZ; in ath_compute_num_delims()
928 ndelim = max(mindelim, ndelim); in ath_compute_num_delims()
951 bf = fi->bf; in ath_tx_get_tid_subframe()
952 if (!fi->bf) in ath_tx_get_tid_subframe()
955 bf->bf_state.stale = false; in ath_tx_get_tid_subframe()
959 ieee80211_free_txskb(sc->hw, skb); in ath_tx_get_tid_subframe()
963 bf->bf_next = NULL; in ath_tx_get_tid_subframe()
964 bf->bf_lastbf = bf; in ath_tx_get_tid_subframe()
967 tx_info->flags &= ~(IEEE80211_TX_CTL_CLEAR_PS_FILT | in ath_tx_get_tid_subframe()
975 if (!tid->active) in ath_tx_get_tid_subframe()
976 tx_info->flags &= ~IEEE80211_TX_CTL_AMPDU; in ath_tx_get_tid_subframe()
978 if (!(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_get_tid_subframe()
979 bf->bf_state.bf_type = 0; in ath_tx_get_tid_subframe()
983 bf->bf_state.bf_type = BUF_AMPDU | BUF_AGGR; in ath_tx_get_tid_subframe()
984 seqno = bf->bf_state.seqno; in ath_tx_get_tid_subframe()
986 /* do not step over block-ack window */ in ath_tx_get_tid_subframe()
987 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) { in ath_tx_get_tid_subframe()
988 __skb_queue_tail(&tid->retry_q, skb); in ath_tx_get_tid_subframe()
993 if (!skb_queue_is_first(&tid->retry_q, skb) && in ath_tx_get_tid_subframe()
999 return -EINPROGRESS; in ath_tx_get_tid_subframe()
1002 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
1003 struct ath_tx_status ts = {}; in ath_tx_get_tid_subframe() local
1007 list_add(&bf->list, &bf_head); in ath_tx_get_tid_subframe()
1009 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_get_tid_subframe()
1028 #define PADBYTES(_len) ((4 - ((_len) % 4)) % 4) in ath_tx_form_aggr()
1032 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
1043 skb = bf->bf_mpdu; in ath_tx_form_aggr()
1047 al_delta = ATH_AGGR_DELIM_SZ + fi->framelen; in ath_tx_form_aggr()
1053 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_aggr()
1054 if ((tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) || in ath_tx_form_aggr()
1055 !(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) in ath_tx_form_aggr()
1066 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
1071 bf->bf_next = NULL; in ath_tx_form_aggr()
1074 bf->bf_state.ndelim = ndelim; in ath_tx_form_aggr()
1076 list_add_tail(&bf->list, bf_q); in ath_tx_form_aggr()
1078 bf_prev->bf_next = bf; in ath_tx_form_aggr()
1088 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_aggr()
1091 bf->bf_lastbf = bf_prev; in ath_tx_form_aggr()
1094 al = get_frame_info(bf->bf_mpdu)->framelen; in ath_tx_form_aggr()
1095 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_form_aggr()
1097 TX_STAT_INC(sc, txq->axq_qnum, a_aggr); in ath_tx_form_aggr()
1105 * rix - rate index
1106 * pktlen - total bytes (delims + data + fcs + pads + pad delims)
1107 * width - 0 for 20 MHz, 1 for 40 MHz
1108 * half_gi - to use 4us v/s 3.6 us for symbol time
1120 nsymbols = (nbits + nsymbits - 1) / nsymbits; in ath_pkt_duration()
1139 usec -= L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams); in ath_max_framelen()
1142 bits -= OFDM_PLCP_BITS; in ath_max_framelen()
1159 cur_ht20 = sc->tx.max_aggr_framelen[queue][MCS_HT20]; in ath_update_max_aggr_framelen()
1160 cur_ht20_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT20_SGI]; in ath_update_max_aggr_framelen()
1161 cur_ht40 = sc->tx.max_aggr_framelen[queue][MCS_HT40]; in ath_update_max_aggr_framelen()
1162 cur_ht40_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT40_SGI]; in ath_update_max_aggr_framelen()
1178 struct ath_hw *ah = sc->sc_ah; in ath_get_rate_txpower()
1180 if (sc->tx99_state || !ah->tpc_enabled) in ath_get_rate_txpower()
1183 skb = bf->bf_mpdu; in ath_get_rate_txpower()
1188 int txpower = fi->tx_power; in ath_get_rate_txpower()
1192 struct ar5416_eeprom_def *eep = &ah->eeprom.def; in ath_get_rate_txpower()
1193 u16 eeprom_rev = ah->eep_ops->get_eeprom_rev(ah); in ath_get_rate_txpower()
1199 is_2ghz = info->band == NL80211_BAND_2GHZ; in ath_get_rate_txpower()
1200 pmodal = &eep->modalHeader[is_2ghz]; in ath_get_rate_txpower()
1201 power_ht40delta = pmodal->ht40PowerIncForPdadc; in ath_get_rate_txpower()
1210 txpower -= 2 * AR9287_PWR_TABLE_OFFSET_DB; in ath_get_rate_txpower()
1214 power_offset = ah->eep_ops->get_eeprom(ah, in ath_get_rate_txpower()
1216 txpower -= 2 * power_offset; in ath_get_rate_txpower()
1220 txpower -= 2; in ath_get_rate_txpower()
1222 txpower = max(txpower, 0); in ath_get_rate_txpower()
1223 max_power = min_t(u8, ah->tx_power[rateidx], txpower); in ath_get_rate_txpower()
1225 /* XXX: clamp minimum TX power at 1 for AR9160 since if in ath_get_rate_txpower()
1226 * max_power is set to 0, frames are transmitted at max in ath_get_rate_txpower()
1227 * TX power in ath_get_rate_txpower()
1231 } else if (!bf->bf_state.bfs_paprd) { in ath_get_rate_txpower()
1232 if (rateidx < 8 && (info->flags & IEEE80211_TX_CTL_STBC)) in ath_get_rate_txpower()
1233 max_power = min_t(u8, ah->tx_power_stbc[rateidx], in ath_get_rate_txpower()
1234 fi->tx_power); in ath_get_rate_txpower()
1236 max_power = min_t(u8, ah->tx_power[rateidx], in ath_get_rate_txpower()
1237 fi->tx_power); in ath_get_rate_txpower()
1239 max_power = ah->paprd_training_power; in ath_get_rate_txpower()
1248 struct ath_hw *ah = sc->sc_ah; in ath_buf_set_rate()
1255 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_buf_set_rate()
1256 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_buf_set_rate()
1260 skb = bf->bf_mpdu; in ath_buf_set_rate()
1262 rates = bf->rates; in ath_buf_set_rate()
1263 hdr = (struct ieee80211_hdr *)skb->data; in ath_buf_set_rate()
1265 /* set dur_update_en for l-sig computation except for PS-Poll frames */ in ath_buf_set_rate()
1266 info->dur_update = !ieee80211_is_pspoll(hdr->frame_control); in ath_buf_set_rate()
1267 info->rtscts_rate = fi->rtscts_rate; in ath_buf_set_rate()
1269 for (i = 0; i < ARRAY_SIZE(bf->rates); i++) { in ath_buf_set_rate()
1277 info->rates[i].Tries = rates[i].count; in ath_buf_set_rate()
1284 unlikely(rts_thresh != (u32) -1)) { in ath_buf_set_rate()
1290 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1291 info->flags |= ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1293 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1294 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1298 info->rates[i].RateFlags |= ATH9K_RATESERIES_2040; in ath_buf_set_rate()
1300 info->rates[i].RateFlags |= ATH9K_RATESERIES_HALFGI; in ath_buf_set_rate()
1308 info->rates[i].Rate = rix | 0x80; in ath_buf_set_rate()
1309 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1310 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1311 info->rates[i].PktDuration = ath_pkt_duration(sc, rix, len, in ath_buf_set_rate()
1313 if (rix < 8 && (tx_info->flags & IEEE80211_TX_CTL_STBC)) in ath_buf_set_rate()
1314 info->rates[i].RateFlags |= ATH9K_RATESERIES_STBC; in ath_buf_set_rate()
1315 if (rix >= 8 && fi->dyn_smps) { in ath_buf_set_rate()
1316 info->rates[i].RateFlags |= in ath_buf_set_rate()
1318 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1321 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, in ath_buf_set_rate()
1327 rate = &common->sbands[tx_info->band].bitrates[rates[i].idx]; in ath_buf_set_rate()
1328 if ((tx_info->band == NL80211_BAND_2GHZ) && in ath_buf_set_rate()
1329 !(rate->flags & IEEE80211_RATE_ERP_G)) in ath_buf_set_rate()
1334 info->rates[i].Rate = rate->hw_value; in ath_buf_set_rate()
1335 if (rate->hw_value_short) { in ath_buf_set_rate()
1337 info->rates[i].Rate |= rate->hw_value_short; in ath_buf_set_rate()
1342 if (bf->bf_state.bfs_paprd) in ath_buf_set_rate()
1343 info->rates[i].ChSel = ah->txchainmask; in ath_buf_set_rate()
1345 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1346 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1348 info->rates[i].PktDuration = ath9k_hw_computetxtime(sc->sc_ah, in ath_buf_set_rate()
1349 phy, rate->bitrate * 100, len, rix, is_sp); in ath_buf_set_rate()
1351 is_cck = IS_CCK_RATE(info->rates[i].Rate); in ath_buf_set_rate()
1352 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, false, in ath_buf_set_rate()
1356 /* For AR5416 - RTS cannot be followed by a frame larger than 8K */ in ath_buf_set_rate()
1357 if (bf_isaggr(bf) && (len > sc->sc_ah->caps.rts_aggr_limit)) in ath_buf_set_rate()
1358 info->flags &= ~ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1361 if (info->flags & ATH9K_TXDESC_RTSENA) in ath_buf_set_rate()
1362 info->flags &= ~ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1371 hdr = (struct ieee80211_hdr *)skb->data; in get_hw_packet_type()
1372 fc = hdr->frame_control; in get_hw_packet_type()
1391 struct ath_hw *ah = sc->sc_ah; in ath_tx_fill_desc()
1394 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_tx_fill_desc()
1400 info.qcu = txq->axq_qnum; in ath_tx_fill_desc()
1403 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_fill_desc()
1406 bool aggr = !!(bf->bf_state.bf_type & BUF_AGGR); in ath_tx_fill_desc()
1409 if (bf->bf_next) in ath_tx_fill_desc()
1410 info.link = bf->bf_next->bf_daddr; in ath_tx_fill_desc()
1412 info.link = (sc->tx99_state) ? bf->bf_daddr : 0; in ath_tx_fill_desc()
1417 if (!sc->tx99_state) in ath_tx_fill_desc()
1419 if ((tx_info->flags & IEEE80211_TX_CTL_CLEAR_PS_FILT) || in ath_tx_fill_desc()
1420 txq == sc->tx.uapsdq) in ath_tx_fill_desc()
1423 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_fill_desc()
1425 if (tx_info->flags & IEEE80211_TX_CTL_LDPC) in ath_tx_fill_desc()
1428 if (bf->bf_state.bfs_paprd) in ath_tx_fill_desc()
1429 info.flags |= (u32) bf->bf_state.bfs_paprd << in ath_tx_fill_desc()
1440 unlikely(rts_thresh != (u32) -1)) { in ath_tx_fill_desc()
1449 len = fi->framelen; in ath_tx_fill_desc()
1454 info.buf_addr[0] = bf->bf_buf_addr; in ath_tx_fill_desc()
1455 info.buf_len[0] = skb->len; in ath_tx_fill_desc()
1456 info.pkt_len = fi->framelen; in ath_tx_fill_desc()
1457 info.keyix = fi->keyix; in ath_tx_fill_desc()
1458 info.keytype = fi->keytype; in ath_tx_fill_desc()
1463 else if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1468 info.ndelim = bf->bf_state.ndelim; in ath_tx_fill_desc()
1472 if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1475 ath9k_hw_set_txdesc(ah, bf->bf_desc, &info); in ath_tx_fill_desc()
1476 bf = bf->bf_next; in ath_tx_fill_desc()
1492 list_add_tail(&bf->list, bf_q); in ath_tx_form_burst()
1494 bf_prev->bf_next = bf; in ath_tx_form_burst()
1504 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_burst()
1505 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_form_burst()
1506 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_burst()
1510 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1529 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_sched_aggr()
1530 aggr = !!(tx_info->flags & IEEE80211_TX_CTL_AMPDU); in ath_tx_sched_aggr()
1531 if ((aggr && txq->axq_ampdu_depth >= ATH_AGGR_MIN_QDEPTH) || in ath_tx_sched_aggr()
1532 (!aggr && txq->axq_depth >= ATH_NON_AGGR_MIN_QDEPTH)) { in ath_tx_sched_aggr()
1533 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_sched_aggr()
1534 return -EBUSY; in ath_tx_sched_aggr()
1537 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1544 return -EAGAIN; in ath_tx_sched_aggr()
1546 if (tid->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1547 tid->clear_ps_filter = false; in ath_tx_sched_aggr()
1548 tx_info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_sched_aggr()
1559 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_start()
1567 an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_start()
1569 txq = txtid->txq; in ath_tx_aggr_start()
1574 * in HT IBSS when a beacon with HT-info is received after the station in ath_tx_aggr_start()
1577 if (sta->deflink.ht_cap.ht_supported) { in ath_tx_aggr_start()
1578 an->maxampdu = (1 << (IEEE80211_HT_MAX_AMPDU_FACTOR + in ath_tx_aggr_start()
1579 sta->deflink.ht_cap.ampdu_factor)) - 1; in ath_tx_aggr_start()
1580 density = ath9k_parse_mpdudensity(sta->deflink.ht_cap.ampdu_density); in ath_tx_aggr_start()
1581 an->mpdudensity = density; in ath_tx_aggr_start()
1584 txtid->active = true; in ath_tx_aggr_start()
1585 *ssn = txtid->seq_start = txtid->seq_next; in ath_tx_aggr_start()
1586 txtid->bar_index = -1; in ath_tx_aggr_start()
1588 memset(txtid->tx_buf, 0, sizeof(txtid->tx_buf)); in ath_tx_aggr_start()
1589 txtid->baw_head = txtid->baw_tail = 0; in ath_tx_aggr_start()
1598 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_stop()
1599 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_stop()
1601 struct ath_txq *txq = txtid->txq; in ath_tx_aggr_stop()
1606 txtid->active = false; in ath_tx_aggr_stop()
1614 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_sleep()
1623 if (!skb_queue_empty(&tid->retry_q)) in ath_tx_aggr_sleep()
1624 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_aggr_sleep()
1631 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_wakeup()
1640 txq = tid->txq; in ath_tx_aggr_wakeup()
1643 tid->clear_ps_filter = true; in ath_tx_aggr_wakeup()
1644 if (!skb_queue_empty(&tid->retry_q)) { in ath_tx_aggr_wakeup()
1661 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath9k_set_moredata()
1662 if ((hdr->frame_control & mask) != mask_val) { in ath9k_set_moredata()
1663 hdr->frame_control = (hdr->frame_control & ~mask) | mask_val; in ath9k_set_moredata()
1664 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath9k_set_moredata()
1675 struct ath_softc *sc = hw->priv; in ath9k_release_buffered_frames()
1676 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath9k_release_buffered_frames()
1677 struct ath_txq *txq = sc->tx.uapsdq; in ath9k_release_buffered_frames()
1693 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1695 ret = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, in ath9k_release_buffered_frames()
1701 list_add_tail(&bf->list, &bf_q); in ath9k_release_buffered_frames()
1702 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1704 bf->bf_state.bf_type &= ~BUF_AGGR; in ath9k_release_buffered_frames()
1706 bf_tail->bf_next = bf; in ath9k_release_buffered_frames()
1709 nframes--; in ath9k_release_buffered_frames()
1711 TX_STAT_INC(sc, txq->axq_qnum, a_queued_hw); in ath9k_release_buffered_frames()
1713 if (an->sta && skb_queue_empty(&tid->retry_q)) in ath9k_release_buffered_frames()
1714 ieee80211_sta_set_buffered(an->sta, i, false); in ath9k_release_buffered_frames()
1716 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1725 info = IEEE80211_SKB_CB(bf_tail->bf_mpdu); in ath9k_release_buffered_frames()
1726 info->flags |= IEEE80211_TX_STATUS_EOSP; in ath9k_release_buffered_frames()
1741 struct ath_hw *ah = sc->sc_ah; in ath_txq_setup()
1760 * We mark tx descriptors to receive a DESC interrupt in ath_txq_setup()
1761 * when a tx queue gets deep; otherwise waiting for the in ath_txq_setup()
1766 * The only potential downside is if the tx queue backs in ath_txq_setup()
1768 * due to a lack of tx descriptors. in ath_txq_setup()
1770 * The UAPSD queue is an exception, since we take a desc- in ath_txq_setup()
1773 if (ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_txq_setup()
1783 if (axq_qnum == -1) { in ath_txq_setup()
1786 * normally on parts with too few tx queues in ath_txq_setup()
1791 struct ath_txq *txq = &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1793 txq->axq_qnum = axq_qnum; in ath_txq_setup()
1794 txq->mac80211_qnum = -1; in ath_txq_setup()
1795 txq->axq_link = NULL; in ath_txq_setup()
1796 __skb_queue_head_init(&txq->complete_q); in ath_txq_setup()
1797 INIT_LIST_HEAD(&txq->axq_q); in ath_txq_setup()
1798 spin_lock_init(&txq->axq_lock); in ath_txq_setup()
1799 txq->axq_depth = 0; in ath_txq_setup()
1800 txq->axq_ampdu_depth = 0; in ath_txq_setup()
1801 txq->axq_tx_inprogress = false; in ath_txq_setup()
1802 sc->tx.txqsetup |= 1<<axq_qnum; in ath_txq_setup()
1804 txq->txq_headidx = txq->txq_tailidx = 0; in ath_txq_setup()
1806 INIT_LIST_HEAD(&txq->txq_fifo[i]); in ath_txq_setup()
1808 return &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1814 struct ath_hw *ah = sc->sc_ah; in ath_txq_update()
1818 BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum); in ath_txq_update()
1821 qi.tqi_aifs = qinfo->tqi_aifs; in ath_txq_update()
1822 qi.tqi_cwmin = qinfo->tqi_cwmin; in ath_txq_update()
1823 qi.tqi_cwmax = qinfo->tqi_cwmax; in ath_txq_update()
1824 qi.tqi_burstTime = qinfo->tqi_burstTime; in ath_txq_update()
1825 qi.tqi_readyTime = qinfo->tqi_readyTime; in ath_txq_update()
1828 ath_err(ath9k_hw_common(sc->sc_ah), in ath_txq_update()
1830 error = -EIO; in ath_txq_update()
1841 struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon; in ath_cabq_update()
1842 int qnum = sc->beacon.cabq->axq_qnum; in ath_cabq_update()
1844 ath9k_hw_get_txq_props(sc->sc_ah, qnum, &qi); in ath_cabq_update()
1846 qi.tqi_readyTime = (TU_TO_USEC(cur_conf->beacon_interval) * in ath_cabq_update()
1858 struct ath_tx_status ts; in ath_drain_txq_list() local
1860 memset(&ts, 0, sizeof(ts)); in ath_drain_txq_list()
1861 ts.ts_status = ATH9K_TX_FLUSH; in ath_drain_txq_list()
1867 if (bf->bf_state.stale) { in ath_drain_txq_list()
1868 list_del(&bf->list); in ath_drain_txq_list()
1874 lastbf = bf->bf_lastbf; in ath_drain_txq_list()
1875 list_cut_position(&bf_head, list, &lastbf->list); in ath_drain_txq_list()
1876 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_drain_txq_list()
1881 * Drain a given TX queue (could be Beacon or Data)
1891 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_draintxq()
1892 int idx = txq->txq_tailidx; in ath_draintxq()
1894 while (!list_empty(&txq->txq_fifo[idx])) { in ath_draintxq()
1895 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]); in ath_draintxq()
1899 txq->txq_tailidx = idx; in ath_draintxq()
1902 txq->axq_link = NULL; in ath_draintxq()
1903 txq->axq_tx_inprogress = false; in ath_draintxq()
1904 ath_drain_txq_list(sc, txq, &txq->axq_q); in ath_draintxq()
1912 struct ath_hw *ah = sc->sc_ah; in ath_drain_all_txq()
1913 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_drain_all_txq()
1918 if (test_bit(ATH_OP_INVALID, &common->op_flags)) in ath_drain_all_txq()
1928 if (!sc->tx.txq[i].axq_depth) in ath_drain_all_txq()
1931 if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum)) in ath_drain_all_txq()
1938 "Failed to stop TX DMA, queues=0x%03x!\n", npend); in ath_drain_all_txq()
1945 txq = &sc->tx.txq[i]; in ath_drain_all_txq()
1954 ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum); in ath_tx_cleanupq()
1955 sc->tx.txqsetup &= ~(1<<txq->axq_qnum); in ath_tx_cleanupq()
1963 struct ieee80211_hw *hw = sc->hw; in ath_txq_schedule()
1964 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_txq_schedule()
1969 if (txq->mac80211_qnum < 0) in ath_txq_schedule()
1972 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_txq_schedule()
1975 ieee80211_txq_schedule_start(hw, txq->mac80211_qnum); in ath_txq_schedule()
1976 spin_lock_bh(&sc->chan_lock); in ath_txq_schedule()
1979 if (sc->cur_chan->stopped) in ath_txq_schedule()
1982 while ((queue = ieee80211_next_txq(hw, txq->mac80211_qnum))) { in ath_txq_schedule()
1985 tid = (struct ath_atx_tid *)queue->drv_priv; in ath_txq_schedule()
1990 force = !skb_queue_empty(&tid->retry_q); in ath_txq_schedule()
1996 spin_unlock_bh(&sc->chan_lock); in ath_txq_schedule()
1997 ieee80211_txq_schedule_end(hw, txq->mac80211_qnum); in ath_txq_schedule()
2006 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
2008 spin_lock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2010 spin_unlock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2015 /* TX, DMA */
2025 struct ath_hw *ah = sc->sc_ah; in ath_tx_txqaddbuf()
2039 edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA); in ath_tx_txqaddbuf()
2041 bf_last = list_entry(head->prev, struct ath_buf, list); in ath_tx_txqaddbuf()
2044 txq->axq_qnum, txq->axq_depth); in ath_tx_txqaddbuf()
2046 if (edma && list_empty(&txq->txq_fifo[txq->txq_headidx])) { in ath_tx_txqaddbuf()
2047 list_splice_tail_init(head, &txq->txq_fifo[txq->txq_headidx]); in ath_tx_txqaddbuf()
2048 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH); in ath_tx_txqaddbuf()
2051 list_splice_tail_init(head, &txq->axq_q); in ath_tx_txqaddbuf()
2053 if (txq->axq_link) { in ath_tx_txqaddbuf()
2054 ath9k_hw_set_desc_link(ah, txq->axq_link, bf->bf_daddr); in ath_tx_txqaddbuf()
2056 txq->axq_qnum, txq->axq_link, in ath_tx_txqaddbuf()
2057 ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2061 txq->axq_link = bf_last->bf_desc; in ath_tx_txqaddbuf()
2065 TX_STAT_INC(sc, txq->axq_qnum, puttxbuf); in ath_tx_txqaddbuf()
2066 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr); in ath_tx_txqaddbuf()
2068 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2071 if (!edma || sc->tx99_state) { in ath_tx_txqaddbuf()
2072 TX_STAT_INC(sc, txq->axq_qnum, txstart); in ath_tx_txqaddbuf()
2073 ath9k_hw_txstart(ah, txq->axq_qnum); in ath_tx_txqaddbuf()
2078 txq->axq_depth++; in ath_tx_txqaddbuf()
2080 txq->axq_ampdu_depth++; in ath_tx_txqaddbuf()
2082 bf_last = bf->bf_lastbf; in ath_tx_txqaddbuf()
2083 bf = bf_last->bf_next; in ath_tx_txqaddbuf()
2084 bf_last->bf_next = NULL; in ath_tx_txqaddbuf()
2095 struct ath_buf *bf = fi->bf; in ath_tx_send_normal()
2098 list_add_tail(&bf->list, &bf_head); in ath_tx_send_normal()
2099 bf->bf_state.bf_type = 0; in ath_tx_send_normal()
2100 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2101 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_send_normal()
2105 bf->bf_next = NULL; in ath_tx_send_normal()
2106 bf->bf_lastbf = bf; in ath_tx_send_normal()
2107 ath_tx_fill_desc(sc, bf, txq, fi->framelen); in ath_tx_send_normal()
2109 TX_STAT_INC(sc, txq->axq_qnum, queued); in ath_tx_send_normal()
2118 struct ieee80211_key_conf *hw_key = tx_info->control.hw_key; in setup_frame_info()
2119 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in setup_frame_info()
2132 if (tx_info->control.vif && in setup_frame_info()
2133 tx_info->control.vif->bss_conf.use_short_preamble) in setup_frame_info()
2140 an = (struct ath_node *) sta->drv_priv; in setup_frame_info()
2142 if (tx_info->control.vif) { in setup_frame_info()
2143 struct ieee80211_vif *vif = tx_info->control.vif; in setup_frame_info()
2144 if (vif->bss_conf.txpower == INT_MIN) in setup_frame_info()
2146 txpower = 2 * vif->bss_conf.txpower; in setup_frame_info()
2150 sc = hw->priv; in setup_frame_info()
2152 txpower = sc->cur_chan->cur_txpower; in setup_frame_info()
2156 fi->txq = -1; in setup_frame_info()
2158 fi->keyix = hw_key->hw_key_idx; in setup_frame_info()
2159 else if (an && ieee80211_is_data(hdr->frame_control) && an->ps_key > 0) in setup_frame_info()
2160 fi->keyix = an->ps_key; in setup_frame_info()
2162 fi->keyix = ATH9K_TXKEYIX_INVALID; in setup_frame_info()
2163 fi->dyn_smps = sta && sta->deflink.smps_mode == IEEE80211_SMPS_DYNAMIC; in setup_frame_info()
2164 fi->keytype = keytype; in setup_frame_info()
2165 fi->framelen = framelen; in setup_frame_info()
2166 fi->tx_power = txpower; in setup_frame_info()
2170 fi->rtscts_rate = rate->hw_value; in setup_frame_info()
2172 fi->rtscts_rate |= rate->hw_value_short; in setup_frame_info()
2177 struct ath_hw *ah = sc->sc_ah; in ath_txchainmask_reduction()
2178 struct ath9k_channel *curchan = ah->curchan; in ath_txchainmask_reduction()
2180 if ((ah->caps.hw_caps & ATH9K_HW_CAP_APM) && IS_CHAN_5GHZ(curchan) && in ath_txchainmask_reduction()
2199 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_setup_buffer()
2201 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_setup_buffer()
2208 ath_dbg(common, XMIT, "TX buffers are full\n"); in ath_tx_setup_buffer()
2214 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2215 fragno = le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_FRAG; in ath_tx_setup_buffer()
2216 seqno = tid->seq_next; in ath_tx_setup_buffer()
2217 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2220 hdr->seq_ctrl |= cpu_to_le16(fragno); in ath_tx_setup_buffer()
2222 if (!ieee80211_has_morefrags(hdr->frame_control)) in ath_tx_setup_buffer()
2223 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2225 bf->bf_state.seqno = seqno; in ath_tx_setup_buffer()
2228 bf->bf_mpdu = skb; in ath_tx_setup_buffer()
2230 bf->bf_buf_addr = dma_map_single(sc->dev, skb->data, in ath_tx_setup_buffer()
2231 skb->len, DMA_TO_DEVICE); in ath_tx_setup_buffer()
2232 if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) { in ath_tx_setup_buffer()
2233 bf->bf_mpdu = NULL; in ath_tx_setup_buffer()
2234 bf->bf_buf_addr = 0; in ath_tx_setup_buffer()
2235 ath_err(ath9k_hw_common(sc->sc_ah), in ath_tx_setup_buffer()
2236 "dma_mapping_error() on TX\n"); in ath_tx_setup_buffer()
2241 fi->bf = bf; in ath_tx_setup_buffer()
2248 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_assign_seq()
2250 struct ieee80211_vif *vif = info->control.vif; in ath_assign_seq()
2253 if (!(info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)) in ath_assign_seq()
2259 avp = (struct ath_vif *)vif->drv_priv; in ath_assign_seq()
2261 if (info->flags & IEEE80211_TX_CTL_FIRST_FRAGMENT) in ath_assign_seq()
2262 avp->seq_no += 0x10; in ath_assign_seq()
2264 hdr->seq_ctrl &= cpu_to_le16(IEEE80211_SCTL_FRAG); in ath_assign_seq()
2265 hdr->seq_ctrl |= cpu_to_le16(avp->seq_no); in ath_assign_seq()
2271 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_tx_prepare()
2273 struct ieee80211_sta *sta = txctl->sta; in ath_tx_prepare()
2274 struct ieee80211_vif *vif = info->control.vif; in ath_tx_prepare()
2276 struct ath_softc *sc = hw->priv; in ath_tx_prepare()
2277 int frmlen = skb->len + FCS_LEN; in ath_tx_prepare()
2282 txctl->an = (struct ath_node *)sta->drv_priv; in ath_tx_prepare()
2283 else if (vif && ieee80211_is_data(hdr->frame_control)) { in ath_tx_prepare()
2284 avp = (void *)vif->drv_priv; in ath_tx_prepare()
2285 txctl->an = &avp->mcast_node; in ath_tx_prepare()
2288 if (info->control.hw_key) in ath_tx_prepare()
2289 frmlen += info->control.hw_key->icv_len; in ath_tx_prepare()
2291 ath_assign_seq(ath9k_hw_common(sc->sc_ah), skb); in ath_tx_prepare()
2293 if ((vif && vif->type != NL80211_IFTYPE_AP && in ath_tx_prepare()
2294 vif->type != NL80211_IFTYPE_AP_VLAN) || in ath_tx_prepare()
2295 !ieee80211_is_data(hdr->frame_control)) in ath_tx_prepare()
2296 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_prepare()
2299 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_prepare()
2301 if (padsize && skb->len > padpos) { in ath_tx_prepare()
2303 return -ENOMEM; in ath_tx_prepare()
2306 memmove(skb->data, skb->data + padsize, padpos); in ath_tx_prepare()
2319 struct ieee80211_sta *sta = txctl->sta; in ath_tx_start()
2320 struct ieee80211_vif *vif = info->control.vif; in ath_tx_start()
2322 struct ath_softc *sc = hw->priv; in ath_tx_start()
2323 struct ath_txq *txq = txctl->txq; in ath_tx_start()
2330 ps_resp = !!(info->control.flags & IEEE80211_TX_CTRL_PS_RESPONSE); in ath_tx_start()
2337 * At this point, the vif, hw_key and sta pointers in the tx control in ath_tx_start()
2344 txq = sc->tx.uapsdq; in ath_tx_start()
2346 if (txctl->sta) { in ath_tx_start()
2347 an = (struct ath_node *) sta->drv_priv; in ath_tx_start()
2352 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2353 fi->txq = q; in ath_tx_start()
2354 ++txq->pending_frames; in ath_tx_start()
2360 if (txctl->paprd) in ath_tx_start()
2363 ieee80211_free_txskb(sc->hw, skb); in ath_tx_start()
2367 bf->bf_state.bfs_paprd = txctl->paprd; in ath_tx_start()
2369 if (txctl->paprd) in ath_tx_start()
2370 bf->bf_state.bfs_paprd_timestamp = jiffies; in ath_tx_start()
2384 struct ath_softc *sc = hw->priv; in ath_tx_cabq()
2386 .txq = sc->beacon.cabq in ath_tx_cabq()
2396 sc->cur_chan->beacon.beacon_interval * 1000 * in ath_tx_cabq()
2397 sc->cur_chan->beacon.dtim_period / ATH_BCBUF; in ath_tx_cabq()
2409 bf->bf_lastbf = bf; in ath_tx_cabq()
2411 ath_buf_set_rate(sc, bf, &info, fi->framelen, false); in ath_tx_cabq()
2414 bf_tail->bf_next = bf; in ath_tx_cabq()
2416 list_add_tail(&bf->list, &bf_q); in ath_tx_cabq()
2439 TX_STAT_INC(sc, txctl.txq->axq_qnum, queued); in ath_tx_cabq()
2444 /* TX Completion */
2452 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_complete()
2453 struct ieee80211_hdr * hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_complete()
2457 ath_dbg(common, XMIT, "TX complete: skb: %p\n", skb); in ath_tx_complete()
2459 if (sc->sc_ah->caldata) in ath_tx_complete()
2460 set_bit(PAPRD_PACKET_SENT, &sc->sc_ah->caldata->cal_flags); in ath_tx_complete()
2463 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_complete()
2464 tx_info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath_tx_complete()
2466 tx_info->flags |= IEEE80211_TX_STAT_ACK; in ath_tx_complete()
2469 if (tx_info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) { in ath_tx_complete()
2470 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_complete()
2472 if (padsize && skb->len>padpos+padsize) { in ath_tx_complete()
2477 memmove(skb->data + padsize, skb->data, padpos); in ath_tx_complete()
2482 spin_lock_irqsave(&sc->sc_pm_lock, flags); in ath_tx_complete()
2483 if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) { in ath_tx_complete()
2484 sc->ps_flags &= ~PS_WAIT_FOR_TX_ACK; in ath_tx_complete()
2486 "Going back to sleep after having received TX status (0x%lx)\n", in ath_tx_complete()
2487 sc->ps_flags & (PS_WAIT_FOR_BEACON | in ath_tx_complete()
2492 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); in ath_tx_complete()
2495 tx_info->status.status_driver_data[0] = sta; in ath_tx_complete()
2496 __skb_queue_tail(&txq->complete_q, skb); in ath_tx_complete()
2502 struct ath_tx_status *ts, int txok) in ath_tx_complete_buf() argument
2504 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_complete_buf()
2512 if (ts->ts_status & ATH9K_TXERR_FILT) in ath_tx_complete_buf()
2513 tx_info->flags |= IEEE80211_TX_STAT_TX_FILTERED; in ath_tx_complete_buf()
2515 dma_unmap_single(sc->dev, bf->bf_buf_addr, skb->len, DMA_TO_DEVICE); in ath_tx_complete_buf()
2516 bf->bf_buf_addr = 0; in ath_tx_complete_buf()
2517 if (sc->tx99_state) in ath_tx_complete_buf()
2520 if (bf->bf_state.bfs_paprd) { in ath_tx_complete_buf()
2522 bf->bf_state.bfs_paprd_timestamp + in ath_tx_complete_buf()
2526 complete(&sc->paprd_complete); in ath_tx_complete_buf()
2528 ath_debug_stat_tx(sc, bf, ts, txq, tx_flags); in ath_tx_complete_buf()
2532 /* At this point, skb (bf->bf_mpdu) is consumed...make sure we don't in ath_tx_complete_buf()
2535 bf->bf_mpdu = NULL; in ath_tx_complete_buf()
2540 spin_lock_irqsave(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2541 list_splice_tail_init(bf_q, &sc->tx.txbuf); in ath_tx_complete_buf()
2542 spin_unlock_irqrestore(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2547 void *ptr = &tx_info->status; in ath_clear_tx_status()
2549 memset(ptr + sizeof(tx_info->status.rates), 0, in ath_clear_tx_status()
2550 sizeof(tx_info->status) - in ath_clear_tx_status()
2551 sizeof(tx_info->status.rates) - in ath_clear_tx_status()
2552 sizeof(tx_info->status.status_driver_data)); in ath_clear_tx_status()
2556 struct ath_tx_status *ts, int nframes, int nbad, in ath_tx_rc_status() argument
2559 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_rc_status()
2560 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_rc_status()
2562 struct ieee80211_hw *hw = sc->hw; in ath_tx_rc_status()
2563 struct ath_hw *ah = sc->sc_ah; in ath_tx_rc_status()
2569 tx_info->status.ack_signal = ts->ts_rssi; in ath_tx_rc_status()
2571 tx_rateindex = ts->ts_rateindex; in ath_tx_rc_status()
2572 WARN_ON(tx_rateindex >= hw->max_rates); in ath_tx_rc_status()
2574 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_rc_status()
2575 tx_info->flags |= IEEE80211_TX_STAT_AMPDU; in ath_tx_rc_status()
2579 tx_info->status.ampdu_len = nframes; in ath_tx_rc_status()
2580 tx_info->status.ampdu_ack_len = nframes - nbad; in ath_tx_rc_status()
2582 tx_info->status.rates[tx_rateindex].count = ts->ts_longretry + 1; in ath_tx_rc_status()
2584 for (i = tx_rateindex + 1; i < hw->max_rates; i++) { in ath_tx_rc_status()
2585 tx_info->status.rates[i].count = 0; in ath_tx_rc_status()
2586 tx_info->status.rates[i].idx = -1; in ath_tx_rc_status()
2589 if ((ts->ts_status & ATH9K_TXERR_FILT) == 0 && in ath_tx_rc_status()
2590 (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) == 0) { in ath_tx_rc_status()
2593 * retry only if max frame trigger level has been reached in ath_tx_rc_status()
2596 * hw->max_rate_tries times to affect how rate control updates in ath_tx_rc_status()
2603 if (unlikely(ts->ts_flags & (ATH9K_TX_DATA_UNDERRUN | in ath_tx_rc_status()
2605 ieee80211_is_data(hdr->frame_control) && in ath_tx_rc_status()
2606 ah->tx_trig_level >= sc->sc_ah->config.max_txtrig_level) in ath_tx_rc_status()
2607 tx_info->status.rates[tx_rateindex].count = in ath_tx_rc_status()
2608 hw->max_rate_tries; in ath_tx_rc_status()
2614 struct ath_hw *ah = sc->sc_ah; in ath_tx_processq()
2619 struct ath_tx_status ts; in ath_tx_processq() local
2622 ath_dbg(common, QUEUE, "tx queue %d (%x), link %p\n", in ath_tx_processq()
2623 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), in ath_tx_processq()
2624 txq->axq_link); in ath_tx_processq()
2628 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_processq()
2631 if (list_empty(&txq->axq_q)) { in ath_tx_processq()
2632 txq->axq_link = NULL; in ath_tx_processq()
2636 bf = list_first_entry(&txq->axq_q, struct ath_buf, list); in ath_tx_processq()
2640 * after sw writes TxE and before hw re-load the last in ath_tx_processq()
2643 * holding descriptor - software does so by marking in ath_tx_processq()
2647 if (bf->bf_state.stale) { in ath_tx_processq()
2649 if (list_is_last(&bf_held->list, &txq->axq_q)) in ath_tx_processq()
2652 bf = list_entry(bf_held->list.next, struct ath_buf, in ath_tx_processq()
2656 lastbf = bf->bf_lastbf; in ath_tx_processq()
2657 ds = lastbf->bf_desc; in ath_tx_processq()
2659 memset(&ts, 0, sizeof(ts)); in ath_tx_processq()
2660 status = ath9k_hw_txprocdesc(ah, ds, &ts); in ath_tx_processq()
2661 if (status == -EINPROGRESS) in ath_tx_processq()
2664 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_processq()
2671 lastbf->bf_state.stale = true; in ath_tx_processq()
2673 if (!list_is_singular(&lastbf->list)) in ath_tx_processq()
2675 &txq->axq_q, lastbf->list.prev); in ath_tx_processq()
2678 list_del(&bf_held->list); in ath_tx_processq()
2682 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_processq()
2689 struct ath_hw *ah = sc->sc_ah; in ath_tx_tasklet()
2690 u32 qcumask = ((1 << ATH9K_NUM_TX_QUEUES) - 1) & ah->intr_txqs; in ath_tx_tasklet()
2696 ath_tx_processq(sc, &sc->tx.txq[i]); in ath_tx_tasklet()
2703 struct ath_tx_status ts; in ath_tx_edma_tasklet() local
2704 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_edma_tasklet()
2705 struct ath_hw *ah = sc->sc_ah; in ath_tx_edma_tasklet()
2714 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_edma_tasklet()
2717 status = ath9k_hw_txprocdesc(ah, NULL, (void *)&ts); in ath_tx_edma_tasklet()
2718 if (status == -EINPROGRESS) in ath_tx_edma_tasklet()
2720 if (status == -EIO) { in ath_tx_edma_tasklet()
2721 ath_dbg(common, XMIT, "Error processing tx status\n"); in ath_tx_edma_tasklet()
2726 if (ts.qid == sc->beacon.beaconq) { in ath_tx_edma_tasklet()
2727 sc->beacon.tx_processed = true; in ath_tx_edma_tasklet()
2728 sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK); in ath_tx_edma_tasklet()
2739 txq = &sc->tx.txq[ts.qid]; in ath_tx_edma_tasklet()
2743 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_edma_tasklet()
2745 fifo_list = &txq->txq_fifo[txq->txq_tailidx]; in ath_tx_edma_tasklet()
2752 if (bf->bf_state.stale) { in ath_tx_edma_tasklet()
2753 list_del(&bf->list); in ath_tx_edma_tasklet()
2758 lastbf = bf->bf_lastbf; in ath_tx_edma_tasklet()
2761 if (list_is_last(&lastbf->list, fifo_list)) { in ath_tx_edma_tasklet()
2763 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH); in ath_tx_edma_tasklet()
2765 if (!list_empty(&txq->axq_q)) { in ath_tx_edma_tasklet()
2769 txq->axq_link = NULL; in ath_tx_edma_tasklet()
2770 list_splice_tail_init(&txq->axq_q, &bf_q); in ath_tx_edma_tasklet()
2774 lastbf->bf_state.stale = true; in ath_tx_edma_tasklet()
2777 lastbf->list.prev); in ath_tx_edma_tasklet()
2780 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_edma_tasklet()
2792 struct ath_descdma *dd = &sc->txsdma; in ath_txstatus_setup()
2793 u8 txs_len = sc->sc_ah->caps.txs_len; in ath_txstatus_setup()
2795 dd->dd_desc_len = size * txs_len; in ath_txstatus_setup()
2796 dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len, in ath_txstatus_setup()
2797 &dd->dd_desc_paddr, GFP_KERNEL); in ath_txstatus_setup()
2798 if (!dd->dd_desc) in ath_txstatus_setup()
2799 return -ENOMEM; in ath_txstatus_setup()
2810 ath9k_hw_setup_statusring(sc->sc_ah, sc->txsdma.dd_desc, in ath_tx_edma_init()
2811 sc->txsdma.dd_desc_paddr, in ath_tx_edma_init()
2819 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_init()
2822 spin_lock_init(&sc->tx.txbuflock); in ath_tx_init()
2824 error = ath_descdma_setup(sc, &sc->tx.txdma, &sc->tx.txbuf, in ath_tx_init()
2825 "tx", nbufs, 1, 1); in ath_tx_init()
2828 "Failed to allocate tx descriptors: %d\n", error); in ath_tx_init()
2832 error = ath_descdma_setup(sc, &sc->beacon.bdma, &sc->beacon.bbuf, in ath_tx_init()
2840 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) in ath_tx_init()
2853 tid->an = an; in ath_tx_node_init()
2854 tid->tidno = tidno; in ath_tx_node_init()
2855 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2856 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2857 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2858 tid->active = false; in ath_tx_node_init()
2859 tid->clear_ps_filter = true; in ath_tx_node_init()
2860 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2861 INIT_LIST_HEAD(&tid->list); in ath_tx_node_init()
2863 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2865 if (!an->sta) in ath_tx_node_init()
2880 txq = tid->txq; in ath_tx_node_cleanup()
2884 if (!list_empty(&tid->list)) in ath_tx_node_cleanup()
2885 list_del_init(&tid->list); in ath_tx_node_cleanup()
2888 tid->active = false; in ath_tx_node_cleanup()
2892 if (!an->sta) in ath_tx_node_cleanup()
2904 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath9k_tx99_send()
2906 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_tx99_send()
2910 padpos = ieee80211_hdrlen(hdr->frame_control); in ath9k_tx99_send()
2913 if (padsize && skb->len > padpos) { in ath9k_tx99_send()
2917 return -EINVAL; in ath9k_tx99_send()
2921 memmove(skb->data, skb->data + padsize, padpos); in ath9k_tx99_send()
2924 fi->keyix = ATH9K_TXKEYIX_INVALID; in ath9k_tx99_send()
2925 fi->framelen = skb->len + FCS_LEN; in ath9k_tx99_send()
2926 fi->keytype = ATH9K_KEY_TYPE_CLEAR; in ath9k_tx99_send()
2928 bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()
2931 return -EINVAL; in ath9k_tx99_send()
2934 ath_set_rates(sc->tx99_vif, NULL, bf); in ath9k_tx99_send()
2936 ath9k_hw_set_desc_link(sc->sc_ah, bf->bf_desc, bf->bf_daddr); in ath9k_tx99_send()
2937 ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum); in ath9k_tx99_send()
2939 ath_tx_send_normal(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()