Lines Matching full:ab

23 static u8 *ath11k_dp_rx_h_80211_hdr(struct ath11k_base *ab, struct hal_rx_desc *desc)  in ath11k_dp_rx_h_80211_hdr()  argument
25 return ab->hw_params.hw_ops->rx_desc_get_hdr_status(desc); in ath11k_dp_rx_h_80211_hdr()
28 static enum hal_encrypt_type ath11k_dp_rx_h_mpdu_start_enctype(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_enctype() argument
31 if (!ab->hw_params.hw_ops->rx_desc_encrypt_valid(desc)) in ath11k_dp_rx_h_mpdu_start_enctype()
34 return ab->hw_params.hw_ops->rx_desc_get_encrypt_type(desc); in ath11k_dp_rx_h_mpdu_start_enctype()
37 static u8 ath11k_dp_rx_h_msdu_start_decap_type(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_decap_type() argument
40 return ab->hw_params.hw_ops->rx_desc_get_decap_type(desc); in ath11k_dp_rx_h_msdu_start_decap_type()
43 static u8 ath11k_dp_rx_h_msdu_start_mesh_ctl_present(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_mesh_ctl_present() argument
46 return ab->hw_params.hw_ops->rx_desc_get_mesh_ctl(desc); in ath11k_dp_rx_h_msdu_start_mesh_ctl_present()
49 static bool ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid() argument
52 return ab->hw_params.hw_ops->rx_desc_get_mpdu_seq_ctl_vld(desc); in ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid()
55 static bool ath11k_dp_rx_h_mpdu_start_fc_valid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_fc_valid() argument
58 return ab->hw_params.hw_ops->rx_desc_get_mpdu_fc_valid(desc); in ath11k_dp_rx_h_mpdu_start_fc_valid()
61 static bool ath11k_dp_rx_h_mpdu_start_more_frags(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_more_frags() argument
66 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params.hal_desc_sz); in ath11k_dp_rx_h_mpdu_start_more_frags()
70 static u16 ath11k_dp_rx_h_mpdu_start_frag_no(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_frag_no() argument
75 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params.hal_desc_sz); in ath11k_dp_rx_h_mpdu_start_frag_no()
79 static u16 ath11k_dp_rx_h_mpdu_start_seq_no(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_seq_no() argument
82 return ab->hw_params.hw_ops->rx_desc_get_mpdu_start_seq_no(desc); in ath11k_dp_rx_h_mpdu_start_seq_no()
85 static void *ath11k_dp_rx_get_attention(struct ath11k_base *ab, in ath11k_dp_rx_get_attention() argument
88 return ab->hw_params.hw_ops->rx_desc_get_attention(desc); in ath11k_dp_rx_get_attention()
145 static u16 ath11k_dp_rx_h_msdu_start_msdu_len(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_msdu_len() argument
148 return ab->hw_params.hw_ops->rx_desc_get_msdu_len(desc); in ath11k_dp_rx_h_msdu_start_msdu_len()
151 static u8 ath11k_dp_rx_h_msdu_start_sgi(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_sgi() argument
154 return ab->hw_params.hw_ops->rx_desc_get_msdu_sgi(desc); in ath11k_dp_rx_h_msdu_start_sgi()
157 static u8 ath11k_dp_rx_h_msdu_start_rate_mcs(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_rate_mcs() argument
160 return ab->hw_params.hw_ops->rx_desc_get_msdu_rate_mcs(desc); in ath11k_dp_rx_h_msdu_start_rate_mcs()
163 static u8 ath11k_dp_rx_h_msdu_start_rx_bw(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_rx_bw() argument
166 return ab->hw_params.hw_ops->rx_desc_get_msdu_rx_bw(desc); in ath11k_dp_rx_h_msdu_start_rx_bw()
169 static u32 ath11k_dp_rx_h_msdu_start_freq(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_freq() argument
172 return ab->hw_params.hw_ops->rx_desc_get_msdu_freq(desc); in ath11k_dp_rx_h_msdu_start_freq()
175 static u8 ath11k_dp_rx_h_msdu_start_pkt_type(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_pkt_type() argument
178 return ab->hw_params.hw_ops->rx_desc_get_msdu_pkt_type(desc); in ath11k_dp_rx_h_msdu_start_pkt_type()
181 static u8 ath11k_dp_rx_h_msdu_start_nss(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_nss() argument
184 return hweight8(ab->hw_params.hw_ops->rx_desc_get_msdu_nss(desc)); in ath11k_dp_rx_h_msdu_start_nss()
187 static u8 ath11k_dp_rx_h_mpdu_start_tid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_tid() argument
190 return ab->hw_params.hw_ops->rx_desc_get_mpdu_tid(desc); in ath11k_dp_rx_h_mpdu_start_tid()
193 static u16 ath11k_dp_rx_h_mpdu_start_peer_id(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_peer_id() argument
196 return ab->hw_params.hw_ops->rx_desc_get_mpdu_peer_id(desc); in ath11k_dp_rx_h_mpdu_start_peer_id()
199 static u8 ath11k_dp_rx_h_msdu_end_l3pad(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_l3pad() argument
202 return ab->hw_params.hw_ops->rx_desc_get_l3_pad_bytes(desc); in ath11k_dp_rx_h_msdu_end_l3pad()
205 static bool ath11k_dp_rx_h_msdu_end_first_msdu(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_first_msdu() argument
208 return ab->hw_params.hw_ops->rx_desc_get_first_msdu(desc); in ath11k_dp_rx_h_msdu_end_first_msdu()
211 static bool ath11k_dp_rx_h_msdu_end_last_msdu(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_last_msdu() argument
214 return ab->hw_params.hw_ops->rx_desc_get_last_msdu(desc); in ath11k_dp_rx_h_msdu_end_last_msdu()
217 static void ath11k_dp_rx_desc_end_tlv_copy(struct ath11k_base *ab, in ath11k_dp_rx_desc_end_tlv_copy() argument
221 ab->hw_params.hw_ops->rx_desc_copy_attn_end_tlv(fdesc, ldesc); in ath11k_dp_rx_desc_end_tlv_copy()
230 static u8 *ath11k_dp_rxdesc_get_80211hdr(struct ath11k_base *ab, in ath11k_dp_rxdesc_get_80211hdr() argument
235 rx_pkt_hdr = ab->hw_params.hw_ops->rx_desc_get_msdu_payload(rx_desc); in ath11k_dp_rxdesc_get_80211hdr()
240 static bool ath11k_dp_rxdesc_mpdu_valid(struct ath11k_base *ab, in ath11k_dp_rxdesc_mpdu_valid() argument
245 tlv_tag = ab->hw_params.hw_ops->rx_desc_get_mpdu_start_tag(rx_desc); in ath11k_dp_rxdesc_mpdu_valid()
250 static u32 ath11k_dp_rxdesc_get_ppduid(struct ath11k_base *ab, in ath11k_dp_rxdesc_get_ppduid() argument
253 return ab->hw_params.hw_ops->rx_desc_get_mpdu_ppdu_id(rx_desc); in ath11k_dp_rxdesc_get_ppduid()
256 static void ath11k_dp_rxdesc_set_msdu_len(struct ath11k_base *ab, in ath11k_dp_rxdesc_set_msdu_len() argument
260 ab->hw_params.hw_ops->rx_desc_set_msdu_len(desc, len); in ath11k_dp_rxdesc_set_msdu_len()
263 static bool ath11k_dp_rx_h_attn_is_mcbc(struct ath11k_base *ab, in ath11k_dp_rx_h_attn_is_mcbc() argument
266 struct rx_attention *attn = ath11k_dp_rx_get_attention(ab, desc); in ath11k_dp_rx_h_attn_is_mcbc()
268 return ath11k_dp_rx_h_msdu_end_first_msdu(ab, desc) && in ath11k_dp_rx_h_attn_is_mcbc()
275 struct ath11k_base *ab = from_timer(ab, t, mon_reap_timer); in ath11k_dp_service_mon_ring() local
278 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) in ath11k_dp_service_mon_ring()
279 ath11k_dp_rx_process_mon_rings(ab, i, NULL, DP_MON_SERVICE_BUDGET); in ath11k_dp_service_mon_ring()
281 mod_timer(&ab->mon_reap_timer, jiffies + in ath11k_dp_service_mon_ring()
285 static int ath11k_dp_purge_mon_ring(struct ath11k_base *ab) in ath11k_dp_purge_mon_ring() argument
291 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) in ath11k_dp_purge_mon_ring()
292 reaped += ath11k_dp_rx_process_mon_rings(ab, i, in ath11k_dp_purge_mon_ring()
302 ath11k_warn(ab, "dp mon ring purge timeout"); in ath11k_dp_purge_mon_ring()
308 int ath11k_dp_rxbufs_replenish(struct ath11k_base *ab, int mac_id, in ath11k_dp_rxbufs_replenish() argument
324 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rxbufs_replenish()
328 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rxbufs_replenish()
330 num_free = ath11k_hal_srng_src_num_free(ab, srng, true); in ath11k_dp_rxbufs_replenish()
350 paddr = dma_map_single(ab->dev, skb->data, in ath11k_dp_rxbufs_replenish()
353 if (dma_mapping_error(ab->dev, paddr)) in ath11k_dp_rxbufs_replenish()
363 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rxbufs_replenish()
377 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rxbufs_replenish()
388 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rxbufs_replenish()
393 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rxbufs_replenish()
413 dma_unmap_single(ar->ab->dev, ATH11K_SKB_RXCB(skb)->paddr, in ath11k_dp_rxdma_buf_ring_free()
424 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rxdma_buf_ring_free()
435 dma_unmap_single(ar->ab->dev, ATH11K_SKB_RXCB(skb)->paddr, in ath11k_dp_rxdma_buf_ring_free()
449 struct ath11k_base *ab = ar->ab; in ath11k_dp_rxdma_pdev_buf_free() local
458 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rxdma_pdev_buf_free()
474 ath11k_hal_srng_get_entrysize(ar->ab, ringtype); in ath11k_dp_rxdma_ring_buf_setup()
477 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, rx_ring, num_entries, in ath11k_dp_rxdma_ring_buf_setup()
485 struct ath11k_base *ab = ar->ab; in ath11k_dp_rxdma_pdev_buf_setup() local
491 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rxdma_pdev_buf_setup()
496 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rxdma_pdev_buf_setup()
507 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_pdev_srng_free() local
510 ath11k_dp_srng_cleanup(ab, &dp->rx_refill_buf_ring.refill_buf_ring); in ath11k_dp_rx_pdev_srng_free()
512 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_free()
513 if (ab->hw_params.rx_mac_buf_ring) in ath11k_dp_rx_pdev_srng_free()
514 ath11k_dp_srng_cleanup(ab, &dp->rx_mac_buf_ring[i]); in ath11k_dp_rx_pdev_srng_free()
516 ath11k_dp_srng_cleanup(ab, &dp->rxdma_err_dst_ring[i]); in ath11k_dp_rx_pdev_srng_free()
517 ath11k_dp_srng_cleanup(ab, in ath11k_dp_rx_pdev_srng_free()
521 ath11k_dp_srng_cleanup(ab, &dp->rxdma_mon_buf_ring.refill_buf_ring); in ath11k_dp_rx_pdev_srng_free()
524 void ath11k_dp_pdev_reo_cleanup(struct ath11k_base *ab) in ath11k_dp_pdev_reo_cleanup() argument
526 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_pdev_reo_cleanup()
530 ath11k_dp_srng_cleanup(ab, &dp->reo_dst_ring[i]); in ath11k_dp_pdev_reo_cleanup()
533 int ath11k_dp_pdev_reo_setup(struct ath11k_base *ab) in ath11k_dp_pdev_reo_setup() argument
535 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_pdev_reo_setup()
540 ret = ath11k_dp_srng_setup(ab, &dp->reo_dst_ring[i], in ath11k_dp_pdev_reo_setup()
544 ath11k_warn(ab, "failed to setup reo_dst_ring\n"); in ath11k_dp_pdev_reo_setup()
552 ath11k_dp_pdev_reo_cleanup(ab); in ath11k_dp_pdev_reo_setup()
560 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_pdev_srng_alloc() local
565 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
570 ath11k_warn(ar->ab, "failed to setup rx_refill_buf_ring\n"); in ath11k_dp_rx_pdev_srng_alloc()
574 if (ar->ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_rx_pdev_srng_alloc()
575 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
576 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
581 ath11k_warn(ar->ab, "failed to setup rx_mac_buf_ring %d\n", in ath11k_dp_rx_pdev_srng_alloc()
588 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
589 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_err_dst_ring[i], in ath11k_dp_rx_pdev_srng_alloc()
593 ath11k_warn(ar->ab, "failed to setup rxdma_err_dst_ring %d\n", i); in ath11k_dp_rx_pdev_srng_alloc()
598 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
600 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
605 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
616 if (!ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_pdev_srng_alloc()
618 timer_setup(&ar->ab->mon_reap_timer, in ath11k_dp_rx_pdev_srng_alloc()
623 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
628 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
633 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_mon_dst_ring, in ath11k_dp_rx_pdev_srng_alloc()
637 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
642 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_mon_desc_ring, in ath11k_dp_rx_pdev_srng_alloc()
646 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
654 void ath11k_dp_reo_cmd_list_cleanup(struct ath11k_base *ab) in ath11k_dp_reo_cmd_list_cleanup() argument
656 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_reo_cmd_list_cleanup()
663 dma_unmap_single(ab->dev, cmd->data.paddr, in ath11k_dp_reo_cmd_list_cleanup()
673 dma_unmap_single(ab->dev, cmd_cache->data.paddr, in ath11k_dp_reo_cmd_list_cleanup()
687 ath11k_warn(dp->ab, "failed to flush rx tid hw desc, tid %d status %d\n", in ath11k_dp_reo_cmd_free()
690 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cmd_free()
695 static void ath11k_dp_reo_cache_flush(struct ath11k_base *ab, in ath11k_dp_reo_cache_flush() argument
709 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
713 ath11k_warn(ab, in ath11k_dp_reo_cache_flush()
722 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
726 ath11k_err(ab, "failed to send HAL_REO_CMD_FLUSH_CACHE cmd, tid %d (%d)\n", in ath11k_dp_reo_cache_flush()
728 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cache_flush()
737 struct ath11k_base *ab = dp->ab; in ath11k_dp_rx_tid_del_func() local
745 ath11k_warn(ab, "failed to delete rx tid %d hw descriptor %d\n", in ath11k_dp_rx_tid_del_func()
771 ath11k_dp_reo_cache_flush(ab, &elem->data); in ath11k_dp_rx_tid_del_func()
780 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_del_func()
799 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_delete()
803 ath11k_err(ar->ab, "failed to send HAL_REO_CMD_UPDATE_RX_QUEUE cmd, tid %d (%d)\n", in ath11k_peer_rx_tid_delete()
805 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_peer_rx_tid_delete()
813 static int ath11k_dp_rx_link_desc_return(struct ath11k_base *ab, in ath11k_dp_rx_link_desc_return() argument
817 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_rx_link_desc_return()
822 srng = &ab->hal.srng_list[dp->wbm_desc_rel_ring.ring_id]; in ath11k_dp_rx_link_desc_return()
826 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_link_desc_return()
828 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_link_desc_return()
834 ath11k_hal_rx_msdu_link_desc_set(ab, (void *)desc, (void *)link_desc, in ath11k_dp_rx_link_desc_return()
838 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_link_desc_return()
847 struct ath11k_base *ab = rx_tid->ab; in ath11k_dp_rx_frags_cleanup() local
849 lockdep_assert_held(&ab->base_lock); in ath11k_dp_rx_frags_cleanup()
853 ath11k_dp_rx_link_desc_return(ab, (u32 *)rx_tid->dst_ring_desc, in ath11k_dp_rx_frags_cleanup()
870 lockdep_assert_held(&ar->ab->base_lock); in ath11k_peer_frags_flush()
875 spin_unlock_bh(&ar->ab->base_lock); in ath11k_peer_frags_flush()
877 spin_lock_bh(&ar->ab->base_lock); in ath11k_peer_frags_flush()
888 lockdep_assert_held(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
896 spin_unlock_bh(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
898 spin_lock_bh(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
922 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_reo_update()
926 ath11k_warn(ar->ab, "failed to update rx tid queue, tid %d (%d)\n", in ath11k_peer_rx_tid_reo_update()
936 static void ath11k_dp_rx_tid_mem_free(struct ath11k_base *ab, in ath11k_dp_rx_tid_mem_free() argument
942 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_tid_mem_free()
944 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_dp_rx_tid_mem_free()
946 ath11k_warn(ab, "failed to find the peer to free up rx tid mem\n"); in ath11k_dp_rx_tid_mem_free()
954 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_mem_free()
961 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_tid_mem_free()
968 struct ath11k_base *ab = ar->ab; in ath11k_peer_rx_tid_setup() local
977 spin_lock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
979 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_peer_rx_tid_setup()
981 ath11k_warn(ab, "failed to find the peer to set up rx tid\n"); in ath11k_peer_rx_tid_setup()
982 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
992 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
994 ath11k_warn(ab, "failed to update reo for rx tid %d\n", tid); in ath11k_peer_rx_tid_setup()
1002 ath11k_warn(ab, "failed to send wmi command to update rx reorder queue, tid :%d (%d)\n", in ath11k_peer_rx_tid_setup()
1021 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1030 paddr = dma_map_single(ab->dev, addr_aligned, hw_desc_sz, in ath11k_peer_rx_tid_setup()
1033 ret = dma_mapping_error(ab->dev, paddr); in ath11k_peer_rx_tid_setup()
1035 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1044 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1049 ath11k_warn(ar->ab, "failed to setup rx reorder queue, tid :%d (%d)\n", in ath11k_peer_rx_tid_setup()
1051 ath11k_dp_rx_tid_mem_free(ab, peer_mac, vdev_id, tid); in ath11k_peer_rx_tid_setup()
1065 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_ampdu_start() local
1074 ath11k_warn(ab, "failed to setup rx tid %d\n", ret); in ath11k_dp_rx_ampdu_start()
1082 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_ampdu_stop() local
1090 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1092 peer = ath11k_peer_find(ab, vdev_id, params->sta->addr); in ath11k_dp_rx_ampdu_stop()
1094 ath11k_warn(ab, "failed to find the peer to stop rx aggregation\n"); in ath11k_dp_rx_ampdu_stop()
1095 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1103 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1108 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1110 ath11k_warn(ab, "failed to update reo for rx tid %d: %d\n", in ath11k_dp_rx_ampdu_stop()
1119 ath11k_warn(ab, "failed to send wmi to delete rx tid %d\n", in ath11k_dp_rx_ampdu_stop()
1131 struct ath11k_base *ab = ar->ab; in ath11k_dp_peer_rx_pn_replay_config() local
1167 spin_lock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1169 peer = ath11k_peer_find(ab, arvif->vdev_id, peer_addr); in ath11k_dp_peer_rx_pn_replay_config()
1171 ath11k_warn(ab, "failed to find the peer to configure pn replay detection\n"); in ath11k_dp_peer_rx_pn_replay_config()
1172 spin_unlock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1182 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_peer_rx_pn_replay_config()
1186 ath11k_warn(ab, "failed to configure rx tid %d queue for pn replay detection %d\n", in ath11k_dp_peer_rx_pn_replay_config()
1192 spin_unlock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1214 static int ath11k_htt_tlv_ppdu_stats_parse(struct ath11k_base *ab, in ath11k_htt_tlv_ppdu_stats_parse() argument
1228 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1237 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1256 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1276 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1298 int ath11k_dp_htt_tlv_iter(struct ath11k_base *ab, const void *ptr, size_t len, in ath11k_dp_htt_tlv_iter() argument
1310 ath11k_err(ab, "htt tlv parse failure at byte %zd (%zu bytes left, %zu expected)\n", in ath11k_dp_htt_tlv_iter()
1321 ath11k_err(ab, "htt tlv parse failure of tag %u at byte %zd (%zu bytes left, %u expected)\n", in ath11k_dp_htt_tlv_iter()
1325 ret = iter(ab, tlv_tag, tlv_len, ptr, data); in ath11k_dp_htt_tlv_iter()
1358 struct ath11k_base *ab = ar->ab; in ath11k_update_per_peer_tx_stats() local
1410 ath11k_warn(ab, "Invalid HE mcs %d peer stats", mcs); in ath11k_update_per_peer_tx_stats()
1415 ath11k_warn(ab, "Invalid VHT mcs %d peer stats", mcs); in ath11k_update_per_peer_tx_stats()
1420 ath11k_warn(ab, "Invalid HT mcs %d nss %d peer stats", in ath11k_update_per_peer_tx_stats()
1435 spin_lock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1436 peer = ath11k_peer_find_by_id(ab, usr_stats->peer_id); in ath11k_update_per_peer_tx_stats()
1439 spin_unlock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1501 spin_unlock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1552 static int ath11k_htt_pull_ppdu_stats(struct ath11k_base *ab, in ath11k_htt_pull_ppdu_stats() argument
1568 ar = ath11k_mac_get_ar_by_pdev_id(ab, pdev_id); in ath11k_htt_pull_ppdu_stats()
1584 ret = ath11k_dp_htt_tlv_iter(ab, msg->data, len, in ath11k_htt_pull_ppdu_stats()
1588 ath11k_warn(ab, "Failed to parse tlv %d\n", ret); in ath11k_htt_pull_ppdu_stats()
1598 static void ath11k_htt_pktlog(struct ath11k_base *ab, struct sk_buff *skb) in ath11k_htt_pktlog() argument
1606 ar = ath11k_mac_get_ar_by_pdev_id(ab, pdev_id); in ath11k_htt_pktlog()
1608 ath11k_warn(ab, "invalid pdev id %d on htt pktlog\n", pdev_id); in ath11k_htt_pktlog()
1613 ar->ab->pktlog_defs_checksum); in ath11k_htt_pktlog()
1616 static void ath11k_htt_backpressure_event_handler(struct ath11k_base *ab, in ath11k_htt_backpressure_event_handler() argument
1636 …ath11k_dbg(ab, ATH11K_DBG_DP_HTT, "htt backpressure event, pdev %d, ring type %d,ring id %d, hp %d… in ath11k_htt_backpressure_event_handler()
1643 bp_stats = &ab->soc_stats.bp_stats.umac_ring_bp_stats[ring_id]; in ath11k_htt_backpressure_event_handler()
1650 bp_stats = &ab->soc_stats.bp_stats.lmac_ring_bp_stats[ring_id][pdev_idx]; in ath11k_htt_backpressure_event_handler()
1652 ath11k_warn(ab, "unknown ring type received in htt bp event %d\n", in ath11k_htt_backpressure_event_handler()
1657 spin_lock_bh(&ab->base_lock); in ath11k_htt_backpressure_event_handler()
1662 spin_unlock_bh(&ab->base_lock); in ath11k_htt_backpressure_event_handler()
1665 void ath11k_dp_htt_htc_t2h_msg_handler(struct ath11k_base *ab, in ath11k_dp_htt_htc_t2h_msg_handler() argument
1668 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_htt_htc_t2h_msg_handler()
1678 ath11k_dbg(ab, ATH11K_DBG_DP_HTT, "dp_htt rx msg type :0x%0x\n", type); in ath11k_dp_htt_htc_t2h_msg_handler()
1697 ath11k_peer_map_event(ab, vdev_id, peer_id, mac_addr, 0, 0); in ath11k_dp_htt_htc_t2h_msg_handler()
1712 ath11k_peer_map_event(ab, vdev_id, peer_id, mac_addr, ast_hash, in ath11k_dp_htt_htc_t2h_msg_handler()
1719 ath11k_peer_unmap_event(ab, peer_id); in ath11k_dp_htt_htc_t2h_msg_handler()
1722 ath11k_htt_pull_ppdu_stats(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1725 ath11k_debugfs_htt_ext_stats_handler(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1728 ath11k_htt_pktlog(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1731 ath11k_htt_backpressure_event_handler(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1734 ath11k_warn(ab, "htt event %d not handled\n", type); in ath11k_dp_htt_htc_t2h_msg_handler()
1746 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_msdu_coalesce() local
1752 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_msdu_coalesce()
1768 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ab, ldesc); in ath11k_dp_rx_msdu_coalesce()
1769 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ab, ldesc); in ath11k_dp_rx_msdu_coalesce()
1781 ath11k_dp_rx_desc_end_tlv_copy(ab, rxcb->rx_desc, ldesc); in ath11k_dp_rx_msdu_coalesce()
1850 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rxcb->rx_desc); in ath11k_dp_rx_h_csum_offload()
1881 ath11k_warn(ar->ab, "unsupported encryption type %d for mic len\n", enctype); in ath11k_dp_rx_crypto_mic_len()
1909 ath11k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath11k_dp_rx_crypto_param_len()
1934 ath11k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath11k_dp_rx_crypto_icv_len()
1983 if (ath11k_dp_rx_h_msdu_start_mesh_ctl_present(ar->ab, rxcb->rx_desc)) in ath11k_dp_rx_h_undecap_nwifi()
2087 hdr = (struct ieee80211_hdr *)ath11k_dp_rx_h_80211_hdr(ar->ab, rxcb->rx_desc); in ath11k_dp_rx_h_find_rfc1042()
2160 first_hdr = ath11k_dp_rx_h_80211_hdr(ar->ab, rx_desc); in ath11k_dp_rx_h_undecap()
2161 decap = ath11k_dp_rx_h_msdu_start_decap_type(ar->ab, rx_desc); in ath11k_dp_rx_h_undecap()
2203 spin_lock_bh(&ar->ab->base_lock); in ath11k_dp_rx_h_mpdu()
2204 peer = ath11k_peer_find_by_addr(ar->ab, hdr->addr2); in ath11k_dp_rx_h_mpdu()
2213 spin_unlock_bh(&ar->ab->base_lock); in ath11k_dp_rx_h_mpdu()
2215 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2264 pkt_type = ath11k_dp_rx_h_msdu_start_pkt_type(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2265 bw = ath11k_dp_rx_h_msdu_start_rx_bw(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2266 rate_mcs = ath11k_dp_rx_h_msdu_start_rate_mcs(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2267 nss = ath11k_dp_rx_h_msdu_start_nss(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2268 sgi = ath11k_dp_rx_h_msdu_start_sgi(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2281 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2295 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2308 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2336 meta_data = ath11k_dp_rx_h_msdu_start_freq(ar->ab, rx_desc); in ath11k_dp_rx_h_ppdu()
2355 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "rx_desc: ", in ath11k_dp_rx_h_ppdu()
2401 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_deliver_msdu()
2426 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DP_RX, NULL, "dp rx msdu: ", in ath11k_dp_rx_deliver_msdu()
2438 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_process_msdu() local
2450 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_process_msdu()
2454 ath11k_warn(ab, in ath11k_dp_rx_process_msdu()
2462 rx_attention = ath11k_dp_rx_get_attention(ab, lrx_desc); in ath11k_dp_rx_process_msdu()
2464 ath11k_warn(ab, "msdu_done bit in attention is not set\n"); in ath11k_dp_rx_process_msdu()
2471 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ab, rx_desc); in ath11k_dp_rx_process_msdu()
2472 l3_pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ab, lrx_desc); in ath11k_dp_rx_process_msdu()
2478 hdr_status = ath11k_dp_rx_h_80211_hdr(ab, rx_desc); in ath11k_dp_rx_process_msdu()
2480 ath11k_warn(ab, "invalid msdu len %u\n", msdu_len); in ath11k_dp_rx_process_msdu()
2481 ath11k_dbg_dump(ab, ATH11K_DBG_DATA, NULL, "", hdr_status, in ath11k_dp_rx_process_msdu()
2483 ath11k_dbg_dump(ab, ATH11K_DBG_DATA, NULL, "", rx_desc, in ath11k_dp_rx_process_msdu()
2494 ath11k_warn(ab, in ath11k_dp_rx_process_msdu()
2519 static void ath11k_dp_rx_process_received_packets(struct ath11k_base *ab, in ath11k_dp_rx_process_received_packets() argument
2538 ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_process_received_packets()
2539 if (!rcu_dereference(ab->pdevs_active[mac_id])) { in ath11k_dp_rx_process_received_packets()
2551 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_process_received_packets()
2564 int ath11k_dp_process_rx(struct ath11k_base *ab, int ring_id, in ath11k_dp_process_rx() argument
2567 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_process_rx()
2584 srng = &ab->hal.srng_list[dp->reo_dst_ring[ring_id].ring_id]; in ath11k_dp_process_rx()
2588 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rx()
2591 while ((rx_desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_rx()
2602 ar = ab->pdevs[mac_id].ar; in ath11k_dp_process_rx()
2607 ath11k_warn(ab, "frame rx with invalid buf_id %d\n", in ath11k_dp_process_rx()
2617 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_process_rx()
2629 ab->soc_stats.hal_reo_error[dp->reo_dst_ring[ring_id].ring_id]++; in ath11k_dp_process_rx()
2657 if (!done && ath11k_hal_srng_dst_num_free(ab, srng, true)) { in ath11k_dp_process_rx()
2658 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx()
2662 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx()
2669 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_process_rx()
2673 ar = ab->pdevs[i].ar; in ath11k_dp_process_rx()
2676 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, num_buffs_reaped[i], in ath11k_dp_process_rx()
2680 ath11k_dp_rx_process_received_packets(ab, napi, &msdu_list, in ath11k_dp_process_rx()
2757 static struct sk_buff *ath11k_dp_rx_alloc_mon_status_buf(struct ath11k_base *ab, in ath11k_dp_rx_alloc_mon_status_buf() argument
2776 paddr = dma_map_single(ab->dev, skb->data, in ath11k_dp_rx_alloc_mon_status_buf()
2779 if (unlikely(dma_mapping_error(ab->dev, paddr))) in ath11k_dp_rx_alloc_mon_status_buf()
2793 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rx_alloc_mon_status_buf()
2801 int ath11k_dp_rx_mon_status_bufs_replenish(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_mon_status_bufs_replenish() argument
2817 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rx_mon_status_bufs_replenish()
2821 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2823 num_free = ath11k_hal_srng_src_num_free(ab, srng, true); in ath11k_dp_rx_mon_status_bufs_replenish()
2829 skb = ath11k_dp_rx_alloc_mon_status_buf(ab, rx_ring, in ath11k_dp_rx_mon_status_bufs_replenish()
2835 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2847 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2857 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rx_mon_status_bufs_replenish()
2860 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2866 static int ath11k_dp_rx_reap_mon_status_ring(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_reap_mon_status_ring() argument
2883 ar = ab->pdevs[ath11k_hw_mac_id_to_pdev_id(&ab->hw_params, mac_id)].ar; in ath11k_dp_rx_reap_mon_status_ring()
2885 srng_id = ath11k_hw_mac_id_to_srng_id(&ab->hw_params, mac_id); in ath11k_dp_rx_reap_mon_status_ring()
2888 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rx_reap_mon_status_ring()
2892 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
2896 ath11k_hal_srng_src_peek(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
2908 ath11k_warn(ab, "rx monitor status with invalid buf_id %d\n", in ath11k_dp_rx_reap_mon_status_ring()
2919 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_rx_reap_mon_status_ring()
2926 ath11k_warn(ab, "mon status DONE not set %lx\n", in ath11k_dp_rx_reap_mon_status_ring()
2936 skb = ath11k_dp_rx_alloc_mon_status_buf(ab, rx_ring, in ath11k_dp_rx_reap_mon_status_ring()
2952 ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
2955 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
2961 int ath11k_dp_rx_process_mon_status(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_process_mon_status() argument
2964 struct ath11k *ar = ath11k_ab_to_ar(ab, mac_id); in ath11k_dp_rx_process_mon_status()
2975 num_buffs_reaped = ath11k_dp_rx_reap_mon_status_ring(ab, mac_id, &budget, in ath11k_dp_rx_process_mon_status()
2987 hal_status = ath11k_hal_rx_parse_mon_status(ab, &ppdu_info, skb); in ath11k_dp_rx_process_mon_status()
2996 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_process_mon_status()
2997 peer = ath11k_peer_find_by_id(ab, ppdu_info.peer_id); in ath11k_dp_rx_process_mon_status()
3000 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_process_mon_status()
3003 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_process_mon_status()
3015 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_process_mon_status()
3028 spin_lock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3031 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3035 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3040 struct ath11k_base *ab = ar->ab; in ath11k_peer_rx_frag_setup() local
3050 spin_lock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3052 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_peer_rx_frag_setup()
3054 ath11k_warn(ab, "failed to find the peer to set up fragment info\n"); in ath11k_peer_rx_frag_setup()
3055 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3061 rx_tid->ab = ab; in ath11k_peer_rx_frag_setup()
3067 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3123 u32 hdr_len, hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_verify_tkip_mic()
3127 if (ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, rx_desc) != in ath11k_dp_rx_h_verify_tkip_mic()
3174 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_undecap_frag()
3212 u32 flags, hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_defrag()
3222 enctype = ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, rx_desc); in ath11k_dp_rx_h_defrag()
3224 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rx_desc); in ath11k_dp_rx_h_defrag()
3272 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_h_defrag_reo_reinject() local
3287 hal_rx_desc_sz = ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_defrag_reo_reinject()
3288 link_desc_banks = ab->dp.link_desc_banks; in ath11k_dp_rx_h_defrag_reo_reinject()
3291 ath11k_hal_rx_reo_ent_paddr_get(ab, reo_dest_ring, &paddr, &desc_bank); in ath11k_dp_rx_h_defrag_reo_reinject()
3309 ath11k_dp_rxdesc_set_msdu_len(ab, rx_desc, defrag_skb->len - hal_rx_desc_sz); in ath11k_dp_rx_h_defrag_reo_reinject()
3311 paddr = dma_map_single(ab->dev, defrag_skb->data, in ath11k_dp_rx_h_defrag_reo_reinject()
3314 if (dma_mapping_error(ab->dev, paddr)) in ath11k_dp_rx_h_defrag_reo_reinject()
3333 srng = &ab->hal.srng_list[ab->dp.reo_reinject_ring.ring_id]; in ath11k_dp_rx_h_defrag_reo_reinject()
3336 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3339 ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3341 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3348 ath11k_hal_rx_reo_ent_paddr_get(ab, reo_dest_ring, &paddr, &desc_bank); in ath11k_dp_rx_h_defrag_reo_reinject()
3367 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3377 dma_unmap_single(ab->dev, paddr, defrag_skb->len + skb_tailroom(defrag_skb), in ath11k_dp_rx_h_defrag_reo_reinject()
3387 frag1 = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, a); in ath11k_dp_rx_h_cmp_frags()
3388 frag2 = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, b); in ath11k_dp_rx_h_cmp_frags()
3415 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_get_pn()
3442 encrypt_type = ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, desc); in ath11k_dp_rx_h_defrag_validate_incr_pn()
3466 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_frag_h_mpdu() local
3479 peer_id = ath11k_dp_rx_h_mpdu_start_peer_id(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3480 tid = ath11k_dp_rx_h_mpdu_start_tid(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3481 seqno = ath11k_dp_rx_h_mpdu_start_seq_no(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3482 frag_no = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, msdu); in ath11k_dp_rx_frag_h_mpdu()
3483 more_frags = ath11k_dp_rx_h_mpdu_start_more_frags(ar->ab, msdu); in ath11k_dp_rx_frag_h_mpdu()
3484 is_mcbc = ath11k_dp_rx_h_attn_is_mcbc(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3490 if (!ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid(ar->ab, rx_desc) || in ath11k_dp_rx_frag_h_mpdu()
3491 !ath11k_dp_rx_h_mpdu_start_fc_valid(ar->ab, rx_desc) || in ath11k_dp_rx_frag_h_mpdu()
3503 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3504 peer = ath11k_peer_find_by_id(ab, peer_id); in ath11k_dp_rx_frag_h_mpdu()
3506 ath11k_warn(ab, "failed to find the peer to de-fragment received fragment peer_id %d\n", in ath11k_dp_rx_frag_h_mpdu()
3544 ath11k_dp_rx_link_desc_return(ab, ring_desc, in ath11k_dp_rx_frag_h_mpdu()
3555 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3557 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3559 peer = ath11k_peer_find_by_id(ab, peer_id); in ath11k_dp_rx_frag_h_mpdu()
3582 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3596 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_process_rx_err_buf()
3601 ath11k_warn(ar->ab, "rx err buf with invalid buf_id %d\n", in ath11k_dp_process_rx_err_buf()
3611 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath11k_dp_process_rx_err_buf()
3621 if (!rcu_dereference(ar->ab->pdevs_active[ar->pdev_idx])) { in ath11k_dp_process_rx_err_buf()
3632 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, rx_desc); in ath11k_dp_process_rx_err_buf()
3634 hdr_status = ath11k_dp_rx_h_80211_hdr(ar->ab, rx_desc); in ath11k_dp_process_rx_err_buf()
3635 ath11k_warn(ar->ab, "invalid msdu leng %u", msdu_len); in ath11k_dp_process_rx_err_buf()
3636 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "", hdr_status, in ath11k_dp_process_rx_err_buf()
3638 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "", rx_desc, in ath11k_dp_process_rx_err_buf()
3648 ath11k_dp_rx_link_desc_return(ar->ab, ring_desc, in ath11k_dp_process_rx_err_buf()
3656 int ath11k_dp_process_rx_err(struct ath11k_base *ab, struct napi_struct *napi, in ath11k_dp_process_rx_err() argument
3680 dp = &ab->dp; in ath11k_dp_process_rx_err()
3684 srng = &ab->hal.srng_list[reo_except->ring_id]; in ath11k_dp_process_rx_err()
3688 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rx_err()
3691 (desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_rx_err()
3694 ab->soc_stats.err_ring_pkts++; in ath11k_dp_process_rx_err()
3695 ret = ath11k_hal_desc_reo_parse_err(ab, desc, &paddr, in ath11k_dp_process_rx_err()
3698 ath11k_warn(ab, "failed to parse error reo desc %d\n", in ath11k_dp_process_rx_err()
3708 ab->soc_stats.invalid_rbm++; in ath11k_dp_process_rx_err()
3709 ath11k_warn(ab, "invalid return buffer manager %d\n", rbm); in ath11k_dp_process_rx_err()
3710 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rx_err()
3723 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rx_err()
3734 ar = ab->pdevs[mac_id].ar; in ath11k_dp_process_rx_err()
3751 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx_err()
3755 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_process_rx_err()
3759 ar = ab->pdevs[i].ar; in ath11k_dp_process_rx_err()
3762 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, n_bufs_reaped[i], in ath11k_dp_process_rx_err()
3778 (DP_RX_BUFFER_SIZE - ar->ab->hw_params.hal_desc_sz)); in ath11k_dp_rx_null_q_desc_sg_drop()
3802 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_null_q_desc()
3804 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3813 rx_attention = ath11k_dp_rx_get_attention(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3815 ath11k_warn(ar->ab, in ath11k_dp_rx_h_null_q_desc()
3830 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3831 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3836 l3pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3848 rxcb->tid = ath11k_dp_rx_h_mpdu_start_tid(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3864 ar->ab->soc_stats.reo_error[rxcb->err_code]++; in ath11k_dp_rx_h_reo_err()
3895 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_tkip_mic_err()
3897 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
3898 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
3900 l3pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
3901 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
3920 ar->ab->soc_stats.rxdma_error[rxcb->err_code]++; in ath11k_dp_rx_h_rxdma_err()
3970 int ath11k_dp_rx_process_wbm_err(struct ath11k_base *ab, in ath11k_dp_rx_process_wbm_err() argument
3974 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_rx_process_wbm_err()
3987 for (i = 0; i < ab->num_radios; i++) in ath11k_dp_rx_process_wbm_err()
3990 srng = &ab->hal.srng_list[dp->rx_rel_ring.ring_id]; in ath11k_dp_rx_process_wbm_err()
3994 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_process_wbm_err()
3997 rx_desc = ath11k_hal_srng_dst_get_next_entry(ab, srng); in ath11k_dp_rx_process_wbm_err()
4001 ret = ath11k_hal_wbm_desc_parse_err(ab, rx_desc, &err_info); in ath11k_dp_rx_process_wbm_err()
4003 ath11k_warn(ab, in ath11k_dp_rx_process_wbm_err()
4012 ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_process_wbm_err()
4018 ath11k_warn(ab, "frame rx with invalid buf_id %d pdev %d\n", in ath11k_dp_rx_process_wbm_err()
4028 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_rx_process_wbm_err()
4048 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_process_wbm_err()
4055 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_rx_process_wbm_err()
4059 ar = ab->pdevs[i].ar; in ath11k_dp_rx_process_wbm_err()
4062 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, num_buffs_reaped[i], in ath11k_dp_rx_process_wbm_err()
4067 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_rx_process_wbm_err()
4068 if (!rcu_dereference(ab->pdevs_active[i])) { in ath11k_dp_rx_process_wbm_err()
4073 ar = ab->pdevs[i].ar; in ath11k_dp_rx_process_wbm_err()
4088 int ath11k_dp_process_rxdma_err(struct ath11k_base *ab, int mac_id, int budget) in ath11k_dp_process_rxdma_err() argument
4093 struct dp_link_desc_bank *link_desc_banks = ab->dp.link_desc_banks; in ath11k_dp_process_rxdma_err()
4111 ar = ab->pdevs[ath11k_hw_mac_id_to_pdev_id(&ab->hw_params, mac_id)].ar; in ath11k_dp_process_rxdma_err()
4112 err_ring = &ar->dp.rxdma_err_dst_ring[ath11k_hw_mac_id_to_srng_id(&ab->hw_params, in ath11k_dp_process_rxdma_err()
4116 srng = &ab->hal.srng_list[err_ring->ring_id]; in ath11k_dp_process_rxdma_err()
4120 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rxdma_err()
4123 (desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_rxdma_err()
4124 ath11k_hal_rx_reo_ent_paddr_get(ab, desc, &paddr, &desc_bank); in ath11k_dp_process_rxdma_err()
4130 ab->soc_stats.rxdma_error[rxdma_err_code]++; in ath11k_dp_process_rxdma_err()
4144 ath11k_warn(ab, "rxdma error with invalid buf_id %d\n", in ath11k_dp_process_rxdma_err()
4154 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_process_rxdma_err()
4162 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rxdma_err()
4166 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rxdma_err()
4171 ath11k_dp_rxbufs_replenish(ab, mac_id, rx_ring, num_buf_freed, in ath11k_dp_process_rxdma_err()
4177 void ath11k_dp_process_reo_status(struct ath11k_base *ab) in ath11k_dp_process_reo_status() argument
4179 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_process_reo_status()
4187 srng = &ab->hal.srng_list[dp->reo_status_ring.ring_id]; in ath11k_dp_process_reo_status()
4193 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_reo_status()
4195 while ((reo_desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_reo_status()
4200 ath11k_hal_reo_status_queue_stats(ab, reo_desc, in ath11k_dp_process_reo_status()
4204 ath11k_hal_reo_flush_queue_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4208 ath11k_hal_reo_flush_cache_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4212 ath11k_hal_reo_unblk_cache_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4216 ath11k_hal_reo_flush_timeout_list_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4220 ath11k_hal_reo_desc_thresh_reached_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4224 ath11k_hal_reo_update_rx_reo_queue_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4228 ath11k_warn(ab, "Unknown reo status type %d\n", tag); in ath11k_dp_process_reo_status()
4251 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_reo_status()
4256 void ath11k_dp_rx_pdev_free(struct ath11k_base *ab, int mac_id) in ath11k_dp_rx_pdev_free() argument
4258 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_pdev_free()
4264 int ath11k_dp_rx_pdev_alloc(struct ath11k_base *ab, int mac_id) in ath11k_dp_rx_pdev_alloc() argument
4266 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_pdev_alloc()
4274 ath11k_warn(ab, "failed to setup rx srngs\n"); in ath11k_dp_rx_pdev_alloc()
4280 ath11k_warn(ab, "failed to setup rxdma ring\n"); in ath11k_dp_rx_pdev_alloc()
4285 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, mac_id, HAL_RXDMA_BUF); in ath11k_dp_rx_pdev_alloc()
4287 ath11k_warn(ab, "failed to configure rx_refill_buf_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4292 if (ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_rx_pdev_alloc()
4293 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4295 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4298 ath11k_warn(ab, "failed to configure rx_mac_buf_ring%d %d\n", in ath11k_dp_rx_pdev_alloc()
4305 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4307 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4310 ath11k_warn(ab, "failed to configure rxdma_err_dest_ring%d %d\n", in ath11k_dp_rx_pdev_alloc()
4316 if (!ab->hw_params.rxdma1_enable) in ath11k_dp_rx_pdev_alloc()
4320 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4323 ath11k_warn(ab, "failed to configure rxdma_mon_buf_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4327 ret = ath11k_dp_tx_htt_srng_setup(ab, in ath11k_dp_rx_pdev_alloc()
4331 ath11k_warn(ab, "failed to configure rxdma_mon_dst_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4335 ret = ath11k_dp_tx_htt_srng_setup(ab, in ath11k_dp_rx_pdev_alloc()
4339 ath11k_warn(ab, "failed to configure rxdma_mon_dst_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4345 for (i = 0; i < ab->hw_params.num_rxmda_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4347 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, mac_id + i, in ath11k_dp_rx_pdev_alloc()
4350 ath11k_warn(ab, in ath11k_dp_rx_pdev_alloc()
4382 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_monitor_link_desc_return()
4384 hal_srng = &ar->ab->hal.srng_list[dp_srng->ring_id]; in ath11k_dp_rx_monitor_link_desc_return()
4386 dp_srng = &ar->ab->dp.wbm_desc_rel_ring; in ath11k_dp_rx_monitor_link_desc_return()
4387 hal_srng = &ar->ab->hal.srng_list[dp_srng->ring_id]; in ath11k_dp_rx_monitor_link_desc_return()
4390 ath11k_hal_srng_access_begin(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4392 src_srng_desc = ath11k_hal_srng_src_get_next_entry(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4400 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_monitor_link_desc_return()
4405 ath11k_hal_srng_access_end(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4560 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_mpdu_pop()
4591 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_mpdu_pop()
4597 (void *)ar->ab->dp.link_desc_banks[sw_cookie].vaddr + in ath11k_dp_rx_mon_mpdu_pop()
4598 (paddr - ar->ab->dp.link_desc_banks[sw_cookie].paddr); in ath11k_dp_rx_mon_mpdu_pop()
4607 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4621 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4627 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath11k_dp_rx_mon_mpdu_pop()
4634 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4645 l2_hdr_offset = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, rx_desc); in ath11k_dp_rx_mon_mpdu_pop()
4648 if (!ath11k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) { in ath11k_dp_rx_mon_mpdu_pop()
4657 ath11k_dp_rxdesc_get_ppduid(ar->ab, rx_desc); in ath11k_dp_rx_mon_mpdu_pop()
4700 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_mon_mpdu_pop()
4704 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4707 ath11k_dp_rx_link_desc_return(ar->ab, rx_link_buf_info, in ath11k_dp_rx_mon_mpdu_pop()
4730 rx_pkt_offset = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_msdus_set_payload()
4731 l2_hdr_offset = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, in ath11k_dp_rx_msdus_set_payload()
4742 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_mon_merg_msdus() local
4757 rx_attention = ath11k_dp_rx_get_attention(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4762 decap_format = ath11k_dp_rx_h_msdu_start_decap_type(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4787 hdr_desc = ath11k_dp_rxdesc_get_80211hdr(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4804 hdr_desc = ath11k_dp_rxdesc_get_80211hdr(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4822 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_merg_msdus()
4826 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_merg_msdus()
4836 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_merg_msdus()
4910 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_dest_process()
4915 mon_dst_srng = &ar->ab->hal.srng_list[ring_id]; in ath11k_dp_rx_mon_dest_process()
4918 ath11k_warn(ar->ab, in ath11k_dp_rx_mon_dest_process()
4926 ath11k_hal_srng_access_begin(ar->ab, mon_dst_srng); in ath11k_dp_rx_mon_dest_process()
4932 while ((ring_entry = ath11k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) { in ath11k_dp_rx_mon_dest_process()
4945 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_dest_process()
4956 ring_entry = ath11k_hal_srng_dst_get_next_entry(ar->ab, in ath11k_dp_rx_mon_dest_process()
4959 ath11k_hal_srng_access_end(ar->ab, mon_dst_srng); in ath11k_dp_rx_mon_dest_process()
4965 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_dest_process()
4966 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, in ath11k_dp_rx_mon_dest_process()
4971 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, in ath11k_dp_rx_mon_dest_process()
4998 tlv_status = ath11k_hal_rx_parse_mon_status(ar->ab, ppdu_info, in ath11k_dp_rx_mon_status_process_tlv()
5010 static int ath11k_dp_mon_process_rx(struct ath11k_base *ab, int mac_id, in ath11k_dp_mon_process_rx() argument
5013 struct ath11k *ar = ath11k_ab_to_ar(ab, mac_id); in ath11k_dp_mon_process_rx()
5018 num_buffs_reaped = ath11k_dp_rx_reap_mon_status_ring(ar->ab, mac_id, &budget, in ath11k_dp_mon_process_rx()
5026 int ath11k_dp_rx_process_mon_rings(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_process_mon_rings() argument
5029 struct ath11k *ar = ath11k_ab_to_ar(ab, mac_id); in ath11k_dp_rx_process_mon_rings()
5033 ret = ath11k_dp_mon_process_rx(ab, mac_id, napi, budget); in ath11k_dp_rx_process_mon_rings()
5035 ret = ath11k_dp_rx_process_mon_status(ab, mac_id, napi, budget); in ath11k_dp_rx_process_mon_rings()
5064 ath11k_warn(ar->ab, "pdev_mon_status_attach() failed"); in ath11k_dp_rx_pdev_mon_attach()
5071 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_pdev_mon_attach()
5076 ath11k_hal_srng_get_entrysize(ar->ab, HAL_RXDMA_MONITOR_DESC); in ath11k_dp_rx_pdev_mon_attach()
5078 &ar->ab->hal.srng_list[dp->rxdma_mon_desc_ring.ring_id]; in ath11k_dp_rx_pdev_mon_attach()
5080 ret = ath11k_dp_link_desc_setup(ar->ab, pmon->link_desc_banks, in ath11k_dp_rx_pdev_mon_attach()
5084 ath11k_warn(ar->ab, "mon_link_desc_pool_setup() failed"); in ath11k_dp_rx_pdev_mon_attach()
5099 ath11k_dp_link_desc_cleanup(ar->ab, pmon->link_desc_banks, in ath11k_dp_mon_link_free()
5111 int ath11k_dp_rx_pktlog_start(struct ath11k_base *ab) in ath11k_dp_rx_pktlog_start() argument
5114 mod_timer(&ab->mon_reap_timer, in ath11k_dp_rx_pktlog_start()
5120 int ath11k_dp_rx_pktlog_stop(struct ath11k_base *ab, bool stop_timer) in ath11k_dp_rx_pktlog_stop() argument
5125 del_timer_sync(&ab->mon_reap_timer); in ath11k_dp_rx_pktlog_stop()
5128 ret = ath11k_dp_purge_mon_ring(ab); in ath11k_dp_rx_pktlog_stop()
5130 ath11k_warn(ab, "failed to purge dp mon ring: %d\n", ret); in ath11k_dp_rx_pktlog_stop()