Lines Matching +full:pn +full:- +full:retry +full:- +full:params

1 // SPDX-License-Identifier: BSD-3-Clause-Clear
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
26 if (!ab->hw_params->hal_ops->rx_desc_encrypt_valid(desc)) in ath12k_dp_rx_h_enctype()
29 return ab->hw_params->hal_ops->rx_desc_get_encrypt_type(desc); in ath12k_dp_rx_h_enctype()
35 return ab->hw_params->hal_ops->rx_desc_get_decap_type(desc); in ath12k_dp_rx_h_decap_type()
41 return ab->hw_params->hal_ops->rx_desc_get_mesh_ctl(desc); in ath12k_dp_rx_h_mesh_ctl_present()
47 return ab->hw_params->hal_ops->rx_desc_get_mpdu_seq_ctl_vld(desc); in ath12k_dp_rx_h_seq_ctrl_valid()
53 return ab->hw_params->hal_ops->rx_desc_get_mpdu_fc_valid(desc); in ath12k_dp_rx_h_fc_valid()
61 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params->hal_desc_sz); in ath12k_dp_rx_h_more_frags()
62 return ieee80211_has_morefrags(hdr->frame_control); in ath12k_dp_rx_h_more_frags()
70 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params->hal_desc_sz); in ath12k_dp_rx_h_frag_no()
71 return le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_FRAG; in ath12k_dp_rx_h_frag_no()
77 return ab->hw_params->hal_ops->rx_desc_get_mpdu_start_seq_no(desc); in ath12k_dp_rx_h_seq_no()
83 return ab->hw_params->hal_ops->dp_rx_h_msdu_done(desc); in ath12k_dp_rx_h_msdu_done()
89 return ab->hw_params->hal_ops->dp_rx_h_l4_cksum_fail(desc); in ath12k_dp_rx_h_l4_cksum_fail()
95 return ab->hw_params->hal_ops->dp_rx_h_ip_cksum_fail(desc); in ath12k_dp_rx_h_ip_cksum_fail()
101 return ab->hw_params->hal_ops->dp_rx_h_is_decrypted(desc); in ath12k_dp_rx_h_is_decrypted()
107 return ab->hw_params->hal_ops->dp_rx_h_mpdu_err(desc); in ath12k_dp_rx_h_mpdu_err()
113 return ab->hw_params->hal_ops->rx_desc_get_msdu_len(desc); in ath12k_dp_rx_h_msdu_len()
119 return ab->hw_params->hal_ops->rx_desc_get_msdu_sgi(desc); in ath12k_dp_rx_h_sgi()
125 return ab->hw_params->hal_ops->rx_desc_get_msdu_rate_mcs(desc); in ath12k_dp_rx_h_rate_mcs()
131 return ab->hw_params->hal_ops->rx_desc_get_msdu_rx_bw(desc); in ath12k_dp_rx_h_rx_bw()
137 return ab->hw_params->hal_ops->rx_desc_get_msdu_freq(desc); in ath12k_dp_rx_h_freq()
143 return ab->hw_params->hal_ops->rx_desc_get_msdu_pkt_type(desc); in ath12k_dp_rx_h_pkt_type()
149 return hweight8(ab->hw_params->hal_ops->rx_desc_get_msdu_nss(desc)); in ath12k_dp_rx_h_nss()
155 return ab->hw_params->hal_ops->rx_desc_get_mpdu_tid(desc); in ath12k_dp_rx_h_tid()
161 return ab->hw_params->hal_ops->rx_desc_get_mpdu_peer_id(desc); in ath12k_dp_rx_h_peer_id()
167 return ab->hw_params->hal_ops->rx_desc_get_l3_pad_bytes(desc); in ath12k_dp_rx_h_l3pad()
173 return ab->hw_params->hal_ops->rx_desc_get_first_msdu(desc); in ath12k_dp_rx_h_first_msdu()
179 return ab->hw_params->hal_ops->rx_desc_get_last_msdu(desc); in ath12k_dp_rx_h_last_msdu()
186 ab->hw_params->hal_ops->rx_desc_copy_end_tlv(fdesc, ldesc); in ath12k_dp_rx_desc_end_tlv_copy()
193 ab->hw_params->hal_ops->rx_desc_set_msdu_len(desc, len); in ath12k_dp_rxdesc_set_msdu_len()
200 ab->hw_params->hal_ops->rx_desc_is_da_mcbc(desc)); in ath12k_dp_rx_h_is_da_mcbc()
206 return ab->hw_params->hal_ops->rx_desc_mac_addr2_valid(desc); in ath12k_dp_rxdesc_mac_addr2_valid()
212 return ab->hw_params->hal_ops->rx_desc_mpdu_start_addr2(desc); in ath12k_dp_rxdesc_get_mpdu_start_addr2()
219 ab->hw_params->hal_ops->rx_desc_get_dot11_hdr(desc, hdr); in ath12k_dp_rx_desc_get_dot11_hdr()
227 ab->hw_params->hal_ops->rx_desc_get_crypto_header(desc, crypto_hdr, enctype); in ath12k_dp_rx_desc_get_crypto_header()
233 return ab->hw_params->hal_ops->rx_desc_get_mpdu_frame_ctl(desc); in ath12k_dp_rxdesc_get_mpdu_frame_ctrl()
242 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) in ath12k_dp_purge_mon_ring()
255 return -ETIMEDOUT; in ath12k_dp_purge_mon_ring()
273 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_bufs_replenish()
276 req_entries = min(req_entries, rx_ring->bufs_max); in ath12k_dp_rx_bufs_replenish()
278 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath12k_dp_rx_bufs_replenish()
280 spin_lock_bh(&srng->lock); in ath12k_dp_rx_bufs_replenish()
285 if (!req_entries && (num_free > (rx_ring->bufs_max * 3) / 4)) in ath12k_dp_rx_bufs_replenish()
297 if (!IS_ALIGNED((unsigned long)skb->data, in ath12k_dp_rx_bufs_replenish()
300 PTR_ALIGN(skb->data, DP_RX_BUFFER_ALIGN_SIZE) - in ath12k_dp_rx_bufs_replenish()
301 skb->data); in ath12k_dp_rx_bufs_replenish()
304 paddr = dma_map_single(ab->dev, skb->data, in ath12k_dp_rx_bufs_replenish()
305 skb->len + skb_tailroom(skb), in ath12k_dp_rx_bufs_replenish()
307 if (dma_mapping_error(ab->dev, paddr)) in ath12k_dp_rx_bufs_replenish()
311 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
319 rx_desc = list_first_entry_or_null(&dp->rx_desc_free_list, in ath12k_dp_rx_bufs_replenish()
323 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
327 rx_desc->skb = skb; in ath12k_dp_rx_bufs_replenish()
328 cookie = rx_desc->cookie; in ath12k_dp_rx_bufs_replenish()
329 list_del(&rx_desc->list); in ath12k_dp_rx_bufs_replenish()
330 list_add_tail(&rx_desc->list, &dp->rx_desc_used_list); in ath12k_dp_rx_bufs_replenish()
332 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
334 spin_lock_bh(&rx_ring->idr_lock); in ath12k_dp_rx_bufs_replenish()
335 buf_id = idr_alloc(&rx_ring->bufs_idr, skb, 0, in ath12k_dp_rx_bufs_replenish()
336 rx_ring->bufs_max * 3, GFP_ATOMIC); in ath12k_dp_rx_bufs_replenish()
337 spin_unlock_bh(&rx_ring->idr_lock); in ath12k_dp_rx_bufs_replenish()
350 ATH12K_SKB_RXCB(skb)->paddr = paddr; in ath12k_dp_rx_bufs_replenish()
352 num_remain--; in ath12k_dp_rx_bufs_replenish()
359 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_bufs_replenish()
361 return req_entries - num_remain; in ath12k_dp_rx_bufs_replenish()
365 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
366 list_del(&rx_desc->list); in ath12k_dp_rx_bufs_replenish()
367 list_add_tail(&rx_desc->list, &dp->rx_desc_free_list); in ath12k_dp_rx_bufs_replenish()
368 rx_desc->skb = NULL; in ath12k_dp_rx_bufs_replenish()
369 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_bufs_replenish()
371 spin_lock_bh(&rx_ring->idr_lock); in ath12k_dp_rx_bufs_replenish()
372 idr_remove(&rx_ring->bufs_idr, buf_id); in ath12k_dp_rx_bufs_replenish()
373 spin_unlock_bh(&rx_ring->idr_lock); in ath12k_dp_rx_bufs_replenish()
376 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath12k_dp_rx_bufs_replenish()
383 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_bufs_replenish()
385 return req_entries - num_remain; in ath12k_dp_rx_bufs_replenish()
394 spin_lock_bh(&rx_ring->idr_lock); in ath12k_dp_rxdma_buf_ring_free()
395 idr_for_each_entry(&rx_ring->bufs_idr, skb, buf_id) { in ath12k_dp_rxdma_buf_ring_free()
396 idr_remove(&rx_ring->bufs_idr, buf_id); in ath12k_dp_rxdma_buf_ring_free()
400 dma_unmap_single(ab->dev, ATH12K_SKB_RXCB(skb)->paddr, in ath12k_dp_rxdma_buf_ring_free()
401 skb->len + skb_tailroom(skb), DMA_FROM_DEVICE); in ath12k_dp_rxdma_buf_ring_free()
405 idr_destroy(&rx_ring->bufs_idr); in ath12k_dp_rxdma_buf_ring_free()
406 spin_unlock_bh(&rx_ring->idr_lock); in ath12k_dp_rxdma_buf_ring_free()
413 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_buf_free()
414 struct dp_rxdma_ring *rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rxdma_buf_free()
418 rx_ring = &dp->rxdma_mon_buf_ring; in ath12k_dp_rxdma_buf_free()
421 rx_ring = &dp->tx_mon_buf_ring; in ath12k_dp_rxdma_buf_free()
433 num_entries = rx_ring->refill_buf_ring.size / in ath12k_dp_rxdma_ring_buf_setup()
436 rx_ring->bufs_max = num_entries; in ath12k_dp_rxdma_ring_buf_setup()
441 ab->hw_params->hal_params->rx_buf_rbm, in ath12k_dp_rxdma_ring_buf_setup()
448 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_buf_setup()
449 struct dp_rxdma_ring *rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rxdma_buf_setup()
460 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rxdma_buf_setup()
461 rx_ring = &dp->rxdma_mon_buf_ring; in ath12k_dp_rxdma_buf_setup()
470 rx_ring = &dp->tx_mon_buf_ring; in ath12k_dp_rxdma_buf_setup()
485 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_srng_free()
486 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_pdev_srng_free()
489 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_pdev_srng_free()
490 ath12k_dp_srng_cleanup(ab, &dp->rxdma_mon_dst_ring[i]); in ath12k_dp_rx_pdev_srng_free()
491 ath12k_dp_srng_cleanup(ab, &dp->tx_mon_dst_ring[i]); in ath12k_dp_rx_pdev_srng_free()
497 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_pdev_reo_cleanup()
501 ath12k_dp_srng_cleanup(ab, &dp->reo_dst_ring[i]); in ath12k_dp_rx_pdev_reo_cleanup()
506 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_pdev_reo_setup()
511 ret = ath12k_dp_srng_setup(ab, &dp->reo_dst_ring[i], in ath12k_dp_rx_pdev_reo_setup()
530 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_srng_alloc()
531 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_pdev_srng_alloc()
534 u32 mac_id = dp->mac_id; in ath12k_dp_rx_pdev_srng_alloc()
536 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_pdev_srng_alloc()
537 ret = ath12k_dp_srng_setup(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
538 &dp->rxdma_mon_dst_ring[i], in ath12k_dp_rx_pdev_srng_alloc()
543 ath12k_warn(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
548 ret = ath12k_dp_srng_setup(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
549 &dp->tx_mon_dst_ring[i], in ath12k_dp_rx_pdev_srng_alloc()
554 ath12k_warn(ar->ab, in ath12k_dp_rx_pdev_srng_alloc()
565 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_reo_cmd_list_cleanup()
569 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_reo_cmd_list_cleanup()
570 list_for_each_entry_safe(cmd, tmp, &dp->reo_cmd_list, list) { in ath12k_dp_rx_reo_cmd_list_cleanup()
571 list_del(&cmd->list); in ath12k_dp_rx_reo_cmd_list_cleanup()
572 dma_unmap_single(ab->dev, cmd->data.paddr, in ath12k_dp_rx_reo_cmd_list_cleanup()
573 cmd->data.size, DMA_BIDIRECTIONAL); in ath12k_dp_rx_reo_cmd_list_cleanup()
574 kfree(cmd->data.vaddr); in ath12k_dp_rx_reo_cmd_list_cleanup()
579 &dp->reo_cmd_cache_flush_list, list) { in ath12k_dp_rx_reo_cmd_list_cleanup()
580 list_del(&cmd_cache->list); in ath12k_dp_rx_reo_cmd_list_cleanup()
581 dp->reo_cmd_cache_flush_count--; in ath12k_dp_rx_reo_cmd_list_cleanup()
582 dma_unmap_single(ab->dev, cmd_cache->data.paddr, in ath12k_dp_rx_reo_cmd_list_cleanup()
583 cmd_cache->data.size, DMA_BIDIRECTIONAL); in ath12k_dp_rx_reo_cmd_list_cleanup()
584 kfree(cmd_cache->data.vaddr); in ath12k_dp_rx_reo_cmd_list_cleanup()
587 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_reo_cmd_list_cleanup()
596 ath12k_warn(dp->ab, "failed to flush rx tid hw desc, tid %d status %d\n", in ath12k_dp_reo_cmd_free()
597 rx_tid->tid, status); in ath12k_dp_reo_cmd_free()
599 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_reo_cmd_free()
601 kfree(rx_tid->vaddr); in ath12k_dp_reo_cmd_free()
602 rx_tid->vaddr = NULL; in ath12k_dp_reo_cmd_free()
611 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_reo_cmd_send()
616 cmd_ring = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath12k_dp_reo_cmd_send()
625 return -EINVAL; in ath12k_dp_reo_cmd_send()
637 return -ENOMEM; in ath12k_dp_reo_cmd_send()
639 memcpy(&dp_cmd->data, rx_tid, sizeof(*rx_tid)); in ath12k_dp_reo_cmd_send()
640 dp_cmd->cmd_num = cmd_num; in ath12k_dp_reo_cmd_send()
641 dp_cmd->handler = cb; in ath12k_dp_reo_cmd_send()
643 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_reo_cmd_send()
644 list_add_tail(&dp_cmd->list, &dp->reo_cmd_list); in ath12k_dp_reo_cmd_send()
645 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_reo_cmd_send()
657 tot_desc_sz = rx_tid->size; in ath12k_dp_reo_cache_flush()
661 tot_desc_sz -= desc_sz; in ath12k_dp_reo_cache_flush()
662 cmd.addr_lo = lower_32_bits(rx_tid->paddr + tot_desc_sz); in ath12k_dp_reo_cache_flush()
663 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
670 rx_tid->tid, ret); in ath12k_dp_reo_cache_flush()
674 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
675 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_reo_cache_flush()
682 rx_tid->tid, ret); in ath12k_dp_reo_cache_flush()
683 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_reo_cache_flush()
685 kfree(rx_tid->vaddr); in ath12k_dp_reo_cache_flush()
686 rx_tid->vaddr = NULL; in ath12k_dp_reo_cache_flush()
693 struct ath12k_base *ab = dp->ab; in ath12k_dp_rx_tid_del_func()
702 rx_tid->tid, status); in ath12k_dp_rx_tid_del_func()
710 elem->ts = jiffies; in ath12k_dp_rx_tid_del_func()
711 memcpy(&elem->data, rx_tid, sizeof(*rx_tid)); in ath12k_dp_rx_tid_del_func()
713 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
714 list_add_tail(&elem->list, &dp->reo_cmd_cache_flush_list); in ath12k_dp_rx_tid_del_func()
715 dp->reo_cmd_cache_flush_count++; in ath12k_dp_rx_tid_del_func()
718 list_for_each_entry_safe(elem, tmp, &dp->reo_cmd_cache_flush_list, in ath12k_dp_rx_tid_del_func()
720 if (dp->reo_cmd_cache_flush_count > ATH12K_DP_RX_REO_DESC_FREE_THRES || in ath12k_dp_rx_tid_del_func()
721 time_after(jiffies, elem->ts + in ath12k_dp_rx_tid_del_func()
723 list_del(&elem->list); in ath12k_dp_rx_tid_del_func()
724 dp->reo_cmd_cache_flush_count--; in ath12k_dp_rx_tid_del_func()
732 * delete to this list. Hence unlock-lock is safe here. in ath12k_dp_rx_tid_del_func()
734 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
736 ath12k_dp_reo_cache_flush(ab, &elem->data); in ath12k_dp_rx_tid_del_func()
738 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
741 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_tid_del_func()
745 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_rx_tid_del_func()
747 kfree(rx_tid->vaddr); in ath12k_dp_rx_tid_del_func()
748 rx_tid->vaddr = NULL; in ath12k_dp_rx_tid_del_func()
755 struct ath12k_dp *dp = &ab->dp; in ath12k_peer_rx_tid_qref_setup()
757 if (!ab->hw_params->reoq_lut_support) in ath12k_peer_rx_tid_qref_setup()
763 qref = (struct ath12k_reo_queue_ref *)dp->reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_setup()
766 qref->info0 = u32_encode_bits(lower_32_bits(paddr), in ath12k_peer_rx_tid_qref_setup()
768 qref->info1 = u32_encode_bits(upper_32_bits(paddr), in ath12k_peer_rx_tid_qref_setup()
776 struct ath12k_dp *dp = &ab->dp; in ath12k_peer_rx_tid_qref_reset()
778 if (!ab->hw_params->reoq_lut_support) in ath12k_peer_rx_tid_qref_reset()
784 qref = (struct ath12k_reo_queue_ref *)dp->reoq_lut.vaddr + in ath12k_peer_rx_tid_qref_reset()
787 qref->info0 = u32_encode_bits(0, BUFFER_ADDR_INFO0_ADDR); in ath12k_peer_rx_tid_qref_reset()
788 qref->info1 = u32_encode_bits(0, BUFFER_ADDR_INFO1_ADDR) | in ath12k_peer_rx_tid_qref_reset()
796 struct ath12k_dp_rx_tid *rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_tid_delete()
799 if (!rx_tid->active) in ath12k_dp_rx_peer_tid_delete()
803 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_tid_delete()
804 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_tid_delete()
806 ret = ath12k_dp_reo_cmd_send(ar->ab, rx_tid, in ath12k_dp_rx_peer_tid_delete()
810 ath12k_err(ar->ab, "failed to send HAL_REO_CMD_UPDATE_RX_QUEUE cmd, tid %d (%d)\n", in ath12k_dp_rx_peer_tid_delete()
812 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath12k_dp_rx_peer_tid_delete()
814 kfree(rx_tid->vaddr); in ath12k_dp_rx_peer_tid_delete()
815 rx_tid->vaddr = NULL; in ath12k_dp_rx_peer_tid_delete()
818 ath12k_peer_rx_tid_qref_reset(ar->ab, peer->peer_id, tid); in ath12k_dp_rx_peer_tid_delete()
820 rx_tid->active = false; in ath12k_dp_rx_peer_tid_delete()
833 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_link_desc_return()
837 srng = &ab->hal.srng_list[dp->wbm_desc_rel_ring.ring_id]; in ath12k_dp_rx_link_desc_return()
839 spin_lock_bh(&srng->lock); in ath12k_dp_rx_link_desc_return()
845 ret = -ENOBUFS; in ath12k_dp_rx_link_desc_return()
854 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_link_desc_return()
862 struct ath12k_base *ab = rx_tid->ab; in ath12k_dp_rx_frags_cleanup()
864 lockdep_assert_held(&ab->base_lock); in ath12k_dp_rx_frags_cleanup()
866 if (rx_tid->dst_ring_desc) { in ath12k_dp_rx_frags_cleanup()
868 ath12k_dp_rx_link_desc_return(ab, rx_tid->dst_ring_desc, in ath12k_dp_rx_frags_cleanup()
870 kfree(rx_tid->dst_ring_desc); in ath12k_dp_rx_frags_cleanup()
871 rx_tid->dst_ring_desc = NULL; in ath12k_dp_rx_frags_cleanup()
874 rx_tid->cur_sn = 0; in ath12k_dp_rx_frags_cleanup()
875 rx_tid->last_frag_no = 0; in ath12k_dp_rx_frags_cleanup()
876 rx_tid->rx_frag_bitmap = 0; in ath12k_dp_rx_frags_cleanup()
877 __skb_queue_purge(&rx_tid->rx_frags); in ath12k_dp_rx_frags_cleanup()
885 lockdep_assert_held(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
888 rx_tid = &peer->rx_tid[i]; in ath12k_dp_rx_peer_tid_cleanup()
893 spin_unlock_bh(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
894 del_timer_sync(&rx_tid->frag_timer); in ath12k_dp_rx_peer_tid_cleanup()
895 spin_lock_bh(&ar->ab->base_lock); in ath12k_dp_rx_peer_tid_cleanup()
908 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_peer_rx_tid_reo_update()
909 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_peer_rx_tid_reo_update()
919 ret = ath12k_dp_reo_cmd_send(ar->ab, rx_tid, in ath12k_peer_rx_tid_reo_update()
923 ath12k_warn(ar->ab, "failed to update rx tid queue, tid %d (%d)\n", in ath12k_peer_rx_tid_reo_update()
924 rx_tid->tid, ret); in ath12k_peer_rx_tid_reo_update()
928 rx_tid->ba_win_sz = ba_win_sz; in ath12k_peer_rx_tid_reo_update()
937 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_tid_setup()
938 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_peer_tid_setup()
947 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
951 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
953 return -ENOENT; in ath12k_dp_rx_peer_tid_setup()
956 if (ab->hw_params->reoq_lut_support && !dp->reoq_lut.vaddr) { in ath12k_dp_rx_peer_tid_setup()
957 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
959 return -EINVAL; in ath12k_dp_rx_peer_tid_setup()
962 if (peer->peer_id > DP_MAX_PEER_ID || tid > IEEE80211_NUM_TIDS) { in ath12k_dp_rx_peer_tid_setup()
964 peer->peer_id, tid); in ath12k_dp_rx_peer_tid_setup()
965 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
966 return -EINVAL; in ath12k_dp_rx_peer_tid_setup()
969 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_tid_setup()
971 if (rx_tid->active) { in ath12k_dp_rx_peer_tid_setup()
972 paddr = rx_tid->paddr; in ath12k_dp_rx_peer_tid_setup()
975 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
981 if (!ab->hw_params->reoq_lut_support) { in ath12k_dp_rx_peer_tid_setup()
996 rx_tid->tid = tid; in ath12k_dp_rx_peer_tid_setup()
998 rx_tid->ba_win_sz = ba_win_sz; in ath12k_dp_rx_peer_tid_setup()
1008 vaddr = kzalloc(hw_desc_sz + HAL_LINK_DESC_ALIGN - 1, GFP_ATOMIC); in ath12k_dp_rx_peer_tid_setup()
1010 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1011 return -ENOMEM; in ath12k_dp_rx_peer_tid_setup()
1019 paddr = dma_map_single(ab->dev, addr_aligned, hw_desc_sz, in ath12k_dp_rx_peer_tid_setup()
1022 ret = dma_mapping_error(ab->dev, paddr); in ath12k_dp_rx_peer_tid_setup()
1024 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1028 rx_tid->vaddr = vaddr; in ath12k_dp_rx_peer_tid_setup()
1029 rx_tid->paddr = paddr; in ath12k_dp_rx_peer_tid_setup()
1030 rx_tid->size = hw_desc_sz; in ath12k_dp_rx_peer_tid_setup()
1031 rx_tid->active = true; in ath12k_dp_rx_peer_tid_setup()
1033 if (ab->hw_params->reoq_lut_support) { in ath12k_dp_rx_peer_tid_setup()
1037 ath12k_peer_rx_tid_qref_setup(ab, peer->peer_id, tid, paddr); in ath12k_dp_rx_peer_tid_setup()
1038 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1040 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_tid_setup()
1054 struct ieee80211_ampdu_params *params) in ath12k_dp_rx_ampdu_start() argument
1056 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_ampdu_start()
1057 struct ath12k_sta *arsta = (void *)params->sta->drv_priv; in ath12k_dp_rx_ampdu_start()
1058 int vdev_id = arsta->arvif->vdev_id; in ath12k_dp_rx_ampdu_start()
1061 ret = ath12k_dp_rx_peer_tid_setup(ar, params->sta->addr, vdev_id, in ath12k_dp_rx_ampdu_start()
1062 params->tid, params->buf_size, in ath12k_dp_rx_ampdu_start()
1063 params->ssn, arsta->pn_type); in ath12k_dp_rx_ampdu_start()
1071 struct ieee80211_ampdu_params *params) in ath12k_dp_rx_ampdu_stop() argument
1073 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_ampdu_stop()
1075 struct ath12k_sta *arsta = (void *)params->sta->drv_priv; in ath12k_dp_rx_ampdu_stop()
1076 int vdev_id = arsta->arvif->vdev_id; in ath12k_dp_rx_ampdu_stop()
1080 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1082 peer = ath12k_peer_find(ab, vdev_id, params->sta->addr); in ath12k_dp_rx_ampdu_stop()
1084 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1086 return -ENOENT; in ath12k_dp_rx_ampdu_stop()
1089 active = peer->rx_tid[params->tid].active; in ath12k_dp_rx_ampdu_stop()
1092 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1096 ret = ath12k_peer_rx_tid_reo_update(ar, peer, peer->rx_tid, 1, 0, false); in ath12k_dp_rx_ampdu_stop()
1097 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_ampdu_stop()
1100 params->tid, ret); in ath12k_dp_rx_ampdu_stop()
1112 struct ath12k *ar = arvif->ar; in ath12k_dp_rx_peer_pn_replay_config()
1113 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_pn_replay_config()
1120 /* NOTE: Enable PN/TSC replay check offload only for unicast frames. in ath12k_dp_rx_peer_pn_replay_config()
1121 * We use mac80211 PN/TSC replay check functionality for bcast/mcast in ath12k_dp_rx_peer_pn_replay_config()
1124 if (!(key->flags & IEEE80211_KEY_FLAG_PAIRWISE)) in ath12k_dp_rx_peer_pn_replay_config()
1134 switch (key->cipher) { in ath12k_dp_rx_peer_pn_replay_config()
1149 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1151 peer = ath12k_peer_find(ab, arvif->vdev_id, peer_addr); in ath12k_dp_rx_peer_pn_replay_config()
1153 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1154 ath12k_warn(ab, "failed to find the peer %pM to configure pn replay detection\n", in ath12k_dp_rx_peer_pn_replay_config()
1156 return -ENOENT; in ath12k_dp_rx_peer_pn_replay_config()
1160 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_peer_pn_replay_config()
1161 if (!rx_tid->active) in ath12k_dp_rx_peer_pn_replay_config()
1163 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_pn_replay_config()
1164 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath12k_dp_rx_peer_pn_replay_config()
1169 ath12k_warn(ab, "failed to configure rx tid %d queue of peer %pM for pn replay detection %d\n", in ath12k_dp_rx_peer_pn_replay_config()
1175 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_pn_replay_config()
1185 for (i = 0; i < HTT_PPDU_STATS_MAX_USERS - 1; i++) { in ath12k_get_ppdu_user_index()
1186 if (ppdu_stats->user_stats[i].is_valid_peer_id) { in ath12k_get_ppdu_user_index()
1187 if (peer_id == ppdu_stats->user_stats[i].peer_id) in ath12k_get_ppdu_user_index()
1194 return -EINVAL; in ath12k_get_ppdu_user_index()
1216 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1218 memcpy(&ppdu_info->ppdu_stats.common, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1225 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1228 peer_id = le16_to_cpu(user_rate->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1229 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1232 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1233 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1234 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1235 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1236 memcpy(&user_stats->rate, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1238 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1244 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1248 peer_id = le16_to_cpu(cmplt_cmn->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1249 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1252 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1253 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1254 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1255 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1256 memcpy(&user_stats->cmpltn_cmn, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1258 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1265 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1269 peer_id = le16_to_cpu(ba_status->sw_peer_id); in ath12k_htt_tlv_ppdu_stats_parse()
1270 cur_user = ath12k_get_ppdu_user_index(&ppdu_info->ppdu_stats, in ath12k_htt_tlv_ppdu_stats_parse()
1273 return -EINVAL; in ath12k_htt_tlv_ppdu_stats_parse()
1274 user_stats = &ppdu_info->ppdu_stats.user_stats[cur_user]; in ath12k_htt_tlv_ppdu_stats_parse()
1275 user_stats->peer_id = peer_id; in ath12k_htt_tlv_ppdu_stats_parse()
1276 user_stats->is_valid_peer_id = true; in ath12k_htt_tlv_ppdu_stats_parse()
1277 memcpy(&user_stats->ack_ba, ptr, in ath12k_htt_tlv_ppdu_stats_parse()
1279 user_stats->tlv_flags |= BIT(tag); in ath12k_htt_tlv_ppdu_stats_parse()
1301 int ret = -EINVAL; in ath12k_dp_htt_tlv_iter()
1306 ptr - begin, len, sizeof(*tlv)); in ath12k_dp_htt_tlv_iter()
1307 return -EINVAL; in ath12k_dp_htt_tlv_iter()
1314 tlv_tag = le32_get_bits(tlv->header, HTT_TLV_TAG); in ath12k_dp_htt_tlv_iter()
1315 tlv_len = le32_get_bits(tlv->header, HTT_TLV_LEN); in ath12k_dp_htt_tlv_iter()
1317 len -= sizeof(*tlv); in ath12k_dp_htt_tlv_iter()
1321 tlv_tag, ptr - begin, len, tlv_len); in ath12k_dp_htt_tlv_iter()
1322 return -EINVAL; in ath12k_dp_htt_tlv_iter()
1325 if (ret == -ENOMEM) in ath12k_dp_htt_tlv_iter()
1329 len -= tlv_len; in ath12k_dp_htt_tlv_iter()
1338 struct ath12k_base *ab = ar->ab; in ath12k_update_per_peer_tx_stats()
1343 struct ath12k_per_peer_tx_stats *peer_stats = &ar->peer_tx_stats; in ath12k_update_per_peer_tx_stats()
1344 struct htt_ppdu_user_stats *usr_stats = &ppdu_stats->user_stats[user]; in ath12k_update_per_peer_tx_stats()
1345 struct htt_ppdu_stats_common *common = &ppdu_stats->common; in ath12k_update_per_peer_tx_stats()
1357 if (!(usr_stats->tlv_flags & BIT(HTT_PPDU_STATS_TAG_USR_RATE))) in ath12k_update_per_peer_tx_stats()
1360 if (usr_stats->tlv_flags & BIT(HTT_PPDU_STATS_TAG_USR_COMPLTN_COMMON)) in ath12k_update_per_peer_tx_stats()
1362 HTT_USR_CMPLTN_IS_AMPDU(usr_stats->cmpltn_cmn.flags); in ath12k_update_per_peer_tx_stats()
1364 if (usr_stats->tlv_flags & in ath12k_update_per_peer_tx_stats()
1366 succ_bytes = le32_to_cpu(usr_stats->ack_ba.success_bytes); in ath12k_update_per_peer_tx_stats()
1367 succ_pkts = le32_get_bits(usr_stats->ack_ba.info, in ath12k_update_per_peer_tx_stats()
1369 tid = le32_get_bits(usr_stats->ack_ba.info, in ath12k_update_per_peer_tx_stats()
1373 if (common->fes_duration_us) in ath12k_update_per_peer_tx_stats()
1374 tx_duration = le32_to_cpu(common->fes_duration_us); in ath12k_update_per_peer_tx_stats()
1376 user_rate = &usr_stats->rate; in ath12k_update_per_peer_tx_stats()
1377 flags = HTT_USR_RATE_PREAMBLE(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1378 bw = HTT_USR_RATE_BW(user_rate->rate_flags) - 2; in ath12k_update_per_peer_tx_stats()
1379 nss = HTT_USR_RATE_NSS(user_rate->rate_flags) + 1; in ath12k_update_per_peer_tx_stats()
1380 mcs = HTT_USR_RATE_MCS(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1381 sgi = HTT_USR_RATE_GI(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1382 dcm = HTT_USR_RATE_DCM(user_rate->rate_flags); in ath12k_update_per_peer_tx_stats()
1415 spin_lock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1416 peer = ath12k_peer_find_by_id(ab, usr_stats->peer_id); in ath12k_update_per_peer_tx_stats()
1418 if (!peer || !peer->sta) { in ath12k_update_per_peer_tx_stats()
1419 spin_unlock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1424 sta = peer->sta; in ath12k_update_per_peer_tx_stats()
1425 arsta = (struct ath12k_sta *)sta->drv_priv; in ath12k_update_per_peer_tx_stats()
1427 memset(&arsta->txrate, 0, sizeof(arsta->txrate)); in ath12k_update_per_peer_tx_stats()
1431 arsta->txrate.legacy = rate; in ath12k_update_per_peer_tx_stats()
1434 arsta->txrate.legacy = rate; in ath12k_update_per_peer_tx_stats()
1437 arsta->txrate.mcs = mcs + 8 * (nss - 1); in ath12k_update_per_peer_tx_stats()
1438 arsta->txrate.flags = RATE_INFO_FLAGS_MCS; in ath12k_update_per_peer_tx_stats()
1440 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath12k_update_per_peer_tx_stats()
1443 arsta->txrate.mcs = mcs; in ath12k_update_per_peer_tx_stats()
1444 arsta->txrate.flags = RATE_INFO_FLAGS_VHT_MCS; in ath12k_update_per_peer_tx_stats()
1446 arsta->txrate.flags |= RATE_INFO_FLAGS_SHORT_GI; in ath12k_update_per_peer_tx_stats()
1449 arsta->txrate.mcs = mcs; in ath12k_update_per_peer_tx_stats()
1450 arsta->txrate.flags = RATE_INFO_FLAGS_HE_MCS; in ath12k_update_per_peer_tx_stats()
1451 arsta->txrate.he_dcm = dcm; in ath12k_update_per_peer_tx_stats()
1452 arsta->txrate.he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi); in ath12k_update_per_peer_tx_stats()
1453 tones = le16_to_cpu(user_rate->ru_end) - in ath12k_update_per_peer_tx_stats()
1454 le16_to_cpu(user_rate->ru_start) + 1; in ath12k_update_per_peer_tx_stats()
1456 arsta->txrate.he_ru_alloc = v; in ath12k_update_per_peer_tx_stats()
1460 arsta->txrate.nss = nss; in ath12k_update_per_peer_tx_stats()
1461 arsta->txrate.bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_update_per_peer_tx_stats()
1462 arsta->tx_duration += tx_duration; in ath12k_update_per_peer_tx_stats()
1463 memcpy(&arsta->last_txrate, &arsta->txrate, sizeof(struct rate_info)); in ath12k_update_per_peer_tx_stats()
1470 peer_stats->succ_pkts = succ_pkts; in ath12k_update_per_peer_tx_stats()
1471 peer_stats->succ_bytes = succ_bytes; in ath12k_update_per_peer_tx_stats()
1472 peer_stats->is_ampdu = is_ampdu; in ath12k_update_per_peer_tx_stats()
1473 peer_stats->duration = tx_duration; in ath12k_update_per_peer_tx_stats()
1474 peer_stats->ba_fails = in ath12k_update_per_peer_tx_stats()
1475 HTT_USR_CMPLTN_LONG_RETRY(usr_stats->cmpltn_cmn.flags) + in ath12k_update_per_peer_tx_stats()
1476 HTT_USR_CMPLTN_SHORT_RETRY(usr_stats->cmpltn_cmn.flags); in ath12k_update_per_peer_tx_stats()
1479 spin_unlock_bh(&ab->base_lock); in ath12k_update_per_peer_tx_stats()
1488 for (user = 0; user < HTT_PPDU_STATS_MAX_USERS - 1; user++) in ath12k_htt_update_ppdu_stats()
1498 lockdep_assert_held(&ar->data_lock); in ath12k_dp_htt_get_ppdu_desc()
1499 if (!list_empty(&ar->ppdu_stats_info)) { in ath12k_dp_htt_get_ppdu_desc()
1500 list_for_each_entry(ppdu_info, &ar->ppdu_stats_info, list) { in ath12k_dp_htt_get_ppdu_desc()
1501 if (ppdu_info->ppdu_id == ppdu_id) in ath12k_dp_htt_get_ppdu_desc()
1505 if (ar->ppdu_stat_list_depth > HTT_PPDU_DESC_MAX_DEPTH) { in ath12k_dp_htt_get_ppdu_desc()
1506 ppdu_info = list_first_entry(&ar->ppdu_stats_info, in ath12k_dp_htt_get_ppdu_desc()
1508 list_del(&ppdu_info->list); in ath12k_dp_htt_get_ppdu_desc()
1509 ar->ppdu_stat_list_depth--; in ath12k_dp_htt_get_ppdu_desc()
1510 ath12k_htt_update_ppdu_stats(ar, &ppdu_info->ppdu_stats); in ath12k_dp_htt_get_ppdu_desc()
1519 list_add_tail(&ppdu_info->list, &ar->ppdu_stats_info); in ath12k_dp_htt_get_ppdu_desc()
1520 ar->ppdu_stat_list_depth++; in ath12k_dp_htt_get_ppdu_desc()
1528 peer->ppdu_stats_delayba.sw_peer_id = le16_to_cpu(usr_stats->rate.sw_peer_id); in ath12k_copy_to_delay_stats()
1529 peer->ppdu_stats_delayba.info0 = le32_to_cpu(usr_stats->rate.info0); in ath12k_copy_to_delay_stats()
1530 peer->ppdu_stats_delayba.ru_end = le16_to_cpu(usr_stats->rate.ru_end); in ath12k_copy_to_delay_stats()
1531 peer->ppdu_stats_delayba.ru_start = le16_to_cpu(usr_stats->rate.ru_start); in ath12k_copy_to_delay_stats()
1532 peer->ppdu_stats_delayba.info1 = le32_to_cpu(usr_stats->rate.info1); in ath12k_copy_to_delay_stats()
1533 peer->ppdu_stats_delayba.rate_flags = le32_to_cpu(usr_stats->rate.rate_flags); in ath12k_copy_to_delay_stats()
1534 peer->ppdu_stats_delayba.resp_rate_flags = in ath12k_copy_to_delay_stats()
1535 le32_to_cpu(usr_stats->rate.resp_rate_flags); in ath12k_copy_to_delay_stats()
1537 peer->delayba_flag = true; in ath12k_copy_to_delay_stats()
1543 usr_stats->rate.sw_peer_id = cpu_to_le16(peer->ppdu_stats_delayba.sw_peer_id); in ath12k_copy_to_bar()
1544 usr_stats->rate.info0 = cpu_to_le32(peer->ppdu_stats_delayba.info0); in ath12k_copy_to_bar()
1545 usr_stats->rate.ru_end = cpu_to_le16(peer->ppdu_stats_delayba.ru_end); in ath12k_copy_to_bar()
1546 usr_stats->rate.ru_start = cpu_to_le16(peer->ppdu_stats_delayba.ru_start); in ath12k_copy_to_bar()
1547 usr_stats->rate.info1 = cpu_to_le32(peer->ppdu_stats_delayba.info1); in ath12k_copy_to_bar()
1548 usr_stats->rate.rate_flags = cpu_to_le32(peer->ppdu_stats_delayba.rate_flags); in ath12k_copy_to_bar()
1549 usr_stats->rate.resp_rate_flags = in ath12k_copy_to_bar()
1550 cpu_to_le32(peer->ppdu_stats_delayba.resp_rate_flags); in ath12k_copy_to_bar()
1552 peer->delayba_flag = false; in ath12k_copy_to_bar()
1568 msg = (struct ath12k_htt_ppdu_stats_msg *)skb->data; in ath12k_htt_pull_ppdu_stats()
1569 len = le32_get_bits(msg->info, HTT_T2H_PPDU_STATS_INFO_PAYLOAD_SIZE); in ath12k_htt_pull_ppdu_stats()
1570 pdev_id = le32_get_bits(msg->info, HTT_T2H_PPDU_STATS_INFO_PDEV_ID); in ath12k_htt_pull_ppdu_stats()
1571 ppdu_id = le32_to_cpu(msg->ppdu_id); in ath12k_htt_pull_ppdu_stats()
1576 ret = -EINVAL; in ath12k_htt_pull_ppdu_stats()
1580 spin_lock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1583 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1584 ret = -EINVAL; in ath12k_htt_pull_ppdu_stats()
1588 ppdu_info->ppdu_id = ppdu_id; in ath12k_htt_pull_ppdu_stats()
1589 ret = ath12k_dp_htt_tlv_iter(ab, msg->data, len, in ath12k_htt_pull_ppdu_stats()
1593 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1599 if (ppdu_info->frame_type == HTT_STATS_PPDU_FTYPE_DATA && in ath12k_htt_pull_ppdu_stats()
1600 (ppdu_info->tlv_bitmap & (1 << HTT_PPDU_STATS_TAG_USR_COMMON)) && in ath12k_htt_pull_ppdu_stats()
1601 ppdu_info->delay_ba) { in ath12k_htt_pull_ppdu_stats()
1602 for (i = 0; i < ppdu_info->ppdu_stats.common.num_users; i++) { in ath12k_htt_pull_ppdu_stats()
1603 peer_id = ppdu_info->ppdu_stats.user_stats[i].peer_id; in ath12k_htt_pull_ppdu_stats()
1604 spin_lock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1607 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1611 usr_stats = &ppdu_info->ppdu_stats.user_stats[i]; in ath12k_htt_pull_ppdu_stats()
1612 if (usr_stats->delay_ba) in ath12k_htt_pull_ppdu_stats()
1614 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1618 /* restore all peers' data rate tlv to mu-bar tlv */ in ath12k_htt_pull_ppdu_stats()
1619 if (ppdu_info->frame_type == HTT_STATS_PPDU_FTYPE_BAR && in ath12k_htt_pull_ppdu_stats()
1620 (ppdu_info->tlv_bitmap & (1 << HTT_PPDU_STATS_TAG_USR_COMMON))) { in ath12k_htt_pull_ppdu_stats()
1621 for (i = 0; i < ppdu_info->bar_num_users; i++) { in ath12k_htt_pull_ppdu_stats()
1622 peer_id = ppdu_info->ppdu_stats.user_stats[i].peer_id; in ath12k_htt_pull_ppdu_stats()
1623 spin_lock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1626 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1630 usr_stats = &ppdu_info->ppdu_stats.user_stats[i]; in ath12k_htt_pull_ppdu_stats()
1631 if (peer->delayba_flag) in ath12k_htt_pull_ppdu_stats()
1633 spin_unlock_bh(&ab->base_lock); in ath12k_htt_pull_ppdu_stats()
1637 spin_unlock_bh(&ar->data_lock); in ath12k_htt_pull_ppdu_stats()
1653 msg = (struct ath12k_htt_mlo_offset_msg *)skb->data; in ath12k_htt_mlo_offset_event_handler()
1654 pdev_id = u32_get_bits(__le32_to_cpu(msg->info), in ath12k_htt_mlo_offset_event_handler()
1663 spin_lock_bh(&ar->data_lock); in ath12k_htt_mlo_offset_event_handler()
1664 pdev = ar->pdev; in ath12k_htt_mlo_offset_event_handler()
1666 pdev->timestamp.info = __le32_to_cpu(msg->info); in ath12k_htt_mlo_offset_event_handler()
1667 pdev->timestamp.sync_timestamp_lo_us = __le32_to_cpu(msg->sync_timestamp_lo_us); in ath12k_htt_mlo_offset_event_handler()
1668 pdev->timestamp.sync_timestamp_hi_us = __le32_to_cpu(msg->sync_timestamp_hi_us); in ath12k_htt_mlo_offset_event_handler()
1669 pdev->timestamp.mlo_offset_lo = __le32_to_cpu(msg->mlo_offset_lo); in ath12k_htt_mlo_offset_event_handler()
1670 pdev->timestamp.mlo_offset_hi = __le32_to_cpu(msg->mlo_offset_hi); in ath12k_htt_mlo_offset_event_handler()
1671 pdev->timestamp.mlo_offset_clks = __le32_to_cpu(msg->mlo_offset_clks); in ath12k_htt_mlo_offset_event_handler()
1672 pdev->timestamp.mlo_comp_clks = __le32_to_cpu(msg->mlo_comp_clks); in ath12k_htt_mlo_offset_event_handler()
1673 pdev->timestamp.mlo_comp_timer = __le32_to_cpu(msg->mlo_comp_timer); in ath12k_htt_mlo_offset_event_handler()
1675 spin_unlock_bh(&ar->data_lock); in ath12k_htt_mlo_offset_event_handler()
1681 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_htt_htc_t2h_msg_handler()
1682 struct htt_resp_msg *resp = (struct htt_resp_msg *)skb->data; in ath12k_dp_htt_htc_t2h_msg_handler()
1691 type = le32_get_bits(resp->version_msg.version, HTT_T2H_MSG_TYPE); in ath12k_dp_htt_htc_t2h_msg_handler()
1697 dp->htt_tgt_ver_major = le32_get_bits(resp->version_msg.version, in ath12k_dp_htt_htc_t2h_msg_handler()
1699 dp->htt_tgt_ver_minor = le32_get_bits(resp->version_msg.version, in ath12k_dp_htt_htc_t2h_msg_handler()
1701 complete(&dp->htt_tgt_version_received); in ath12k_dp_htt_htc_t2h_msg_handler()
1705 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1707 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1709 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1711 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1716 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1718 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1720 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1722 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1724 ast_hash = le32_get_bits(resp->peer_map_ev.info2, in ath12k_dp_htt_htc_t2h_msg_handler()
1726 hw_peer_id = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1732 vdev_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1734 peer_id = le32_get_bits(resp->peer_map_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1736 peer_mac_h16 = le32_get_bits(resp->peer_map_ev.info1, in ath12k_dp_htt_htc_t2h_msg_handler()
1738 ath12k_dp_get_mac_addr(le32_to_cpu(resp->peer_map_ev.mac_addr_l32), in ath12k_dp_htt_htc_t2h_msg_handler()
1745 peer_id = le32_get_bits(resp->peer_unmap_ev.info, in ath12k_dp_htt_htc_t2h_msg_handler()
1771 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_msdu_coalesce()
1777 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_msdu_coalesce()
1784 buf_first_len = DP_RX_BUFFER_SIZE - buf_first_hdr_len; in ath12k_dp_rx_msdu_coalesce()
1792 ldesc = (struct hal_rx_desc *)last->data; in ath12k_dp_rx_msdu_coalesce()
1793 rxcb->is_first_msdu = ath12k_dp_rx_h_first_msdu(ab, ldesc); in ath12k_dp_rx_msdu_coalesce()
1794 rxcb->is_last_msdu = ath12k_dp_rx_h_last_msdu(ab, ldesc); in ath12k_dp_rx_msdu_coalesce()
1797 * exceeds DP_RX_BUFFER_SIZE - HAL_RX_DESC_SIZE. So assume the data in ath12k_dp_rx_msdu_coalesce()
1798 * in the first buf is of length DP_RX_BUFFER_SIZE - HAL_RX_DESC_SIZE. in ath12k_dp_rx_msdu_coalesce()
1806 ath12k_dp_rx_desc_end_tlv_copy(ab, rxcb->rx_desc, ldesc); in ath12k_dp_rx_msdu_coalesce()
1808 space_extra = msdu_len - (buf_first_len + skb_tailroom(first)); in ath12k_dp_rx_msdu_coalesce()
1814 if (!rxcb->is_continuation) { in ath12k_dp_rx_msdu_coalesce()
1820 return -ENOMEM; in ath12k_dp_rx_msdu_coalesce()
1823 rem_len = msdu_len - buf_first_len; in ath12k_dp_rx_msdu_coalesce()
1826 if (rxcb->is_continuation) in ath12k_dp_rx_msdu_coalesce()
1827 buf_len = DP_RX_BUFFER_SIZE - hal_rx_desc_sz; in ath12k_dp_rx_msdu_coalesce()
1831 if (buf_len > (DP_RX_BUFFER_SIZE - hal_rx_desc_sz)) { in ath12k_dp_rx_msdu_coalesce()
1834 return -EINVAL; in ath12k_dp_rx_msdu_coalesce()
1843 rem_len -= buf_len; in ath12k_dp_rx_msdu_coalesce()
1844 if (!rxcb->is_continuation) in ath12k_dp_rx_msdu_coalesce()
1857 if (!rxcb->is_continuation) in ath12k_dp_rx_get_msdu_last_buf()
1862 if (!rxcb->is_continuation) in ath12k_dp_rx_get_msdu_last_buf()
1872 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_csum_offload()
1875 ip_csum_fail = ath12k_dp_rx_h_ip_cksum_fail(ab, rxcb->rx_desc); in ath12k_dp_rx_h_csum_offload()
1876 l4_csum_fail = ath12k_dp_rx_h_l4_cksum_fail(ab, rxcb->rx_desc); in ath12k_dp_rx_h_csum_offload()
1878 msdu->ip_summed = (ip_csum_fail || l4_csum_fail) ? in ath12k_dp_rx_h_csum_offload()
1905 ath12k_warn(ar->ab, "unsupported encryption type %d for mic len\n", enctype); in ath12k_dp_rx_crypto_mic_len()
1933 ath12k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath12k_dp_rx_crypto_param_len()
1958 ath12k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath12k_dp_rx_crypto_icv_len()
1967 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_undecap_nwifi()
1976 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_dp_rx_h_undecap_nwifi()
1977 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_nwifi()
1981 hdr->frame_control |= __cpu_to_le16(IEEE80211_STYPE_QOS_DATA); in ath12k_dp_rx_h_undecap_nwifi()
1984 hdr->frame_control &= ~(__cpu_to_le16(IEEE80211_FCTL_ORDER)); in ath12k_dp_rx_h_undecap_nwifi()
1986 qos_ctl = rxcb->tid; in ath12k_dp_rx_h_undecap_nwifi()
1988 if (ath12k_dp_rx_h_mesh_ctl_present(ab, rxcb->rx_desc)) in ath12k_dp_rx_h_undecap_nwifi()
1997 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in ath12k_dp_rx_h_undecap_nwifi()
1999 ath12k_dp_rx_desc_get_crypto_header(ar->ab, in ath12k_dp_rx_h_undecap_nwifi()
2000 rxcb->rx_desc, crypto_hdr, in ath12k_dp_rx_h_undecap_nwifi()
2020 if (!rxcb->is_first_msdu || in ath12k_dp_rx_h_undecap_raw()
2021 !(rxcb->is_first_msdu && rxcb->is_last_msdu)) { in ath12k_dp_rx_h_undecap_raw()
2026 skb_trim(msdu, msdu->len - FCS_LEN); in ath12k_dp_rx_h_undecap_raw()
2031 hdr = (void *)msdu->data; in ath12k_dp_rx_h_undecap_raw()
2034 if (status->flag & RX_FLAG_IV_STRIPPED) { in ath12k_dp_rx_h_undecap_raw()
2035 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2038 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2042 if (status->flag & RX_FLAG_MIC_STRIPPED) in ath12k_dp_rx_h_undecap_raw()
2043 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2047 if (status->flag & RX_FLAG_ICV_STRIPPED) in ath12k_dp_rx_h_undecap_raw()
2048 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_raw()
2053 if ((status->flag & RX_FLAG_MMIC_STRIPPED) && in ath12k_dp_rx_h_undecap_raw()
2054 !ieee80211_has_morefrags(hdr->frame_control) && in ath12k_dp_rx_h_undecap_raw()
2056 skb_trim(msdu, msdu->len - IEEE80211_CCMP_MIC_LEN); in ath12k_dp_rx_h_undecap_raw()
2059 if (status->flag & RX_FLAG_IV_STRIPPED) { in ath12k_dp_rx_h_undecap_raw()
2060 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_raw()
2063 memmove(msdu->data + crypto_len, msdu->data, hdr_len); in ath12k_dp_rx_h_undecap_raw()
2074 struct hal_rx_desc *rx_desc = rxcb->rx_desc; in ath12k_get_dot11_hdr_from_rx_desc()
2075 struct ath12k_base *ab = ar->ab; in ath12k_get_dot11_hdr_from_rx_desc()
2082 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in ath12k_get_dot11_hdr_from_rx_desc()
2091 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_get_dot11_hdr_from_rx_desc()
2092 hdr->frame_control = fc; in ath12k_get_dot11_hdr_from_rx_desc()
2097 if (rxcb->is_mcbc) in ath12k_get_dot11_hdr_from_rx_desc()
2098 status->flag &= ~RX_FLAG_PN_VALIDATED; in ath12k_get_dot11_hdr_from_rx_desc()
2101 if (ieee80211_is_data_qos(hdr->frame_control)) { in ath12k_get_dot11_hdr_from_rx_desc()
2102 qos_ctl = rxcb->tid; in ath12k_get_dot11_hdr_from_rx_desc()
2107 memcpy(msdu->data + (hdr_len - IEEE80211_QOS_CTL_LEN), in ath12k_get_dot11_hdr_from_rx_desc()
2124 eth = (struct ethhdr *)msdu->data; in ath12k_dp_rx_h_undecap_eth()
2125 ether_addr_copy(da, eth->h_dest); in ath12k_dp_rx_h_undecap_eth()
2126 ether_addr_copy(sa, eth->h_source); in ath12k_dp_rx_h_undecap_eth()
2127 rfc.snap_type = eth->h_proto; in ath12k_dp_rx_h_undecap_eth()
2136 hdr = (struct ieee80211_hdr *)msdu->data; in ath12k_dp_rx_h_undecap_eth()
2147 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_undecap()
2162 ehdr = (struct ethhdr *)msdu->data; in ath12k_dp_rx_h_undecap()
2165 if (ehdr->h_proto == cpu_to_be16(ETH_P_PAE)) { in ath12k_dp_rx_h_undecap()
2166 ATH12K_SKB_RXCB(msdu)->is_eapol = true; in ath12k_dp_rx_h_undecap()
2171 /* PN for mcast packets will be validated in mac80211; in ath12k_dp_rx_h_undecap()
2174 if (ATH12K_SKB_RXCB(msdu)->is_mcbc && decrypted) in ath12k_dp_rx_h_undecap()
2187 struct hal_rx_desc *rx_desc = rxcb->rx_desc; in ath12k_dp_rx_h_find_peer()
2190 lockdep_assert_held(&ab->base_lock); in ath12k_dp_rx_h_find_peer()
2192 if (rxcb->peer_id) in ath12k_dp_rx_h_find_peer()
2193 peer = ath12k_peer_find_by_id(ab, rxcb->peer_id); in ath12k_dp_rx_h_find_peer()
2213 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_mpdu()
2221 /* PN for multicast packets will be checked in mac80211 */ in ath12k_dp_rx_h_mpdu()
2223 fill_crypto_hdr = ath12k_dp_rx_h_is_da_mcbc(ar->ab, rx_desc); in ath12k_dp_rx_h_mpdu()
2224 rxcb->is_mcbc = fill_crypto_hdr; in ath12k_dp_rx_h_mpdu()
2226 if (rxcb->is_mcbc) in ath12k_dp_rx_h_mpdu()
2227 rxcb->peer_id = ath12k_dp_rx_h_peer_id(ar->ab, rx_desc); in ath12k_dp_rx_h_mpdu()
2229 spin_lock_bh(&ar->ab->base_lock); in ath12k_dp_rx_h_mpdu()
2230 peer = ath12k_dp_rx_h_find_peer(ar->ab, msdu); in ath12k_dp_rx_h_mpdu()
2232 if (rxcb->is_mcbc) in ath12k_dp_rx_h_mpdu()
2233 enctype = peer->sec_type_grp; in ath12k_dp_rx_h_mpdu()
2235 enctype = peer->sec_type; in ath12k_dp_rx_h_mpdu()
2239 spin_unlock_bh(&ar->ab->base_lock); in ath12k_dp_rx_h_mpdu()
2245 /* Clear per-MPDU flags while leaving per-PPDU flags intact */ in ath12k_dp_rx_h_mpdu()
2246 rx_status->flag &= ~(RX_FLAG_FAILED_FCS_CRC | in ath12k_dp_rx_h_mpdu()
2253 rx_status->flag |= RX_FLAG_FAILED_FCS_CRC; in ath12k_dp_rx_h_mpdu()
2255 rx_status->flag |= RX_FLAG_MMIC_ERROR; in ath12k_dp_rx_h_mpdu()
2258 rx_status->flag |= RX_FLAG_DECRYPTED | RX_FLAG_MMIC_STRIPPED; in ath12k_dp_rx_h_mpdu()
2261 rx_status->flag |= RX_FLAG_MIC_STRIPPED | in ath12k_dp_rx_h_mpdu()
2264 rx_status->flag |= RX_FLAG_IV_STRIPPED | in ath12k_dp_rx_h_mpdu()
2275 if (ath12k_dp_rx_h_decap_type(ar->ab, rx_desc) != in ath12k_dp_rx_h_mpdu()
2277 hdr = (void *)msdu->data; in ath12k_dp_rx_h_mpdu()
2278 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_FCTL_PROTECTED); in ath12k_dp_rx_h_mpdu()
2285 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_rate()
2303 sband = &ar->mac.sbands[rx_status->band]; in ath12k_dp_rx_h_rate()
2304 rx_status->rate_idx = ath12k_mac_hw_rate_to_idx(sband, rate_mcs, in ath12k_dp_rx_h_rate()
2308 rx_status->encoding = RX_ENC_HT; in ath12k_dp_rx_h_rate()
2310 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2315 rx_status->rate_idx = rate_mcs + (8 * (nss - 1)); in ath12k_dp_rx_h_rate()
2317 rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI; in ath12k_dp_rx_h_rate()
2318 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2321 rx_status->encoding = RX_ENC_VHT; in ath12k_dp_rx_h_rate()
2322 rx_status->rate_idx = rate_mcs; in ath12k_dp_rx_h_rate()
2324 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2329 rx_status->nss = nss; in ath12k_dp_rx_h_rate()
2331 rx_status->enc_flags |= RX_ENC_FLAG_SHORT_GI; in ath12k_dp_rx_h_rate()
2332 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2335 rx_status->rate_idx = rate_mcs; in ath12k_dp_rx_h_rate()
2337 ath12k_warn(ar->ab, in ath12k_dp_rx_h_rate()
2342 rx_status->encoding = RX_ENC_HE; in ath12k_dp_rx_h_rate()
2343 rx_status->nss = nss; in ath12k_dp_rx_h_rate()
2344 rx_status->he_gi = ath12k_he_gi_to_nl80211_he_gi(sgi); in ath12k_dp_rx_h_rate()
2345 rx_status->bw = ath12k_mac_bw_to_mac80211_bw(bw); in ath12k_dp_rx_h_rate()
2353 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_ppdu()
2358 rx_status->freq = 0; in ath12k_dp_rx_h_ppdu()
2359 rx_status->rate_idx = 0; in ath12k_dp_rx_h_ppdu()
2360 rx_status->nss = 0; in ath12k_dp_rx_h_ppdu()
2361 rx_status->encoding = RX_ENC_LEGACY; in ath12k_dp_rx_h_ppdu()
2362 rx_status->bw = RATE_INFO_BW_20; in ath12k_dp_rx_h_ppdu()
2363 rx_status->enc_flags = 0; in ath12k_dp_rx_h_ppdu()
2365 rx_status->flag |= RX_FLAG_NO_SIGNAL_VAL; in ath12k_dp_rx_h_ppdu()
2372 rx_status->band = NL80211_BAND_6GHZ; in ath12k_dp_rx_h_ppdu()
2374 rx_status->band = NL80211_BAND_2GHZ; in ath12k_dp_rx_h_ppdu()
2376 rx_status->band = NL80211_BAND_5GHZ; in ath12k_dp_rx_h_ppdu()
2378 spin_lock_bh(&ar->data_lock); in ath12k_dp_rx_h_ppdu()
2379 channel = ar->rx_channel; in ath12k_dp_rx_h_ppdu()
2381 rx_status->band = channel->band; in ath12k_dp_rx_h_ppdu()
2383 ieee80211_frequency_to_channel(channel->center_freq); in ath12k_dp_rx_h_ppdu()
2385 spin_unlock_bh(&ar->data_lock); in ath12k_dp_rx_h_ppdu()
2386 ath12k_dbg_dump(ar->ab, ATH12K_DBG_DATA, NULL, "rx_desc: ", in ath12k_dp_rx_h_ppdu()
2390 rx_status->freq = ieee80211_channel_to_frequency(channel_num, in ath12k_dp_rx_h_ppdu()
2391 rx_status->band); in ath12k_dp_rx_h_ppdu()
2400 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_deliver_msdu()
2412 bool is_mcbc = rxcb->is_mcbc; in ath12k_dp_rx_deliver_msdu()
2413 bool is_eapol = rxcb->is_eapol; in ath12k_dp_rx_deliver_msdu()
2415 if (status->encoding == RX_ENC_HE && !(status->flag & RX_FLAG_RADIOTAP_HE) && in ath12k_dp_rx_deliver_msdu()
2416 !(status->flag & RX_FLAG_SKIP_MONITOR)) { in ath12k_dp_rx_deliver_msdu()
2419 status->flag |= RX_FLAG_RADIOTAP_HE; in ath12k_dp_rx_deliver_msdu()
2422 if (!(status->flag & RX_FLAG_ONLY_MONITOR)) in ath12k_dp_rx_deliver_msdu()
2423 decap = ath12k_dp_rx_h_decap_type(ab, rxcb->rx_desc); in ath12k_dp_rx_deliver_msdu()
2425 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_deliver_msdu()
2428 pubsta = peer ? peer->sta : NULL; in ath12k_dp_rx_deliver_msdu()
2430 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_deliver_msdu()
2433 …s%s%s%s%s%s rate_idx %u vht_nss %u freq %u band %u flag 0x%x fcs-err %i mic-err %i amsdu-more %i\n… in ath12k_dp_rx_deliver_msdu()
2435 msdu->len, in ath12k_dp_rx_deliver_msdu()
2436 peer ? peer->addr : NULL, in ath12k_dp_rx_deliver_msdu()
2437 rxcb->tid, in ath12k_dp_rx_deliver_msdu()
2439 ath12k_dp_rx_h_seq_no(ab, rxcb->rx_desc), in ath12k_dp_rx_deliver_msdu()
2440 (status->encoding == RX_ENC_LEGACY) ? "legacy" : "", in ath12k_dp_rx_deliver_msdu()
2441 (status->encoding == RX_ENC_HT) ? "ht" : "", in ath12k_dp_rx_deliver_msdu()
2442 (status->encoding == RX_ENC_VHT) ? "vht" : "", in ath12k_dp_rx_deliver_msdu()
2443 (status->encoding == RX_ENC_HE) ? "he" : "", in ath12k_dp_rx_deliver_msdu()
2444 (status->bw == RATE_INFO_BW_40) ? "40" : "", in ath12k_dp_rx_deliver_msdu()
2445 (status->bw == RATE_INFO_BW_80) ? "80" : "", in ath12k_dp_rx_deliver_msdu()
2446 (status->bw == RATE_INFO_BW_160) ? "160" : "", in ath12k_dp_rx_deliver_msdu()
2447 status->enc_flags & RX_ENC_FLAG_SHORT_GI ? "sgi " : "", in ath12k_dp_rx_deliver_msdu()
2448 status->rate_idx, in ath12k_dp_rx_deliver_msdu()
2449 status->nss, in ath12k_dp_rx_deliver_msdu()
2450 status->freq, in ath12k_dp_rx_deliver_msdu()
2451 status->band, status->flag, in ath12k_dp_rx_deliver_msdu()
2452 !!(status->flag & RX_FLAG_FAILED_FCS_CRC), in ath12k_dp_rx_deliver_msdu()
2453 !!(status->flag & RX_FLAG_MMIC_ERROR), in ath12k_dp_rx_deliver_msdu()
2454 !!(status->flag & RX_FLAG_AMSDU_MORE)); in ath12k_dp_rx_deliver_msdu()
2457 msdu->data, msdu->len); in ath12k_dp_rx_deliver_msdu()
2464 /* PN for multicast packets are not validate in HW, in ath12k_dp_rx_deliver_msdu()
2470 !(is_mcbc && rx_status->flag & RX_FLAG_DECRYPTED)) in ath12k_dp_rx_deliver_msdu()
2471 rx_status->flag |= RX_FLAG_8023; in ath12k_dp_rx_deliver_msdu()
2473 ieee80211_rx_napi(ar->hw, pubsta, msdu, napi); in ath12k_dp_rx_deliver_msdu()
2481 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_process_msdu()
2488 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_process_msdu()
2494 ret = -EIO; in ath12k_dp_rx_process_msdu()
2498 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_process_msdu()
2499 lrx_desc = (struct hal_rx_desc *)last_buf->data; in ath12k_dp_rx_process_msdu()
2502 ret = -EIO; in ath12k_dp_rx_process_msdu()
2507 rxcb->rx_desc = rx_desc; in ath12k_dp_rx_process_msdu()
2511 if (rxcb->is_frag) { in ath12k_dp_rx_process_msdu()
2513 } else if (!rxcb->is_continuation) { in ath12k_dp_rx_process_msdu()
2515 ret = -EINVAL; in ath12k_dp_rx_process_msdu()
2537 rx_status->flag |= RX_FLAG_SKIP_MONITOR | RX_FLAG_DUP_VALIDATED; in ath12k_dp_rx_process_msdu()
2564 mac_id = rxcb->mac_id; in ath12k_dp_rx_process_received_packets()
2565 pdev_id = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id); in ath12k_dp_rx_process_received_packets()
2566 ar = ab->pdevs[pdev_id].ar; in ath12k_dp_rx_process_received_packets()
2567 if (!rcu_dereference(ab->pdevs_active[pdev_id])) { in ath12k_dp_rx_process_received_packets()
2572 if (test_bit(ATH12K_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_rx_process_received_packets()
2595 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process()
2596 struct dp_rxdma_ring *rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rx_process()
2610 srng = &ab->hal.srng_list[dp->reo_dst_ring[ring_id].ring_id]; in ath12k_dp_rx_process()
2612 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process()
2621 cookie = le32_get_bits(desc->buf_addr_info.info1, in ath12k_dp_rx_process()
2624 mac_id = le32_get_bits(desc->info0, in ath12k_dp_rx_process()
2627 desc_va = ((u64)le32_to_cpu(desc->buf_va_hi) << 32 | in ath12k_dp_rx_process()
2628 le32_to_cpu(desc->buf_va_lo)); in ath12k_dp_rx_process()
2631 /* retry manual desc retrieval */ in ath12k_dp_rx_process()
2640 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_rx_process()
2643 msdu = desc_info->skb; in ath12k_dp_rx_process()
2644 desc_info->skb = NULL; in ath12k_dp_rx_process()
2646 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_process()
2647 list_move_tail(&desc_info->list, &dp->rx_desc_free_list); in ath12k_dp_rx_process()
2648 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_process()
2651 dma_unmap_single(ab->dev, rxcb->paddr, in ath12k_dp_rx_process()
2652 msdu->len + skb_tailroom(msdu), in ath12k_dp_rx_process()
2657 push_reason = le32_get_bits(desc->info0, in ath12k_dp_rx_process()
2662 ab->soc_stats.hal_reo_error[dp->reo_dst_ring[ring_id].ring_id]++; in ath12k_dp_rx_process()
2666 rxcb->is_first_msdu = !!(le32_to_cpu(desc->rx_msdu_info.info0) & in ath12k_dp_rx_process()
2668 rxcb->is_last_msdu = !!(le32_to_cpu(desc->rx_msdu_info.info0) & in ath12k_dp_rx_process()
2670 rxcb->is_continuation = !!(le32_to_cpu(desc->rx_msdu_info.info0) & in ath12k_dp_rx_process()
2672 rxcb->mac_id = mac_id; in ath12k_dp_rx_process()
2673 rxcb->peer_id = le32_get_bits(desc->rx_mpdu_info.peer_meta_data, in ath12k_dp_rx_process()
2675 rxcb->tid = le32_get_bits(desc->rx_mpdu_info.info0, in ath12k_dp_rx_process()
2680 if (!rxcb->is_continuation) { in ath12k_dp_rx_process()
2704 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process()
2711 ab->hw_params->hal_params->rx_buf_rbm, true); in ath12k_dp_rx_process()
2724 spin_lock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2725 if (rx_tid->last_frag_no && in ath12k_dp_rx_frag_timer()
2726 rx_tid->rx_frag_bitmap == GENMASK(rx_tid->last_frag_no, 0)) { in ath12k_dp_rx_frag_timer()
2727 spin_unlock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2731 spin_unlock_bh(&rx_tid->ab->base_lock); in ath12k_dp_rx_frag_timer()
2736 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_peer_frag_setup()
2746 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2750 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2752 return -ENOENT; in ath12k_dp_rx_peer_frag_setup()
2756 rx_tid = &peer->rx_tid[i]; in ath12k_dp_rx_peer_frag_setup()
2757 rx_tid->ab = ab; in ath12k_dp_rx_peer_frag_setup()
2758 timer_setup(&rx_tid->frag_timer, ath12k_dp_rx_frag_timer, 0); in ath12k_dp_rx_peer_frag_setup()
2759 skb_queue_head_init(&rx_tid->rx_frags); in ath12k_dp_rx_peer_frag_setup()
2762 peer->tfm_mmic = tfm; in ath12k_dp_rx_peer_frag_setup()
2763 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_peer_frag_setup()
2778 return -EINVAL; in ath12k_dp_rx_h_michael_mic()
2780 desc->tfm = tfm; in ath12k_dp_rx_h_michael_mic()
2793 if (ieee80211_is_data_qos(hdr->frame_control)) in ath12k_dp_rx_h_michael_mic()
2812 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_verify_tkip_mic()
2813 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_verify_tkip_mic()
2820 u32 hdr_len, hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_verify_tkip_mic()
2827 hdr = (struct ieee80211_hdr *)(msdu->data + hal_rx_desc_sz); in ath12k_dp_rx_h_verify_tkip_mic()
2828 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_verify_tkip_mic()
2832 if (!is_multicast_ether_addr(hdr->addr1)) in ath12k_dp_rx_h_verify_tkip_mic()
2833 key_idx = peer->ucast_keyidx; in ath12k_dp_rx_h_verify_tkip_mic()
2835 key_idx = peer->mcast_keyidx; in ath12k_dp_rx_h_verify_tkip_mic()
2837 key_conf = peer->keys[key_idx]; in ath12k_dp_rx_h_verify_tkip_mic()
2839 data = msdu->data + head_len; in ath12k_dp_rx_h_verify_tkip_mic()
2840 data_len = msdu->len - head_len - tail_len; in ath12k_dp_rx_h_verify_tkip_mic()
2841 key = &key_conf->key[NL80211_TKIP_DATA_OFFSET_RX_MIC_KEY]; in ath12k_dp_rx_h_verify_tkip_mic()
2843 ret = ath12k_dp_rx_h_michael_mic(peer->tfm_mmic, key, hdr, data, data_len, mic); in ath12k_dp_rx_h_verify_tkip_mic()
2850 (ATH12K_SKB_RXCB(msdu))->is_first_msdu = true; in ath12k_dp_rx_h_verify_tkip_mic()
2851 (ATH12K_SKB_RXCB(msdu))->is_last_msdu = true; in ath12k_dp_rx_h_verify_tkip_mic()
2853 rxs->flag |= RX_FLAG_MMIC_ERROR | RX_FLAG_MMIC_STRIPPED | in ath12k_dp_rx_h_verify_tkip_mic()
2860 ieee80211_rx(ar->hw, msdu); in ath12k_dp_rx_h_verify_tkip_mic()
2861 return -EINVAL; in ath12k_dp_rx_h_verify_tkip_mic()
2870 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_undecap_frag()
2875 hdr = (struct ieee80211_hdr *)(msdu->data + hal_rx_desc_sz); in ath12k_dp_rx_h_undecap_frag()
2878 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_frag()
2882 skb_trim(msdu, msdu->len - in ath12k_dp_rx_h_undecap_frag()
2886 hdr_len = ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_undecap_frag()
2889 memmove(msdu->data + hal_rx_desc_sz + crypto_len, in ath12k_dp_rx_h_undecap_frag()
2890 msdu->data + hal_rx_desc_sz, hdr_len); in ath12k_dp_rx_h_undecap_frag()
2900 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag()
2908 u32 flags, hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_defrag()
2910 first_frag = skb_peek(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
2911 last_frag = skb_peek_tail(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
2913 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath12k_dp_rx_h_defrag()
2915 rx_desc = (struct hal_rx_desc *)skb->data; in ath12k_dp_rx_h_defrag()
2916 hdr = (struct ieee80211_hdr *)(skb->data + hal_rx_desc_sz); in ath12k_dp_rx_h_defrag()
2933 skb_trim(skb, skb->len - FCS_LEN); in ath12k_dp_rx_h_defrag()
2938 ieee80211_hdrlen(hdr->frame_control)); in ath12k_dp_rx_h_defrag()
2939 msdu_len += skb->len; in ath12k_dp_rx_h_defrag()
2942 extra_space = msdu_len - (DP_RX_BUFFER_SIZE + skb_tailroom(first_frag)); in ath12k_dp_rx_h_defrag()
2945 return -ENOMEM; in ath12k_dp_rx_h_defrag()
2947 __skb_unlink(first_frag, &rx_tid->rx_frags); in ath12k_dp_rx_h_defrag()
2948 while ((skb = __skb_dequeue(&rx_tid->rx_frags))) { in ath12k_dp_rx_h_defrag()
2949 skb_put_data(first_frag, skb->data, skb->len); in ath12k_dp_rx_h_defrag()
2953 hdr = (struct ieee80211_hdr *)(first_frag->data + hal_rx_desc_sz); in ath12k_dp_rx_h_defrag()
2954 hdr->frame_control &= ~__cpu_to_le16(IEEE80211_FCTL_MOREFRAGS); in ath12k_dp_rx_h_defrag()
2955 ATH12K_SKB_RXCB(first_frag)->is_frag = 1; in ath12k_dp_rx_h_defrag()
2968 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag_reo_reinject()
2969 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_h_defrag_reo_reinject()
2970 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)defrag_skb->data; in ath12k_dp_rx_h_defrag_reo_reinject()
2984 hal_rx_desc_sz = ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_defrag_reo_reinject()
2985 link_desc_banks = dp->link_desc_banks; in ath12k_dp_rx_h_defrag_reo_reinject()
2986 reo_dest_ring = rx_tid->dst_ring_desc; in ath12k_dp_rx_h_defrag_reo_reinject()
2988 ath12k_hal_rx_reo_ent_paddr_get(ab, &reo_dest_ring->buf_addr_info, in ath12k_dp_rx_h_defrag_reo_reinject()
2994 (link_paddr - link_desc_banks[desc_bank].paddr)); in ath12k_dp_rx_h_defrag_reo_reinject()
2997 (link_paddr - link_desc_banks[desc_bank].paddr)); in ath12k_dp_rx_h_defrag_reo_reinject()
2999 msdu0 = &msdu_link->msdu_link[0]; in ath12k_dp_rx_h_defrag_reo_reinject()
3000 msdu_ext_info = le32_to_cpu(msdu0->rx_msdu_ext_info.info0); in ath12k_dp_rx_h_defrag_reo_reinject()
3008 u32_encode_bits(defrag_skb->len - hal_rx_desc_sz, in ath12k_dp_rx_h_defrag_reo_reinject()
3012 msdu0->rx_msdu_info.info0 = cpu_to_le32(msdu_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3013 msdu0->rx_msdu_ext_info.info0 = cpu_to_le32(msdu_ext_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3016 ath12k_dp_rxdesc_set_msdu_len(ab, rx_desc, defrag_skb->len - hal_rx_desc_sz); in ath12k_dp_rx_h_defrag_reo_reinject()
3018 buf_paddr = dma_map_single(ab->dev, defrag_skb->data, in ath12k_dp_rx_h_defrag_reo_reinject()
3019 defrag_skb->len + skb_tailroom(defrag_skb), in ath12k_dp_rx_h_defrag_reo_reinject()
3021 if (dma_mapping_error(ab->dev, buf_paddr)) in ath12k_dp_rx_h_defrag_reo_reinject()
3022 return -ENOMEM; in ath12k_dp_rx_h_defrag_reo_reinject()
3024 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3025 desc_info = list_first_entry_or_null(&dp->rx_desc_free_list, in ath12k_dp_rx_h_defrag_reo_reinject()
3029 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3031 ret = -ENOMEM; in ath12k_dp_rx_h_defrag_reo_reinject()
3035 desc_info->skb = defrag_skb; in ath12k_dp_rx_h_defrag_reo_reinject()
3037 list_del(&desc_info->list); in ath12k_dp_rx_h_defrag_reo_reinject()
3038 list_add_tail(&desc_info->list, &dp->rx_desc_used_list); in ath12k_dp_rx_h_defrag_reo_reinject()
3039 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3041 ATH12K_SKB_RXCB(defrag_skb)->paddr = buf_paddr; in ath12k_dp_rx_h_defrag_reo_reinject()
3043 ath12k_hal_rx_buf_addr_info_set(&msdu0->buf_addr_info, buf_paddr, in ath12k_dp_rx_h_defrag_reo_reinject()
3044 desc_info->cookie, in ath12k_dp_rx_h_defrag_reo_reinject()
3048 srng = &ab->hal.srng_list[dp->reo_reinject_ring.ring_id]; in ath12k_dp_rx_h_defrag_reo_reinject()
3050 spin_lock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3056 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3057 ret = -ENOSPC; in ath12k_dp_rx_h_defrag_reo_reinject()
3062 ath12k_hal_rx_buf_addr_info_set(&reo_ent_ring->buf_addr_info, link_paddr, in ath12k_dp_rx_h_defrag_reo_reinject()
3070 u32_encode_bits(rx_tid->tid, RX_MPDU_DESC_INFO0_TID); in ath12k_dp_rx_h_defrag_reo_reinject()
3072 reo_ent_ring->rx_mpdu_info.info0 = cpu_to_le32(mpdu_info); in ath12k_dp_rx_h_defrag_reo_reinject()
3073 reo_ent_ring->rx_mpdu_info.peer_meta_data = in ath12k_dp_rx_h_defrag_reo_reinject()
3074 reo_dest_ring->rx_mpdu_info.peer_meta_data; in ath12k_dp_rx_h_defrag_reo_reinject()
3081 reo_ent_ring->queue_addr_lo = reo_dest_ring->rx_mpdu_info.peer_meta_data; in ath12k_dp_rx_h_defrag_reo_reinject()
3082 reo_ent_ring->info0 = le32_encode_bits(dst_ind, in ath12k_dp_rx_h_defrag_reo_reinject()
3085 reo_ent_ring->info1 = le32_encode_bits(rx_tid->cur_sn, in ath12k_dp_rx_h_defrag_reo_reinject()
3087 dest_ring_info0 = le32_get_bits(reo_dest_ring->info0, in ath12k_dp_rx_h_defrag_reo_reinject()
3089 reo_ent_ring->info2 = in ath12k_dp_rx_h_defrag_reo_reinject()
3094 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3099 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3100 list_del(&desc_info->list); in ath12k_dp_rx_h_defrag_reo_reinject()
3101 list_add_tail(&desc_info->list, &dp->rx_desc_free_list); in ath12k_dp_rx_h_defrag_reo_reinject()
3102 desc_info->skb = NULL; in ath12k_dp_rx_h_defrag_reo_reinject()
3103 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_h_defrag_reo_reinject()
3105 dma_unmap_single(ab->dev, buf_paddr, defrag_skb->len + skb_tailroom(defrag_skb), in ath12k_dp_rx_h_defrag_reo_reinject()
3118 return frag1 - frag2; in ath12k_dp_rx_h_cmp_frags()
3141 u64 pn = 0; in ath12k_dp_rx_h_get_pn() local
3143 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_get_pn()
3145 hdr = (struct ieee80211_hdr *)(skb->data + hal_rx_desc_sz); in ath12k_dp_rx_h_get_pn()
3146 ehdr = skb->data + hal_rx_desc_sz + ieee80211_hdrlen(hdr->frame_control); in ath12k_dp_rx_h_get_pn()
3148 pn = ehdr[0]; in ath12k_dp_rx_h_get_pn()
3149 pn |= (u64)ehdr[1] << 8; in ath12k_dp_rx_h_get_pn()
3150 pn |= (u64)ehdr[4] << 16; in ath12k_dp_rx_h_get_pn()
3151 pn |= (u64)ehdr[5] << 24; in ath12k_dp_rx_h_get_pn()
3152 pn |= (u64)ehdr[6] << 32; in ath12k_dp_rx_h_get_pn()
3153 pn |= (u64)ehdr[7] << 40; in ath12k_dp_rx_h_get_pn()
3155 return pn; in ath12k_dp_rx_h_get_pn()
3161 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_defrag_validate_incr_pn()
3168 first_frag = skb_peek(&rx_tid->rx_frags); in ath12k_dp_rx_h_defrag_validate_incr_pn()
3169 desc = (struct hal_rx_desc *)first_frag->data; in ath12k_dp_rx_h_defrag_validate_incr_pn()
3179 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath12k_dp_rx_h_defrag_validate_incr_pn()
3195 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_frag_h_mpdu()
3206 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_frag_h_mpdu()
3216 return -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3224 return -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3226 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3229 ath12k_warn(ab, "failed to find the peer to de-fragment received fragment peer_id %d\n", in ath12k_dp_rx_frag_h_mpdu()
3231 ret = -ENOENT; in ath12k_dp_rx_frag_h_mpdu()
3234 rx_tid = &peer->rx_tid[tid]; in ath12k_dp_rx_frag_h_mpdu()
3236 if ((!skb_queue_empty(&rx_tid->rx_frags) && seqno != rx_tid->cur_sn) || in ath12k_dp_rx_frag_h_mpdu()
3237 skb_queue_empty(&rx_tid->rx_frags)) { in ath12k_dp_rx_frag_h_mpdu()
3240 rx_tid->cur_sn = seqno; in ath12k_dp_rx_frag_h_mpdu()
3243 if (rx_tid->rx_frag_bitmap & BIT(frag_no)) { in ath12k_dp_rx_frag_h_mpdu()
3245 ret = -EINVAL; in ath12k_dp_rx_frag_h_mpdu()
3249 if (frag_no > __fls(rx_tid->rx_frag_bitmap)) in ath12k_dp_rx_frag_h_mpdu()
3250 __skb_queue_tail(&rx_tid->rx_frags, msdu); in ath12k_dp_rx_frag_h_mpdu()
3252 ath12k_dp_rx_h_sort_frags(ab, &rx_tid->rx_frags, msdu); in ath12k_dp_rx_frag_h_mpdu()
3254 rx_tid->rx_frag_bitmap |= BIT(frag_no); in ath12k_dp_rx_frag_h_mpdu()
3256 rx_tid->last_frag_no = frag_no; in ath12k_dp_rx_frag_h_mpdu()
3259 rx_tid->dst_ring_desc = kmemdup(ring_desc, in ath12k_dp_rx_frag_h_mpdu()
3260 sizeof(*rx_tid->dst_ring_desc), in ath12k_dp_rx_frag_h_mpdu()
3262 if (!rx_tid->dst_ring_desc) { in ath12k_dp_rx_frag_h_mpdu()
3263 ret = -ENOMEM; in ath12k_dp_rx_frag_h_mpdu()
3271 if (!rx_tid->last_frag_no || in ath12k_dp_rx_frag_h_mpdu()
3272 rx_tid->rx_frag_bitmap != GENMASK(rx_tid->last_frag_no, 0)) { in ath12k_dp_rx_frag_h_mpdu()
3273 mod_timer(&rx_tid->frag_timer, jiffies + in ath12k_dp_rx_frag_h_mpdu()
3278 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3279 del_timer_sync(&rx_tid->frag_timer); in ath12k_dp_rx_frag_h_mpdu()
3280 spin_lock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3305 spin_unlock_bh(&ab->base_lock); in ath12k_dp_rx_frag_h_mpdu()
3313 struct ath12k_base *ab = ar->ab; in ath12k_dp_process_rx_err_buf()
3318 u32 hal_rx_desc_sz = ab->hw_params->hal_desc_sz; in ath12k_dp_process_rx_err_buf()
3322 desc_va = ((u64)le32_to_cpu(desc->buf_va_hi) << 32 | in ath12k_dp_process_rx_err_buf()
3323 le32_to_cpu(desc->buf_va_lo)); in ath12k_dp_process_rx_err_buf()
3326 /* retry manual desc retrieval */ in ath12k_dp_process_rx_err_buf()
3331 return -EINVAL; in ath12k_dp_process_rx_err_buf()
3335 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_process_rx_err_buf()
3338 msdu = desc_info->skb; in ath12k_dp_process_rx_err_buf()
3339 desc_info->skb = NULL; in ath12k_dp_process_rx_err_buf()
3340 spin_lock_bh(&ab->dp.rx_desc_lock); in ath12k_dp_process_rx_err_buf()
3341 list_move_tail(&desc_info->list, &ab->dp.rx_desc_free_list); in ath12k_dp_process_rx_err_buf()
3342 spin_unlock_bh(&ab->dp.rx_desc_lock); in ath12k_dp_process_rx_err_buf()
3345 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath12k_dp_process_rx_err_buf()
3346 msdu->len + skb_tailroom(msdu), in ath12k_dp_process_rx_err_buf()
3355 if (!rcu_dereference(ar->ab->pdevs_active[ar->pdev_idx])) { in ath12k_dp_process_rx_err_buf()
3360 if (test_bit(ATH12K_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_process_rx_err_buf()
3365 rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_process_rx_err_buf()
3366 msdu_len = ath12k_dp_rx_h_msdu_len(ar->ab, rx_desc); in ath12k_dp_process_rx_err_buf()
3368 ath12k_warn(ar->ab, "invalid msdu leng %u", msdu_len); in ath12k_dp_process_rx_err_buf()
3369 ath12k_dbg_dump(ar->ab, ATH12K_DBG_DATA, NULL, "", rx_desc, in ath12k_dp_process_rx_err_buf()
3379 ath12k_dp_rx_link_desc_return(ar->ab, desc, in ath12k_dp_process_rx_err_buf()
3411 dp = &ab->dp; in ath12k_dp_rx_process_err()
3412 reo_except = &dp->reo_except_ring; in ath12k_dp_rx_process_err()
3413 link_desc_banks = dp->link_desc_banks; in ath12k_dp_rx_process_err()
3415 srng = &ab->hal.srng_list[reo_except->ring_id]; in ath12k_dp_rx_process_err()
3417 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_err()
3423 ab->soc_stats.err_ring_pkts++; in ath12k_dp_rx_process_err()
3433 (paddr - link_desc_banks[desc_bank].paddr); in ath12k_dp_rx_process_err()
3436 (paddr - link_desc_banks[desc_bank].paddr)); in ath12k_dp_rx_process_err()
3442 rbm != ab->hw_params->hal_params->rx_buf_rbm) { in ath12k_dp_rx_process_err()
3443 ab->soc_stats.invalid_rbm++; in ath12k_dp_rx_process_err()
3450 is_frag = !!(le32_to_cpu(reo_desc->rx_mpdu_info.info0) & in ath12k_dp_rx_process_err()
3464 mac_id = le32_get_bits(reo_desc->info0, in ath12k_dp_rx_process_err()
3467 pdev_id = ath12k_hw_mac_id_to_pdev_id(ab->hw_params, mac_id); in ath12k_dp_rx_process_err()
3468 ar = ab->pdevs[pdev_id].ar; in ath12k_dp_rx_process_err()
3480 budget = quota - tot_n_bufs_reaped; in ath12k_dp_rx_process_err()
3486 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_err()
3488 rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rx_process_err()
3491 ab->hw_params->hal_params->rx_buf_rbm, true); in ath12k_dp_rx_process_err()
3505 (DP_RX_BUFFER_SIZE - ar->ab->hw_params->hal_desc_sz)); in ath12k_dp_rx_null_q_desc_sg_drop()
3509 if (rxcb->err_rel_src == HAL_WBM_REL_SRC_MODULE_REO && in ath12k_dp_rx_null_q_desc_sg_drop()
3510 rxcb->err_code == HAL_REO_DEST_RING_ERROR_CODE_DESC_ADDR_ZERO) { in ath12k_dp_rx_null_q_desc_sg_drop()
3515 n_buffs--; in ath12k_dp_rx_null_q_desc_sg_drop()
3524 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_null_q_desc()
3526 struct hal_rx_desc *desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_null_q_desc()
3529 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_null_q_desc()
3534 spin_lock(&ab->base_lock); in ath12k_dp_rx_h_null_q_desc()
3536 spin_unlock(&ab->base_lock); in ath12k_dp_rx_h_null_q_desc()
3539 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3541 spin_unlock(&ab->base_lock); in ath12k_dp_rx_h_null_q_desc()
3543 if (!rxcb->is_frag && ((msdu_len + hal_rx_desc_sz) > DP_RX_BUFFER_SIZE)) { in ath12k_dp_rx_h_null_q_desc()
3545 msdu_len = msdu_len - (DP_RX_BUFFER_SIZE - hal_rx_desc_sz); in ath12k_dp_rx_h_null_q_desc()
3547 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3554 if (rxcb->is_continuation) in ath12k_dp_rx_h_null_q_desc()
3555 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3558 ath12k_warn(ar->ab, in ath12k_dp_rx_h_null_q_desc()
3561 return -EIO; in ath12k_dp_rx_h_null_q_desc()
3569 * non-QOS TID queue, in the absence of any other default TID queue. in ath12k_dp_rx_h_null_q_desc()
3573 if (rxcb->is_frag) { in ath12k_dp_rx_h_null_q_desc()
3579 return -EINVAL; in ath12k_dp_rx_h_null_q_desc()
3588 rxcb->tid = ath12k_dp_rx_h_tid(ab, desc); in ath12k_dp_rx_h_null_q_desc()
3604 ar->ab->soc_stats.reo_error[rxcb->err_code]++; in ath12k_dp_rx_h_reo_err()
3606 switch (rxcb->err_code) { in ath12k_dp_rx_h_reo_err()
3612 /* TODO: Do not drop PN failed packets in the driver; in ath12k_dp_rx_h_reo_err()
3631 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_tkip_mic_err()
3633 struct hal_rx_desc *desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_tkip_mic_err()
3636 u32 hal_rx_desc_sz = ar->ab->hw_params->hal_desc_sz; in ath12k_dp_rx_h_tkip_mic_err()
3638 rxcb->is_first_msdu = ath12k_dp_rx_h_first_msdu(ab, desc); in ath12k_dp_rx_h_tkip_mic_err()
3639 rxcb->is_last_msdu = ath12k_dp_rx_h_last_msdu(ab, desc); in ath12k_dp_rx_h_tkip_mic_err()
3648 status->flag |= (RX_FLAG_MMIC_STRIPPED | RX_FLAG_MMIC_ERROR | in ath12k_dp_rx_h_tkip_mic_err()
3658 struct ath12k_base *ab = ar->ab; in ath12k_dp_rx_h_rxdma_err()
3660 struct hal_rx_desc *rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_h_rxdma_err()
3664 ar->ab->soc_stats.rxdma_error[rxcb->err_code]++; in ath12k_dp_rx_h_rxdma_err()
3666 switch (rxcb->err_code) { in ath12k_dp_rx_h_rxdma_err()
3695 switch (rxcb->err_rel_src) { in ath12k_dp_rx_wbm_err()
3719 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process_wbm_err()
3732 for (i = 0; i < ab->num_radios; i++) in ath12k_dp_rx_process_wbm_err()
3735 srng = &ab->hal.srng_list[dp->rx_rel_ring.ring_id]; in ath12k_dp_rx_process_wbm_err()
3736 rx_ring = &dp->rx_refill_buf_ring; in ath12k_dp_rx_process_wbm_err()
3738 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_wbm_err()
3757 /* retry manual desc retrieval if hw cc is not done */ in ath12k_dp_rx_process_wbm_err()
3771 if (desc_info->magic != ATH12K_DP_RX_DESC_MAGIC) in ath12k_dp_rx_process_wbm_err()
3774 msdu = desc_info->skb; in ath12k_dp_rx_process_wbm_err()
3775 desc_info->skb = NULL; in ath12k_dp_rx_process_wbm_err()
3777 spin_lock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_process_wbm_err()
3778 list_move_tail(&desc_info->list, &dp->rx_desc_free_list); in ath12k_dp_rx_process_wbm_err()
3779 spin_unlock_bh(&dp->rx_desc_lock); in ath12k_dp_rx_process_wbm_err()
3782 dma_unmap_single(ab->dev, rxcb->paddr, in ath12k_dp_rx_process_wbm_err()
3783 msdu->len + skb_tailroom(msdu), in ath12k_dp_rx_process_wbm_err()
3789 budget--; in ath12k_dp_rx_process_wbm_err()
3797 rxcb->err_rel_src = err_info.err_rel_src; in ath12k_dp_rx_process_wbm_err()
3798 rxcb->err_code = err_info.err_code; in ath12k_dp_rx_process_wbm_err()
3799 rxcb->rx_desc = (struct hal_rx_desc *)msdu->data; in ath12k_dp_rx_process_wbm_err()
3802 rxcb->is_first_msdu = err_info.first_msdu; in ath12k_dp_rx_process_wbm_err()
3803 rxcb->is_last_msdu = err_info.last_msdu; in ath12k_dp_rx_process_wbm_err()
3804 rxcb->is_continuation = err_info.continuation; in ath12k_dp_rx_process_wbm_err()
3809 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_wbm_err()
3815 ab->hw_params->hal_params->rx_buf_rbm, true); in ath12k_dp_rx_process_wbm_err()
3818 for (i = 0; i < ab->num_radios; i++) { in ath12k_dp_rx_process_wbm_err()
3819 if (!rcu_dereference(ab->pdevs_active[i])) { in ath12k_dp_rx_process_wbm_err()
3824 ar = ab->pdevs[i].ar; in ath12k_dp_rx_process_wbm_err()
3826 if (test_bit(ATH12K_CAC_RUNNING, &ar->dev_flags)) { in ath12k_dp_rx_process_wbm_err()
3841 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_process_reo_status()
3849 srng = &ab->hal.srng_list[dp->reo_status_ring.ring_id]; in ath12k_dp_rx_process_reo_status()
3853 spin_lock_bh(&srng->lock); in ath12k_dp_rx_process_reo_status()
3858 tag = u64_get_bits(hdr->tl, HAL_SRNG_TLV_HDR_TAG); in ath12k_dp_rx_process_reo_status()
3894 spin_lock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_process_reo_status()
3895 list_for_each_entry_safe(cmd, tmp, &dp->reo_cmd_list, list) { in ath12k_dp_rx_process_reo_status()
3896 if (reo_status.uniform_hdr.cmd_num == cmd->cmd_num) { in ath12k_dp_rx_process_reo_status()
3898 list_del(&cmd->list); in ath12k_dp_rx_process_reo_status()
3902 spin_unlock_bh(&dp->reo_cmd_lock); in ath12k_dp_rx_process_reo_status()
3905 cmd->handler(dp, (void *)&cmd->data, in ath12k_dp_rx_process_reo_status()
3915 spin_unlock_bh(&srng->lock); in ath12k_dp_rx_process_reo_status()
3920 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_free()
3923 ath12k_dp_srng_cleanup(ab, &dp->rx_refill_buf_ring.refill_buf_ring); in ath12k_dp_rx_free()
3925 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_free()
3926 if (ab->hw_params->rx_mac_buf_ring) in ath12k_dp_rx_free()
3927 ath12k_dp_srng_cleanup(ab, &dp->rx_mac_buf_ring[i]); in ath12k_dp_rx_free()
3930 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) in ath12k_dp_rx_free()
3931 ath12k_dp_srng_cleanup(ab, &dp->rxdma_err_dst_ring[i]); in ath12k_dp_rx_free()
3933 ath12k_dp_srng_cleanup(ab, &dp->rxdma_mon_buf_ring.refill_buf_ring); in ath12k_dp_rx_free()
3934 ath12k_dp_srng_cleanup(ab, &dp->tx_mon_buf_ring.refill_buf_ring); in ath12k_dp_rx_free()
3941 struct ath12k *ar = ab->pdevs[mac_id].ar; in ath12k_dp_rx_pdev_free()
3948 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
3952 u32 hal_rx_desc_sz = ab->hw_params->hal_desc_sz; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
3954 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rxdma_ring_sel_config_qcn9274()
3965 ab->hw_params->hal_ops->rx_desc_get_mpdu_start_offset(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
3967 ab->hw_params->hal_ops->rx_desc_get_msdu_end_offset(); in ath12k_dp_rxdma_ring_sel_config_qcn9274()
3983 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
3987 u32 hal_rx_desc_sz = ab->hw_params->hal_desc_sz; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
3990 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4003 ab->hw_params->hal_ops->rx_desc_get_mpdu_start_offset(); in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4005 ab->hw_params->hal_ops->rx_desc_get_msdu_end_offset(); in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4012 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4013 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath12k_dp_rxdma_ring_sel_config_wcn7850()
4025 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_htt_setup()
4030 ring_id = dp->rx_refill_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rx_htt_setup()
4038 if (ab->hw_params->rx_mac_buf_ring) { in ath12k_dp_rx_htt_setup()
4039 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_htt_setup()
4040 ring_id = dp->rx_mac_buf_ring[i].ring_id; in ath12k_dp_rx_htt_setup()
4051 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) { in ath12k_dp_rx_htt_setup()
4052 ring_id = dp->rxdma_err_dst_ring[i].ring_id; in ath12k_dp_rx_htt_setup()
4062 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rx_htt_setup()
4063 ring_id = dp->rxdma_mon_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rx_htt_setup()
4072 ring_id = dp->tx_mon_buf_ring.refill_buf_ring.ring_id; in ath12k_dp_rx_htt_setup()
4082 ret = ab->hw_params->hw_ops->rxdma_ring_sel_config(ab); in ath12k_dp_rx_htt_setup()
4093 struct ath12k_dp *dp = &ab->dp; in ath12k_dp_rx_alloc()
4096 idr_init(&dp->rx_refill_buf_ring.bufs_idr); in ath12k_dp_rx_alloc()
4097 spin_lock_init(&dp->rx_refill_buf_ring.idr_lock); in ath12k_dp_rx_alloc()
4099 idr_init(&dp->rxdma_mon_buf_ring.bufs_idr); in ath12k_dp_rx_alloc()
4100 spin_lock_init(&dp->rxdma_mon_buf_ring.idr_lock); in ath12k_dp_rx_alloc()
4102 idr_init(&dp->tx_mon_buf_ring.bufs_idr); in ath12k_dp_rx_alloc()
4103 spin_lock_init(&dp->tx_mon_buf_ring.idr_lock); in ath12k_dp_rx_alloc()
4106 &dp->rx_refill_buf_ring.refill_buf_ring, in ath12k_dp_rx_alloc()
4114 if (ab->hw_params->rx_mac_buf_ring) { in ath12k_dp_rx_alloc()
4115 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_alloc()
4117 &dp->rx_mac_buf_ring[i], in ath12k_dp_rx_alloc()
4128 for (i = 0; i < ab->hw_params->num_rxdma_dst_ring; i++) { in ath12k_dp_rx_alloc()
4129 ret = ath12k_dp_srng_setup(ab, &dp->rxdma_err_dst_ring[i], in ath12k_dp_rx_alloc()
4138 if (ab->hw_params->rxdma1_enable) { in ath12k_dp_rx_alloc()
4140 &dp->rxdma_mon_buf_ring.refill_buf_ring, in ath12k_dp_rx_alloc()
4149 &dp->tx_mon_buf_ring.refill_buf_ring, in ath12k_dp_rx_alloc()
4169 struct ath12k *ar = ab->pdevs[mac_id].ar; in ath12k_dp_rx_pdev_alloc()
4170 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_alloc()
4175 if (!ab->hw_params->rxdma1_enable) in ath12k_dp_rx_pdev_alloc()
4184 for (i = 0; i < ab->hw_params->num_rxmda_per_pdev; i++) { in ath12k_dp_rx_pdev_alloc()
4185 ring_id = dp->rxdma_mon_dst_ring[i].ring_id; in ath12k_dp_rx_pdev_alloc()
4196 ring_id = dp->tx_mon_dst_ring[i].ring_id; in ath12k_dp_rx_pdev_alloc()
4213 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_mon_status_attach()
4214 struct ath12k_mon_data *pmon = (struct ath12k_mon_data *)&dp->mon_data; in ath12k_dp_rx_pdev_mon_status_attach()
4216 skb_queue_head_init(&pmon->rx_status_q); in ath12k_dp_rx_pdev_mon_status_attach()
4218 pmon->mon_ppdu_status = DP_PPDU_STATUS_START; in ath12k_dp_rx_pdev_mon_status_attach()
4220 memset(&pmon->rx_mon_stats, 0, in ath12k_dp_rx_pdev_mon_status_attach()
4221 sizeof(pmon->rx_mon_stats)); in ath12k_dp_rx_pdev_mon_status_attach()
4227 struct ath12k_pdev_dp *dp = &ar->dp; in ath12k_dp_rx_pdev_mon_attach()
4228 struct ath12k_mon_data *pmon = &dp->mon_data; in ath12k_dp_rx_pdev_mon_attach()
4233 ath12k_warn(ar->ab, "pdev_mon_status_attach() failed"); in ath12k_dp_rx_pdev_mon_attach()
4240 if (!ar->ab->hw_params->rxdma1_enable) in ath12k_dp_rx_pdev_mon_attach()
4243 pmon->mon_last_linkdesc_paddr = 0; in ath12k_dp_rx_pdev_mon_attach()
4244 pmon->mon_last_buf_cookie = DP_RX_DESC_COOKIE_MAX + 1; in ath12k_dp_rx_pdev_mon_attach()
4245 spin_lock_init(&pmon->mon_lock); in ath12k_dp_rx_pdev_mon_attach()
4253 mod_timer(&ab->mon_reap_timer, in ath12k_dp_rx_pktlog_start()
4264 del_timer_sync(&ab->mon_reap_timer); in ath12k_dp_rx_pktlog_stop()