Lines Matching refs:rx_tid
684 struct dp_rx_tid *rx_tid = ctx; in ath11k_dp_reo_cmd_free() local
688 rx_tid->tid, status); in ath11k_dp_reo_cmd_free()
690 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cmd_free()
692 kfree(rx_tid->vaddr); in ath11k_dp_reo_cmd_free()
696 struct dp_rx_tid *rx_tid) in ath11k_dp_reo_cache_flush() argument
702 tot_desc_sz = rx_tid->size; in ath11k_dp_reo_cache_flush()
707 cmd.addr_lo = lower_32_bits(rx_tid->paddr + tot_desc_sz); in ath11k_dp_reo_cache_flush()
708 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
709 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
715 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
719 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
720 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
722 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
727 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
728 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cache_flush()
730 kfree(rx_tid->vaddr); in ath11k_dp_reo_cache_flush()
738 struct dp_rx_tid *rx_tid = ctx; in ath11k_dp_rx_tid_del_func() local
746 rx_tid->tid, status); in ath11k_dp_rx_tid_del_func()
755 memcpy(&elem->data, rx_tid, sizeof(*rx_tid)); in ath11k_dp_rx_tid_del_func()
780 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_del_func()
782 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_del_func()
789 struct dp_rx_tid *rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_delete() local
792 if (!rx_tid->active) in ath11k_peer_rx_tid_delete()
796 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
797 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
799 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_delete()
805 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_peer_rx_tid_delete()
807 kfree(rx_tid->vaddr); in ath11k_peer_rx_tid_delete()
810 rx_tid->active = false; in ath11k_peer_rx_tid_delete()
845 static void ath11k_dp_rx_frags_cleanup(struct dp_rx_tid *rx_tid, bool rel_link_desc) in ath11k_dp_rx_frags_cleanup() argument
847 struct ath11k_base *ab = rx_tid->ab; in ath11k_dp_rx_frags_cleanup()
851 if (rx_tid->dst_ring_desc) { in ath11k_dp_rx_frags_cleanup()
853 ath11k_dp_rx_link_desc_return(ab, (u32 *)rx_tid->dst_ring_desc, in ath11k_dp_rx_frags_cleanup()
855 kfree(rx_tid->dst_ring_desc); in ath11k_dp_rx_frags_cleanup()
856 rx_tid->dst_ring_desc = NULL; in ath11k_dp_rx_frags_cleanup()
859 rx_tid->cur_sn = 0; in ath11k_dp_rx_frags_cleanup()
860 rx_tid->last_frag_no = 0; in ath11k_dp_rx_frags_cleanup()
861 rx_tid->rx_frag_bitmap = 0; in ath11k_dp_rx_frags_cleanup()
862 __skb_queue_purge(&rx_tid->rx_frags); in ath11k_dp_rx_frags_cleanup()
867 struct dp_rx_tid *rx_tid; in ath11k_peer_frags_flush() local
873 rx_tid = &peer->rx_tid[i]; in ath11k_peer_frags_flush()
876 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_frags_flush()
879 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_frags_flush()
885 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_cleanup() local
891 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_tid_cleanup()
894 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_rx_tid_cleanup()
897 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_rx_tid_cleanup()
904 struct dp_rx_tid *rx_tid, in ath11k_peer_rx_tid_reo_update() argument
911 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
912 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
922 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_reo_update()
927 rx_tid->tid, ret); in ath11k_peer_rx_tid_reo_update()
931 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_reo_update()
940 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_tid_mem_free() local
950 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_tid_mem_free()
951 if (!rx_tid->active) in ath11k_dp_rx_tid_mem_free()
954 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_mem_free()
956 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_mem_free()
958 rx_tid->active = false; in ath11k_dp_rx_tid_mem_free()
970 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_setup() local
986 rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_setup()
988 if (rx_tid->active) { in ath11k_peer_rx_tid_setup()
989 paddr = rx_tid->paddr; in ath11k_peer_rx_tid_setup()
990 ret = ath11k_peer_rx_tid_reo_update(ar, peer, rx_tid, in ath11k_peer_rx_tid_setup()
1007 rx_tid->tid = tid; in ath11k_peer_rx_tid_setup()
1009 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_setup()
1039 rx_tid->vaddr = vaddr; in ath11k_peer_rx_tid_setup()
1040 rx_tid->paddr = paddr; in ath11k_peer_rx_tid_setup()
1041 rx_tid->size = hw_desc_sz; in ath11k_peer_rx_tid_setup()
1042 rx_tid->active = true; in ath11k_peer_rx_tid_setup()
1099 paddr = peer->rx_tid[params->tid].paddr; in ath11k_dp_rx_ampdu_stop()
1100 active = peer->rx_tid[params->tid].active; in ath11k_dp_rx_ampdu_stop()
1107 ret = ath11k_peer_rx_tid_reo_update(ar, peer, peer->rx_tid, 1, 0, false); in ath11k_dp_rx_ampdu_stop()
1134 struct dp_rx_tid *rx_tid; in ath11k_dp_peer_rx_pn_replay_config() local
1177 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_peer_rx_pn_replay_config()
1178 if (!rx_tid->active) in ath11k_dp_peer_rx_pn_replay_config()
1180 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1181 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1182 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_peer_rx_pn_replay_config()
3038 struct dp_rx_tid *rx_tid = from_timer(rx_tid, timer, frag_timer); in ath11k_dp_rx_frag_timer() local
3040 spin_lock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3041 if (rx_tid->last_frag_no && in ath11k_dp_rx_frag_timer()
3042 rx_tid->rx_frag_bitmap == GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_timer()
3043 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3046 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_timer()
3047 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3055 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_frag_setup() local
3073 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_frag_setup()
3074 rx_tid->ab = ab; in ath11k_peer_rx_frag_setup()
3075 timer_setup(&rx_tid->frag_timer, ath11k_dp_rx_frag_timer, 0); in ath11k_peer_rx_frag_setup()
3076 skb_queue_head_init(&rx_tid->rx_frags); in ath11k_peer_rx_frag_setup()
3214 struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag() argument
3227 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3228 last_frag = skb_peek_tail(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3230 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag()
3265 __skb_unlink(first_frag, &rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3266 while ((skb = __skb_dequeue(&rx_tid->rx_frags))) { in ath11k_dp_rx_h_defrag()
3282 static int ath11k_dp_rx_h_defrag_reo_reinject(struct ath11k *ar, struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag_reo_reinject() argument
3302 reo_dest_ring = rx_tid->dst_ring_desc; in ath11k_dp_rx_h_defrag_reo_reinject()
3366 FIELD_PREP(RX_MPDU_DESC_INFO0_SEQ_NUM, rx_tid->cur_sn) | in ath11k_dp_rx_h_defrag_reo_reinject()
3444 ath11k_dp_rx_h_defrag_validate_incr_pn(struct ath11k *ar, struct dp_rx_tid *rx_tid) in ath11k_dp_rx_h_defrag_validate_incr_pn() argument
3452 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag_validate_incr_pn()
3463 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag_validate_incr_pn()
3482 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_frag_h_mpdu() local
3524 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_frag_h_mpdu()
3526 if ((!skb_queue_empty(&rx_tid->rx_frags) && seqno != rx_tid->cur_sn) || in ath11k_dp_rx_frag_h_mpdu()
3527 skb_queue_empty(&rx_tid->rx_frags)) { in ath11k_dp_rx_frag_h_mpdu()
3529 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()
3530 rx_tid->cur_sn = seqno; in ath11k_dp_rx_frag_h_mpdu()
3533 if (rx_tid->rx_frag_bitmap & BIT(frag_no)) { in ath11k_dp_rx_frag_h_mpdu()
3539 if (frag_no > __fls(rx_tid->rx_frag_bitmap)) in ath11k_dp_rx_frag_h_mpdu()
3540 __skb_queue_tail(&rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3542 ath11k_dp_rx_h_sort_frags(ar, &rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3544 rx_tid->rx_frag_bitmap |= BIT(frag_no); in ath11k_dp_rx_frag_h_mpdu()
3546 rx_tid->last_frag_no = frag_no; in ath11k_dp_rx_frag_h_mpdu()
3549 rx_tid->dst_ring_desc = kmemdup(ring_desc, in ath11k_dp_rx_frag_h_mpdu()
3550 sizeof(*rx_tid->dst_ring_desc), in ath11k_dp_rx_frag_h_mpdu()
3552 if (!rx_tid->dst_ring_desc) { in ath11k_dp_rx_frag_h_mpdu()
3561 if (!rx_tid->last_frag_no || in ath11k_dp_rx_frag_h_mpdu()
3562 rx_tid->rx_frag_bitmap != GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_h_mpdu()
3563 mod_timer(&rx_tid->frag_timer, jiffies + in ath11k_dp_rx_frag_h_mpdu()
3569 del_timer_sync(&rx_tid->frag_timer); in ath11k_dp_rx_frag_h_mpdu()
3576 if (!ath11k_dp_rx_h_defrag_validate_incr_pn(ar, rx_tid)) in ath11k_dp_rx_frag_h_mpdu()
3579 if (ath11k_dp_rx_h_defrag(ar, peer, rx_tid, &defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3585 if (ath11k_dp_rx_h_defrag_reo_reinject(ar, rx_tid, defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3588 ath11k_dp_rx_frags_cleanup(rx_tid, false); in ath11k_dp_rx_frag_h_mpdu()
3593 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()