Lines Matching refs:rx_tid

719 	struct dp_rx_tid *rx_tid = ctx;  in ath11k_dp_reo_cmd_free()  local
723 rx_tid->tid, status); in ath11k_dp_reo_cmd_free()
725 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cmd_free()
727 kfree(rx_tid->vaddr); in ath11k_dp_reo_cmd_free()
731 struct dp_rx_tid *rx_tid) in ath11k_dp_reo_cache_flush() argument
737 tot_desc_sz = rx_tid->size; in ath11k_dp_reo_cache_flush()
742 cmd.addr_lo = lower_32_bits(rx_tid->paddr + tot_desc_sz); in ath11k_dp_reo_cache_flush()
743 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
744 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
750 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
754 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
755 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
757 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
762 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
763 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cache_flush()
765 kfree(rx_tid->vaddr); in ath11k_dp_reo_cache_flush()
773 struct dp_rx_tid *rx_tid = ctx; in ath11k_dp_rx_tid_del_func() local
781 rx_tid->tid, status); in ath11k_dp_rx_tid_del_func()
790 memcpy(&elem->data, rx_tid, sizeof(*rx_tid)); in ath11k_dp_rx_tid_del_func()
815 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_del_func()
817 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_del_func()
824 struct dp_rx_tid *rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_delete() local
827 if (!rx_tid->active) in ath11k_peer_rx_tid_delete()
831 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
832 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
834 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_delete()
841 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_peer_rx_tid_delete()
843 kfree(rx_tid->vaddr); in ath11k_peer_rx_tid_delete()
846 rx_tid->active = false; in ath11k_peer_rx_tid_delete()
881 static void ath11k_dp_rx_frags_cleanup(struct dp_rx_tid *rx_tid, bool rel_link_desc) in ath11k_dp_rx_frags_cleanup() argument
883 struct ath11k_base *ab = rx_tid->ab; in ath11k_dp_rx_frags_cleanup()
887 if (rx_tid->dst_ring_desc) { in ath11k_dp_rx_frags_cleanup()
889 ath11k_dp_rx_link_desc_return(ab, (u32 *)rx_tid->dst_ring_desc, in ath11k_dp_rx_frags_cleanup()
891 kfree(rx_tid->dst_ring_desc); in ath11k_dp_rx_frags_cleanup()
892 rx_tid->dst_ring_desc = NULL; in ath11k_dp_rx_frags_cleanup()
895 rx_tid->cur_sn = 0; in ath11k_dp_rx_frags_cleanup()
896 rx_tid->last_frag_no = 0; in ath11k_dp_rx_frags_cleanup()
897 rx_tid->rx_frag_bitmap = 0; in ath11k_dp_rx_frags_cleanup()
898 __skb_queue_purge(&rx_tid->rx_frags); in ath11k_dp_rx_frags_cleanup()
903 struct dp_rx_tid *rx_tid; in ath11k_peer_frags_flush() local
909 rx_tid = &peer->rx_tid[i]; in ath11k_peer_frags_flush()
912 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_frags_flush()
915 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_frags_flush()
921 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_cleanup() local
927 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_tid_cleanup()
930 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_rx_tid_cleanup()
933 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_rx_tid_cleanup()
940 struct dp_rx_tid *rx_tid, in ath11k_peer_rx_tid_reo_update() argument
947 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
948 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
958 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_reo_update()
963 rx_tid->tid, ret); in ath11k_peer_rx_tid_reo_update()
967 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_reo_update()
976 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_tid_mem_free() local
986 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_tid_mem_free()
987 if (!rx_tid->active) in ath11k_dp_rx_tid_mem_free()
990 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_mem_free()
992 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_mem_free()
994 rx_tid->active = false; in ath11k_dp_rx_tid_mem_free()
1006 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_setup() local
1022 rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_setup()
1024 if (rx_tid->active) { in ath11k_peer_rx_tid_setup()
1025 paddr = rx_tid->paddr; in ath11k_peer_rx_tid_setup()
1026 ret = ath11k_peer_rx_tid_reo_update(ar, peer, rx_tid, in ath11k_peer_rx_tid_setup()
1043 rx_tid->tid = tid; in ath11k_peer_rx_tid_setup()
1045 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_setup()
1075 rx_tid->vaddr = vaddr; in ath11k_peer_rx_tid_setup()
1076 rx_tid->paddr = paddr; in ath11k_peer_rx_tid_setup()
1077 rx_tid->size = hw_desc_sz; in ath11k_peer_rx_tid_setup()
1078 rx_tid->active = true; in ath11k_peer_rx_tid_setup()
1135 paddr = peer->rx_tid[params->tid].paddr; in ath11k_dp_rx_ampdu_stop()
1136 active = peer->rx_tid[params->tid].active; in ath11k_dp_rx_ampdu_stop()
1143 ret = ath11k_peer_rx_tid_reo_update(ar, peer, peer->rx_tid, 1, 0, false); in ath11k_dp_rx_ampdu_stop()
1170 struct dp_rx_tid *rx_tid; in ath11k_dp_peer_rx_pn_replay_config() local
1213 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_peer_rx_pn_replay_config()
1214 if (!rx_tid->active) in ath11k_dp_peer_rx_pn_replay_config()
1216 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1217 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1218 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_peer_rx_pn_replay_config()
3099 struct dp_rx_tid *rx_tid = from_timer(rx_tid, timer, frag_timer); in ath11k_dp_rx_frag_timer() local
3101 spin_lock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3102 if (rx_tid->last_frag_no && in ath11k_dp_rx_frag_timer()
3103 rx_tid->rx_frag_bitmap == GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_timer()
3104 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3107 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_timer()
3108 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3116 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_frag_setup() local
3133 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_frag_setup()
3134 rx_tid->ab = ab; in ath11k_peer_rx_frag_setup()
3135 timer_setup(&rx_tid->frag_timer, ath11k_dp_rx_frag_timer, 0); in ath11k_peer_rx_frag_setup()
3136 skb_queue_head_init(&rx_tid->rx_frags); in ath11k_peer_rx_frag_setup()
3274 struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag() argument
3287 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3288 last_frag = skb_peek_tail(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3290 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag()
3325 __skb_unlink(first_frag, &rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3326 while ((skb = __skb_dequeue(&rx_tid->rx_frags))) { in ath11k_dp_rx_h_defrag()
3342 static int ath11k_dp_rx_h_defrag_reo_reinject(struct ath11k *ar, struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag_reo_reinject() argument
3362 reo_dest_ring = rx_tid->dst_ring_desc; in ath11k_dp_rx_h_defrag_reo_reinject()
3427 FIELD_PREP(RX_MPDU_DESC_INFO0_SEQ_NUM, rx_tid->cur_sn) | in ath11k_dp_rx_h_defrag_reo_reinject()
3505 ath11k_dp_rx_h_defrag_validate_incr_pn(struct ath11k *ar, struct dp_rx_tid *rx_tid) in ath11k_dp_rx_h_defrag_validate_incr_pn() argument
3513 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag_validate_incr_pn()
3524 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag_validate_incr_pn()
3543 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_frag_h_mpdu() local
3585 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_frag_h_mpdu()
3587 if ((!skb_queue_empty(&rx_tid->rx_frags) && seqno != rx_tid->cur_sn) || in ath11k_dp_rx_frag_h_mpdu()
3588 skb_queue_empty(&rx_tid->rx_frags)) { in ath11k_dp_rx_frag_h_mpdu()
3590 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()
3591 rx_tid->cur_sn = seqno; in ath11k_dp_rx_frag_h_mpdu()
3594 if (rx_tid->rx_frag_bitmap & BIT(frag_no)) { in ath11k_dp_rx_frag_h_mpdu()
3600 if (frag_no > __fls(rx_tid->rx_frag_bitmap)) in ath11k_dp_rx_frag_h_mpdu()
3601 __skb_queue_tail(&rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3603 ath11k_dp_rx_h_sort_frags(ar, &rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3605 rx_tid->rx_frag_bitmap |= BIT(frag_no); in ath11k_dp_rx_frag_h_mpdu()
3607 rx_tid->last_frag_no = frag_no; in ath11k_dp_rx_frag_h_mpdu()
3610 rx_tid->dst_ring_desc = kmemdup(ring_desc, in ath11k_dp_rx_frag_h_mpdu()
3611 sizeof(*rx_tid->dst_ring_desc), in ath11k_dp_rx_frag_h_mpdu()
3613 if (!rx_tid->dst_ring_desc) { in ath11k_dp_rx_frag_h_mpdu()
3622 if (!rx_tid->last_frag_no || in ath11k_dp_rx_frag_h_mpdu()
3623 rx_tid->rx_frag_bitmap != GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_h_mpdu()
3624 mod_timer(&rx_tid->frag_timer, jiffies + in ath11k_dp_rx_frag_h_mpdu()
3630 del_timer_sync(&rx_tid->frag_timer); in ath11k_dp_rx_frag_h_mpdu()
3637 if (!ath11k_dp_rx_h_defrag_validate_incr_pn(ar, rx_tid)) in ath11k_dp_rx_frag_h_mpdu()
3640 if (ath11k_dp_rx_h_defrag(ar, peer, rx_tid, &defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3646 if (ath11k_dp_rx_h_defrag_reo_reinject(ar, rx_tid, defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3649 ath11k_dp_rx_frags_cleanup(rx_tid, false); in ath11k_dp_rx_frag_h_mpdu()
3654 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()