Lines Matching refs:rx_tid

671 	struct dp_rx_tid *rx_tid;  in ath11k_dp_reo_cmd_list_cleanup()  local
676 rx_tid = &cmd->data; in ath11k_dp_reo_cmd_list_cleanup()
677 if (rx_tid->vaddr) { in ath11k_dp_reo_cmd_list_cleanup()
678 dma_unmap_single(ab->dev, rx_tid->paddr, in ath11k_dp_reo_cmd_list_cleanup()
679 rx_tid->size, DMA_BIDIRECTIONAL); in ath11k_dp_reo_cmd_list_cleanup()
680 kfree(rx_tid->vaddr); in ath11k_dp_reo_cmd_list_cleanup()
681 rx_tid->vaddr = NULL; in ath11k_dp_reo_cmd_list_cleanup()
690 rx_tid = &cmd_cache->data; in ath11k_dp_reo_cmd_list_cleanup()
691 if (rx_tid->vaddr) { in ath11k_dp_reo_cmd_list_cleanup()
692 dma_unmap_single(ab->dev, rx_tid->paddr, in ath11k_dp_reo_cmd_list_cleanup()
693 rx_tid->size, DMA_BIDIRECTIONAL); in ath11k_dp_reo_cmd_list_cleanup()
694 kfree(rx_tid->vaddr); in ath11k_dp_reo_cmd_list_cleanup()
695 rx_tid->vaddr = NULL; in ath11k_dp_reo_cmd_list_cleanup()
705 struct dp_rx_tid *rx_tid = ctx; in ath11k_dp_reo_cmd_free() local
709 rx_tid->tid, status); in ath11k_dp_reo_cmd_free()
710 if (rx_tid->vaddr) { in ath11k_dp_reo_cmd_free()
711 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cmd_free()
713 kfree(rx_tid->vaddr); in ath11k_dp_reo_cmd_free()
714 rx_tid->vaddr = NULL; in ath11k_dp_reo_cmd_free()
719 struct dp_rx_tid *rx_tid) in ath11k_dp_reo_cache_flush() argument
725 tot_desc_sz = rx_tid->size; in ath11k_dp_reo_cache_flush()
730 cmd.addr_lo = lower_32_bits(rx_tid->paddr + tot_desc_sz); in ath11k_dp_reo_cache_flush()
731 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
732 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
738 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
742 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
743 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_reo_cache_flush()
745 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
750 rx_tid->tid, ret); in ath11k_dp_reo_cache_flush()
751 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cache_flush()
753 kfree(rx_tid->vaddr); in ath11k_dp_reo_cache_flush()
754 rx_tid->vaddr = NULL; in ath11k_dp_reo_cache_flush()
762 struct dp_rx_tid *rx_tid = ctx; in ath11k_dp_rx_tid_del_func() local
770 rx_tid->tid, status); in ath11k_dp_rx_tid_del_func()
779 memcpy(&elem->data, rx_tid, sizeof(*rx_tid)); in ath11k_dp_rx_tid_del_func()
804 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_del_func()
806 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_del_func()
807 rx_tid->vaddr = NULL; in ath11k_dp_rx_tid_del_func()
814 struct dp_rx_tid *rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_delete() local
817 if (!rx_tid->active) in ath11k_peer_rx_tid_delete()
820 rx_tid->active = false; in ath11k_peer_rx_tid_delete()
823 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
824 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_delete()
826 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_delete()
833 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_peer_rx_tid_delete()
835 kfree(rx_tid->vaddr); in ath11k_peer_rx_tid_delete()
836 rx_tid->vaddr = NULL; in ath11k_peer_rx_tid_delete()
839 rx_tid->paddr = 0; in ath11k_peer_rx_tid_delete()
840 rx_tid->size = 0; in ath11k_peer_rx_tid_delete()
875 static void ath11k_dp_rx_frags_cleanup(struct dp_rx_tid *rx_tid, bool rel_link_desc) in ath11k_dp_rx_frags_cleanup() argument
877 struct ath11k_base *ab = rx_tid->ab; in ath11k_dp_rx_frags_cleanup()
881 if (rx_tid->dst_ring_desc) { in ath11k_dp_rx_frags_cleanup()
883 ath11k_dp_rx_link_desc_return(ab, (u32 *)rx_tid->dst_ring_desc, in ath11k_dp_rx_frags_cleanup()
885 kfree(rx_tid->dst_ring_desc); in ath11k_dp_rx_frags_cleanup()
886 rx_tid->dst_ring_desc = NULL; in ath11k_dp_rx_frags_cleanup()
889 rx_tid->cur_sn = 0; in ath11k_dp_rx_frags_cleanup()
890 rx_tid->last_frag_no = 0; in ath11k_dp_rx_frags_cleanup()
891 rx_tid->rx_frag_bitmap = 0; in ath11k_dp_rx_frags_cleanup()
892 __skb_queue_purge(&rx_tid->rx_frags); in ath11k_dp_rx_frags_cleanup()
897 struct dp_rx_tid *rx_tid; in ath11k_peer_frags_flush() local
903 rx_tid = &peer->rx_tid[i]; in ath11k_peer_frags_flush()
906 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_frags_flush()
909 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_frags_flush()
915 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_cleanup() local
921 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_tid_cleanup()
924 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_peer_rx_tid_cleanup()
927 del_timer_sync(&rx_tid->frag_timer); in ath11k_peer_rx_tid_cleanup()
934 struct dp_rx_tid *rx_tid, in ath11k_peer_rx_tid_reo_update() argument
941 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
942 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_peer_rx_tid_reo_update()
952 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_reo_update()
957 rx_tid->tid, ret); in ath11k_peer_rx_tid_reo_update()
961 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_reo_update()
970 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_tid_mem_free() local
980 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_tid_mem_free()
981 if (!rx_tid->active) in ath11k_dp_rx_tid_mem_free()
984 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_mem_free()
986 kfree(rx_tid->vaddr); in ath11k_dp_rx_tid_mem_free()
987 rx_tid->vaddr = NULL; in ath11k_dp_rx_tid_mem_free()
989 rx_tid->active = false; in ath11k_dp_rx_tid_mem_free()
1001 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_tid_setup() local
1018 rx_tid = &peer->rx_tid[tid]; in ath11k_peer_rx_tid_setup()
1020 if (rx_tid->active) { in ath11k_peer_rx_tid_setup()
1021 paddr = rx_tid->paddr; in ath11k_peer_rx_tid_setup()
1022 ret = ath11k_peer_rx_tid_reo_update(ar, peer, rx_tid, in ath11k_peer_rx_tid_setup()
1040 rx_tid->tid = tid; in ath11k_peer_rx_tid_setup()
1042 rx_tid->ba_win_sz = ba_win_sz; in ath11k_peer_rx_tid_setup()
1074 rx_tid->vaddr = vaddr; in ath11k_peer_rx_tid_setup()
1075 rx_tid->paddr = paddr; in ath11k_peer_rx_tid_setup()
1076 rx_tid->size = hw_desc_sz; in ath11k_peer_rx_tid_setup()
1077 rx_tid->active = true; in ath11k_peer_rx_tid_setup()
1092 kfree(rx_tid->vaddr); in ath11k_peer_rx_tid_setup()
1093 rx_tid->vaddr = NULL; in ath11k_peer_rx_tid_setup()
1135 paddr = peer->rx_tid[params->tid].paddr; in ath11k_dp_rx_ampdu_stop()
1136 active = peer->rx_tid[params->tid].active; in ath11k_dp_rx_ampdu_stop()
1143 ret = ath11k_peer_rx_tid_reo_update(ar, peer, peer->rx_tid, 1, 0, false); in ath11k_dp_rx_ampdu_stop()
1170 struct dp_rx_tid *rx_tid; in ath11k_dp_peer_rx_pn_replay_config() local
1213 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_peer_rx_pn_replay_config()
1214 if (!rx_tid->active) in ath11k_dp_peer_rx_pn_replay_config()
1216 cmd.addr_lo = lower_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1217 cmd.addr_hi = upper_32_bits(rx_tid->paddr); in ath11k_dp_peer_rx_pn_replay_config()
1218 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_peer_rx_pn_replay_config()
3120 struct dp_rx_tid *rx_tid = from_timer(rx_tid, timer, frag_timer); in ath11k_dp_rx_frag_timer() local
3122 spin_lock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3123 if (rx_tid->last_frag_no && in ath11k_dp_rx_frag_timer()
3124 rx_tid->rx_frag_bitmap == GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_timer()
3125 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3128 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_timer()
3129 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3137 struct dp_rx_tid *rx_tid; in ath11k_peer_rx_frag_setup() local
3158 rx_tid = &peer->rx_tid[i]; in ath11k_peer_rx_frag_setup()
3159 rx_tid->ab = ab; in ath11k_peer_rx_frag_setup()
3160 timer_setup(&rx_tid->frag_timer, ath11k_dp_rx_frag_timer, 0); in ath11k_peer_rx_frag_setup()
3161 skb_queue_head_init(&rx_tid->rx_frags); in ath11k_peer_rx_frag_setup()
3300 struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag() argument
3313 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3314 last_frag = skb_peek_tail(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3316 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag()
3351 __skb_unlink(first_frag, &rx_tid->rx_frags); in ath11k_dp_rx_h_defrag()
3352 while ((skb = __skb_dequeue(&rx_tid->rx_frags))) { in ath11k_dp_rx_h_defrag()
3368 static int ath11k_dp_rx_h_defrag_reo_reinject(struct ath11k *ar, struct dp_rx_tid *rx_tid, in ath11k_dp_rx_h_defrag_reo_reinject() argument
3388 reo_dest_ring = rx_tid->dst_ring_desc; in ath11k_dp_rx_h_defrag_reo_reinject()
3453 FIELD_PREP(RX_MPDU_DESC_INFO0_SEQ_NUM, rx_tid->cur_sn) | in ath11k_dp_rx_h_defrag_reo_reinject()
3531 ath11k_dp_rx_h_defrag_validate_incr_pn(struct ath11k *ar, struct dp_rx_tid *rx_tid) in ath11k_dp_rx_h_defrag_validate_incr_pn() argument
3539 first_frag = skb_peek(&rx_tid->rx_frags); in ath11k_dp_rx_h_defrag_validate_incr_pn()
3550 skb_queue_walk(&rx_tid->rx_frags, skb) { in ath11k_dp_rx_h_defrag_validate_incr_pn()
3569 struct dp_rx_tid *rx_tid; in ath11k_dp_rx_frag_h_mpdu() local
3618 rx_tid = &peer->rx_tid[tid]; in ath11k_dp_rx_frag_h_mpdu()
3620 if ((!skb_queue_empty(&rx_tid->rx_frags) && seqno != rx_tid->cur_sn) || in ath11k_dp_rx_frag_h_mpdu()
3621 skb_queue_empty(&rx_tid->rx_frags)) { in ath11k_dp_rx_frag_h_mpdu()
3623 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()
3624 rx_tid->cur_sn = seqno; in ath11k_dp_rx_frag_h_mpdu()
3627 if (rx_tid->rx_frag_bitmap & BIT(frag_no)) { in ath11k_dp_rx_frag_h_mpdu()
3633 if (!rx_tid->rx_frag_bitmap || (frag_no > __fls(rx_tid->rx_frag_bitmap))) in ath11k_dp_rx_frag_h_mpdu()
3634 __skb_queue_tail(&rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3636 ath11k_dp_rx_h_sort_frags(ar, &rx_tid->rx_frags, msdu); in ath11k_dp_rx_frag_h_mpdu()
3638 rx_tid->rx_frag_bitmap |= BIT(frag_no); in ath11k_dp_rx_frag_h_mpdu()
3640 rx_tid->last_frag_no = frag_no; in ath11k_dp_rx_frag_h_mpdu()
3643 rx_tid->dst_ring_desc = kmemdup(ring_desc, in ath11k_dp_rx_frag_h_mpdu()
3644 sizeof(*rx_tid->dst_ring_desc), in ath11k_dp_rx_frag_h_mpdu()
3646 if (!rx_tid->dst_ring_desc) { in ath11k_dp_rx_frag_h_mpdu()
3655 if (!rx_tid->last_frag_no || in ath11k_dp_rx_frag_h_mpdu()
3656 rx_tid->rx_frag_bitmap != GENMASK(rx_tid->last_frag_no, 0)) { in ath11k_dp_rx_frag_h_mpdu()
3657 mod_timer(&rx_tid->frag_timer, jiffies + in ath11k_dp_rx_frag_h_mpdu()
3663 del_timer_sync(&rx_tid->frag_timer); in ath11k_dp_rx_frag_h_mpdu()
3670 if (!ath11k_dp_rx_h_defrag_validate_incr_pn(ar, rx_tid)) in ath11k_dp_rx_frag_h_mpdu()
3673 if (ath11k_dp_rx_h_defrag(ar, peer, rx_tid, &defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3679 if (ath11k_dp_rx_h_defrag_reo_reinject(ar, rx_tid, defrag_skb)) in ath11k_dp_rx_frag_h_mpdu()
3682 ath11k_dp_rx_frags_cleanup(rx_tid, false); in ath11k_dp_rx_frag_h_mpdu()
3687 ath11k_dp_rx_frags_cleanup(rx_tid, true); in ath11k_dp_rx_frag_h_mpdu()