Lines Matching refs:htt

34 	hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr)  in ath10k_htt_rx_find_skb_paddr()
42 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
49 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
50 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
52 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
59 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
60 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
65 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
72 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
73 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
74 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
75 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
78 static size_t ath10k_htt_get_rx_ring_size_32(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_32() argument
80 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_32); in ath10k_htt_get_rx_ring_size_32()
83 static size_t ath10k_htt_get_rx_ring_size_64(struct ath10k_htt *htt) in ath10k_htt_get_rx_ring_size_64() argument
85 return htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring_64); in ath10k_htt_get_rx_ring_size_64()
88 static void ath10k_htt_config_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_32() argument
91 htt->rx_ring.paddrs_ring_32 = vaddr; in ath10k_htt_config_paddrs_ring_32()
94 static void ath10k_htt_config_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_config_paddrs_ring_64() argument
97 htt->rx_ring.paddrs_ring_64 = vaddr; in ath10k_htt_config_paddrs_ring_64()
100 static void ath10k_htt_set_paddrs_ring_32(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_32() argument
103 htt->rx_ring.paddrs_ring_32[idx] = __cpu_to_le32(paddr); in ath10k_htt_set_paddrs_ring_32()
106 static void ath10k_htt_set_paddrs_ring_64(struct ath10k_htt *htt, in ath10k_htt_set_paddrs_ring_64() argument
109 htt->rx_ring.paddrs_ring_64[idx] = __cpu_to_le64(paddr); in ath10k_htt_set_paddrs_ring_64()
112 static void ath10k_htt_reset_paddrs_ring_32(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_32() argument
114 htt->rx_ring.paddrs_ring_32[idx] = 0; in ath10k_htt_reset_paddrs_ring_32()
117 static void ath10k_htt_reset_paddrs_ring_64(struct ath10k_htt *htt, int idx) in ath10k_htt_reset_paddrs_ring_64() argument
119 htt->rx_ring.paddrs_ring_64[idx] = 0; in ath10k_htt_reset_paddrs_ring_64()
122 static void *ath10k_htt_get_vaddr_ring_32(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_32() argument
124 return (void *)htt->rx_ring.paddrs_ring_32; in ath10k_htt_get_vaddr_ring_32()
127 static void *ath10k_htt_get_vaddr_ring_64(struct ath10k_htt *htt) in ath10k_htt_get_vaddr_ring_64() argument
129 return (void *)htt->rx_ring.paddrs_ring_64; in ath10k_htt_get_vaddr_ring_64()
132 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
134 struct ath10k_hw_params *hw = &htt->ar->hw_params; in __ath10k_htt_rx_ring_fill_n()
148 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
150 if (idx < 0 || idx >= htt->rx_ring.size) { in __ath10k_htt_rx_ring_fill_n()
151 ath10k_err(htt->ar, "rx ring index is not valid, firmware malfunctioning?\n"); in __ath10k_htt_rx_ring_fill_n()
152 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
173 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
177 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
185 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
186 ath10k_htt_set_paddrs_ring(htt, paddr, idx); in __ath10k_htt_rx_ring_fill_n()
187 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
189 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
190 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
197 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
206 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
210 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
212 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
213 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
216 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
236 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
237 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
240 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
248 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
251 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
254 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
259 struct ath10k_htt *htt = from_timer(htt, t, rx_ring.refill_retry_timer); in ath10k_htt_rx_ring_refill_retry() local
261 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
266 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
272 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
273 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
274 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
277 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
279 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
284 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
286 if (htt->ar->bus_param.dev_type == ATH10K_DEV_TYPE_HL) in ath10k_htt_rx_free()
289 del_timer_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
291 skb_queue_purge(&htt->rx_msdus_q); in ath10k_htt_rx_free()
292 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
293 skb_queue_purge(&htt->tx_fetch_ind_q); in ath10k_htt_rx_free()
295 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
296 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
297 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_free()
299 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
300 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_free()
301 ath10k_htt_get_vaddr_ring(htt), in ath10k_htt_rx_free()
302 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
304 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
305 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
306 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
307 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
309 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
312 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
314 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
318 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
320 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
325 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
326 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
327 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
328 ath10k_htt_reset_paddrs_ring(htt, idx); in ath10k_htt_rx_netbuf_pop()
331 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
332 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
333 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
335 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
346 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
349 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
359 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
364 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
415 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
458 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
461 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
465 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
473 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
475 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
493 static int ath10k_htt_rx_handle_amsdu_mon_32(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_32() argument
497 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_32()
527 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
542 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_32()
570 ath10k_htt_rx_handle_amsdu_mon_64(struct ath10k_htt *htt, in ath10k_htt_rx_handle_amsdu_mon_64() argument
574 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu_mon_64()
604 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
619 frag_buf = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_handle_amsdu_mon_64()
646 static int ath10k_htt_rx_pop_paddr32_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr32_list() argument
650 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr32_list()
660 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr32_list()
668 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr32_list()
675 ret = ath10k_htt_rx_handle_amsdu_mon_32(htt, msdu, in ath10k_htt_rx_pop_paddr32_list()
700 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr32_list()
711 static int ath10k_htt_rx_pop_paddr64_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr64_list() argument
715 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr64_list()
725 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr64_list()
732 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr64_list()
739 ret = ath10k_htt_rx_handle_amsdu_mon_64(htt, msdu, in ath10k_htt_rx_pop_paddr64_list()
764 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr64_list()
775 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
777 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
781 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
786 htt->rx_confused = false; in ath10k_htt_rx_alloc()
791 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
792 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
793 htt->rx_ring.fill_level = ar->hw_params.rx_ring_fill_level; in ath10k_htt_rx_alloc()
795 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
800 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
801 kcalloc(htt->rx_ring.size, sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
803 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
806 size = ath10k_htt_get_rx_ring_size(htt); in ath10k_htt_rx_alloc()
808 vaddr_ring = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_KERNEL); in ath10k_htt_rx_alloc()
812 ath10k_htt_config_paddrs_ring(htt, vaddr_ring); in ath10k_htt_rx_alloc()
813 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
815 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
816 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
821 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
822 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
823 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
824 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
829 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
831 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
832 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
833 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
835 skb_queue_head_init(&htt->rx_msdus_q); in ath10k_htt_rx_alloc()
836 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
837 skb_queue_head_init(&htt->tx_fetch_ind_q); in ath10k_htt_rx_alloc()
838 atomic_set(&htt->num_mpdus_ready, 0); in ath10k_htt_rx_alloc()
841 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
845 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
846 ath10k_htt_get_rx_ring_size(htt), in ath10k_htt_rx_alloc()
848 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
850 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
1350 skb_queue_tail(&ar->htt.rx_msdus_q, skb); in ath10k_htt_rx_h_queue_msdu()
2316 static int ath10k_htt_rx_handle_amsdu(struct ath10k_htt *htt) in ath10k_htt_rx_handle_amsdu() argument
2318 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handle_amsdu()
2319 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handle_amsdu()
2331 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2332 if (htt->rx_confused) { in ath10k_htt_rx_handle_amsdu()
2333 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2336 ret = ath10k_htt_rx_amsdu_pop(htt, &amsdu); in ath10k_htt_rx_handle_amsdu()
2337 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_handle_amsdu()
2345 htt->rx_confused = true; in ath10k_htt_rx_handle_amsdu()
2450 static bool ath10k_htt_rx_proc_rx_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_hl() argument
2456 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_hl()
2754 static bool ath10k_htt_rx_proc_rx_frag_ind_hl(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_frag_ind_hl() argument
2758 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_frag_ind_hl()
2811 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2873 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2900 return ath10k_htt_rx_proc_rx_ind_hl(htt, &resp->rx_ind_hl, skb, in ath10k_htt_rx_proc_rx_frag_ind_hl()
2912 static void ath10k_htt_rx_proc_rx_ind_ll(struct ath10k_htt *htt, in ath10k_htt_rx_proc_rx_ind_ll() argument
2915 struct ath10k *ar = htt->ar; in ath10k_htt_rx_proc_rx_ind_ll()
2935 atomic_add(mpdu_count, &htt->num_mpdus_ready); in ath10k_htt_rx_proc_rx_ind_ll()
2944 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_tx_compl_ind() local
3013 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3014 } else if (!kfifo_put(&htt->txdone_fifo, tx_done)) { in ath10k_htt_rx_tx_compl_ind()
3017 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_tx_compl_ind()
3210 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
3211 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
3259 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
3261 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
3272 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
3274 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
3302 ret = ath10k_htt_rx_pop_paddr64_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3305 ret = ath10k_htt_rx_pop_paddr32_list(htt, &resp->rx_in_ord_ind, in ath10k_htt_rx_in_ord_ind()
3310 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3342 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
3413 if (!ar->htt.tx_q_state.enabled) { in ath10k_htt_rx_tx_fetch_ind()
3418 if (ar->htt.tx_q_state.mode == HTT_TX_MODE_SWITCH_PUSH) { in ath10k_htt_rx_tx_fetch_ind()
3437 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_fetch_ind()
3438 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_fetch_ind()
3589 ar->htt.tx_q_state.enabled = enable; in ath10k_htt_rx_tx_mode_switch_ind()
3590 ar->htt.tx_q_state.mode = mode; in ath10k_htt_rx_tx_mode_switch_ind()
3591 ar->htt.tx_q_state.num_push_allowed = threshold; in ath10k_htt_rx_tx_mode_switch_ind()
3601 if (unlikely(peer_id >= ar->htt.tx_q_state.num_peers) || in ath10k_htt_rx_tx_mode_switch_ind()
3602 unlikely(tid >= ar->htt.tx_q_state.num_tids)) { in ath10k_htt_rx_tx_mode_switch_ind()
3622 spin_lock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3625 spin_unlock_bh(&ar->htt.tx_lock); in ath10k_htt_rx_tx_mode_switch_ind()
3895 if (ar->htt.disable_tx_comp) { in ath10k_update_per_peer_tx_stats()
4068 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4079 if (resp->hdr.msg_type >= ar->htt.t2h_msg_types_max) { in ath10k_htt_t2h_msg_handler()
4081 resp->hdr.msg_type, ar->htt.t2h_msg_types_max); in ath10k_htt_t2h_msg_handler()
4084 type = ar->htt.t2h_msg_types[resp->hdr.msg_type]; in ath10k_htt_t2h_msg_handler()
4088 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
4089 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
4090 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
4095 ath10k_htt_rx_proc_rx_ind_ll(htt, &resp->rx_ind); in ath10k_htt_t2h_msg_handler()
4097 skb_queue_tail(&htt->rx_indication_head, skb); in ath10k_htt_t2h_msg_handler()
4107 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4114 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
4119 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4121 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4147 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4153 status = ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
4155 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4156 ath10k_htt_tx_mgmt_dec_pending(htt); in ath10k_htt_t2h_msg_handler()
4157 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
4162 ath10k_htt_rx_tx_compl_ind(htt->ar, skb); in ath10k_htt_t2h_msg_handler()
4165 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
4180 atomic_inc(&htt->num_mpdus_ready); in ath10k_htt_t2h_msg_handler()
4182 return ath10k_htt_rx_proc_rx_frag_ind(htt, in ath10k_htt_t2h_msg_handler()
4223 skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
4227 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
4229 struct ath10k_htc_ep *ep = &ar->htc.endpoint[htt->eid]; in ath10k_htt_t2h_msg_handler()
4241 if (htt->disable_tx_comp) { in ath10k_htt_t2h_msg_handler()
4271 skb_queue_tail(&htt->tx_fetch_ind_q, tx_fetch_ind); in ath10k_htt_t2h_msg_handler()
4308 if (skb_queue_empty(&ar->htt.rx_msdus_q)) in ath10k_htt_rx_deliver_msdu()
4311 skb = skb_dequeue(&ar->htt.rx_msdus_q); in ath10k_htt_rx_deliver_msdu()
4324 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_hl_indication() local
4330 skb = skb_dequeue(&htt->rx_indication_head); in ath10k_htt_rx_hl_indication()
4336 release = ath10k_htt_rx_proc_rx_ind_hl(htt, in ath10k_htt_rx_hl_indication()
4346 skb_queue_len(&htt->rx_indication_head)); in ath10k_htt_rx_hl_indication()
4354 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_txrx_compl_task() local
4373 while ((skb = skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
4374 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4376 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
4385 while (atomic_read(&htt->num_mpdus_ready)) { in ath10k_htt_txrx_compl_task()
4386 ret = ath10k_htt_rx_handle_amsdu(htt); in ath10k_htt_txrx_compl_task()
4391 atomic_dec(&htt->num_mpdus_ready); in ath10k_htt_txrx_compl_task()
4402 if ((quota < budget) && !kfifo_is_empty(&htt->txdone_fifo)) in ath10k_htt_txrx_compl_task()
4410 while (kfifo_get(&htt->txdone_fifo, &tx_done)) in ath10k_htt_txrx_compl_task()
4411 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_txrx_compl_task()
4415 spin_lock_irqsave(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4416 skb_queue_splice_init(&htt->tx_fetch_ind_q, &tx_ind_q); in ath10k_htt_txrx_compl_task()
4417 spin_unlock_irqrestore(&htt->tx_fetch_ind_q.lock, flags); in ath10k_htt_txrx_compl_task()
4425 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_txrx_compl_task()
4455 void ath10k_htt_set_rx_ops(struct ath10k_htt *htt) in ath10k_htt_set_rx_ops() argument
4457 struct ath10k *ar = htt->ar; in ath10k_htt_set_rx_ops()
4460 htt->rx_ops = &htt_rx_ops_hl; in ath10k_htt_set_rx_ops()
4462 htt->rx_ops = &htt_rx_ops_64; in ath10k_htt_set_rx_ops()
4464 htt->rx_ops = &htt_rx_ops_32; in ath10k_htt_set_rx_ops()