Lines Matching refs:hlid

40 		ret = wl12xx_cmd_set_default_wep_key(wl, id, wlvif->sta.hlid);  in wl1271_set_default_wep_key()
108 u8 hlid) in wl1271_tx_regulate_link() argument
113 if (WARN_ON(!test_bit(hlid, wlvif->links_map))) in wl1271_tx_regulate_link()
116 fw_ps = test_bit(hlid, &wl->ap_fw_ps_map); in wl1271_tx_regulate_link()
117 tx_pkts = wl->links[hlid].allocated_pkts; in wl1271_tx_regulate_link()
131 wl12xx_ps_link_start(wl, wlvif, hlid, true); in wl1271_tx_regulate_link()
147 return wl_sta->hlid; in wl12xx_tx_get_hlid_ap()
176 return wlvif->sta.hlid; in wl12xx_tx_get_hlid()
192 u8 hlid, bool is_gem) in wl1271_tx_allocate() argument
235 if (test_bit(hlid, wl->links_map)) in wl1271_tx_allocate()
236 wl->links[hlid].allocated_pkts++; in wl1271_tx_allocate()
252 struct ieee80211_tx_info *control, u8 hlid) in wl1271_tx_fill_hdr() argument
300 u8 session_id = wl->session_ids[hlid]; in wl1271_tx_fill_hdr()
310 desc->hlid = hlid; in wl1271_tx_fill_hdr()
328 if (hlid == wlvif->ap.global_hlid) in wl1271_tx_fill_hdr()
330 else if (hlid == wlvif->ap.bcast_hlid || in wl1271_tx_fill_hdr()
361 struct sk_buff *skb, u32 buf_offset, u8 hlid) in wl1271_prepare_tx_frame() argument
375 if (hlid == WL12XX_INVALID_LINK_ID) { in wl1271_prepare_tx_frame()
407 ret = wl1271_tx_allocate(wl, wlvif, skb, extra, buf_offset, hlid, in wl1271_prepare_tx_frame()
412 wl1271_tx_fill_hdr(wl, wlvif, skb, extra, info, hlid); in wl1271_prepare_tx_frame()
416 wl1271_tx_regulate_link(wl, wlvif, hlid); in wl1271_prepare_tx_frame()
529 u8 hlid, u8 ac, in wlcore_lnk_dequeue_high_prio() argument
532 struct wl1271_link *lnk = &wl->links[hlid]; in wlcore_lnk_dequeue_high_prio()
534 if (!wlcore_hw_lnk_high_prio(wl, hlid, lnk)) { in wlcore_lnk_dequeue_high_prio()
537 wlcore_hw_lnk_low_prio(wl, hlid, lnk)) in wlcore_lnk_dequeue_high_prio()
539 *low_prio_hlid = hlid; in wlcore_lnk_dequeue_high_prio()
549 u8 ac, u8 *hlid, in wlcore_vif_dequeue_high_prio() argument
578 *hlid = wlvif->last_tx_hlid; in wlcore_vif_dequeue_high_prio()
582 static struct sk_buff *wl1271_skb_dequeue(struct wl1271 *wl, u8 *hlid) in wl1271_skb_dequeue() argument
600 skb = wlcore_vif_dequeue_high_prio(wl, wlvif, ac, hlid, in wl1271_skb_dequeue()
615 *hlid = wl->system_hlid; in wl1271_skb_dequeue()
627 skb = wlcore_vif_dequeue_high_prio(wl, wlvif, ac, hlid, in wl1271_skb_dequeue()
646 *hlid = low_prio_hlid; in wl1271_skb_dequeue()
661 *hlid = wl->system_hlid; in wl1271_skb_dequeue()
673 struct sk_buff *skb, u8 hlid) in wl1271_skb_queue_head() argument
681 skb_queue_head(&wl->links[hlid].tx_queue[q], skb); in wl1271_skb_queue_head()
684 wlvif->last_tx_hlid = (hlid + wl->num_links - 1) % in wl1271_skb_queue_head()
706 u8 hlid; in wl12xx_rearm_rx_streaming() local
718 for_each_set_bit(hlid, active_hlids, wl->num_links) { in wl12xx_rearm_rx_streaming()
719 if (test_bit(hlid, wlvif->links_map)) { in wl12xx_rearm_rx_streaming()
758 u8 hlid; in wlcore_tx_work_locked() local
763 while ((skb = wl1271_skb_dequeue(wl, &hlid))) { in wlcore_tx_work_locked()
771 hlid = wl->system_hlid; in wlcore_tx_work_locked()
775 hlid); in wlcore_tx_work_locked()
781 wl1271_skb_queue_head(wl, wlvif, skb, hlid); in wlcore_tx_work_locked()
798 wl1271_skb_queue_head(wl, wlvif, skb, hlid); in wlcore_tx_work_locked()
808 wl1271_skb_queue_head(wl, wlvif, skb, hlid); in wlcore_tx_work_locked()
818 __set_bit(desc->hlid, active_hlids); in wlcore_tx_work_locked()
1017 void wl1271_tx_reset_link_queues(struct wl1271 *wl, u8 hlid) in wl1271_tx_reset_link_queues() argument
1024 struct wl1271_link *lnk = &wl->links[hlid]; in wl1271_tx_reset_link_queues()
1065 u8 hlid = i; in wl12xx_tx_reset_wlvif() local
1066 wl12xx_free_link(wl, wlvif, &hlid); in wl12xx_tx_reset_wlvif()