Lines Matching refs:unack
44 struct sk_buff_head unack; /* Unack'ed packets queue */ member
301 spin_lock_irqsave_nested(&bcsp->unack.lock, flags, SINGLE_DEPTH_NESTING); in bcsp_dequeue()
303 if (bcsp->unack.qlen < BCSP_TXWINSIZE) { in bcsp_dequeue()
311 __skb_queue_tail(&bcsp->unack, skb); in bcsp_dequeue()
313 spin_unlock_irqrestore(&bcsp->unack.lock, flags); in bcsp_dequeue()
322 spin_unlock_irqrestore(&bcsp->unack.lock, flags); in bcsp_dequeue()
355 spin_lock_irqsave(&bcsp->unack.lock, flags); in bcsp_pkt_cull()
357 pkts_to_be_removed = skb_queue_len(&bcsp->unack); in bcsp_pkt_cull()
371 pkts_to_be_removed, skb_queue_len(&bcsp->unack), in bcsp_pkt_cull()
375 skb_queue_walk_safe(&bcsp->unack, skb, tmp) { in bcsp_pkt_cull()
380 __skb_unlink(skb, &bcsp->unack); in bcsp_pkt_cull()
384 if (skb_queue_empty(&bcsp->unack)) in bcsp_pkt_cull()
387 spin_unlock_irqrestore(&bcsp->unack.lock, flags); in bcsp_pkt_cull()
696 BT_DBG("hu %p retransmitting %u pkts", hu, bcsp->unack.qlen); in bcsp_timed_event()
698 spin_lock_irqsave_nested(&bcsp->unack.lock, flags, SINGLE_DEPTH_NESTING); in bcsp_timed_event()
700 while ((skb = __skb_dequeue_tail(&bcsp->unack)) != NULL) { in bcsp_timed_event()
705 spin_unlock_irqrestore(&bcsp->unack.lock, flags); in bcsp_timed_event()
722 skb_queue_head_init(&bcsp->unack); in bcsp_open()
746 skb_queue_purge(&bcsp->unack); in bcsp_close()