Searched refs:ack_queue (Results 1 – 24 of 24) sorted by relevance
36 skb_queue_purge(&nr->ack_queue); in nr_clear_queues()55 while (skb_peek(&nrom->ack_queue) != NULL && nrom->va != nr) { in nr_frames_acked()56 skb = skb_dequeue(&nrom->ack_queue); in nr_frames_acked()72 while ((skb = skb_dequeue(&nr_sk(sk)->ack_queue)) != NULL) { in nr_requeue_frames()
102 if ((skb = skb_peek(&nr->ack_queue)) == NULL) in nr_send_nak_frame()137 start = (skb_peek(&nr->ack_queue) == NULL) ? nr->va : nr->vs; in nr_kick()173 skb_queue_tail(&nr->ack_queue, skb); in nr_kick()
444 skb_queue_head_init(&nr->ack_queue); in nr_create()492 skb_queue_head_init(&nr->ack_queue); in nr_make_new()
36 skb_queue_purge(&ax25->ack_queue); in ax25_clear_queues()54 while (skb_peek(&ax25->ack_queue) != NULL && ax25->va != nr) { in ax25_frames_acked()55 skb = skb_dequeue(&ax25->ack_queue); in ax25_frames_acked()71 while ((skb = skb_dequeue_tail(&ax25->ack_queue)) != NULL) in ax25_requeue_frames()
70 …if (ax25->state == AX25_STATE_1 || ax25->state == AX25_STATE_2 || skb_peek(&ax25->ack_queue) != NU… in ax25_ds_enquiry_response()96 …if (ax25o->state == AX25_STATE_1 || ax25o->state == AX25_STATE_2 || skb_peek(&ax25o->ack_queue) !=… in ax25_ds_enquiry_response()
256 start = (skb_peek(&ax25->ack_queue) == NULL) ? ax25->va : ax25->vs; in ax25_kick()313 skb_queue_tail(&ax25->ack_queue, skb); in ax25_kick()
538 skb_queue_head_init(&ax25->ack_queue); in ax25_create_cb()
38 skb_queue_purge(&lapb->ack_queue); in lapb_clear_queues()57 while (skb_peek(&lapb->ack_queue) && lapb->va != nr) { in lapb_frames_acked()58 skb = skb_dequeue(&lapb->ack_queue); in lapb_frames_acked()73 while ((skb = skb_dequeue(&lapb->ack_queue)) != NULL) { in lapb_requeue_frames()
72 start = !skb_peek(&lapb->ack_queue) ? lapb->va : lapb->vs; in lapb_kick()104 skb_queue_tail(&lapb->ack_queue, skb); in lapb_kick()
119 skb_queue_head_init(&lapb->ack_queue); in lapb_create_cb()
60 start = (skb_peek(&rose->ack_queue) == NULL) ? rose->va : rose->vs; in rose_kick()93 skb_queue_tail(&rose->ack_queue, skb); in rose_kick()
35 skb_queue_purge(&rose_sk(sk)->ack_queue); in rose_clear_queues()52 while (skb_peek(&rose->ack_queue) != NULL && rose->va != nr) { in rose_frames_acked()53 skb = skb_dequeue(&rose->ack_queue); in rose_frames_acked()69 while ((skb = skb_dequeue(&rose_sk(sk)->ack_queue)) != NULL) { in rose_requeue_frames()
532 skb_queue_head_init(&rose->ack_queue); in rose_create()571 skb_queue_head_init(&rose->ack_queue); in rose_make_new()
39 skb_queue_purge(&x25->ack_queue); in x25_clear_queues()61 while (skb_peek(&x25->ack_queue) && x25->va != nr) { in x25_frames_acked()62 skb = skb_dequeue(&x25->ack_queue); in x25_frames_acked()77 while ((skb = skb_dequeue(&x25_sk(sk)->ack_queue)) != NULL) { in x25_requeue_frames()
164 start = skb_peek(&x25->ack_queue) ? x25->vs : x25->va; in x25_kick()197 skb_queue_tail(&x25->ack_queue, skb); in x25_kick()
513 skb_queue_head_init(&x25->ack_queue); in x25_alloc_socket()
99 struct sk_buff_head ack_queue; member
146 struct sk_buff_head ack_queue; member
82 struct sk_buff_head ack_queue; member
160 struct sk_buff_head ack_queue; member
250 struct sk_buff_head ack_queue; member
237 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()267 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()337 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= elemcnt); in virtio_gpu_queue_ctrl_sgs()464 wait_event(vgdev->cursorq.ack_queue, vq->num_free >= outcnt); in virtio_gpu_queue_cursor()
61 init_waitqueue_head(&vgvq->ack_queue); in virtio_gpu_init_vq()
196 wait_queue_head_t ack_queue; member