Lines Matching refs:cached_cons
44 u32 cached_cons; member
120 static inline void __xskq_cons_read_addr_unchecked(struct xsk_queue *q, u32 cached_cons, u64 *addr) in __xskq_cons_read_addr_unchecked() argument
123 u32 idx = cached_cons & q->ring_mask; in __xskq_cons_read_addr_unchecked()
130 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_addr_unchecked()
131 __xskq_cons_read_addr_unchecked(q, q->cached_cons, addr); in xskq_cons_read_addr_unchecked()
191 return q->cached_cons != q->cached_prod; in xskq_has_descs()
209 if (q->cached_cons != q->cached_prod) { in xskq_cons_read_desc()
211 u32 idx = q->cached_cons & q->ring_mask; in xskq_cons_read_desc()
223 q->cached_cons += cnt; in xskq_cons_release_n()
237 u32 cached_cons = q->cached_cons, nb_entries = 0; in xskq_cons_read_desc_batch() local
244 while (cached_cons != q->cached_prod && nb_entries < max) { in xskq_cons_read_desc_batch()
246 u32 idx = cached_cons & q->ring_mask; in xskq_cons_read_desc_batch()
250 cached_cons++; in xskq_cons_read_desc_batch()
268 cached_cons -= nr_frags; in xskq_cons_read_desc_batch()
270 xskq_cons_release_n(q, cached_cons - q->cached_cons); in xskq_cons_read_desc_batch()
278 smp_store_release(&q->ring->consumer, q->cached_cons); /* D, matchees A */ in __xskq_cons_release()
295 u32 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
301 entries = q->cached_prod - q->cached_cons; in xskq_cons_nb_entries()
313 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_addr_unchecked()
322 if (q->cached_prod == q->cached_cons) in xskq_cons_peek_desc()
333 q->cached_cons++; in xskq_cons_release()
338 q->cached_cons -= cnt; in xskq_cons_cancel_n()
351 u32 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()
357 q->cached_cons = READ_ONCE(q->ring->consumer); in xskq_prod_nb_free()
358 free_entries = q->nentries - (q->cached_prod - q->cached_cons); in xskq_prod_nb_free()