Home
last modified time | relevance | path

Searched refs:ring_size (Results 1 – 25 of 211) sorted by relevance

123456789

/linux-6.6.21/tools/virtio/ringtest/
Dvirtio_ring_0_9.c70 ret = posix_memalign(&p, 0x1000, vring_size(ring_size, 0x1000)); in alloc_ring()
75 memset(p, 0, vring_size(ring_size, 0x1000)); in alloc_ring()
76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring()
85 for (i = 0; i < ring_size - 1; i++) in alloc_ring()
89 guest.num_free = ring_size; in alloc_ring()
90 data = malloc(ring_size * sizeof *data); in alloc_ring()
95 memset(data, 0, ring_size * sizeof *data); in alloc_ring()
111 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf()
136 ring.avail->ring[avail & (ring_size - 1)] = in add_inbuf()
137 (head | (avail & ~(ring_size - 1))) ^ 0x8000; in add_inbuf()
[all …]
Dring.c82 ret = posix_memalign((void **)&ring, 0x1000, ring_size * sizeof *ring); in alloc_ring()
97 for (i = 0; i < ring_size; ++i) { in alloc_ring()
103 guest.num_free = ring_size; in alloc_ring()
104 data = calloc(ring_size, sizeof(*data)); in alloc_ring()
120 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf()
145 unsigned head = (ring_size - 1) & guest.last_used_idx; in get_buf()
154 index = ring[head].index & (ring_size - 1); in get_buf()
166 unsigned head = (ring_size - 1) & guest.last_used_idx; in used_empty()
221 unsigned head = (ring_size - 1) & host.used_idx; in avail_empty()
228 unsigned head = (ring_size - 1) & host.used_idx; in use_buf()
Dmain.c29 unsigned ring_size = 256; variable
288 ring_size, in help()
322 ring_size = strtol(optarg, &endptr, 0); in main()
323 assert(ring_size && !(ring_size & (ring_size - 1))); in main()
/linux-6.6.21/include/xen/interface/io/
Dring.h357 static inline RING_IDX name##_mask(RING_IDX idx, RING_IDX ring_size) \
359 return idx & (ring_size - 1); \
364 RING_IDX ring_size) \
366 return buf + name##_mask(idx, ring_size); \
374 RING_IDX ring_size) \
377 size <= ring_size - *masked_cons) { \
380 memcpy(opaque, buf + *masked_cons, ring_size - *masked_cons); \
381 memcpy((unsigned char *)opaque + ring_size - *masked_cons, buf, \
382 size - (ring_size - *masked_cons)); \
384 *masked_cons = name##_mask(*masked_cons + size, ring_size); \
[all …]
/linux-6.6.21/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ih.c42 unsigned ring_size, bool use_bus_addr) in amdgpu_ih_ring_init() argument
48 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init()
49 ring_size = (1 << rb_bufsz) * 4; in amdgpu_ih_ring_init()
50 ih->ring_size = ring_size; in amdgpu_ih_ring_init()
51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init()
64 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init()
70 ih->wptr_addr = dma_addr + ih->ring_size; in amdgpu_ih_ring_init()
71 ih->wptr_cpu = &ih->ring[ih->ring_size / 4]; in amdgpu_ih_ring_init()
72 ih->rptr_addr = dma_addr + ih->ring_size + 4; in amdgpu_ih_ring_init()
73 ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1]; in amdgpu_ih_ring_init()
[all …]
Dnavi10_ih.c53 if (adev->irq.ih.ring_size) { in navi10_ih_init_register_offset()
66 if (adev->irq.ih1.ring_size) { in navi10_ih_init_register_offset()
77 if (adev->irq.ih2.ring_size) { in navi10_ih_init_register_offset()
203 if (ih[i]->ring_size) { in navi10_ih_toggle_interrupts()
215 int rb_bufsz = order_base_2(ih->ring_size / 4); in navi10_ih_rb_cntl()
353 if (ih[i]->ring_size) { in navi10_ih_irq_init()
373 if (adev->irq.ih_soft.ring_size) in navi10_ih_irq_init()
474 if ((v < ih->ring_size) && (v != ih->rptr)) in navi10_ih_irq_rearm()
581 adev->irq.ih1.ring_size = 0; in navi10_ih_sw_init()
582 adev->irq.ih2.ring_size = 0; in navi10_ih_sw_init()
Dih_v6_0.c52 if (adev->irq.ih.ring_size) { in ih_v6_0_init_register_offset()
65 if (adev->irq.ih1.ring_size) { in ih_v6_0_init_register_offset()
177 if (ih[i]->ring_size) { in ih_v6_0_toggle_interrupts()
189 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v6_0_rb_cntl()
319 if (ih[i]->ring_size) { in ih_v6_0_irq_init()
358 if (adev->irq.ih_soft.ring_size) in ih_v6_0_irq_init()
450 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v6_0_irq_rearm()
552 adev->irq.ih1.ring_size = 0; in ih_v6_0_sw_init()
553 adev->irq.ih2.ring_size = 0; in ih_v6_0_sw_init()
Dih_v6_1.c52 if (adev->irq.ih.ring_size) { in ih_v6_1_init_register_offset()
65 if (adev->irq.ih1.ring_size) { in ih_v6_1_init_register_offset()
177 if (ih[i]->ring_size) { in ih_v6_1_toggle_interrupts()
189 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v6_1_rb_cntl()
319 if (ih[i]->ring_size) { in ih_v6_1_irq_init()
358 if (adev->irq.ih_soft.ring_size) in ih_v6_1_irq_init()
451 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v6_1_irq_rearm()
553 adev->irq.ih1.ring_size = 0; in ih_v6_1_sw_init()
554 adev->irq.ih2.ring_size = 0; in ih_v6_1_sw_init()
Dvega10_ih.c51 if (adev->irq.ih.ring_size) { in vega10_ih_init_register_offset()
64 if (adev->irq.ih1.ring_size) { in vega10_ih_init_register_offset()
75 if (adev->irq.ih2.ring_size) { in vega10_ih_init_register_offset()
148 if (ih[i]->ring_size) { in vega10_ih_toggle_interrupts()
160 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega10_ih_rb_cntl()
285 if (ih[i]->ring_size) { in vega10_ih_irq_init()
303 if (adev->irq.ih_soft.ring_size) in vega10_ih_irq_init()
404 if ((v < ih->ring_size) && (v != ih->rptr)) in vega10_ih_irq_rearm()
Dvega20_ih.c59 if (adev->irq.ih.ring_size) { in vega20_ih_init_register_offset()
72 if (adev->irq.ih1.ring_size) { in vega20_ih_init_register_offset()
83 if (adev->irq.ih2.ring_size) { in vega20_ih_init_register_offset()
157 if (ih[i]->ring_size) { in vega20_ih_toggle_interrupts()
169 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega20_ih_rb_cntl()
318 if (ih[i]->ring_size) { in vega20_ih_irq_init()
351 if (adev->irq.ih_soft.ring_size) in vega20_ih_irq_init()
453 if ((v < ih->ring_size) && (v != ih->rptr)) in vega20_ih_irq_rearm()
Damdgpu_ring.c321 ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission); in amdgpu_ring_init()
323 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init()
331 BUG_ON(ring->ring_size > PAGE_SIZE*4); in amdgpu_ring_init()
340 r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_dw, PAGE_SIZE, in amdgpu_ring_init()
496 if (*pos >= (ring->ring_size + 12)) in amdgpu_debugfs_ring_read()
595 ring->ring_size + 12); in amdgpu_debugfs_ring_init()
648 prop->queue_size = ring->ring_size; in amdgpu_ring_to_mqd_prop()
Damdgpu_ih.h52 unsigned ring_size; member
101 unsigned ring_size, bool use_bus_addr);
/linux-6.6.21/drivers/crypto/ccp/
Dtee-dev.c25 static int tee_alloc_ring(struct psp_tee_device *tee, int ring_size) in tee_alloc_ring() argument
30 if (!ring_size) in tee_alloc_ring()
36 start_addr = (void *)__get_free_pages(GFP_KERNEL, get_order(ring_size)); in tee_alloc_ring()
40 memset(start_addr, 0x0, ring_size); in tee_alloc_ring()
42 rb_mgr->ring_size = ring_size; in tee_alloc_ring()
57 get_order(rb_mgr->ring_size)); in tee_free_ring()
60 rb_mgr->ring_size = 0; in tee_free_ring()
96 cmd->size = tee->rb_mgr.ring_size; in tee_alloc_cmd_buffer()
111 int ring_size = MAX_RING_BUFFER_ENTRIES * sizeof(struct tee_ring_cmd); in tee_init_ring() local
119 ret = tee_alloc_ring(tee, ring_size); in tee_init_ring()
[all …]
/linux-6.6.21/drivers/gpu/drm/radeon/
Dradeon_ring.c86 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size()
91 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size()
112 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
314 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup()
381 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument
386 ring->ring_size = ring_size; in radeon_ring_init()
392 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init()
417 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init()
418 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init()
474 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info_show()
/linux-6.6.21/drivers/xen/
Devtchn.c67 unsigned int ring_size; member
98 return idx & (u->ring_size - 1); in evtchn_ring_offset()
178 if ((prod - cons) < u->ring_size) { in evtchn_interrupt()
235 if (((c ^ p) & u->ring_size) != 0) { in evtchn_read()
236 bytes1 = (u->ring_size - evtchn_ring_offset(u, c)) * in evtchn_read()
322 if (u->nr_evtchns <= u->ring_size) in evtchn_resize_ring()
325 if (u->ring_size == 0) in evtchn_resize_ring()
328 new_size = 2 * u->ring_size; in evtchn_resize_ring()
354 memcpy(new_ring, old_ring, u->ring_size * sizeof(*u->ring)); in evtchn_resize_ring()
355 memcpy(new_ring + u->ring_size, old_ring, in evtchn_resize_ring()
[all …]
/linux-6.6.21/drivers/net/ethernet/amazon/ena/
Dena_netdev.h88 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
90 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument
91 #define ENA_RX_RING_IDX_ADD(idx, n, ring_size) \ argument
92 (((idx) + (n)) & ((ring_size) - 1))
275 int ring_size; member
/linux-6.6.21/drivers/net/ethernet/renesas/
Drswitch.c206 if (index + num >= gq->ring_size) in rswitch_next_queue_index()
207 index = (index + num) % gq->ring_size; in rswitch_next_queue_index()
219 return gq->ring_size - gq->dirty + gq->cur; in rswitch_get_num_cur_queues()
238 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_skb()
251 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_skb()
267 (gq->ring_size + 1), gq->rx_ring, gq->ring_dma); in rswitch_gwca_queue_free()
270 for (i = 0; i < gq->ring_size; i++) in rswitch_gwca_queue_free()
275 (gq->ring_size + 1), gq->tx_ring, gq->ring_dma); in rswitch_gwca_queue_free()
288 sizeof(struct rswitch_ts_desc) * (gq->ring_size + 1), in rswitch_gwca_ts_queue_free()
296 bool dir_tx, int ring_size) in rswitch_gwca_queue_alloc() argument
[all …]
/linux-6.6.21/drivers/bus/mhi/ep/
Dring.c26 ring->rd_offset = (ring->rd_offset + 1) % ring->ring_size; in mhi_ep_ring_inc_index()
55 buf_info.size = (ring->ring_size - start) * sizeof(struct mhi_ring_element); in __mhi_ep_cache_ring()
125 num_free_elem = ((ring->ring_size - ring->rd_offset) + ring->wr_offset) - 1; in mhi_ep_ring_add_element()
174 ring->ring_size = mhi_ep_ring_num_elems(ring); in mhi_ep_ring_start()
190 ring->ring_cache = kcalloc(ring->ring_size, sizeof(struct mhi_ring_element), GFP_KERNEL); in mhi_ep_ring_start()
/linux-6.6.21/drivers/crypto/intel/qat/qat_common/
Dadf_transport.c93 ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) { in adf_send_message()
103 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_send_message()
124 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_handle_response()
139 u32 ring_config = BUILD_RING_CONFIG(ring->ring_size); in adf_configure_tx_ring()
151 BUILD_RESP_RING_CONFIG(ring->ring_size, in adf_configure_rx_ring()
168 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_init_ring()
194 ring->ring_size); in adf_init_ring()
206 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_cleanup_ring()
272 ring->ring_size = adf_verify_ring_size(msg_size, num_msgs); in adf_create_ring()
275 max_inflights = ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size); in adf_create_ring()
/linux-6.6.21/arch/powerpc/platforms/pasemi/
Ddma_lib.c242 int pasemi_dma_alloc_ring(struct pasemi_dmachan *chan, int ring_size) in pasemi_dma_alloc_ring() argument
246 chan->ring_size = ring_size; in pasemi_dma_alloc_ring()
249 ring_size * sizeof(u64), in pasemi_dma_alloc_ring()
268 dma_free_coherent(&dma_pdev->dev, chan->ring_size * sizeof(u64), in pasemi_dma_free_ring()
271 chan->ring_size = 0; in pasemi_dma_free_ring()
/linux-6.6.21/drivers/net/ethernet/atheros/
Dag71xx.c795 int ring_mask, ring_size; in ag71xx_tx_packets() local
799 ring_size = BIT(ring->order); in ag71xx_tx_packets()
852 if ((ring->curr - ring->dirty) < (ring_size * 3) / 4) in ag71xx_tx_packets()
1174 int ring_size = BIT(ring->order); in ag71xx_ring_tx_init() local
1175 int ring_mask = ring_size - 1; in ag71xx_ring_tx_init()
1178 for (i = 0; i < ring_size; i++) { in ag71xx_ring_tx_init()
1199 int ring_size = BIT(ring->order); in ag71xx_ring_rx_clean() local
1205 for (i = 0; i < ring_size; i++) in ag71xx_ring_rx_clean()
1246 int ring_size = BIT(ring->order); in ag71xx_ring_rx_init() local
1251 for (i = 0; i < ring_size; i++) { in ag71xx_ring_rx_init()
[all …]
/linux-6.6.21/include/linux/
Dpipe_fs_i.h64 unsigned int ring_size; member
183 return &pipe->bufs[slot & (pipe->ring_size - 1)]; in pipe_buf()
251 unsigned int mask = pipe->ring_size - 1; in pipe_discard_from()
/linux-6.6.21/tools/testing/selftests/bpf/progs/
Dtest_ringbuf.c32 long ring_size = 0; variable
72 ring_size = bpf_ringbuf_query(&ringbuf, BPF_RB_RING_SIZE); in test_ringbuf()
/linux-6.6.21/io_uring/
Dkbuf.c590 size_t ring_size) in io_lookup_buf_free_entry() argument
598 if (ibf->inuse || ibf->size < ring_size) in io_lookup_buf_free_entry()
600 dist = ibf->size - ring_size; in io_lookup_buf_free_entry()
617 size_t ring_size; in io_alloc_pbuf_ring() local
620 ring_size = reg->ring_entries * sizeof(struct io_uring_buf_ring); in io_alloc_pbuf_ring()
623 ibf = io_lookup_buf_free_entry(ctx, ring_size); in io_alloc_pbuf_ring()
625 ptr = io_mem_alloc(ring_size); in io_alloc_pbuf_ring()
636 ibf->size = ring_size; in io_alloc_pbuf_ring()
/linux-6.6.21/drivers/net/ethernet/pensando/ionic/
Dionic_dev.c590 .q_init.ring_size = ilog2(q->num_descs), in ionic_dev_cmd_adminq_init()
634 unsigned int ring_size; in ionic_cq_init() local
639 ring_size = ilog2(num_descs); in ionic_cq_init()
640 if (ring_size < 2 || ring_size > 16) in ionic_cq_init()
702 unsigned int ring_size; in ionic_q_init() local
707 ring_size = ilog2(num_descs); in ionic_q_init()
708 if (ring_size < 2 || ring_size > 16) in ionic_q_init()

123456789