Lines Matching refs:xdp_ring
60 static int ena_clean_xdp_irq(struct ena_ring *xdp_ring, u32 budget);
196 struct ena_ring *xdp_ring; in ena_xdp_io_poll() local
200 xdp_ring = ena_napi->xdp_ring; in ena_xdp_io_poll()
204 if (!test_bit(ENA_FLAG_DEV_UP, &xdp_ring->adapter->flags) || in ena_xdp_io_poll()
205 test_bit(ENA_FLAG_TRIGGER_RESET, &xdp_ring->adapter->flags)) { in ena_xdp_io_poll()
210 xdp_work_done = ena_clean_xdp_irq(xdp_ring, xdp_budget); in ena_xdp_io_poll()
215 if (unlikely(!test_bit(ENA_FLAG_DEV_UP, &xdp_ring->adapter->flags))) { in ena_xdp_io_poll()
221 ena_unmask_interrupt(xdp_ring, NULL); in ena_xdp_io_poll()
222 ena_update_ring_numa_node(xdp_ring, NULL); in ena_xdp_io_poll()
228 u64_stats_update_begin(&xdp_ring->syncp); in ena_xdp_io_poll()
229 xdp_ring->tx_stats.napi_comp += napi_comp_call; in ena_xdp_io_poll()
230 xdp_ring->tx_stats.tx_poll++; in ena_xdp_io_poll()
231 u64_stats_update_end(&xdp_ring->syncp); in ena_xdp_io_poll()
232 xdp_ring->tx_stats.last_napi_jiffies = jiffies; in ena_xdp_io_poll()
237 static int ena_xdp_tx_map_frame(struct ena_ring *xdp_ring, in ena_xdp_tx_map_frame() argument
242 struct ena_adapter *adapter = xdp_ring->adapter; in ena_xdp_tx_map_frame()
253 if (xdp_ring->tx_mem_queue_type == ENA_ADMIN_PLACEMENT_POLICY_DEV) { in ena_xdp_tx_map_frame()
255 push_len = min_t(u32, size, xdp_ring->tx_max_header_size); in ena_xdp_tx_map_frame()
266 dma = dma_map_single(xdp_ring->dev, in ena_xdp_tx_map_frame()
270 if (unlikely(dma_mapping_error(xdp_ring->dev, dma))) in ena_xdp_tx_map_frame()
286 ena_increase_stat(&xdp_ring->tx_stats.dma_mapping_err, 1, in ena_xdp_tx_map_frame()
287 &xdp_ring->syncp); in ena_xdp_tx_map_frame()
293 static int ena_xdp_xmit_frame(struct ena_ring *xdp_ring, in ena_xdp_xmit_frame() argument
303 next_to_use = xdp_ring->next_to_use; in ena_xdp_xmit_frame()
304 req_id = xdp_ring->free_ids[next_to_use]; in ena_xdp_xmit_frame()
305 tx_info = &xdp_ring->tx_buffer_info[req_id]; in ena_xdp_xmit_frame()
308 rc = ena_xdp_tx_map_frame(xdp_ring, tx_info, xdpf, &ena_tx_ctx); in ena_xdp_xmit_frame()
315 xdp_ring, in ena_xdp_xmit_frame()
327 ena_ring_tx_doorbell(xdp_ring); in ena_xdp_xmit_frame()
332 ena_unmap_tx_buff(xdp_ring, tx_info); in ena_xdp_xmit_frame()
341 struct ena_ring *xdp_ring; in ena_xdp_xmit() local
356 xdp_ring = &adapter->tx_ring[qid]; in ena_xdp_xmit()
359 spin_lock(&xdp_ring->xdp_tx_lock); in ena_xdp_xmit()
362 if (ena_xdp_xmit_frame(xdp_ring, dev, frames[i], 0)) in ena_xdp_xmit()
369 ena_ring_tx_doorbell(xdp_ring); in ena_xdp_xmit()
371 spin_unlock(&xdp_ring->xdp_tx_lock); in ena_xdp_xmit()
381 struct ena_ring *xdp_ring; in ena_xdp_execute() local
403 xdp_ring = rx_ring->xdp_ring; in ena_xdp_execute()
406 spin_lock(&xdp_ring->xdp_tx_lock); in ena_xdp_execute()
408 if (ena_xdp_xmit_frame(xdp_ring, rx_ring->netdev, xdpf, in ena_xdp_execute()
412 spin_unlock(&xdp_ring->xdp_tx_lock); in ena_xdp_execute()
731 rxr->xdp_ring = &adapter->tx_ring[i + adapter->num_io_queues]; in ena_init_io_rings()
1308 static int validate_xdp_req_id(struct ena_ring *xdp_ring, u16 req_id) in validate_xdp_req_id() argument
1312 tx_info = &xdp_ring->tx_buffer_info[req_id]; in validate_xdp_req_id()
1316 return handle_invalid_req_id(xdp_ring, req_id, tx_info, true); in validate_xdp_req_id()
1952 static int ena_clean_xdp_irq(struct ena_ring *xdp_ring, u32 budget) in ena_clean_xdp_irq() argument
1960 if (unlikely(!xdp_ring)) in ena_clean_xdp_irq()
1962 next_to_clean = xdp_ring->next_to_clean; in ena_clean_xdp_irq()
1968 rc = ena_com_tx_comp_req_id_get(xdp_ring->ena_com_io_cq, in ena_clean_xdp_irq()
1972 handle_invalid_req_id(xdp_ring, req_id, NULL, in ena_clean_xdp_irq()
1978 rc = validate_xdp_req_id(xdp_ring, req_id); in ena_clean_xdp_irq()
1982 tx_info = &xdp_ring->tx_buffer_info[req_id]; in ena_clean_xdp_irq()
1987 ena_unmap_tx_buff(xdp_ring, tx_info); in ena_clean_xdp_irq()
1989 netif_dbg(xdp_ring->adapter, tx_done, xdp_ring->netdev, in ena_clean_xdp_irq()
1990 "tx_poll: q %d skb %p completed\n", xdp_ring->qid, in ena_clean_xdp_irq()
1997 xdp_ring->free_ids[next_to_clean] = req_id; in ena_clean_xdp_irq()
1999 xdp_ring->ring_size); in ena_clean_xdp_irq()
2002 xdp_ring->next_to_clean = next_to_clean; in ena_clean_xdp_irq()
2003 ena_com_comp_ack(xdp_ring->ena_com_io_sq, total_done); in ena_clean_xdp_irq()
2004 ena_com_update_dev_comp_head(xdp_ring->ena_com_io_cq); in ena_clean_xdp_irq()
2006 netif_dbg(xdp_ring->adapter, tx_done, xdp_ring->netdev, in ena_clean_xdp_irq()
2008 xdp_ring->qid, tx_pkts); in ena_clean_xdp_irq()
2330 adapter->ena_napi[i].xdp_ring); in ena_del_napi_in_range()
2349 napi->xdp_ring = &adapter->tx_ring[i]; in ena_init_napi_in_range()