Home
last modified time | relevance | path

Searched refs:qpl (Results 1 – 6 of 6) sorted by relevance

/linux-5.19.10/drivers/net/ethernet/google/gve/
Dgve_main.c788 struct gve_queue_page_list *qpl = &priv->qpls[id]; in gve_alloc_queue_page_list() local
800 qpl->id = id; in gve_alloc_queue_page_list()
801 qpl->num_entries = 0; in gve_alloc_queue_page_list()
802 qpl->pages = kvcalloc(pages, sizeof(*qpl->pages), GFP_KERNEL); in gve_alloc_queue_page_list()
804 if (!qpl->pages) in gve_alloc_queue_page_list()
806 qpl->page_buses = kvcalloc(pages, sizeof(*qpl->page_buses), GFP_KERNEL); in gve_alloc_queue_page_list()
808 if (!qpl->page_buses) in gve_alloc_queue_page_list()
812 err = gve_alloc_page(priv, &priv->pdev->dev, &qpl->pages[i], in gve_alloc_queue_page_list()
813 &qpl->page_buses[i], in gve_alloc_queue_page_list()
818 qpl->num_entries++; in gve_alloc_queue_page_list()
[all …]
Dgve_tx.c32 fifo->base = vmap(fifo->qpl->pages, fifo->qpl->num_entries, VM_MAP, in gve_tx_fifo_init()
36 fifo->qpl->id); in gve_tx_fifo_init()
40 fifo->size = fifo->qpl->num_entries * PAGE_SIZE; in gve_tx_fifo_init()
156 gve_unassign_qpl(priv, tx->tx_fifo.qpl->id); in gve_tx_free_ring()
157 tx->tx_fifo.qpl = NULL; in gve_tx_free_ring()
198 tx->tx_fifo.qpl = gve_assign_tx_qpl(priv); in gve_tx_alloc_ring()
199 if (!tx->tx_fifo.qpl) in gve_tx_alloc_ring()
226 gve_unassign_qpl(priv, tx->tx_fifo.qpl->id); in gve_tx_alloc_ring()
483 gve_dma_sync_for_device(&priv->pdev->dev, tx->tx_fifo.qpl->page_buses, in gve_tx_add_skb_copy()
504 gve_dma_sync_for_device(&priv->pdev->dev, tx->tx_fifo.qpl->page_buses, in gve_tx_add_skb_copy()
Dgve_adminq.c501 GVE_RAW_ADDRESSING_QPL_ID : tx->tx_fifo.qpl->id; in gve_adminq_create_tx_queue()
545 GVE_RAW_ADDRESSING_QPL_ID : rx->data.qpl->id; in gve_adminq_create_rx_queue()
807 struct gve_queue_page_list *qpl) in gve_adminq_register_page_list() argument
810 u32 num_entries = qpl->num_entries; in gve_adminq_register_page_list()
811 u32 size = num_entries * sizeof(qpl->page_buses[0]); in gve_adminq_register_page_list()
824 page_list[i] = cpu_to_be64(qpl->page_buses[i]); in gve_adminq_register_page_list()
828 .page_list_id = cpu_to_be32(qpl->id), in gve_adminq_register_page_list()
Dgve_rx.c36 gve_unassign_qpl(priv, rx->data.qpl->id); in gve_rx_unfill_pages()
37 rx->data.qpl = NULL; in gve_rx_unfill_pages()
116 rx->data.qpl = gve_assign_rx_qpl(priv); in gve_prefill_rx_pages()
117 if (!rx->data.qpl) { in gve_prefill_rx_pages()
125 struct page *page = rx->data.qpl->pages[i]; in gve_prefill_rx_pages()
577 rx->data.qpl->page_buses[idx]; in gve_rx()
Dgve.h81 struct gve_queue_page_list *qpl; /* qpl assigned to this queue */ member
263 struct gve_queue_page_list *qpl; /* QPL mapped into this FIFO */ member
Dgve_adminq.h370 struct gve_queue_page_list *qpl);