Lines Matching refs:xskb

60 	struct xdp_buff_xsk *xskb;  in xp_create_and_assign_umem()  local
98 xskb = &pool->heads[i]; in xp_create_and_assign_umem()
99 xskb->pool = pool; in xp_create_and_assign_umem()
100 xskb->xdp.frame_sz = umem->chunk_size - umem->headroom; in xp_create_and_assign_umem()
101 INIT_LIST_HEAD(&xskb->free_list_node); in xp_create_and_assign_umem()
103 pool->free_heads[i] = xskb; in xp_create_and_assign_umem()
105 xp_init_xskb_addr(xskb, pool, i * pool->chunk_size); in xp_create_and_assign_umem()
386 struct xdp_buff_xsk *xskb = &pool->heads[i]; in xp_init_dma_info() local
388 xp_init_xskb_dma(xskb, pool, dma_map->dma_pages, xskb->orig_addr); in xp_init_dma_info()
476 struct xdp_buff_xsk *xskb; in __xp_alloc() local
500 xskb = pool->free_heads[--pool->free_heads_cnt]; in __xp_alloc()
501 xp_init_xskb_addr(xskb, pool, addr); in __xp_alloc()
503 xp_init_xskb_dma(xskb, pool, pool->dma_pages, addr); in __xp_alloc()
505 xskb = &pool->heads[xp_aligned_extract_idx(pool, addr)]; in __xp_alloc()
509 return xskb; in __xp_alloc()
514 struct xdp_buff_xsk *xskb; in xp_alloc() local
517 xskb = __xp_alloc(pool); in xp_alloc()
518 if (!xskb) in xp_alloc()
522 xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk, in xp_alloc()
524 list_del_init(&xskb->free_list_node); in xp_alloc()
527 xskb->xdp.data = xskb->xdp.data_hard_start + XDP_PACKET_HEADROOM; in xp_alloc()
528 xskb->xdp.data_meta = xskb->xdp.data; in xp_alloc()
531 dma_sync_single_range_for_device(pool->dev, xskb->dma, 0, in xp_alloc()
535 return &xskb->xdp; in xp_alloc()
551 struct xdp_buff_xsk *xskb; in xp_alloc_new_from_fq() local
566 xskb = pool->free_heads[--pool->free_heads_cnt]; in xp_alloc_new_from_fq()
567 xp_init_xskb_addr(xskb, pool, addr); in xp_alloc_new_from_fq()
569 xp_init_xskb_dma(xskb, pool, pool->dma_pages, addr); in xp_alloc_new_from_fq()
571 xskb = &pool->heads[xp_aligned_extract_idx(pool, addr)]; in xp_alloc_new_from_fq()
574 *xdp = &xskb->xdp; in xp_alloc_new_from_fq()
584 struct xdp_buff_xsk *xskb; in xp_alloc_reused() local
591 xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk, free_list_node); in xp_alloc_reused()
592 list_del_init(&xskb->free_list_node); in xp_alloc_reused()
594 *xdp = &xskb->xdp; in xp_alloc_reused()
641 void xp_free(struct xdp_buff_xsk *xskb) in xp_free() argument
643 if (!list_empty(&xskb->free_list_node)) in xp_free()
646 xskb->pool->free_list_cnt++; in xp_free()
647 list_add(&xskb->free_list_node, &xskb->pool->free_list); in xp_free()
667 void xp_dma_sync_for_cpu_slow(struct xdp_buff_xsk *xskb) in xp_dma_sync_for_cpu_slow() argument
669 dma_sync_single_range_for_cpu(xskb->pool->dev, xskb->dma, 0, in xp_dma_sync_for_cpu_slow()
670 xskb->pool->frame_len, DMA_BIDIRECTIONAL); in xp_dma_sync_for_cpu_slow()