Lines Matching refs:put_tx_ctx
826 struct nv_skb_map *get_tx_ctx, *put_tx_ctx; member
1958 np->put_tx_ctx = np->tx_skb; in nv_init_tx()
2090 …return (u32)(np->tx_ring_size - ((np->tx_ring_size + (np->put_tx_ctx - np->get_tx_ctx)) % np->tx_r… in nv_get_empty_tx_slots()
2253 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit()
2257 np->put_tx_ctx->dma))) { in nv_start_xmit()
2268 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2269 np->put_tx_ctx->dma_single = 1; in nv_start_xmit()
2270 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2278 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2279 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2290 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2293 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit()
2299 np->put_tx_ctx->dma))) { in nv_start_xmit()
2306 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit()
2308 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit()
2318 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2319 np->put_tx_ctx->dma_single = 0; in nv_start_xmit()
2320 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2327 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2328 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2337 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit()
2340 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit()
2425 start_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2430 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit_optimized()
2434 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2445 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2446 np->put_tx_ctx->dma_single = 1; in nv_start_xmit_optimized()
2447 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2448 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2456 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2457 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2469 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2470 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit_optimized()
2477 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2484 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit_optimized()
2486 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit_optimized()
2495 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2496 np->put_tx_ctx->dma_single = 0; in nv_start_xmit_optimized()
2497 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2498 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2505 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2506 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2515 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit_optimized()
2518 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit_optimized()
2554 start_tx_ctx->next_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2555 np->tx_end_flip = np->put_tx_ctx; in nv_start_xmit_optimized()