Home
last modified time | relevance | path

Searched refs:op_sg (Results 1 – 7 of 7) sorted by relevance

/linux-6.1.9/net/rds/
Dtcp_send.c119 sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
120 rm->data.op_sg[sg].offset + off, in rds_tcp_xmit()
121 rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
123 rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
124 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
131 if (off == rm->data.op_sg[sg].length) { in rds_tcp_xmit()
Dmessage.c156 __free_page(sg_page(&rm->data.op_sg[i])); in rds_message_purge()
158 put_page(sg_page(&rm->data.op_sg[i])); in rds_message_purge()
348 rm->data.op_sg = rds_message_alloc_sgs(rm, num_sgs); in rds_message_map_pages()
349 if (IS_ERR(rm->data.op_sg)) { in rds_message_map_pages()
350 void *err = ERR_CAST(rm->data.op_sg); in rds_message_map_pages()
356 sg_set_page(&rm->data.op_sg[i], in rds_message_map_pages()
377 sg = rm->data.op_sg; in rds_message_zcopy_from_user()
401 put_page(sg_page(&rm->data.op_sg[i])); in rds_message_zcopy_from_user()
432 sg = rm->data.op_sg; in rds_message_copy_from_user()
479 sg = rm->data.op_sg; in rds_message_inc_copy_to_user()
Dib_send.c79 op->op_sg, op->op_nents, in rds_ib_send_unmap_data()
89 op->op_sg, op->op_nents, in rds_ib_send_unmap_rdma()
129 ib_dma_unmap_sg(ic->i_cm_id->device, op->op_sg, 1, in rds_ib_send_unmap_atomic()
513 scat = &rm->data.op_sg[sg]; in rds_ib_xmit()
552 rm->data.op_sg, in rds_ib_xmit()
622 scat = &ic->i_data_op->op_sg[rm->data.op_dmasg]; in rds_ib_xmit()
650 && scat != &rm->data.op_sg[rm->data.op_count]) { in rds_ib_xmit()
708 && scat != &rm->data.op_sg[rm->data.op_count]); in rds_ib_xmit()
716 if (scat == &rm->data.op_sg[rm->data.op_count]) { in rds_ib_xmit()
810 ret = ib_dma_map_sg(ic->i_cm_id->device, op->op_sg, 1, DMA_FROM_DEVICE); in rds_ib_xmit_atomic()
[all …]
Drdma.c495 struct page *page = sg_page(&ro->op_sg[i]); in rds_rdma_free_op()
513 struct page *page = sg_page(ao->op_sg); in rds_atomic_free_op()
669 op->op_sg = rds_message_alloc_sgs(rm, nr_pages); in rds_cmsg_rdma_args()
670 if (IS_ERR(op->op_sg)) { in rds_cmsg_rdma_args()
671 ret = PTR_ERR(op->op_sg); in rds_cmsg_rdma_args()
767 sg = &op->op_sg[op->op_nents + j]; in rds_cmsg_rdma_args()
912 rm->atomic.op_sg = rds_message_alloc_sgs(rm, 1); in rds_cmsg_atomic()
913 if (IS_ERR(rm->atomic.op_sg)) { in rds_cmsg_atomic()
914 ret = PTR_ERR(rm->atomic.op_sg); in rds_cmsg_atomic()
929 sg_set_page(rm->atomic.op_sg, page, 8, offset_in_page(args->local_addr)); in rds_cmsg_atomic()
Dloop.c79 struct scatterlist *sgp = &rm->data.op_sg[sg]; in rds_loop_xmit()
Drds.h450 struct scatterlist *op_sg; member
468 struct scatterlist *op_sg; member
483 struct scatterlist *op_sg; member
Dsend.c382 sg = &rm->data.op_sg[cp->cp_xmit_sg]; in rds_send_xmit()
1277 rm->data.op_sg = rds_message_alloc_sgs(rm, num_sgs); in rds_sendmsg()
1278 if (IS_ERR(rm->data.op_sg)) { in rds_sendmsg()
1279 ret = PTR_ERR(rm->data.op_sg); in rds_sendmsg()