Searched refs:first_wr (Results 1 – 7 of 7) sorted by relevance
/linux-6.1.9/drivers/infiniband/core/ |
D | rw.c | 500 struct ib_send_wr *first_wr, *last_wr; in rdma_rw_ctx_wrs() local 513 first_wr = &ctx->reg[0].inv_wr; in rdma_rw_ctx_wrs() 515 first_wr = &ctx->reg[0].reg_wr.wr; in rdma_rw_ctx_wrs() 519 first_wr = &ctx->map.wrs[0].wr; in rdma_rw_ctx_wrs() 523 first_wr = &ctx->single.wr.wr; in rdma_rw_ctx_wrs() 537 return first_wr; in rdma_rw_ctx_wrs() 558 struct ib_send_wr *first_wr; in rdma_rw_ctx_post() local 560 first_wr = rdma_rw_ctx_wrs(ctx, qp, port_num, cqe, chain_wr); in rdma_rw_ctx_post() 561 return ib_post_send(qp, first_wr, NULL); in rdma_rw_ctx_post()
|
/linux-6.1.9/net/sunrpc/xprtrdma/ |
D | svc_rdma_rw.c | 364 struct ib_send_wr *first_wr; in svc_rdma_post_chunk_ctxt() local 373 first_wr = NULL; in svc_rdma_post_chunk_ctxt() 379 first_wr = rdma_rw_ctx_wrs(&ctxt->rw_ctx, rdma->sc_qp, in svc_rdma_post_chunk_ctxt() 380 rdma->sc_port_num, cqe, first_wr); in svc_rdma_post_chunk_ctxt() 388 ret = ib_post_send(rdma->sc_qp, first_wr, &bad_wr); in svc_rdma_post_chunk_ctxt() 406 if (bad_wr != first_wr) in svc_rdma_post_chunk_ctxt()
|
/linux-6.1.9/drivers/infiniband/ulp/iser/ |
D | iser_verbs.c | 876 struct ib_send_wr *first_wr; in iser_post_send() local 891 first_wr = &tx_desc->inv_wr; in iser_post_send() 893 first_wr = &tx_desc->reg_wr.wr; in iser_post_send() 895 first_wr = wr; in iser_post_send() 897 ret = ib_post_send(ib_conn->qp, first_wr, NULL); in iser_post_send()
|
/linux-6.1.9/drivers/infiniband/ulp/srpt/ |
D | ib_srpt.c | 2742 struct ib_send_wr *first_wr = NULL; in srpt_write_pending() local 2767 first_wr = rdma_rw_ctx_wrs(&ctx->rw, ch->qp, ch->sport->port, in srpt_write_pending() 2768 cqe, first_wr); in srpt_write_pending() 2772 ret = ib_post_send(ch->qp, first_wr, NULL); in srpt_write_pending() 2810 struct ib_send_wr send_wr, *first_wr = &send_wr; in srpt_queue_response() local 2841 first_wr = rdma_rw_ctx_wrs(&ctx->rw, ch->qp, in srpt_queue_response() 2842 ch->sport->port, NULL, first_wr); in srpt_queue_response() 2880 ret = ib_post_send(ch->qp, first_wr, NULL); in srpt_queue_response()
|
/linux-6.1.9/drivers/nvme/target/ |
D | rdma.c | 721 struct ib_send_wr *first_wr; in nvmet_rdma_queue_response() local 732 first_wr = rdma_rw_ctx_wrs(&rsp->rw, cm_id->qp, in nvmet_rdma_queue_response() 735 first_wr = rdma_rw_ctx_wrs(&rsp->rw, cm_id->qp, in nvmet_rdma_queue_response() 738 first_wr = &rsp->send_wr; in nvmet_rdma_queue_response() 747 if (unlikely(ib_post_send(cm_id->qp, first_wr, NULL))) { in nvmet_rdma_queue_response()
|
/linux-6.1.9/fs/ksmbd/ |
D | transport_rdma.c | 1365 struct ib_send_wr *first_wr; in smb_direct_rdma_xmit() local 1449 first_wr = NULL; in smb_direct_rdma_xmit() 1451 first_wr = rdma_rw_ctx_wrs(&msg->rw_ctx, t->qp, t->qp->port, in smb_direct_rdma_xmit() 1452 &msg->cqe, first_wr); in smb_direct_rdma_xmit() 1455 ret = ib_post_send(t->qp, first_wr, NULL); in smb_direct_rdma_xmit()
|
/linux-6.1.9/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
D | chcr_ktls.c | 846 bool first_wr = ((tx_info->prev_ack == 0) && (tx_info->prev_win == 0)); in chcr_ktls_xmit_tcb_cpls() local 876 if (first_wr || tcp_seq != tx_info->prev_seq) { in chcr_ktls_xmit_tcb_cpls() 898 if (first_wr || tx_info->prev_ack != tcp_ack) { in chcr_ktls_xmit_tcb_cpls() 907 if (first_wr || tx_info->prev_win != tcp_win) { in chcr_ktls_xmit_tcb_cpls()
|