Lines Matching refs:wr
373 static int rxe_post_srq_recv(struct ib_srq *ibsrq, const struct ib_recv_wr *wr, in rxe_post_srq_recv() argument
382 while (wr) { in rxe_post_srq_recv()
383 err = post_one_recv(&srq->rq, wr); in rxe_post_srq_recv()
386 wr = wr->next; in rxe_post_srq_recv()
392 *bad_wr = wr; in rxe_post_srq_recv()
524 static void init_send_wr(struct rxe_qp *qp, struct rxe_send_wr *wr, in init_send_wr() argument
527 wr->wr_id = ibwr->wr_id; in init_send_wr()
528 wr->opcode = ibwr->opcode; in init_send_wr()
529 wr->send_flags = ibwr->send_flags; in init_send_wr()
535 wr->wr.ud.remote_qpn = ud_wr(ibwr)->remote_qpn; in init_send_wr()
536 wr->wr.ud.remote_qkey = ud_wr(ibwr)->remote_qkey; in init_send_wr()
537 wr->wr.ud.ah_num = to_rah(ibah)->ah_num; in init_send_wr()
539 wr->wr.ud.pkey_index = ud_wr(ibwr)->pkey_index; in init_send_wr()
540 if (wr->opcode == IB_WR_SEND_WITH_IMM) in init_send_wr()
541 wr->ex.imm_data = ibwr->ex.imm_data; in init_send_wr()
543 switch (wr->opcode) { in init_send_wr()
545 wr->ex.imm_data = ibwr->ex.imm_data; in init_send_wr()
549 wr->wr.rdma.remote_addr = rdma_wr(ibwr)->remote_addr; in init_send_wr()
550 wr->wr.rdma.rkey = rdma_wr(ibwr)->rkey; in init_send_wr()
553 wr->ex.imm_data = ibwr->ex.imm_data; in init_send_wr()
556 wr->ex.invalidate_rkey = ibwr->ex.invalidate_rkey; in init_send_wr()
560 wr->wr.atomic.remote_addr = in init_send_wr()
562 wr->wr.atomic.compare_add = in init_send_wr()
564 wr->wr.atomic.swap = atomic_wr(ibwr)->swap; in init_send_wr()
565 wr->wr.atomic.rkey = atomic_wr(ibwr)->rkey; in init_send_wr()
568 wr->ex.invalidate_rkey = ibwr->ex.invalidate_rkey; in init_send_wr()
571 wr->wr.reg.mr = reg_wr(ibwr)->mr; in init_send_wr()
572 wr->wr.reg.key = reg_wr(ibwr)->key; in init_send_wr()
573 wr->wr.reg.access = reg_wr(ibwr)->access; in init_send_wr()
600 init_send_wr(qp, &wqe->wr, ibwr); in init_send_wqe()
659 static int rxe_post_send_kernel(struct rxe_qp *qp, const struct ib_send_wr *wr, in rxe_post_send_kernel() argument
668 while (wr) { in rxe_post_send_kernel()
669 mask = wr_opcode_mask(wr->opcode, qp); in rxe_post_send_kernel()
672 *bad_wr = wr; in rxe_post_send_kernel()
676 if (unlikely((wr->send_flags & IB_SEND_INLINE) && in rxe_post_send_kernel()
679 *bad_wr = wr; in rxe_post_send_kernel()
683 next = wr->next; in rxe_post_send_kernel()
686 for (i = 0; i < wr->num_sge; i++) in rxe_post_send_kernel()
687 length += wr->sg_list[i].length; in rxe_post_send_kernel()
689 err = post_one_send(qp, wr, mask, length); in rxe_post_send_kernel()
692 *bad_wr = wr; in rxe_post_send_kernel()
695 wr = next; in rxe_post_send_kernel()
705 static int rxe_post_send(struct ib_qp *ibqp, const struct ib_send_wr *wr, in rxe_post_send() argument
711 *bad_wr = wr; in rxe_post_send()
716 *bad_wr = wr; in rxe_post_send()
725 return rxe_post_send_kernel(qp, wr, bad_wr); in rxe_post_send()
728 static int rxe_post_recv(struct ib_qp *ibqp, const struct ib_recv_wr *wr, in rxe_post_recv() argument
737 *bad_wr = wr; in rxe_post_recv()
743 *bad_wr = wr; in rxe_post_recv()
750 while (wr) { in rxe_post_recv()
751 err = post_one_recv(rq, wr); in rxe_post_recv()
753 *bad_wr = wr; in rxe_post_recv()
756 wr = wr->next; in rxe_post_recv()