Lines Matching refs:wqe

262 	struct siw_wqe *wqe = tx_wqe(qp);  in siw_qp_mpa_rts()  local
268 if (unlikely(wqe->wr_status != SIW_WR_IDLE)) { in siw_qp_mpa_rts()
272 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_qp_mpa_rts()
274 wqe->wr_status = SIW_WR_QUEUED; in siw_qp_mpa_rts()
275 wqe->sqe.flags = 0; in siw_qp_mpa_rts()
276 wqe->sqe.num_sge = 1; in siw_qp_mpa_rts()
277 wqe->sqe.sge[0].length = 0; in siw_qp_mpa_rts()
278 wqe->sqe.sge[0].laddr = 0; in siw_qp_mpa_rts()
279 wqe->sqe.sge[0].lkey = 0; in siw_qp_mpa_rts()
284 wqe->sqe.rkey = 1; in siw_qp_mpa_rts()
285 wqe->sqe.raddr = 0; in siw_qp_mpa_rts()
286 wqe->processed = 0; in siw_qp_mpa_rts()
289 wqe->sqe.opcode = SIW_OP_WRITE; in siw_qp_mpa_rts()
293 wqe->sqe.opcode = SIW_OP_READ; in siw_qp_mpa_rts()
300 siw_read_to_orq(rreq, &wqe->sqe); in siw_qp_mpa_rts()
310 wqe->wr_status = SIW_WR_IDLE; in siw_qp_mpa_rts()
462 struct siw_wqe *wqe = tx_wqe(qp); in siw_send_terminate() local
480 rreq->ddp_msn = htonl(wqe->sqe.sge[0].length); in siw_send_terminate()
482 rreq->ddp_mo = htonl(wqe->processed); in siw_send_terminate()
483 rreq->sink_stag = htonl(wqe->sqe.rkey); in siw_send_terminate()
484 rreq->sink_to = cpu_to_be64(wqe->sqe.raddr); in siw_send_terminate()
485 rreq->read_size = htonl(wqe->sqe.sge[0].length); in siw_send_terminate()
486 rreq->source_stag = htonl(wqe->sqe.sge[0].lkey); in siw_send_terminate()
488 cpu_to_be64(wqe->sqe.sge[0].laddr); in siw_send_terminate()
884 struct siw_wqe *wqe = tx_wqe(qp); in siw_activate_tx_from_sq() local
891 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_activate_tx_from_sq()
892 wqe->wr_status = SIW_WR_QUEUED; in siw_activate_tx_from_sq()
895 memcpy(&wqe->sqe, sqe, sizeof(*sqe)); in siw_activate_tx_from_sq()
897 if (wqe->sqe.opcode >= SIW_NUM_OPCODES) { in siw_activate_tx_from_sq()
901 if (wqe->sqe.flags & SIW_WQE_INLINE) { in siw_activate_tx_from_sq()
902 if (wqe->sqe.opcode != SIW_OP_SEND && in siw_activate_tx_from_sq()
903 wqe->sqe.opcode != SIW_OP_WRITE) { in siw_activate_tx_from_sq()
907 if (wqe->sqe.sge[0].length > SIW_MAX_INLINE) { in siw_activate_tx_from_sq()
911 wqe->sqe.sge[0].laddr = (uintptr_t)&wqe->sqe.sge[1]; in siw_activate_tx_from_sq()
912 wqe->sqe.sge[0].lkey = 0; in siw_activate_tx_from_sq()
913 wqe->sqe.num_sge = 1; in siw_activate_tx_from_sq()
915 if (wqe->sqe.flags & SIW_WQE_READ_FENCE) { in siw_activate_tx_from_sq()
917 if (unlikely(wqe->sqe.opcode == SIW_OP_READ || in siw_activate_tx_from_sq()
918 wqe->sqe.opcode == in siw_activate_tx_from_sq()
932 } else if (wqe->sqe.opcode == SIW_OP_READ || in siw_activate_tx_from_sq()
933 wqe->sqe.opcode == SIW_OP_READ_LOCAL_INV) { in siw_activate_tx_from_sq()
941 wqe->sqe.num_sge = 1; in siw_activate_tx_from_sq()
951 siw_read_to_orq(rreq, &wqe->sqe); in siw_activate_tx_from_sq()
966 wqe->wr_status = SIW_WR_IDLE; in siw_activate_tx_from_sq()
980 struct siw_wqe *wqe = tx_wqe(qp); in siw_activate_tx() local
998 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE); in siw_activate_tx()
999 wqe->wr_status = SIW_WR_QUEUED; in siw_activate_tx()
1002 wqe->sqe.opcode = SIW_OP_READ_RESPONSE; in siw_activate_tx()
1003 wqe->sqe.flags = 0; in siw_activate_tx()
1005 wqe->sqe.num_sge = 1; in siw_activate_tx()
1006 wqe->sqe.sge[0].length = irqe->sge[0].length; in siw_activate_tx()
1007 wqe->sqe.sge[0].laddr = irqe->sge[0].laddr; in siw_activate_tx()
1008 wqe->sqe.sge[0].lkey = irqe->sge[0].lkey; in siw_activate_tx()
1010 wqe->sqe.num_sge = 0; in siw_activate_tx()
1016 wqe->sqe.sge[1].length = irqe->sge[1].length; in siw_activate_tx()
1018 wqe->sqe.rkey = irqe->rkey; in siw_activate_tx()
1019 wqe->sqe.raddr = irqe->raddr; in siw_activate_tx()
1021 wqe->processed = 0; in siw_activate_tx()
1191 struct siw_wqe *wqe = tx_wqe(qp); in siw_sq_flush() local
1211 if (wqe->wr_status != SIW_WR_IDLE) { in siw_sq_flush()
1213 tx_type(wqe), wqe->wr_status); in siw_sq_flush()
1215 siw_wqe_put_mem(wqe, tx_type(wqe)); in siw_sq_flush()
1217 if (tx_type(wqe) != SIW_OP_READ_RESPONSE && in siw_sq_flush()
1218 ((tx_type(wqe) != SIW_OP_READ && in siw_sq_flush()
1219 tx_type(wqe) != SIW_OP_READ_LOCAL_INV) || in siw_sq_flush()
1220 wqe->wr_status == SIW_WR_QUEUED)) in siw_sq_flush()
1225 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes, in siw_sq_flush()
1228 wqe->wr_status = SIW_WR_IDLE; in siw_sq_flush()
1266 struct siw_wqe *wqe = &qp->rx_untagged.wqe_active; in siw_rq_flush() local
1271 if (wqe->wr_status != SIW_WR_IDLE) { in siw_rq_flush()
1273 rx_type(wqe), wqe->wr_status); in siw_rq_flush()
1275 siw_wqe_put_mem(wqe, rx_type(wqe)); in siw_rq_flush()
1277 if (rx_type(wqe) == SIW_OP_RECEIVE) { in siw_rq_flush()
1278 siw_rqe_complete(qp, &wqe->rqe, wqe->bytes, in siw_rq_flush()
1280 } else if (rx_type(wqe) != SIW_OP_READ && in siw_rq_flush()
1281 rx_type(wqe) != SIW_OP_READ_RESPONSE && in siw_rq_flush()
1282 rx_type(wqe) != SIW_OP_WRITE) { in siw_rq_flush()
1283 siw_sqe_complete(qp, &wqe->sqe, 0, SIW_WC_WR_FLUSH_ERR); in siw_rq_flush()
1285 wqe->wr_status = SIW_WR_IDLE; in siw_rq_flush()
1287 wqe = &qp->rx_tagged.wqe_active; in siw_rq_flush()
1289 if (wqe->wr_status != SIW_WR_IDLE) { in siw_rq_flush()
1290 siw_wqe_put_mem(wqe, rx_type(wqe)); in siw_rq_flush()
1291 wqe->wr_status = SIW_WR_IDLE; in siw_rq_flush()