/linux-6.1.9/drivers/infiniband/hw/irdma/ |
D | uk.c | 62 u32 wqe_idx; in irdma_nop_1() local 68 wqe_idx = IRDMA_RING_CURRENT_HEAD(qp->sq_ring); in irdma_nop_1() 69 wqe = qp->sq_base[wqe_idx].elem; in irdma_nop_1() 71 qp->sq_wrtrk_array[wqe_idx].quanta = IRDMA_QP_WQE_MIN_QUANTA; in irdma_nop_1() 97 u32 wqe_idx; in irdma_clr_wqes() local 100 wqe_idx = (qp_wqe_idx + 128) % qp->sq_ring.size; in irdma_clr_wqes() 101 wqe = qp->sq_base[wqe_idx].elem; in irdma_clr_wqes() 102 if (wqe_idx) in irdma_clr_wqes() 152 static void irdma_qp_ring_push_db(struct irdma_qp_uk *qp, u32 wqe_idx) in irdma_qp_ring_push_db() argument 155 FIELD_PREP(IRDMA_WQEALLOC_WQE_DESC_INDEX, wqe_idx >> 3) | qp->qp_id); in irdma_qp_ring_push_db() [all …]
|
D | puda.c | 15 struct irdma_puda_buf *buf, u32 wqe_idx); 79 static void irdma_puda_post_recvbuf(struct irdma_puda_rsrc *rsrc, u32 wqe_idx, in irdma_puda_post_recvbuf() argument 89 qp->qp_uk.rq_wrid_array[wqe_idx] = (uintptr_t)buf; in irdma_puda_post_recvbuf() 90 wqe = qp->qp_uk.rq_base[wqe_idx].elem; in irdma_puda_post_recvbuf() 192 u32 *wqe_idx) in irdma_puda_get_next_send_wqe() argument 196 *wqe_idx = IRDMA_RING_CURRENT_HEAD(qp->sq_ring); in irdma_puda_get_next_send_wqe() 197 if (!*wqe_idx) in irdma_puda_get_next_send_wqe() 203 return qp->sq_base[*wqe_idx].elem; in irdma_puda_get_next_send_wqe() 278 info->wqe_idx = (u32)FIELD_GET(IRDMA_CQ_WQEIDX, qword3); in irdma_puda_poll_info() 357 qp->rq_wrid_array[info.wqe_idx]; in irdma_puda_poll_cmpl() [all …]
|
D | user.h | 393 __le64 *irdma_qp_get_next_send_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx, 396 __le64 *irdma_qp_get_next_recv_wqe(struct irdma_qp_uk *qp, u32 *wqe_idx); 408 u32 wqe_idx, bool post_sq);
|
D | utils.c | 537 u32 i, pending_work, wqe_idx; in irdma_cleanup_pending_cqp_op() local 540 wqe_idx = IRDMA_RING_CURRENT_TAIL(cqp->sc_cqp.sq_ring); in irdma_cleanup_pending_cqp_op() 543 cqp->scratch_array[wqe_idx]; in irdma_cleanup_pending_cqp_op() 546 wqe_idx = (wqe_idx + 1) % IRDMA_RING_SIZE(cqp->sc_cqp.sq_ring); in irdma_cleanup_pending_cqp_op() 2570 u32 wqe_idx; in irdma_generate_flush_completions() local 2587 wqe_idx = sq_ring->tail; in irdma_generate_flush_completions() 2590 cmpl->cpi.wr_id = qp->sq_wrtrk_array[wqe_idx].wrid; in irdma_generate_flush_completions() 2591 sw_wqe = qp->sq_base[wqe_idx].elem; in irdma_generate_flush_completions() 2628 wqe_idx = rq_ring->tail; in irdma_generate_flush_completions() 2631 cmpl->cpi.wr_id = qp->rq_wrid_array[wqe_idx]; in irdma_generate_flush_completions() [all …]
|
D | ctrl.c | 1291 u32 wqe_idx; in irdma_sc_mr_fast_register() local 1306 wqe = irdma_qp_get_next_send_wqe(&qp->qp_uk, &wqe_idx, in irdma_sc_mr_fast_register() 1311 irdma_clr_wqes(&qp->qp_uk, wqe_idx); in irdma_sc_mr_fast_register() 1315 info->wr_id, wqe_idx, in irdma_sc_mr_fast_register() 1316 &qp->qp_uk.sq_wrtrk_array[wqe_idx].wrid); in irdma_sc_mr_fast_register() 1351 wqe_idx, post_sq); in irdma_sc_mr_fast_register() 3242 u32 *wqe_idx) in irdma_sc_cqp_get_next_send_wqe_idx() argument 3254 IRDMA_ATOMIC_RING_MOVE_HEAD(cqp->sq_ring, *wqe_idx, ret_code); in irdma_sc_cqp_get_next_send_wqe_idx() 3259 if (!*wqe_idx) in irdma_sc_cqp_get_next_send_wqe_idx() 3261 wqe = cqp->sq_base[*wqe_idx].elem; in irdma_sc_cqp_get_next_send_wqe_idx() [all …]
|
D | puda.h | 38 u32 wqe_idx; member
|
D | type.h | 991 u16 wqe_idx; member 1522 u32 *wqe_idx); 1531 u32 wqe_idx; in irdma_sc_cqp_get_next_send_wqe() local 1533 return irdma_sc_cqp_get_next_send_wqe_idx(cqp, scratch, &wqe_idx); in irdma_sc_cqp_get_next_send_wqe()
|
D | hw.c | 384 ctx_info->roce_info->err_rq_idx = info->wqe_idx; in irdma_process_aeq() 394 ctx_info->iwarp_info->err_rq_idx = info->wqe_idx; in irdma_process_aeq()
|
/linux-6.1.9/drivers/infiniband/hw/erdma/ |
D | erdma_cq.c | 115 u16 wqe_idx, depth; in erdma_poll_one_cqe() local 130 wqe_idx = be32_to_cpu(cqe->qe_idx); in erdma_poll_one_cqe() 146 wqe_hdr = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in erdma_poll_one_cqe() 150 wqe_idx + 1; in erdma_poll_one_cqe() 155 wc->wr_id = id_table[wqe_idx & (depth - 1)]; in erdma_poll_one_cqe()
|
D | erdma_qp.c | 205 const struct ib_send_wr *send_wr, u16 wqe_idx, in fill_inline_data() argument 212 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_inline_data() 214 data = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, qp->attrs.sq_size, in fill_inline_data() 235 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_inline_data() 238 data = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in fill_inline_data() 252 u16 wqe_idx, u32 sgl_offset, __le32 *length_field) in fill_sgl() argument 265 wqe_idx += (sgl_offset >> SQEBB_SHIFT); in fill_sgl() 267 sgl = get_queue_entry(qp->kern_qp.sq_buf, wqe_idx, in fill_sgl()
|
/linux-6.1.9/drivers/infiniband/hw/ocrdma/ |
D | ocrdma_verbs.c | 1595 u32 qpn = 0, wqe_idx = 0; in ocrdma_discard_cqes() local 1628 wqe_idx = (le32_to_cpu(cqe->rq.buftag_qpn) >> in ocrdma_discard_cqes() 1631 BUG_ON(wqe_idx < 1); in ocrdma_discard_cqes() 1634 ocrdma_srq_toggle_bit(qp->srq, wqe_idx - 1); in ocrdma_discard_cqes() 2402 u32 wqe_idx) in ocrdma_update_wc() argument 2408 hdr = ocrdma_hwq_head_from_idx(&qp->sq, wqe_idx); in ocrdma_update_wc() 2410 ibwc->wr_id = qp->wqe_wr_id_tbl[wqe_idx].wrid; in ocrdma_update_wc() 2563 u32 wqe_idx; in ocrdma_poll_success_scqe() local 2574 wqe_idx = (le32_to_cpu(cqe->wq.wqeidx) & in ocrdma_poll_success_scqe() 2576 if (tail != wqe_idx) in ocrdma_poll_success_scqe() [all …]
|
/linux-6.1.9/drivers/infiniband/hw/hns/ |
D | hns_roce_hw_v2.c | 705 unsigned int wqe_idx; in hns_roce_v2_post_send() local 728 wqe_idx = (qp->sq.head + nreq) & (qp->sq.wqe_cnt - 1); in hns_roce_v2_post_send() 738 wqe = hns_roce_get_send_wqe(qp, wqe_idx); in hns_roce_v2_post_send() 739 qp->sq.wrid[wqe_idx] = wr->wr_id; in hns_roce_v2_post_send() 822 u32 wqe_idx, u32 max_sge) in fill_rq_wqe() argument 828 wqe = hns_roce_get_recv_wqe(hr_qp, wqe_idx); in fill_rq_wqe() 833 sge_list = hr_qp->rq_inl_buf.wqe_list[wqe_idx].sg_list; in fill_rq_wqe() 834 hr_qp->rq_inl_buf.wqe_list[wqe_idx].sge_cnt = (u32)wr->num_sge; in fill_rq_wqe() 849 u32 wqe_idx, nreq, max_sge; in hns_roce_v2_post_recv() local 879 wqe_idx = (hr_qp->rq.head + nreq) & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v2_post_recv() [all …]
|
/linux-6.1.9/drivers/infiniband/hw/bnxt_re/ |
D | qplib_fp.c | 1714 u32 wqe_idx; in bnxt_qplib_post_send() local 1737 swq = bnxt_qplib_get_swqe(sq, &wqe_idx); in bnxt_qplib_post_send() 1926 bnxt_qplib_swq_mod_start(sq, wqe_idx); in bnxt_qplib_post_send() 1964 u32 wqe_idx; in bnxt_qplib_post_recv() local 1983 swq = bnxt_qplib_get_swqe(rq, &wqe_idx); in bnxt_qplib_post_recv() 2012 base_hdr->wr_id[0] = cpu_to_le32(wqe_idx); in bnxt_qplib_post_recv() 2014 bnxt_qplib_swq_mod_start(rq, wqe_idx); in bnxt_qplib_post_recv()
|