Home
last modified time | relevance | path

Searched refs:wqe_shift (Results 1 – 19 of 19) sorted by relevance

/linux-6.6.21/drivers/infiniband/hw/mthca/
Dmthca_srq.c77 return srq->queue.direct.buf + (n << srq->wqe_shift); in get_wqe()
79 return srq->queue.page_list[(n << srq->wqe_shift) >> PAGE_SHIFT].buf + in get_wqe()
80 ((n << srq->wqe_shift) & (PAGE_SIZE - 1)); in get_wqe()
108 context->wqe_base_ds = cpu_to_be64(1 << (srq->wqe_shift - 4)); in mthca_tavor_init_srq_context()
139 context->logstride_usrpage = cpu_to_be32((srq->wqe_shift - 4) << 29); in mthca_arbel_init_srq_context()
149 mthca_buf_free(dev, srq->max << srq->wqe_shift, &srq->queue, in mthca_free_srq_buf()
169 err = mthca_buf_alloc(dev, srq->max << srq->wqe_shift, in mthca_alloc_srq_buf()
189 next->nda_op = htonl(((i + 1) << srq->wqe_shift) | 1); in mthca_alloc_srq_buf()
196 (void *) scatter < wqe + (1 << srq->wqe_shift); in mthca_alloc_srq_buf()
235 srq->wqe_shift = ilog2(ds); in mthca_alloc_srq()
[all …]
Dmthca_qp.c211 return qp->queue.direct.buf + (n << qp->rq.wqe_shift); in get_recv_wqe()
213 return qp->queue.page_list[(n << qp->rq.wqe_shift) >> PAGE_SHIFT].buf + in get_recv_wqe()
214 ((n << qp->rq.wqe_shift) & (PAGE_SIZE - 1)); in get_recv_wqe()
221 (n << qp->sq.wqe_shift); in get_send_wqe()
224 (n << qp->sq.wqe_shift)) >> in get_send_wqe()
226 ((qp->send_wqe_offset + (n << qp->sq.wqe_shift)) & in get_send_wqe()
617 qp_context->rq_size_stride |= qp->rq.wqe_shift - 4; in __mthca_modify_qp()
621 qp_context->sq_size_stride |= qp->sq.wqe_shift - 4; in __mthca_modify_qp()
971 1 << qp->sq.wqe_shift)); in mthca_adjust_qp_caps()
978 (min(dev->limits.max_desc_sz, 1 << qp->rq.wqe_shift) - in mthca_adjust_qp_caps()
[all …]
Dmthca_provider.h211 int wqe_shift; member
237 int wqe_shift; member
Dmthca_cq.c536 >> wq->wqe_shift); in mthca_poll_one()
543 wqe_index = wqe >> srq->wqe_shift; in mthca_poll_one()
550 wqe_index = wqe >> wq->wqe_shift; in mthca_poll_one()
/linux-6.6.21/drivers/infiniband/hw/mlx5/
Dsrq.h19 u32 wqe_shift; member
47 int wqe_shift; member
Dsrq.c125 mlx5_init_fbc(srq->buf.frags, srq->msrq.wqe_shift, ilog2(srq->msrq.max), in create_srq_kernel()
233 srq->msrq.wqe_shift = ilog2(desc_size); in mlx5_ib_create_srq()
252 in.wqe_shift = srq->msrq.wqe_shift - 4; in mlx5_ib_create_srq()
Dsrq_cmd.c16 u32 log_rq_stride = in->wqe_shift; in get_pas_size()
32 MLX5_SET(wq, wq, log_wq_stride, in->wqe_shift + 4); in set_wq()
45 MLX5_SET(srqc, srqc, log_rq_stride, in->wqe_shift); in set_srqc()
60 in->wqe_shift = MLX5_GET(wq, wq, log_wq_stride) - 4; in get_wq()
73 in->wqe_shift = MLX5_GET(srqc, srqc, log_rq_stride); in get_srqc()
Dqp.c197 wq->wqe_shift, buflen, in mlx5_ib_read_user_wqe_sq()
222 wq->wqe_cnt, wq->wqe_shift, in mlx5_ib_read_user_wqe_sq()
259 wq->wqe_shift, buflen, in mlx5_ib_read_user_wqe_rq()
274 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq()
293 srq->msrq.max, srq->msrq.wqe_shift, in mlx5_ib_read_user_wqe_srq()
306 size_t wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_read_wqe_srq()
446 qp->rq.wqe_shift = 0; in set_rq_size()
456 qp->rq.wqe_shift = ucmd->rq_wqe_shift; in set_rq_size()
457 if ((1 << qp->rq.wqe_shift) / in set_rq_size()
462 (1 << qp->rq.wqe_shift) / in set_rq_size()
[all …]
Dmlx5_ib.h371 int wqe_shift; member
407 u32 wqe_shift; member
Dodp.c1118 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq()
1137 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq()
/linux-6.6.21/drivers/infiniband/hw/mlx4/
Dsrq.c44 return mlx4_buf_offset(&srq->buf, n << srq->msrq.wqe_shift); in get_wqe()
107 srq->msrq.wqe_shift = ilog2(desc_size); in mlx4_ib_create_srq()
285 mlx4_buf_free(dev->dev, msrq->msrq.max << msrq->msrq.wqe_shift, in mlx4_ib_destroy_srq()
Dqp.c181 return get_wqe(qp, qp->rq.offset + (n << qp->rq.wqe_shift)); in get_recv_wqe()
186 return get_wqe(qp, qp->sq.offset + (n << qp->sq.wqe_shift)); in get_send_wqe()
363 qp->rq.wqe_shift = ilog2(max_t(u32, wqe_size, inl_recv_sz)); in set_rq_size()
409 qp->sq.wqe_shift = ilog2(roundup_pow_of_two(s)); in set_kernel_sq_size()
415 qp->sq_spare_wqes = MLX4_IB_SQ_HEADROOM(qp->sq.wqe_shift); in set_kernel_sq_size()
421 (1 << qp->sq.wqe_shift)) - in set_kernel_sq_size()
425 qp->buf_size = (qp->rq.wqe_cnt << qp->rq.wqe_shift) + in set_kernel_sq_size()
426 (qp->sq.wqe_cnt << qp->sq.wqe_shift); in set_kernel_sq_size()
427 if (qp->rq.wqe_shift > qp->sq.wqe_shift) { in set_kernel_sq_size()
429 qp->sq.offset = qp->rq.wqe_cnt << qp->rq.wqe_shift; in set_kernel_sq_size()
[all …]
Dmlx4_ib.h170 int wqe_shift; member
/linux-6.6.21/drivers/infiniband/hw/hns/
Dhns_roce_qp.c477 hr_qp->rq.wqe_shift = ilog2(hr_dev->caps.max_rq_desc_sz * in set_rq_size()
637 hr_qp->sq.wqe_shift = ucmd->log_sq_stride; in set_user_sq_size()
656 hr_qp->sq.wqe_shift); in set_wqe_buf_attr()
678 hr_qp->rq.wqe_shift); in set_wqe_buf_attr()
714 hr_qp->sq.wqe_shift = ilog2(hr_dev->caps.max_sq_desc_sz); in set_kernel_sq_size()
1429 return get_wqe(hr_qp, hr_qp->rq.offset + (n << hr_qp->rq.wqe_shift)); in hns_roce_get_recv_wqe()
1434 return get_wqe(hr_qp, hr_qp->sq.offset + (n << hr_qp->sq.wqe_shift)); in hns_roce_get_send_wqe()
Dhns_roce_srq.c226 srq->wqe_shift = ilog2(roundup_pow_of_two(max(HNS_ROCE_SGE_SIZE, in alloc_srq_wqe_buf()
232 srq->wqe_shift); in alloc_srq_wqe_buf()
Dhns_roce_device.h336 u32 wqe_shift; /* WQE size */ member
441 u32 wqe_shift; member
Dhns_roce_hw_v2.c868 return hns_roce_buf_offset(srq->buf_mtr.kmem, n << srq->wqe_shift); in get_srq_wqe_buf()
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx4/
Dsrq.c190 srq_context->logstride = srq->wqe_shift - 4; in mlx4_srq_alloc()
/linux-6.6.21/include/linux/mlx4/
Ddevice.h770 int wqe_shift; member