/linux-6.1.9/drivers/infiniband/hw/mlx5/ |
D | gsi.c | 56 wr = &gsi->outstanding_wrs[index % gsi->cap.max_send_wr]; in generate_completions() 112 kcalloc(attr->cap.max_send_wr, sizeof(*gsi->outstanding_wrs), in mlx5_ib_create_gsi() 131 gsi->cq = ib_alloc_cq(pd->device, gsi, attr->cap.max_send_wr, 0, in mlx5_ib_create_gsi() 143 hw_init_attr.cap.max_send_wr = 0; in mlx5_ib_create_gsi() 208 .max_send_wr = gsi->cap.max_send_wr, in create_gsi_ud_qp() 366 if (gsi->outstanding_pi == gsi->outstanding_ci + gsi->cap.max_send_wr) { in mlx5_ib_add_outstanding_wr() 372 gsi->cap.max_send_wr]; in mlx5_ib_add_outstanding_wr()
|
/linux-6.1.9/drivers/infiniband/ulp/iser/ |
D | iser_verbs.c | 242 unsigned int max_send_wr, cq_size; in iser_create_ib_conn_res() local 251 max_send_wr = ISER_QP_SIG_MAX_REQ_DTOS + 1; in iser_create_ib_conn_res() 253 max_send_wr = ISER_QP_MAX_REQ_DTOS + 1; in iser_create_ib_conn_res() 254 max_send_wr = min_t(unsigned int, max_send_wr, in iser_create_ib_conn_res() 257 cq_size = max_send_wr + ISER_QP_MAX_RECV_DTOS; in iser_create_ib_conn_res() 277 init_attr.cap.max_send_wr = max_send_wr; in iser_create_ib_conn_res() 280 iser_conn->max_cmds = ISER_GET_MAX_XMIT_CMDS(max_send_wr - 1); in iser_create_ib_conn_res() 288 ib_conn->cma_id, ib_conn->cma_id->qp, max_send_wr); in iser_create_ib_conn_res()
|
/linux-6.1.9/drivers/infiniband/core/ |
D | uverbs_std_types_qp.c | 68 attr->cap.max_send_wr = cap->max_send_wr; in set_caps() 74 cap->max_send_wr = attr->cap.max_send_wr; in set_caps() 167 if (cap.max_send_wr) { in UVERBS_HANDLER()
|
D | uverbs_marshall.c | 110 dst->max_send_wr = src->cap.max_send_wr; in ib_copy_qp_attr_to_user()
|
D | rw.c | 677 attr->cap.max_send_wr += factor * attr->cap.max_rdma_ctxs; in rdma_rw_init_qp() 683 attr->cap.max_send_wr = in rdma_rw_init_qp() 684 min_t(u32, attr->cap.max_send_wr, dev->attrs.max_qp_wr); in rdma_rw_init_qp()
|
/linux-6.1.9/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_misc.c | 226 dst->max_send_wr = src->max_send_wr; in pvrdma_qp_cap_to_ib() 235 dst->max_send_wr = src->max_send_wr; in ib_qp_cap_to_pvrdma()
|
D | pvrdma_dev_api.h | 559 u32 max_send_wr; member 578 u32 max_send_wr; member 589 u32 max_send_wr; member
|
D | pvrdma_qp.c | 159 if (req_cap->max_send_wr > dev->dsr->caps.max_qp_wr || in pvrdma_set_sq_size() 165 qp->sq.wqe_cnt = roundup_pow_of_two(max(1U, req_cap->max_send_wr)); in pvrdma_set_sq_size() 169 req_cap->max_send_wr = qp->sq.wqe_cnt; in pvrdma_set_sq_size() 365 cmd->max_send_wr = init_attr->cap.max_send_wr; in pvrdma_create_qp() 380 cmd->max_send_wr, cmd->max_recv_wr, cmd->max_send_sge, in pvrdma_create_qp()
|
/linux-6.1.9/net/sunrpc/xprtrdma/ |
D | frwr_ops.c | 237 ep->re_attr.cap.max_send_wr = ep->re_max_requests * depth; in frwr_query_device() 238 if (ep->re_attr.cap.max_send_wr > max_qp_wr) { in frwr_query_device() 242 ep->re_attr.cap.max_send_wr = ep->re_max_requests * depth; in frwr_query_device() 244 ep->re_attr.cap.max_send_wr += RPCRDMA_BACKWARD_WRS; in frwr_query_device() 245 ep->re_attr.cap.max_send_wr += 1; /* for ib_drain_sq */ in frwr_query_device()
|
D | svc_rdma_transport.c | 448 qp_attr.cap.max_send_wr = newxprt->sc_sq_depth - ctxts; in svc_rdma_accept() 459 qp_attr.cap.max_send_wr, qp_attr.cap.max_recv_wr); in svc_rdma_accept()
|
/linux-6.1.9/drivers/infiniband/ulp/rtrs/ |
D | rtrs.c | 254 u32 max_send_wr, u32 max_recv_wr, u32 max_sge) in create_qp() argument 260 init_attr.cap.max_send_wr = max_send_wr; in create_qp() 295 u32 max_send_wr, u32 max_recv_wr, in rtrs_cq_qp_create() argument 304 err = create_qp(con, path->dev->ib_pd, max_send_wr, max_recv_wr, in rtrs_cq_qp_create()
|
D | rtrs-srv.c | 1642 u32 cq_num, max_send_wr, max_recv_wr, wr_limit; in create_con() local 1664 max_send_wr = min_t(int, wr_limit, in create_con() 1666 max_recv_wr = max_send_wr; in create_con() 1672 max_send_wr = in create_con() 1676 max_send_wr = in create_con() 1688 cq_num = max_send_wr + max_recv_wr; in create_con() 1689 atomic_set(&con->c.sq_wr_avail, max_send_wr); in create_con() 1694 max_send_wr, max_recv_wr, in create_con()
|
D | rtrs-clt.c | 1645 u32 max_send_wr, max_recv_wr, cq_num, max_send_sge, wr_limit; in create_con_cq_qp() local 1678 max_send_wr = in create_con_cq_qp() 1680 max_recv_wr = max_send_wr; in create_con_cq_qp() 1695 max_send_wr = min_t(int, wr_limit, in create_con_cq_qp() 1702 atomic_set(&con->c.sq_wr_avail, max_send_wr); in create_con_cq_qp() 1703 cq_num = max_send_wr + max_recv_wr; in create_con_cq_qp() 1715 cq_num = max_send_wr + max_recv_wr; in create_con_cq_qp() 1719 cq_vector, cq_num, max_send_wr, in create_con_cq_qp() 1723 cq_vector, cq_num, max_send_wr, in create_con_cq_qp()
|
/linux-6.1.9/include/uapi/linux/ |
D | rds.h | 249 __u32 max_send_wr; member 265 __u32 max_send_wr; member
|
/linux-6.1.9/include/uapi/rdma/ |
D | qedr-abi.h | 69 __u32 max_send_wr; member
|
D | ib_user_verbs.h | 550 __u32 max_send_wr; member 579 __u32 max_send_wr; member 605 __u32 max_send_wr; member 634 __u32 max_send_wr; member 678 __u32 max_send_wr; member
|
D | ib_user_ioctl_verbs.h | 225 __u32 max_send_wr; member
|
/linux-6.1.9/drivers/infiniband/sw/rxe/ |
D | rxe_qp.c | 21 if (cap->max_send_wr > rxe->attr.max_qp_wr) { in rxe_qp_chk_cap() 23 cap->max_send_wr, rxe->attr.max_qp_wr); in rxe_qp_chk_cap() 208 qp->sq.max_wr = init->cap.max_send_wr; in rxe_qp_init_req() 376 init->cap.max_send_wr = qp->sq.max_wr; in rxe_qp_to_init() 739 attr->cap.max_send_wr = qp->sq.max_wr; in rxe_qp_to_attr()
|
/linux-6.1.9/drivers/infiniband/hw/qedr/ |
D | qedr_roce_cm.c | 170 if (attrs->cap.max_send_wr > QEDR_GSI_MAX_SEND_WR) { in qedr_check_gsi_qp_attrs() 173 attrs->cap.max_send_wr, QEDR_GSI_MAX_SEND_WR); in qedr_check_gsi_qp_attrs() 282 data.input.tx_num_desc = attrs->cap.max_send_wr; in qedr_ll2_start() 340 qp->sq.max_wr = attrs->cap.max_send_wr; in qedr_create_gsi_qp()
|
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_send.c | 47 u32 max_send_wr; member 129 dr_qp->sq.wqe_cnt = roundup_pow_of_two(attr->max_send_wr); in dr_create_rc_qp() 940 init_attr.max_send_wr = QUEUE_SIZE; in mlx5dr_send_ring_alloc() 957 dmn->info.max_send_wr = QUEUE_SIZE; in mlx5dr_send_ring_alloc() 961 dmn->send_ring->signal_th = dmn->info.max_send_wr / in mlx5dr_send_ring_alloc()
|
/linux-6.1.9/net/rds/ |
D | ib.c | 322 iinfo->max_send_wr = ic->i_send_ring.w_nr; in rds_ib_conn_info_visitor() 359 iinfo6->max_send_wr = ic->i_send_ring.w_nr; in rds6_ib_conn_info_visitor()
|
/linux-6.1.9/drivers/infiniband/hw/efa/ |
D | efa.h | 115 u32 max_send_wr; member
|
D | efa_verbs.c | 325 qp_attr->cap.max_send_wr = qp->max_send_wr; in efa_query_qp() 559 if (init_attr->cap.max_send_wr > dev->dev_attr.max_sq_depth) { in efa_qp_validate_cap() 562 init_attr->cap.max_send_wr, in efa_qp_validate_cap() 692 create_qp_params.sq_depth = init_attr->cap.max_send_wr; in efa_create_qp() 730 qp->max_send_wr = init_attr->cap.max_send_wr; in efa_create_qp()
|
/linux-6.1.9/drivers/infiniband/hw/erdma/ |
D | erdma_verbs.c | 275 attr->max_qp_wr = min(dev->attrs.max_send_wr, dev->attrs.max_recv_wr); in erdma_query_device() 382 if ((attrs->cap.max_send_wr > dev->attrs.max_send_wr) || in erdma_qp_validate_cap() 387 !attrs->cap.max_send_wr || !attrs->cap.max_recv_wr) { in erdma_qp_validate_cap() 703 qp->attrs.sq_size = roundup_pow_of_two(attrs->cap.max_send_wr * in erdma_create_qp() 1308 qp_attr->cap.max_send_wr = qp->attrs.sq_size; in erdma_query_qp()
|
/linux-6.1.9/drivers/infiniband/hw/hns/ |
D | hns_roce_qp.c | 692 if (!cap->max_send_wr || cap->max_send_wr > hr_dev->caps.max_wqes || in set_kernel_sq_size() 698 cnt = roundup_pow_of_two(max(cap->max_send_wr, hr_dev->caps.min_wqes)); in set_kernel_sq_size() 711 cap->max_send_wr = cnt; in set_kernel_sq_size() 719 if (attr->qp_type == IB_QPT_XRC_TGT || !attr->cap.max_send_wr) in hns_roce_qp_has_sq()
|