Home
last modified time | relevance | path

Searched refs:num_sge (Results 1 – 25 of 51) sorted by relevance

123

/linux-3.4.99/drivers/infiniband/hw/qib/
Dqib_uc.c101 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_uc_req()
282 qp->r_sge.num_sge = 0; in qib_uc_rcv()
284 while (qp->r_sge.num_sge) { in qib_uc_rcv()
286 if (--qp->r_sge.num_sge) in qib_uc_rcv()
408 while (qp->s_rdma_read_sge.num_sge) { in qib_uc_rcv()
410 if (--qp->s_rdma_read_sge.num_sge) in qib_uc_rcv()
454 qp->r_sge.num_sge = 1; in qib_uc_rcv()
456 qp->r_sge.num_sge = 0; in qib_uc_rcv()
496 while (qp->s_rdma_read_sge.num_sge) { in qib_uc_rcv()
499 if (--qp->s_rdma_read_sge.num_sge) in qib_uc_rcv()
[all …]
Dqib_srq.c61 if ((unsigned) wr->num_sge > srq->rq.max_sge) { in qib_post_srq_receive()
81 wqe->num_sge = wr->num_sge; in qib_post_srq_receive()
82 for (i = 0; i < wr->num_sge; i++) in qib_post_srq_receive()
287 p->num_sge = wqe->num_sge; in qib_modify_srq()
288 for (i = 0; i < wqe->num_sge; i++) in qib_modify_srq()
Dqib_ruc.c94 for (i = j = 0; i < wqe->num_sge; i++) { in qib_init_sge()
104 ss->num_sge = j; in qib_init_sge()
115 ss->num_sge = 0; in qib_init_sge()
429 sqp->s_sge.num_sge = wqe->wr.num_sge; in qib_ruc_loopback()
466 qp->r_sge.num_sge = 1; in qib_ruc_loopback()
480 sqp->s_sge.num_sge = 1; in qib_ruc_loopback()
483 qp->r_sge.num_sge = wqe->wr.num_sge; in qib_ruc_loopback()
505 qp->r_sge.num_sge = 0; in qib_ruc_loopback()
529 if (--sqp->s_sge.num_sge) in qib_ruc_loopback()
545 while (qp->r_sge.num_sge) { in qib_ruc_loopback()
[all …]
Dqib_ud.c174 ssge.num_sge = swqe->wr.num_sge; in qib_ud_loopback()
189 if (--ssge.num_sge) in qib_ud_loopback()
204 while (qp->r_sge.num_sge) { in qib_ud_loopback()
206 if (--qp->r_sge.num_sge) in qib_ud_loopback()
326 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_ud_req()
566 while (qp->r_sge.num_sge) { in qib_ud_rcv()
568 if (--qp->r_sge.num_sge) in qib_ud_rcv()
Dqib_verbs.c187 if (--ss->num_sge) in qib_copy_sge()
228 if (--ss->num_sge) in qib_skip_sge()
254 u8 num_sge = ss->num_sge; in qib_count_sge() local
275 if (--num_sge) in qib_count_sge()
313 if (--ss->num_sge) in qib_copy_from_sge()
355 if (wr->num_sge > qp->s_max_sge) in qib_post_one_send()
380 (wr->num_sge == 0 || in qib_post_one_send()
401 if (wr->num_sge) { in qib_post_one_send()
404 for (i = 0; i < wr->num_sge; i++) { in qib_post_one_send()
417 wqe->wr.num_sge = j; in qib_post_one_send()
[all …]
Dqib_rc.c51 ss->num_sge = wqe->wr.num_sge; in restart_sge()
138 qp->s_ack_rdma_sge.num_sge = 1; in qib_make_rc_ack()
489 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_rc_req()
1012 for (i = 0; i < wqe->wr.num_sge; i++) { in qib_rc_send_complete()
1068 for (i = 0; i < wqe->wr.num_sge; i++) { in do_rc_completion()
1580 WARN_ON(qp->s_rdma_read_sge.num_sge); in qib_rc_rcv_resp()
2027 while (qp->r_sge.num_sge) { in qib_rc_rcv()
2029 if (--qp->r_sge.num_sge) in qib_rc_rcv()
2078 qp->r_sge.num_sge = 1; in qib_rc_rcv()
2080 qp->r_sge.num_sge = 0; in qib_rc_rcv()
[all …]
Dqib_qp.c401 qp->r_sge.num_sge = 0; in qib_reset_qp()
409 while (qp->s_rdma_read_sge.num_sge) { in clear_mr_refs()
411 if (--qp->s_rdma_read_sge.num_sge) in clear_mr_refs()
416 while (qp->r_sge.num_sge) { in clear_mr_refs()
418 if (--qp->r_sge.num_sge) in clear_mr_refs()
427 for (i = 0; i < wqe->wr.num_sge; i++) { in clear_mr_refs()
/linux-3.4.99/drivers/infiniband/hw/ipath/
Dipath_srq.c61 if ((unsigned) wr->num_sge > srq->rq.max_sge) { in ipath_post_srq_receive()
81 wqe->num_sge = wr->num_sge; in ipath_post_srq_receive()
82 for (i = 0; i < wr->num_sge; i++) in ipath_post_srq_receive()
291 p->num_sge = wqe->num_sge; in ipath_modify_srq()
292 for (i = 0; i < wqe->num_sge; i++) in ipath_modify_srq()
Dipath_ruc.c130 for (i = j = 0; i < wqe->num_sge; i++) { in ipath_init_sge()
140 ss->num_sge = j; in ipath_init_sge()
330 sqp->s_sge.num_sge = wqe->wr.num_sge; in ipath_ruc_loopback()
372 qp->r_sge.num_sge = wqe->wr.num_sge; in ipath_ruc_loopback()
413 if (--sqp->s_sge.num_sge) in ipath_ruc_loopback()
Dipath_verbs.c188 if (--ss->num_sge) in ipath_copy_sge()
227 if (--ss->num_sge) in ipath_skip_sge()
253 u8 num_sge = ss->num_sge; in ipath_count_sge() local
274 if (--num_sge) in ipath_count_sge()
313 if (--ss->num_sge) in ipath_copy_from_sge()
360 if (wr->num_sge > qp->s_max_sge) in ipath_post_one_send()
382 (wr->num_sge == 0 || in ipath_post_one_send()
400 if (wr->num_sge) { in ipath_post_one_send()
403 for (i = 0, j = 0; i < wr->num_sge; i++) { in ipath_post_one_send()
416 wqe->wr.num_sge = j; in ipath_post_one_send()
[all …]
Dipath_keys.c227 ss->num_sge = 1; in ipath_rkey_ok()
264 ss->num_sge = 1; in ipath_rkey_ok()
Dipath_ud.c198 if (--swqe->wr.num_sge) in ipath_ud_loopback()
325 qp->s_sge.num_sge = wqe->wr.num_sge; in ipath_make_ud_req()
/linux-3.4.99/net/rds/
Diw_send.c143 send->s_wr.num_sge = 1; in rds_iw_send_init_ring()
269 send->s_wr.num_sge = 1; in rds_iw_send_cq_comp_handler()
459 send->s_wr.num_sge = 2; in rds_iw_xmit_populate_wr()
474 send->s_wr.num_sge = 1; in rds_iw_xmit_populate_wr()
687 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_iw_xmit()
805 int num_sge; in rds_iw_xmit_rdma() local
858 num_sge = op->op_count; in rds_iw_xmit_rdma()
886 if (num_sge > rds_iwdev->max_sge) { in rds_iw_xmit_rdma()
887 send->s_wr.num_sge = rds_iwdev->max_sge; in rds_iw_xmit_rdma()
888 num_sge -= rds_iwdev->max_sge; in rds_iw_xmit_rdma()
[all …]
Dib_send.c669 send->s_wr.num_sge = 1; in rds_ib_xmit()
684 send->s_wr.num_sge = 2; in rds_ib_xmit()
709 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_ib_xmit()
825 send->s_wr.num_sge = 1; in rds_ib_xmit_atomic()
892 int num_sge; in rds_ib_xmit_rdma() local
929 num_sge = op->op_count; in rds_ib_xmit_rdma()
942 if (num_sge > max_sge) { in rds_ib_xmit_rdma()
943 send->s_wr.num_sge = max_sge; in rds_ib_xmit_rdma()
944 num_sge -= max_sge; in rds_ib_xmit_rdma()
946 send->s_wr.num_sge = num_sge; in rds_ib_xmit_rdma()
[all …]
/linux-3.4.99/drivers/infiniband/hw/ehca/
Dehca_reqs.c76 if (unlikely((recv_wr->num_sge < 0) || in ehca_write_rwqe()
77 (recv_wr->num_sge > ipz_rqueue->act_nr_of_sg))) { in ehca_write_rwqe()
80 recv_wr->num_sge, ipz_rqueue->act_nr_of_sg); in ehca_write_rwqe()
88 wqe_p->nr_of_data_seg = recv_wr->num_sge; in ehca_write_rwqe()
90 for (cnt_ds = 0; cnt_ds < recv_wr->num_sge; cnt_ds++) { in ehca_write_rwqe()
122 send_wr->num_sge, send_wr->send_flags, in trace_send_wr_ud()
137 for (j = 0; j < send_wr->num_sge; j++) { in trace_send_wr_ud()
166 if (unlikely((send_wr->num_sge < 0) || in ehca_write_swqe()
167 (send_wr->num_sge > qp->ipz_squeue.act_nr_of_sg))) { in ehca_write_swqe()
170 send_wr->num_sge, qp->ipz_squeue.act_nr_of_sg); in ehca_write_swqe()
[all …]
/linux-3.4.99/drivers/infiniband/hw/amso1100/
Dc2_qp.c834 sizeof(struct c2_data_addr) * ib_wr->num_sge; in c2_post_send()
835 if (ib_wr->num_sge > qp->send_sgl_depth) { in c2_post_send()
844 ib_wr->num_sge, in c2_post_send()
852 (sizeof(struct c2_data_addr) * ib_wr->num_sge); in c2_post_send()
853 if (ib_wr->num_sge > qp->rdma_write_sgl_depth) { in c2_post_send()
867 ib_wr->num_sge, in c2_post_send()
877 if (ib_wr->num_sge > 1) { in c2_post_send()
963 if (ib_wr->num_sge > qp->recv_sgl_depth) { in c2_post_receive()
976 BUG_ON(ib_wr->num_sge >= 256); in c2_post_receive()
979 ib_wr->num_sge, &tot_len, &actual_sge_count); in c2_post_receive()
/linux-3.4.99/drivers/infiniband/hw/cxgb3/
Diwch_qp.c66 if (wr->num_sge > T3_MAX_SGE) in build_rdma_send()
72 for (i = 0; i < wr->num_sge; i++) { in build_rdma_send()
81 wqe->send.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_send()
82 *flit_cnt = 4 + ((wr->num_sge) << 1); in build_rdma_send()
92 if (wr->num_sge > T3_MAX_SGE) in build_rdma_write()
109 for (i = 0; i < wr->num_sge; i++) { in build_rdma_write()
121 wqe->write.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_write()
122 *flit_cnt = 5 + ((wr->num_sge) << 1); in build_rdma_write()
131 if (wr->num_sge > 1) in build_rdma_read()
254 err = iwch_sgl2pbl_map(qhp->rhp, wr->sg_list, wr->num_sge, pbl_addr, in build_rdma_recv()
[all …]
/linux-3.4.99/net/sunrpc/xprtrdma/
Dsvc_rdma_recvfrom.c450 read_wr.num_sge = in rdma_read_xdr()
455 read_wr.num_sge); in rdma_read_xdr()
462 (read_wr.num_sge == chl_map->ch[ch_no].count)) { in rdma_read_xdr()
506 if (read_wr.num_sge < chl_map->ch[ch_no].count) { in rdma_read_xdr()
507 chl_map->ch[ch_no].count -= read_wr.num_sge; in rdma_read_xdr()
508 chl_map->ch[ch_no].start += read_wr.num_sge; in rdma_read_xdr()
/linux-3.4.99/drivers/infiniband/hw/cxgb4/
Dqp.c298 for (i = 0; i < wr->num_sge; i++) { in build_immd()
330 int num_sge, u32 *plenp) in build_isgl() argument
337 for (i = 0; i < num_sge; i++) { in build_isgl()
352 isglp->nsge = cpu_to_be16(num_sge); in build_isgl()
366 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_send()
393 if (wr->num_sge) { in build_rdma_send()
405 wr->sg_list, wr->num_sge, &plen); in build_rdma_send()
409 wr->num_sge * sizeof(struct fw_ri_sge); in build_rdma_send()
431 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_write()
436 if (wr->num_sge) { in build_rdma_write()
[all …]
/linux-3.4.99/drivers/infiniband/ulp/iser/
Diser_initiator.c146 iser_task->desc.num_sge = 2; in iser_prepare_write_cmd()
164 tx_desc->num_sge = 1; in iser_create_send_desc()
359 tx_desc->num_sge = 2; in iser_send_data_out()
423 mdesc->num_sge = 2; in iser_send_control()
Diser_verbs.c698 rx_wr.num_sge = 1; in iser_post_recvl()
721 rx_wr->num_sge = 1; in iser_post_recvm()
756 send_wr.num_sge = tx_desc->num_sge; in iser_post_send()
/linux-3.4.99/drivers/infiniband/hw/mthca/
Dmthca_srq.c514 if (unlikely(wr->num_sge > srq->max_gs)) { in mthca_tavor_post_srq_recv()
521 for (i = 0; i < wr->num_sge; ++i) { in mthca_tavor_post_srq_recv()
607 if (unlikely(wr->num_sge > srq->max_gs)) { in mthca_arbel_post_srq_recv()
613 for (i = 0; i < wr->num_sge; ++i) { in mthca_arbel_post_srq_recv()
/linux-3.4.99/drivers/infiniband/ulp/ipoib/
Dipoib_verbs.c220 priv->rx_wr.num_sge = IPOIB_UD_RX_SG; in ipoib_transport_dev_init()
223 priv->rx_wr.num_sge = 1; in ipoib_transport_dev_init()
/linux-3.4.99/drivers/infiniband/core/
Duverbs_cmd.c1956 if (user_wr->num_sge + sg_ind > cmd.sge_count) { in ib_uverbs_post_send()
1962 user_wr->num_sge * sizeof (struct ib_sge), in ib_uverbs_post_send()
1977 next->num_sge = user_wr->num_sge; in ib_uverbs_post_send()
2024 if (next->num_sge) { in ib_uverbs_post_send()
2031 next->num_sge * sizeof (struct ib_sge))) { in ib_uverbs_post_send()
2035 sg_ind += next->num_sge; in ib_uverbs_post_send()
2102 if (user_wr->num_sge + sg_ind > sge_count) { in ib_uverbs_unmarshall_recv()
2108 user_wr->num_sge * sizeof (struct ib_sge), in ib_uverbs_unmarshall_recv()
2123 next->num_sge = user_wr->num_sge; in ib_uverbs_unmarshall_recv()
2125 if (next->num_sge) { in ib_uverbs_unmarshall_recv()
[all …]
/linux-3.4.99/drivers/infiniband/hw/mlx4/
Dsrq.c322 if (unlikely(wr->num_sge > srq->msrq.max_gs)) { in mlx4_ib_post_srq_recv()
340 for (i = 0; i < wr->num_sge; ++i) { in mlx4_ib_post_srq_recv()

123