Home
last modified time | relevance | path

Searched refs:ibmr (Results 1 – 25 of 59) sorted by relevance

123

/linux-6.1.9/net/rds/
Dib_frmr.c36 rds_transition_frwr_state(struct rds_ib_mr *ibmr, in rds_transition_frwr_state() argument
40 if (cmpxchg(&ibmr->u.frmr.fr_state, in rds_transition_frwr_state()
47 atomic_dec(&ibmr->ic->i_fastreg_inuse_count); in rds_transition_frwr_state()
57 struct rds_ib_mr *ibmr = NULL; in rds_ib_alloc_frmr() local
66 ibmr = rds_ib_try_reuse_ibmr(pool); in rds_ib_alloc_frmr()
67 if (ibmr) in rds_ib_alloc_frmr()
68 return ibmr; in rds_ib_alloc_frmr()
70 ibmr = kzalloc_node(sizeof(*ibmr), GFP_KERNEL, in rds_ib_alloc_frmr()
72 if (!ibmr) { in rds_ib_alloc_frmr()
77 frmr = &ibmr->u.frmr; in rds_ib_alloc_frmr()
[all …]
Dib_rdma.c200 struct rds_ib_mr *ibmr = NULL; in rds_ib_reuse_mr() local
208 ibmr = llist_entry(ret, struct rds_ib_mr, llnode); in rds_ib_reuse_mr()
215 return ibmr; in rds_ib_reuse_mr()
220 struct rds_ib_mr *ibmr = trans_private; in rds_ib_sync_mr() local
221 struct rds_ib_device *rds_ibdev = ibmr->device; in rds_ib_sync_mr()
223 if (ibmr->odp) in rds_ib_sync_mr()
228 ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
229 ibmr->sg_dma_len, DMA_BIDIRECTIONAL); in rds_ib_sync_mr()
232 ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
233 ibmr->sg_dma_len, DMA_BIDIRECTIONAL); in rds_ib_sync_mr()
[all …]
/linux-6.1.9/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_mr.c94 mr->ibmr.lkey = resp->lkey; in pvrdma_get_dma_mr()
95 mr->ibmr.rkey = resp->rkey; in pvrdma_get_dma_mr()
97 return &mr->ibmr; in pvrdma_get_dma_mr()
182 mr->ibmr.lkey = resp->lkey; in pvrdma_reg_user_mr()
183 mr->ibmr.rkey = resp->rkey; in pvrdma_reg_user_mr()
185 return &mr->ibmr; in pvrdma_reg_user_mr()
254 mr->ibmr.lkey = resp->lkey; in pvrdma_alloc_mr()
255 mr->ibmr.rkey = resp->rkey; in pvrdma_alloc_mr()
259 return &mr->ibmr; in pvrdma_alloc_mr()
277 int pvrdma_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in pvrdma_dereg_mr() argument
[all …]
Dpvrdma.h141 struct ib_mr ibmr; member
284 static inline struct pvrdma_user_mr *to_vmr(struct ib_mr *ibmr) in to_vmr() argument
286 return container_of(ibmr, struct pvrdma_user_mr, ibmr); in to_vmr()
/linux-6.1.9/drivers/infiniband/hw/mlx5/
Dmr.c847 mr->ibmr.lkey = mr->mmkey.key; in mlx5_ib_get_dma_mr()
848 mr->ibmr.rkey = mr->mmkey.key; in mlx5_ib_get_dma_mr()
851 return &mr->ibmr; in mlx5_ib_get_dma_mr()
896 mr->ibmr.lkey = mr->mmkey.key; in set_mr_fields()
897 mr->ibmr.rkey = mr->mmkey.key; in set_mr_fields()
898 mr->ibmr.length = length; in set_mr_fields()
899 mr->ibmr.device = &dev->ib_dev; in set_mr_fields()
900 mr->ibmr.iova = iova; in set_mr_fields()
950 mr->ibmr.pd = pd; in alloc_cacheable_mr()
981 mr->ibmr.pd = pd; in reg_create()
[all …]
Drestrack.c81 static int fill_stat_mr_entry(struct sk_buff *msg, struct ib_mr *ibmr) in fill_stat_mr_entry() argument
83 struct mlx5_ib_mr *mr = to_mmr(ibmr); in fill_stat_mr_entry()
115 static int fill_res_mr_entry_raw(struct sk_buff *msg, struct ib_mr *ibmr) in fill_res_mr_entry_raw() argument
117 struct mlx5_ib_mr *mr = to_mmr(ibmr); in fill_res_mr_entry_raw()
123 static int fill_res_mr_entry(struct sk_buff *msg, struct ib_mr *ibmr) in fill_res_mr_entry() argument
125 struct mlx5_ib_mr *mr = to_mmr(ibmr); in fill_res_mr_entry()
Dodp.c137 pklm->key = cpu_to_be32(mtt->ibmr.lkey); in populate_klm()
205 mlx5_ib_dereg_mr(&mr->ibmr, NULL); in free_implicit_child_mr_work()
429 mr->ibmr.pd = imr->ibmr.pd; in implicit_get_child_mr()
430 mr->ibmr.device = &mr_to_mdev(imr)->ib_dev; in implicit_get_child_mr()
432 mr->ibmr.lkey = mr->mmkey.key; in implicit_get_child_mr()
433 mr->ibmr.rkey = mr->mmkey.key; in implicit_get_child_mr()
434 mr->ibmr.iova = idx * MLX5_IMR_MTT_SIZE; in implicit_get_child_mr()
477 mlx5_ib_dereg_mr(&mr->ibmr, NULL); in implicit_get_child_mr()
505 imr->ibmr.pd = &pd->ibpd; in mlx5_ib_alloc_implicit_mr()
506 imr->ibmr.iova = 0; in mlx5_ib_alloc_implicit_mr()
[all …]
Dwr.c198 seg->log2_page_size = ilog2(mr->ibmr.page_size); in set_reg_mkey_seg()
206 seg->start_addr = cpu_to_be64(mr->ibmr.iova); in set_reg_mkey_seg()
207 seg->len = cpu_to_be64(mr->ibmr.length); in set_reg_mkey_seg()
436 data_key = pi_mr->ibmr.lkey; in set_sig_data_segment()
440 prot_key = pi_mr->ibmr.lkey; in set_sig_data_segment()
564 struct ib_sig_attrs *sig_attrs = sig_mr->ibmr.sig_attrs; in set_pi_umr_wr()
576 region_len = pi_mr->ibmr.length; in set_pi_umr_wr()
867 reg_pi_wr.mr = &pi_mr->ibmr; in handle_reg_mr_integrity()
869 reg_pi_wr.key = pi_mr->ibmr.rkey; in handle_reg_mr_integrity()
889 pa_pi_mr.ibmr.lkey = mr->ibmr.pd->local_dma_lkey; in handle_reg_mr_integrity()
[all …]
/linux-6.1.9/drivers/infiniband/sw/rdmavt/
Dmr.h12 struct ib_mr ibmr; member
17 static inline struct rvt_mr *to_imr(struct ib_mr *ibmr) in to_imr() argument
19 return container_of(ibmr, struct rvt_mr, ibmr); in to_imr()
30 int rvt_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata);
33 int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
Dmr.c259 mr->ibmr.lkey = mr->mr.lkey; in __rvt_alloc_mr()
260 mr->ibmr.rkey = mr->mr.lkey; in __rvt_alloc_mr()
314 ret = &mr->ibmr; in rvt_get_dma_mr()
387 return &mr->ibmr; in rvt_reg_user_mr()
509 int rvt_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in rvt_dereg_mr() argument
511 struct rvt_mr *mr = to_imr(ibmr); in rvt_dereg_mr()
547 return &mr->ibmr; in rvt_alloc_mr()
557 static int rvt_set_page(struct ib_mr *ibmr, u64 addr) in rvt_set_page() argument
559 struct rvt_mr *mr = to_imr(ibmr); in rvt_set_page()
588 int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in rvt_map_mr_sg() argument
[all …]
Dtrace_mr.h144 TP_PROTO(struct ib_mr *ibmr, int sg_nents, unsigned int *sg_offset),
145 TP_ARGS(ibmr, sg_nents, sg_offset),
147 RDI_DEV_ENTRY(ib_to_rvt(to_imr(ibmr)->mr.pd->device))
156 RDI_DEV_ASSIGN(ib_to_rvt(to_imr(ibmr)->mr.pd->device));
157 __entry->ibmr_iova = ibmr->iova;
158 __entry->iova = to_imr(ibmr)->mr.iova;
159 __entry->user_base = to_imr(ibmr)->mr.user_base;
160 __entry->ibmr_length = to_imr(ibmr)->mr.length;
/linux-6.1.9/drivers/infiniband/hw/mlx4/
Dmr.c76 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_get_dma_mr()
79 return &mr->ibmr; in mlx4_ib_get_dma_mr()
441 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_reg_user_mr()
442 mr->ibmr.page_size = 1U << shift; in mlx4_ib_reg_user_mr()
444 return &mr->ibmr; in mlx4_ib_reg_user_mr()
587 struct ib_device *device = mr->ibmr.device; in mlx4_free_priv_pages()
596 int mlx4_ib_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in mlx4_ib_dereg_mr() argument
598 struct mlx4_ib_mr *mr = to_mmr(ibmr); in mlx4_ib_dereg_mr()
603 ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr); in mlx4_ib_dereg_mr()
673 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_alloc_mr()
[all …]
/linux-6.1.9/drivers/infiniband/hw/usnic/
Dusnic_ib.h69 struct ib_mr ibmr; member
125 struct usnic_ib_mr *to_umr(struct ib_mr *ibmr) in to_umr() argument
127 return container_of(ibmr, struct usnic_ib_mr, ibmr); in to_umr()
/linux-6.1.9/drivers/infiniband/hw/hns/
Dhns_roce_mr.c210 mr->ibmr.rkey = mr->ibmr.lkey = mr->key; in hns_roce_get_dma_mr()
212 return &mr->ibmr; in hns_roce_get_dma_mr()
251 mr->ibmr.rkey = mr->ibmr.lkey = mr->key; in hns_roce_reg_user_mr()
253 return &mr->ibmr; in hns_roce_reg_user_mr()
264 struct ib_mr *hns_roce_rereg_user_mr(struct ib_mr *ibmr, int flags, u64 start, in hns_roce_rereg_user_mr() argument
269 struct hns_roce_dev *hr_dev = to_hr_dev(ibmr->device); in hns_roce_rereg_user_mr()
271 struct hns_roce_mr *mr = to_hr_mr(ibmr); in hns_roce_rereg_user_mr()
338 int hns_roce_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in hns_roce_dereg_mr() argument
340 struct hns_roce_dev *hr_dev = to_hr_dev(ibmr->device); in hns_roce_dereg_mr()
341 struct hns_roce_mr *mr = to_hr_mr(ibmr); in hns_roce_dereg_mr()
[all …]
/linux-6.1.9/net/sunrpc/xprtrdma/
Dfrwr_ops.c292 struct ib_mr *ibmr; in frwr_map() local
318 ibmr = mr->mr_ibmr; in frwr_map()
319 n = ib_map_mr_sg(ibmr, mr->mr_sg, dma_nents, NULL, PAGE_SIZE); in frwr_map()
323 ibmr->iova &= 0x00000000ffffffff; in frwr_map()
324 ibmr->iova |= ((u64)be32_to_cpu(xid)) << 32; in frwr_map()
325 key = (u8)(ibmr->rkey & 0x000000FF); in frwr_map()
326 ib_update_fast_reg_key(ibmr, ++key); in frwr_map()
329 reg_wr->mr = ibmr; in frwr_map()
330 reg_wr->key = ibmr->rkey; in frwr_map()
335 mr->mr_handle = ibmr->rkey; in frwr_map()
[all …]
/linux-6.1.9/drivers/infiniband/sw/rxe/
Drxe_mr.c35 if (iova < mr->ibmr.iova || length > mr->ibmr.length || in mr_check_range()
36 iova > mr->ibmr.iova + mr->ibmr.length - length) in mr_check_range()
61 mr->lkey = mr->ibmr.lkey = lkey; in rxe_mr_init()
62 mr->rkey = mr->ibmr.rkey = rkey; in rxe_mr_init()
214 size_t offset = iova - mr->ibmr.iova + mr->offset; in lookup_iova()
581 if (unlikely(qp->ibqp.pd != mr->ibmr.pd)) { in rxe_reg_fast_mr()
597 mr->ibmr.iova = wqe->wr.wr.reg.mr->iova; in rxe_reg_fast_mr()
603 int rxe_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in rxe_dereg_mr() argument
605 struct rxe_mr *mr = to_rmr(ibmr); in rxe_dereg_mr()
Drxe_verbs.c904 mr->ibmr.pd = ibpd; in rxe_get_dma_mr()
909 return &mr->ibmr; in rxe_get_dma_mr()
931 mr->ibmr.pd = ibpd; in rxe_reg_user_mr()
939 return &mr->ibmr; in rxe_reg_user_mr()
965 mr->ibmr.pd = ibpd; in rxe_alloc_mr()
973 return &mr->ibmr; in rxe_alloc_mr()
981 static int rxe_set_page(struct ib_mr *ibmr, u64 addr) in rxe_set_page() argument
983 struct rxe_mr *mr = to_rmr(ibmr); in rxe_set_page()
994 buf->size = ibmr->page_size; in rxe_set_page()
1000 static int rxe_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in rxe_map_mr_sg() argument
[all …]
Drxe_mw.c117 if (unlikely(wqe->wr.wr.mw.length > mr->ibmr.length)) { in rxe_check_bind_mw()
123 if (unlikely((wqe->wr.wr.mw.addr < mr->ibmr.iova) || in rxe_check_bind_mw()
125 (mr->ibmr.iova + mr->ibmr.length)))) { in rxe_check_bind_mw()
Drxe_verbs.h300 struct ib_mr ibmr; member
450 return mr ? container_of(mr, struct rxe_mr, ibmr) : NULL; in to_rmr()
465 return to_rpd(mr->ibmr.pd); in mr_pd()
/linux-6.1.9/drivers/infiniband/hw/erdma/
Derdma_verbs.c110 struct erdma_pd *pd = to_epd(mr->ibmr.pd); in regmr_cmd()
117 FIELD_PREP(ERDMA_CMD_MR_KEY_MASK, mr->ibmr.lkey & 0xFF) | in regmr_cmd()
118 FIELD_PREP(ERDMA_CMD_MR_MPT_IDX_MASK, mr->ibmr.lkey >> 8); in regmr_cmd()
786 mr->ibmr.lkey = stag; in erdma_get_dma_mr()
787 mr->ibmr.rkey = stag; in erdma_get_dma_mr()
788 mr->ibmr.pd = ibpd; in erdma_get_dma_mr()
794 return &mr->ibmr; in erdma_get_dma_mr()
798 mr->ibmr.lkey >> 8); in erdma_get_dma_mr()
830 mr->ibmr.lkey = stag; in erdma_ib_alloc_mr()
831 mr->ibmr.rkey = stag; in erdma_ib_alloc_mr()
[all …]
Derdma_verbs.h102 struct ib_mr ibmr; member
270 static inline struct erdma_mr *to_emr(struct ib_mr *ibmr) in to_emr() argument
272 return container_of(ibmr, struct erdma_mr, ibmr); in to_emr()
317 int erdma_dereg_mr(struct ib_mr *ibmr, struct ib_udata *data);
330 int erdma_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
/linux-6.1.9/drivers/infiniband/hw/mthca/
Dmthca_provider.h74 struct ib_mr ibmr; member
286 static inline struct mthca_mr *to_mmr(struct ib_mr *ibmr) in to_mmr() argument
288 return container_of(ibmr, struct mthca_mr, ibmr); in to_mmr()
/linux-6.1.9/drivers/infiniband/hw/cxgb4/
Dmem.c384 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in finish_mem_reg()
385 mhp->ibmr.length = mhp->attr.len; in finish_mem_reg()
386 mhp->ibmr.page_size = 1U << (mhp->attr.page_size + 12); in finish_mem_reg()
478 return &mhp->ibmr; in c4iw_get_dma_mr()
582 return &mhp->ibmr; in c4iw_reg_user_mr()
653 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in c4iw_alloc_mr()
660 return &(mhp->ibmr); in c4iw_alloc_mr()
678 static int c4iw_set_page(struct ib_mr *ibmr, u64 addr) in c4iw_set_page() argument
680 struct c4iw_mr *mhp = to_c4iw_mr(ibmr); in c4iw_set_page()
690 int c4iw_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents, in c4iw_map_mr_sg() argument
[all …]
/linux-6.1.9/drivers/infiniband/hw/ocrdma/
Docrdma.h192 struct ib_mr ibmr; member
481 static inline struct ocrdma_mr *get_ocrdma_mr(struct ib_mr *ibmr) in get_ocrdma_mr() argument
483 return container_of(ibmr, struct ocrdma_mr, ibmr); in get_ocrdma_mr()
/linux-6.1.9/drivers/infiniband/hw/qedr/
Dqedr.h485 struct ib_mr ibmr; member
604 static inline struct qedr_mr *get_qedr_mr(struct ib_mr *ibmr) in get_qedr_mr() argument
606 return container_of(ibmr, struct qedr_mr, ibmr); in get_qedr_mr()

123