Home
last modified time | relevance | path

Searched refs:uar (Results 1 – 25 of 51) sorted by relevance

123

/linux-6.6.21/drivers/net/ethernet/mellanox/mlx4/
Dpd.c145 int mlx4_uar_alloc(struct mlx4_dev *dev, struct mlx4_uar *uar) in mlx4_uar_alloc() argument
149 uar->index = mlx4_bitmap_alloc(&mlx4_priv(dev)->uar_table.bitmap); in mlx4_uar_alloc()
150 if (uar->index == -1) in mlx4_uar_alloc()
154 offset = uar->index % ((int)pci_resource_len(dev->persist->pdev, in mlx4_uar_alloc()
158 offset = uar->index; in mlx4_uar_alloc()
159 uar->pfn = (pci_resource_start(dev->persist->pdev, 2) >> PAGE_SHIFT) in mlx4_uar_alloc()
161 uar->map = NULL; in mlx4_uar_alloc()
166 void mlx4_uar_free(struct mlx4_dev *dev, struct mlx4_uar *uar) in mlx4_uar_free() argument
168 mlx4_bitmap_free(&mlx4_priv(dev)->uar_table.bitmap, uar->index, MLX4_USE_RR); in mlx4_uar_free()
175 struct mlx4_uar *uar; in mlx4_bf_alloc() local
[all …]
Dcq.c343 struct mlx4_mtt *mtt, struct mlx4_uar *uar, u64 db_rec, in mlx4_cq_alloc() argument
383 mlx4_to_hw_uar_index(dev, uar->index)); in mlx4_cq_alloc()
412 cq->uar = uar; in mlx4_cq_alloc()
/linux-6.6.21/drivers/infiniband/hw/mthca/
Dmthca_uar.c38 int mthca_uar_alloc(struct mthca_dev *dev, struct mthca_uar *uar) in mthca_uar_alloc() argument
40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc()
41 if (uar->index == -1) in mthca_uar_alloc()
44 uar->pfn = (pci_resource_start(dev->pdev, 2) >> PAGE_SHIFT) + uar->index; in mthca_uar_alloc()
49 void mthca_uar_free(struct mthca_dev *dev, struct mthca_uar *uar) in mthca_uar_free() argument
51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free()
Dmthca_provider.c303 err = mthca_uar_alloc(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext()
310 mthca_uar_free(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext()
315 mthca_cleanup_user_db_tab(to_mdev(ibdev), &context->uar, context->db_tab); in mthca_alloc_ucontext()
316 mthca_uar_free(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext()
327 mthca_cleanup_user_db_tab(to_mdev(context->device), &to_mucontext(context)->uar, in mthca_dealloc_ucontext()
329 mthca_uar_free(to_mdev(context->device), &to_mucontext(context)->uar); in mthca_dealloc_ucontext()
341 to_mucontext(context)->uar.pfn, in mthca_mmap_uar()
408 err = mthca_map_user_db(to_mdev(ibsrq->device), &context->uar, in mthca_create_srq()
423 mthca_unmap_user_db(to_mdev(ibsrq->device), &context->uar, in mthca_create_srq()
446 mthca_unmap_user_db(to_mdev(srq->device), &context->uar, in mthca_destroy_srq()
[all …]
Dmthca_memfree.h165 int mthca_map_user_db(struct mthca_dev *dev, struct mthca_uar *uar,
167 void mthca_unmap_user_db(struct mthca_dev *dev, struct mthca_uar *uar,
170 void mthca_cleanup_user_db_tab(struct mthca_dev *dev, struct mthca_uar *uar,
Dmthca_memfree.c439 static u64 mthca_uarc_virt(struct mthca_dev *dev, struct mthca_uar *uar, int page) in mthca_uarc_virt() argument
442 uar->index * dev->uar_table.uarc_size + in mthca_uarc_virt()
446 int mthca_map_user_db(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_map_user_db() argument
491 mthca_uarc_virt(dev, uar, i)); in mthca_map_user_db()
507 void mthca_unmap_user_db(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_unmap_user_db() argument
549 void mthca_cleanup_user_db_tab(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_cleanup_user_db_tab() argument
559 mthca_UNMAP_ICM(dev, mthca_uarc_virt(dev, uar, i), 1); in mthca_cleanup_user_db_tab()
Dmthca_srq.c54 __be32 uar; member
113 context->uar = cpu_to_be32(ucontext->uar.index); in mthca_tavor_init_srq_context()
115 context->uar = cpu_to_be32(dev->driver_uar.index); in mthca_tavor_init_srq_context()
141 context->logstride_usrpage |= cpu_to_be32(ucontext->uar.index); in mthca_arbel_init_srq_context()
/linux-6.6.21/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_doorbell.c83 int pvrdma_uar_alloc(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar) in pvrdma_uar_alloc() argument
108 uar->index = obj; in pvrdma_uar_alloc()
109 uar->pfn = (pci_resource_start(dev->pdev, PVRDMA_PCI_RESOURCE_UAR) >> in pvrdma_uar_alloc()
110 PAGE_SHIFT) + uar->index; in pvrdma_uar_alloc()
115 void pvrdma_uar_free(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar) in pvrdma_uar_free() argument
121 obj = uar->index & (tbl->max - 1); in pvrdma_uar_free()
Dpvrdma_verbs.c329 ret = pvrdma_uar_alloc(vdev, &context->uar); in pvrdma_alloc_ucontext()
335 cmd->pfn = context->uar.pfn; in pvrdma_alloc_ucontext()
337 cmd->pfn64 = context->uar.pfn; in pvrdma_alloc_ucontext()
353 pvrdma_uar_free(vdev, &context->uar); in pvrdma_alloc_ucontext()
361 pvrdma_uar_free(vdev, &context->uar); in pvrdma_alloc_ucontext()
385 pvrdma_uar_free(to_vdev(ibcontext->device), &context->uar); in pvrdma_dealloc_ucontext()
413 if (io_remap_pfn_range(vma, start, context->uar.pfn, size, in pvrdma_mmap()
Dpvrdma.h90 struct pvrdma_uar_map *uar; member
123 struct pvrdma_uar_map uar; member
535 int pvrdma_uar_alloc(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar);
536 void pvrdma_uar_free(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar);
Dpvrdma_cq.c203 cq->uar = &context->uar; in pvrdma_create_cq()
/linux-6.6.21/drivers/infiniband/hw/hns/
Dhns_roce_pd.c85 int hns_roce_uar_alloc(struct hns_roce_dev *hr_dev, struct hns_roce_uar *uar) in hns_roce_uar_alloc() argument
97 uar->logic_idx = (unsigned long)id; in hns_roce_uar_alloc()
99 if (uar->logic_idx > 0 && hr_dev->caps.phy_num_uars > 1) in hns_roce_uar_alloc()
100 uar->index = (uar->logic_idx - 1) % in hns_roce_uar_alloc()
103 uar->index = 0; in hns_roce_uar_alloc()
105 uar->pfn = ((pci_resource_start(hr_dev->pci_dev, 2)) >> PAGE_SHIFT); in hns_roce_uar_alloc()
/linux-6.6.21/drivers/vdpa/mlx5/core/
Dresources.c260 res->uar = mlx5_get_uars_page(mdev); in mlx5_vdpa_alloc_resources()
261 if (IS_ERR(res->uar)) { in mlx5_vdpa_alloc_resources()
262 err = PTR_ERR(res->uar); in mlx5_vdpa_alloc_resources()
302 mlx5_put_uars_page(mdev, res->uar); in mlx5_vdpa_alloc_resources()
320 mlx5_put_uars_page(mvdev->mdev, res->uar); in mlx5_vdpa_free_resources()
/linux-6.6.21/drivers/infiniband/hw/efa/
Defa_admin_cmds_defs.h148 u16 uar; member
485 u16 uar; member
843 u16 uar; member
853 u16 uar; member
Defa_com_cmd.c33 create_qp_cmd.uar = params->uarn; in efa_com_create_qp()
165 create_cmd.uar = params->uarn; in efa_com_create_cq()
719 result->uarn = resp.uar; in efa_com_alloc_uar()
733 cmd.uar = params->uarn; in efa_com_dealloc_uar()
743 cmd.uar, err); in efa_com_dealloc_uar()
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/fpga/
Dconn.c138 mlx5_write64(wqe, conn->fdev->conn_res.uar->map + MLX5_BF_OFFSET); in mlx5_fpga_conn_notify_hw()
362 conn->fdev->conn_res.uar->map, conn->cq.wq.cc); in mlx5_fpga_conn_arm_cq()
457 MLX5_SET(cqc, cqc, uar_page, fdev->conn_res.uar->index); in mlx5_fpga_conn_create_cq()
478 conn->cq.mcq.uar = fdev->conn_res.uar; in mlx5_fpga_conn_create_cq()
562 MLX5_SET(qpc, qpc, uar_page, fdev->conn_res.uar->index); in mlx5_fpga_conn_create_qp()
959 fdev->conn_res.uar = mlx5_get_uars_page(fdev->mdev); in mlx5_fpga_conn_device_init()
960 if (IS_ERR(fdev->conn_res.uar)) { in mlx5_fpga_conn_device_init()
961 err = PTR_ERR(fdev->conn_res.uar); in mlx5_fpga_conn_device_init()
966 fdev->conn_res.uar->index); in mlx5_fpga_conn_device_init()
988 mlx5_put_uars_page(fdev->mdev, fdev->conn_res.uar); in mlx5_fpga_conn_device_init()
[all …]
Dcore.h58 struct mlx5_uars_page *uar; member
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/steering/
Ddr_send.c55 struct mlx5_uars_page *uar; member
309 MLX5_SET(qpc, qpc, uar_page, attr->uar->index); in dr_create_rc_qp()
334 dr_qp->uar = attr->uar; in dr_create_rc_qp()
369 mlx5_write64(ctrl, dr_qp->uar->map + MLX5_BF_OFFSET); in dr_cmd_notify_hw()
1059 struct mlx5_uars_page *uar, in dr_create_cq() argument
1109 MLX5_SET(cqc, cqc, uar_page, uar->index); in dr_create_cq()
1136 cq->mcq.uar = uar; in dr_create_cq()
1232 dmn->send_ring->cq = dr_create_cq(dmn->mdev, dmn->uar, cq_size); in mlx5dr_send_ring_alloc()
1241 init_attr.uar = dmn->uar; in mlx5dr_send_ring_alloc()
Ddr_domain.c179 dmn->uar = mlx5_get_uars_page(dmn->mdev); in dr_domain_init_resources()
180 if (IS_ERR(dmn->uar)) { in dr_domain_init_resources()
182 ret = PTR_ERR(dmn->uar); in dr_domain_init_resources()
211 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_init_resources()
223 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_uninit_resources()
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/
Dcq.c137 cq->uar = dev->priv.uar; in mlx5_create_cq()
Duar.c48 *uarn = MLX5_GET(alloc_uar_out, out, uar); in mlx5_cmd_alloc_uar()
57 MLX5_SET(dealloc_uar_in, in, uar, uarn); in mlx5_cmd_free_uar()
/linux-6.6.21/drivers/infiniband/hw/mlx5/
Dcmd.c229 *uarn = MLX5_GET(alloc_uar_out, out, uar); in mlx5_cmd_uar_alloc()
238 MLX5_SET(dealloc_uar_in, in, uar, uarn); in mlx5_cmd_uar_dealloc()
/linux-6.6.21/drivers/vfio/pci/mlx5/
Dcmd.c990 MLX5_SET(cqc, cqc, uar_page, tracker->uar->index); in mlx5vf_create_cq()
1002 mlx5_cq_arm(&cq->mcq, MLX5_CQ_DB_REQ_NOT, tracker->uar->map, in mlx5vf_create_cq()
1063 MLX5_SET(qpc, qpc, uar_page, tracker->uar->index); in mlx5vf_create_rc_qp()
1340 mlx5_put_uars_page(mdev, tracker->uar); in _mlx5vf_free_page_tracker_resources()
1389 tracker->uar = mlx5_get_uars_page(mdev); in mlx5vf_start_page_tracker()
1390 if (IS_ERR(tracker->uar)) { in mlx5vf_start_page_tracker()
1391 err = PTR_ERR(tracker->uar); in mlx5vf_start_page_tracker()
1465 mlx5_put_uars_page(mdev, tracker->uar); in mlx5vf_start_page_tracker()
1600 mlx5_cq_arm(&cq->mcq, MLX5_CQ_DB_REQ_NOT, tracker->uar->map, in mlx5vf_tracker_read_and_clear()
/linux-6.6.21/drivers/infiniband/hw/mlx4/
Dcq.c182 struct mlx4_uar *uar; in mlx4_ib_create_cq() local
222 uar = &context->uar; in mlx4_ib_create_cq()
240 uar = &dev->priv_uar; in mlx4_ib_create_cq()
247 err = mlx4_cq_alloc(dev->dev, entries, &cq->buf.mtt, uar, cq->db.dma, in mlx4_ib_create_cq()
/linux-6.6.21/include/linux/mlx5/
Dcq.h44 struct mlx5_uars_page *uar; member

123