/linux-6.6.21/drivers/gpu/drm/tegra/ |
D | gem.c | 56 drm_gem_object_put(&obj->gem); in tegra_bo_put() 63 struct drm_gem_object *gem = &obj->gem; in tegra_bo_pin() local 79 if (gem->import_attach) { in tegra_bo_pin() 80 struct dma_buf *buf = gem->import_attach->dmabuf; in tegra_bo_pin() 97 map->size = gem->size; in tegra_bo_pin() 117 err = sg_alloc_table_from_pages(map->sgt, obj->pages, obj->num_pages, 0, gem->size, in tegra_bo_pin() 127 err = dma_get_sgtable(dev, map->sgt, obj->vaddr, obj->iova, gem->size); in tegra_bo_pin() 149 map->size = gem->size; in tegra_bo_pin() 185 } else if (obj->gem.import_attach) { in tegra_bo_mmap() 186 ret = dma_buf_vmap_unlocked(obj->gem.import_attach->dmabuf, &map); in tegra_bo_mmap() [all …]
|
D | gem.h | 36 struct drm_gem_object gem; member 52 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument 54 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo() 69 void tegra_bo_free_object(struct drm_gem_object *gem); 75 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma); 78 struct dma_buf *tegra_gem_prime_export(struct drm_gem_object *gem,
|
D | fb.c | 120 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc() 139 struct drm_gem_object *gem; in tegra_fb_create() local 149 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create() 150 if (!gem) { in tegra_fb_create() 160 if (gem->size < size) { in tegra_fb_create() 165 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 178 drm_gem_object_put(&planes[i]->gem); in tegra_fb_create()
|
D | drm.c | 251 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 258 if (offset & 3 || offset > obj->gem.size) { in tegra_drm_submit() 281 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 289 reloc->cmdbuf.offset >= obj->gem.size) { in tegra_drm_submit() 295 refs[num_refs++] = &obj->gem; in tegra_drm_submit() 297 if (reloc->target.offset >= obj->gem.size) { in tegra_drm_submit() 367 struct drm_gem_object *gem; in tegra_gem_mmap() local 370 gem = drm_gem_object_lookup(file, args->handle); in tegra_gem_mmap() 371 if (!gem) in tegra_gem_mmap() 374 bo = to_tegra_bo(gem); in tegra_gem_mmap() [all …]
|
D | fbdev.c | 33 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap() 37 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap() 102 drm_gem_object_put(&bo->gem); in tegra_fbdev_probe() 111 drm_gem_object_put(&bo->gem); in tegra_fbdev_probe()
|
/linux-6.6.21/drivers/gpu/drm/ |
D | drm_gem_ttm_helper.c | 26 const struct drm_gem_object *gem) in drm_gem_ttm_print_info() argument 42 const struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_print_info() 65 int drm_gem_ttm_vmap(struct drm_gem_object *gem, in drm_gem_ttm_vmap() argument 68 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vmap() 82 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, in drm_gem_ttm_vunmap() argument 85 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vunmap() 99 int drm_gem_ttm_mmap(struct drm_gem_object *gem, in drm_gem_ttm_mmap() argument 102 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_mmap() 113 drm_gem_object_put(gem); in drm_gem_ttm_mmap() 138 struct drm_gem_object *gem; in drm_gem_ttm_dumb_map_offset() local [all …]
|
D | drm_gem_vram_helper.c | 192 struct drm_gem_object *gem; in drm_gem_vram_create() local 201 gem = dev->driver->gem_create_object(dev, size); in drm_gem_vram_create() 202 if (IS_ERR(gem)) in drm_gem_vram_create() 203 return ERR_CAST(gem); in drm_gem_vram_create() 204 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_create() 209 gem = &gbo->bo.base; in drm_gem_vram_create() 212 if (!gem->funcs) in drm_gem_vram_create() 213 gem->funcs = &drm_gem_vram_object_funcs; in drm_gem_vram_create() 215 ret = drm_gem_object_init(dev, gem, size); in drm_gem_vram_create() 582 static void drm_gem_vram_object_free(struct drm_gem_object *gem) in drm_gem_vram_object_free() argument [all …]
|
D | drm_gpuva_mgr.c | 819 struct drm_gem_object *obj = va->gem.obj; in drm_gpuva_link() 826 list_add_tail(&va->gem.entry, &obj->gpuva.list); in drm_gpuva_link() 843 struct drm_gem_object *obj = va->gem.obj; in drm_gpuva_unlink() 850 list_del_init(&va->gem.entry); in drm_gpuva_unlink() 1036 op.map.gem.obj = obj; in op_map_cb() 1037 op.map.gem.offset = offset; in op_map_cb() 1087 struct drm_gem_object *obj = va->gem.obj; in __drm_gpuva_sm_map() 1088 u64 offset = va->gem.offset; in __drm_gpuva_sm_map() 1092 bool merge = !!va->gem.obj; in __drm_gpuva_sm_map() 1116 .gem.obj = obj, in __drm_gpuva_sm_map() [all …]
|
D | drm_fb_dma_helper.c | 49 struct drm_gem_object *gem; in drm_fb_dma_get_gem_obj() local 51 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_dma_get_gem_obj() 52 if (!gem) in drm_fb_dma_get_gem_obj() 55 return to_drm_gem_dma_obj(gem); in drm_fb_dma_get_gem_obj()
|
/linux-6.6.21/drivers/gpu/drm/i915/ |
D | Makefile | 152 gem-y += \ 153 gem/i915_gem_busy.o \ 154 gem/i915_gem_clflush.o \ 155 gem/i915_gem_context.o \ 156 gem/i915_gem_create.o \ 157 gem/i915_gem_dmabuf.o \ 158 gem/i915_gem_domain.o \ 159 gem/i915_gem_execbuffer.o \ 160 gem/i915_gem_internal.o \ 161 gem/i915_gem_object.o \ [all …]
|
D | i915_sysfs.c | 79 spin_lock(&i915->gem.contexts.lock); in i915_l3_read() 84 spin_unlock(&i915->gem.contexts.lock); in i915_l3_read() 112 spin_lock(&i915->gem.contexts.lock); in i915_l3_write() 125 list_for_each_entry(ctx, &i915->gem.contexts.list, link) in i915_l3_write() 128 spin_unlock(&i915->gem.contexts.lock); in i915_l3_write()
|
/linux-6.6.21/Documentation/devicetree/bindings/net/ |
D | cdns,macb.yaml | 23 - cdns,zynq-gem # Xilinx Zynq-7xxx SoC 24 - cdns,zynqmp-gem # Xilinx Zynq Ultrascale+ MPSoC 25 - const: cdns,gem # Generic 30 - xlnx,versal-gem # Xilinx Versal 31 - xlnx,zynq-gem # Xilinx Zynq-7xxx SoC 32 - xlnx,zynqmp-gem # Xilinx Zynq Ultrascale+ MPSoC 33 - const: cdns,gem # Generic 50 - atmel,sama5d29-gem # GEM XL IP (10/100) on Atmel sama5d29 SoCs 51 - atmel,sama5d2-gem # GEM IP (10/100) on Atmel sama5d2 SoCs 52 - atmel,sama5d3-gem # Gigabit IP on Atmel sama5d3 SoCs [all …]
|
/linux-6.6.21/drivers/gpu/drm/nouveau/ |
D | nouveau_gem.c | 77 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument 79 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() 90 if (gem->import_attach) in nouveau_gem_object_del() 91 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del() 100 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument 103 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() 185 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument 188 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close() 296 nouveau_gem_info(struct drm_file *file_priv, struct drm_gem_object *gem, in nouveau_gem_info() argument 300 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_info() [all …]
|
D | nouveau_gem.h | 11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 13 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
|
D | nouveau_ttm.c | 192 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 198 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram() 221 unsigned long size_pages = drm->gem.gart_available >> PAGE_SHIFT; in nouveau_ttm_init_gtt() 313 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init() 329 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init() 331 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 343 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 344 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
/linux-6.6.21/drivers/net/ethernet/sun/ |
D | sungem.c | 117 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read() 145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 149 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read() 154 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write() 178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 182 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write() 187 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints() 193 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints() 200 static void gem_get_cell(struct gem *gp) in gem_get_cell() 214 static void gem_put_cell(struct gem *gp) in gem_put_cell() [all …]
|
/linux-6.6.21/include/drm/ |
D | drm_gem_ttm_helper.h | 18 const struct drm_gem_object *gem); 19 int drm_gem_ttm_vmap(struct drm_gem_object *gem, 21 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, 23 int drm_gem_ttm_mmap(struct drm_gem_object *gem,
|
D | drm_gpuva_mgr.h | 113 } gem; member 160 va->gem.obj = obj; in drm_gpuva_init() 161 va->gem.offset = offset; in drm_gpuva_init() 393 } gem; member 609 op->gem.obj, op->gem.offset); in drm_gpuva_init_from_op()
|
/linux-6.6.21/drivers/gpu/drm/loongson/ |
D | lsdc_gem.c | 176 mutex_lock(&ldev->gem.mutex); in lsdc_gem_object_create() 177 list_add_tail(&lbo->list, &ldev->gem.objects); in lsdc_gem_object_create() 178 mutex_unlock(&ldev->gem.mutex); in lsdc_gem_object_create() 278 mutex_init(&ldev->gem.mutex); in lsdc_gem_init() 279 INIT_LIST_HEAD(&ldev->gem.objects); in lsdc_gem_init() 290 mutex_lock(&ldev->gem.mutex); in lsdc_show_buffer_object() 294 list_for_each_entry(lbo, &ldev->gem.objects, list) { in lsdc_show_buffer_object() 305 mutex_unlock(&ldev->gem.mutex); in lsdc_show_buffer_object()
|
D | lsdc_ttm.h | 47 static inline struct ttm_buffer_object *to_ttm_bo(struct drm_gem_object *gem) in to_ttm_bo() argument 49 return container_of(gem, struct ttm_buffer_object, base); in to_ttm_bo() 57 static inline struct lsdc_bo *gem_to_lsdc_bo(struct drm_gem_object *gem) in gem_to_lsdc_bo() argument 59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
|
/linux-6.6.21/drivers/gpu/drm/qxl/ |
D | qxl_gem.c | 68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 125 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
|
/linux-6.6.21/drivers/gpu/drm/radeon/ |
D | radeon_prime.c | 63 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 64 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 65 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
/linux-6.6.21/drivers/gpu/drm/renesas/rcar-du/ |
D | rcar_du_vsp.c | 266 struct drm_gem_dma_object *gem = drm_fb_dma_get_gem_obj(fb, i); in rcar_du_vsp_map_fb() local 269 if (gem->sgt) { in rcar_du_vsp_map_fb() 280 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb() 285 src = gem->sgt->sgl; in rcar_du_vsp_map_fb() 287 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb() 294 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb() 295 gem->dma_addr, gem->base.size); in rcar_du_vsp_map_fb()
|
/linux-6.6.21/drivers/gpu/drm/renesas/shmobile/ |
D | shmob_drm_plane.c | 43 struct drm_gem_dma_object *gem; in shmob_drm_plane_compute_base() local 47 gem = drm_fb_dma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 48 splane->dma[0] = gem->dma_addr + fb->offsets[0] in shmob_drm_plane_compute_base() 53 gem = drm_fb_dma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 54 splane->dma[1] = gem->dma_addr + fb->offsets[1] in shmob_drm_plane_compute_base()
|
/linux-6.6.21/Documentation/devicetree/bindings/interconnect/ |
D | qcom,rpmh.yaml | 33 - qcom,sc7180-gem-noc 45 - qcom,sc8180x-gem-noc 78 - qcom,sm8150-gem-noc 87 - qcom,sm8250-gem-noc 97 - qcom,sm8350-gem-noc
|