/linux-3.4.99/drivers/gpu/drm/radeon/ |
D | radeon_object.c | 42 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo); 49 void radeon_bo_clear_va(struct radeon_bo *bo) in radeon_bo_clear_va() argument 53 list_for_each_entry_safe(bo_va, tmp, &bo->va, bo_list) { in radeon_bo_clear_va() 65 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local 67 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 68 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 69 list_del_init(&bo->list); in radeon_ttm_bo_destroy() 70 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 71 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy() 72 radeon_bo_clear_va(bo); in radeon_ttm_bo_destroy() [all …]
|
D | radeon_semaphore.c | 36 struct radeon_semaphore_bo *bo; in radeon_semaphore_add_bo() local 43 bo = kmalloc(sizeof(struct radeon_semaphore_bo), GFP_KERNEL); in radeon_semaphore_add_bo() 44 if (bo == NULL) { in radeon_semaphore_add_bo() 47 INIT_LIST_HEAD(&bo->free); in radeon_semaphore_add_bo() 48 INIT_LIST_HEAD(&bo->list); in radeon_semaphore_add_bo() 49 bo->nused = 0; in radeon_semaphore_add_bo() 51 r = radeon_ib_get(rdev, 0, &bo->ib, RADEON_SEMAPHORE_BO_SIZE); in radeon_semaphore_add_bo() 54 kfree(bo); in radeon_semaphore_add_bo() 58 gpu_addr += bo->ib->sa_bo.offset; in radeon_semaphore_add_bo() 60 cpu_ptr += (bo->ib->sa_bo.offset >> 2); in radeon_semaphore_add_bo() [all …]
|
D | radeon_object.h | 55 int radeon_bo_reserve(struct radeon_bo *bo, bool no_wait); 57 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument 59 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 71 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument 73 return bo->tbo.offset; in radeon_bo_gpu_offset() 76 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument 78 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size() 81 static inline bool radeon_bo_is_reserved(struct radeon_bo *bo) in radeon_bo_is_reserved() argument 83 return !!atomic_read(&bo->tbo.reserved); in radeon_bo_is_reserved() 86 static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo) in radeon_bo_ngpu_pages() argument [all …]
|
D | radeon_sa.c | 40 sa_manager->bo = NULL; in radeon_sa_bo_manager_init() 46 RADEON_GEM_DOMAIN_CPU, &sa_manager->bo); in radeon_sa_bo_manager_init() 66 radeon_bo_unref(&sa_manager->bo); in radeon_sa_bo_manager_fini() 75 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_start() 81 r = radeon_bo_reserve(sa_manager->bo, false); in radeon_sa_bo_manager_start() 86 r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in radeon_sa_bo_manager_start() 88 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start() 92 r = radeon_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr); in radeon_sa_bo_manager_start() 93 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start() 102 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_suspend() [all …]
|
D | radeon_ttm.c | 173 static void radeon_evict_flags(struct ttm_buffer_object *bo, in radeon_evict_flags() argument 179 if (!radeon_ttm_bo_is_radeon_bo(bo)) { in radeon_evict_flags() 188 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags() 189 switch (bo->mem.mem_type) { in radeon_evict_flags() 203 static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp) in radeon_verify_access() argument 208 static void radeon_move_null(struct ttm_buffer_object *bo, in radeon_move_null() argument 211 struct ttm_mem_reg *old_mem = &bo->mem; in radeon_move_null() 218 static int radeon_move_blit(struct ttm_buffer_object *bo, in radeon_move_blit() argument 228 rdev = radeon_get_rdev(bo->bdev); in radeon_move_blit() 299 r = ttm_bo_move_accel_cleanup(bo, (void *)fence, NULL, in radeon_move_blit() [all …]
|
D | radeon_trace.h | 16 TP_PROTO(struct radeon_bo *bo), 17 TP_ARGS(bo), 19 __field(struct radeon_bo *, bo) 24 __entry->bo = bo; 25 __entry->pages = bo->tbo.num_pages; 27 TP_printk("bo=%p, pages=%u", __entry->bo, __entry->pages)
|
D | radeon_gart.c | 340 if (rdev->vm_manager.sa_manager.bo == NULL) in radeon_vm_manager_fini() 350 if (rdev->vm_manager.sa_manager.bo == NULL) { in radeon_vm_manager_start() 438 return radeon_vm_bo_update_pte(rdev, vm, rdev->ib_pool.sa_manager.bo, in radeon_vm_bind() 439 &rdev->ib_pool.sa_manager.bo->tbo.mem); in radeon_vm_bind() 445 struct radeon_bo *bo, in radeon_vm_bo_add() argument 451 uint64_t size = radeon_bo_size(bo), last_offset = 0; in radeon_vm_bo_add() 459 bo_va->bo = bo; in radeon_vm_bo_add() 505 bo, (unsigned)bo_va->soffset, tmp->bo, in radeon_vm_bo_add() 515 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add() 551 struct radeon_bo *bo, in radeon_vm_bo_update_pte() argument [all …]
|
/linux-3.4.99/drivers/gpu/drm/ttm/ |
D | ttm_bo.c | 48 static int ttm_bo_setup_vm(struct ttm_buffer_object *bo); 84 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument 90 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug() 91 bo->mem.size >> 20); in ttm_bo_mem_space_debug() 99 ttm_mem_type_debug(bo->bdev, mem_type); in ttm_bo_mem_space_debug() 137 struct ttm_buffer_object *bo = in ttm_bo_release_list() local 139 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_release_list() 140 size_t acc_size = bo->acc_size; in ttm_bo_release_list() 142 BUG_ON(atomic_read(&bo->list_kref.refcount)); in ttm_bo_release_list() 143 BUG_ON(atomic_read(&bo->kref.refcount)); in ttm_bo_release_list() [all …]
|
D | ttm_bo_vm.c | 49 struct ttm_buffer_object *bo; in ttm_bo_vm_lookup_rb() local 53 bo = rb_entry(cur, struct ttm_buffer_object, vm_rb); in ttm_bo_vm_lookup_rb() 54 cur_offset = bo->vm_node->start; in ttm_bo_vm_lookup_rb() 57 best_bo = bo; in ttm_bo_vm_lookup_rb() 76 struct ttm_buffer_object *bo = (struct ttm_buffer_object *) in ttm_bo_vm_fault() local 78 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_vm_fault() 89 &bdev->man[bo->mem.mem_type]; in ttm_bo_vm_fault() 97 ret = ttm_bo_reserve(bo, true, true, false, 0); in ttm_bo_vm_fault() 105 ret = bdev->driver->fault_reserve_notify(bo); in ttm_bo_vm_fault() 126 if (test_bit(TTM_BO_PRIV_FLAG_MOVING, &bo->priv_flags)) { in ttm_bo_vm_fault() [all …]
|
D | ttm_execbuf_util.c | 40 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation_locked() local 45 ttm_bo_add_to_lru(bo); in ttm_eu_backoff_reservation_locked() 50 atomic_set(&bo->reserved, 0); in ttm_eu_backoff_reservation_locked() 51 wake_up_all(&bo->event_queue); in ttm_eu_backoff_reservation_locked() 60 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_del_from_lru_locked() local 65 entry->put_count = ttm_bo_del_from_lru(bo); in ttm_eu_del_from_lru_locked() 76 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_list_ref_sub() local 79 ttm_bo_list_ref_sub(bo, entry->put_count, true); in ttm_eu_list_ref_sub() 86 struct ttm_buffer_object *bo) in ttm_eu_wait_unreserved_locked() argument 88 struct ttm_bo_global *glob = bo->glob; in ttm_eu_wait_unreserved_locked() [all …]
|
D | ttm_bo_util.c | 40 void ttm_bo_free_old_node(struct ttm_buffer_object *bo) in ttm_bo_free_old_node() argument 42 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_free_old_node() 45 int ttm_bo_move_ttm(struct ttm_buffer_object *bo, in ttm_bo_move_ttm() argument 49 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm() 50 struct ttm_mem_reg *old_mem = &bo->mem; in ttm_bo_move_ttm() 55 ttm_bo_free_old_node(bo); in ttm_bo_move_ttm() 100 struct ttm_buffer_object *bo; in ttm_mem_io_evict() local 105 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict() 108 list_del_init(&bo->io_reserve_lru); in ttm_mem_io_evict() 109 ttm_bo_unmap_virtual_locked(bo); in ttm_mem_io_evict() [all …]
|
/linux-3.4.99/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_dmabuf.c | 56 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_placement() local 65 ret = ttm_bo_reserve(bo, interruptible, false, false, 0); in vmw_dmabuf_to_placement() 69 ret = ttm_bo_validate(bo, placement, interruptible, false, false); in vmw_dmabuf_to_placement() 71 ttm_bo_unreserve(bo); in vmw_dmabuf_to_placement() 99 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_vram_or_gmr() local 110 ret = ttm_bo_reserve(bo, interruptible, false, false, 0); in vmw_dmabuf_to_vram_or_gmr() 126 ret = ttm_bo_validate(bo, placement, interruptible, false, false); in vmw_dmabuf_to_vram_or_gmr() 141 ret = ttm_bo_validate(bo, placement, interruptible, false, false); in vmw_dmabuf_to_vram_or_gmr() 144 ttm_bo_unreserve(bo); in vmw_dmabuf_to_vram_or_gmr() 202 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_start_of_vram() local [all …]
|
D | vmwgfx_execbuf.c | 73 struct ttm_buffer_object *bo, in vmw_bo_to_validate_list() argument 80 val_node = vmw_dmabuf_validate_node(bo, sw_context->cur_val_buf); in vmw_bo_to_validate_list() 91 val_buf->bo = ttm_bo_reference(bo); in vmw_bo_to_validate_list() 446 struct ttm_buffer_object *bo; in vmw_translate_guest_ptr() local 456 bo = &vmw_bo->base; in vmw_translate_guest_ptr() 468 ret = vmw_bo_to_validate_list(sw_context, bo, DRM_VMW_FENCE_FLAG_EXEC, in vmw_translate_guest_ptr() 553 struct ttm_buffer_object *bo; in vmw_cmd_dma() local 569 bo = &vmw_bo->base; in vmw_cmd_dma() 588 vmw_kms_cursor_snoop(srf, sw_context->tfile, bo, header); in vmw_cmd_dma() 867 struct ttm_buffer_object *bo; in vmw_apply_relocations() local [all …]
|
/linux-3.4.99/drivers/gpu/drm/nouveau/ |
D | nouveau_bo.c | 43 nouveau_bo_del_ttm(struct ttm_buffer_object *bo) in nouveau_bo_del_ttm() argument 45 struct drm_nouveau_private *dev_priv = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm() 47 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm() 50 DRM_ERROR("bo %p still attached to GEM object\n", bo); in nouveau_bo_del_ttm() 60 struct drm_nouveau_private *dev_priv = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align() 107 nvbo->bo.bdev = &dev_priv->ttm.bdev; in nouveau_bo_new() 116 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new() 122 ret = ttm_bo_init(&dev_priv->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new() 151 struct drm_nouveau_private *dev_priv = nouveau_bdev(nvbo->bo.bdev); in set_placement_range() 156 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range() [all …]
|
D | nouveau_gem.c | 45 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_gem_object_del() local 56 ttm_bo_unref(&bo); in nouveau_gem_object_del() 73 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, 0); in nouveau_gem_object_open() 95 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_open() 110 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, 0); in nouveau_gem_object_close() 121 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_close() 156 nvbo->gem = drm_gem_object_alloc(dev, nvbo->bo.mem.size); in nouveau_gem_new() 162 nvbo->bo.persistent_swap_storage = nvbo->gem->filp; in nouveau_gem_new() 175 if (nvbo->bo.mem.mem_type == TTM_PL_TT) in nouveau_gem_info() 180 rep->offset = nvbo->bo.offset; in nouveau_gem_info() [all …]
|
D | nouveau_fence.c | 299 nouveau_bo_wr32(dev_priv->fence.bo, i / 4, 0); in semaphore_alloc() 328 nouveau_bo_wr32(dev_priv->fence.bo, sema->mem->start / 4, 1); in semaphore_work() 519 struct ttm_mem_reg *mem = &dev_priv->fence.bo->bo.mem; in nouveau_fence_channel_init() 535 ret = nouveau_bo_vma_add(dev_priv->fence.bo, chan->vm, in nouveau_fence_channel_init() 563 nouveau_bo_vma_del(dev_priv->fence.bo, &chan->fence.vma); in nouveau_fence_channel_fini() 576 0, 0, &dev_priv->fence.bo); in nouveau_fence_init() 580 ret = nouveau_bo_pin(dev_priv->fence.bo, TTM_PL_FLAG_VRAM); in nouveau_fence_init() 584 ret = nouveau_bo_map(dev_priv->fence.bo); in nouveau_fence_init() 589 dev_priv->fence.bo->bo.mem.size); in nouveau_fence_init() 598 nouveau_bo_unmap(dev_priv->fence.bo); in nouveau_fence_init() [all …]
|
D | nouveau_display.c | 390 ret = ttm_bo_reserve(&new_bo->bo, false, false, false, 0); in nouveau_page_flip_reserve() 394 ret = ttm_bo_reserve(&old_bo->bo, false, false, false, 0); in nouveau_page_flip_reserve() 401 ttm_bo_unreserve(&new_bo->bo); in nouveau_page_flip_reserve() 413 ttm_bo_unreserve(&new_bo->bo); in nouveau_page_flip_unreserve() 416 ttm_bo_unreserve(&old_bo->bo); in nouveau_page_flip_unreserve() 439 ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan); in nouveau_page_flip_emit() 500 new_bo->bo.offset }; in nouveau_crtc_page_flip() 503 chan = nouveau_fence_channel(new_bo->bo.sync_obj); in nouveau_crtc_page_flip() 582 struct nouveau_bo *bo; in nouveau_display_dumb_create() local 589 ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo); in nouveau_display_dumb_create() [all …]
|
D | nv50_evo.c | 157 u64 pushbuf = evo->pushbuf_bo->bo.offset; in nv50_evo_channel_init() 228 if (disp->crtc[i].sem.bo) { in nv50_evo_destroy() 229 nouveau_bo_unmap(disp->crtc[i].sem.bo); in nv50_evo_destroy() 230 nouveau_bo_ref(NULL, &disp->crtc[i].sem.bo); in nv50_evo_destroy() 336 0, 0x0000, &dispc->sem.bo); in nv50_evo_create() 338 ret = nouveau_bo_pin(dispc->sem.bo, TTM_PL_FLAG_VRAM); in nv50_evo_create() 340 ret = nouveau_bo_map(dispc->sem.bo); in nv50_evo_create() 342 nouveau_bo_ref(NULL, &dispc->sem.bo); in nv50_evo_create() 343 offset = dispc->sem.bo->bo.offset; in nv50_evo_create() 374 nouveau_bo_wr32(dispc->sem.bo, j / 4, 0x74b1e000); in nv50_evo_create()
|
/linux-3.4.99/include/drm/ttm/ |
D | ttm_bo_driver.h | 205 struct ttm_buffer_object *bo, 381 void(*evict_flags) (struct ttm_buffer_object *bo, 396 int (*move) (struct ttm_buffer_object *bo, 413 int (*verify_access) (struct ttm_buffer_object *bo, 435 void (*move_notify)(struct ttm_buffer_object *bo, 439 int (*fault_reserve_notify)(struct ttm_buffer_object *bo); 444 void (*swap_notify) (struct ttm_buffer_object *bo); 722 extern int ttm_bo_mem_space(struct ttm_buffer_object *bo, 728 extern void ttm_bo_mem_put(struct ttm_buffer_object *bo, 730 extern void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo, [all …]
|
D | ttm_bo_api.h | 299 struct ttm_buffer_object *bo; member 311 ttm_bo_reference(struct ttm_buffer_object *bo) in ttm_bo_reference() argument 313 kref_get(&bo->kref); in ttm_bo_reference() 314 return bo; in ttm_bo_reference() 331 extern int ttm_bo_wait(struct ttm_buffer_object *bo, bool lazy, 350 extern int ttm_bo_validate(struct ttm_buffer_object *bo, 362 extern void ttm_bo_unref(struct ttm_buffer_object **bo); 374 extern void ttm_bo_list_ref_sub(struct ttm_buffer_object *bo, int count, 387 extern void ttm_bo_add_to_lru(struct ttm_buffer_object *bo); 399 extern int ttm_bo_del_from_lru(struct ttm_buffer_object *bo); [all …]
|
/linux-3.4.99/crypto/ |
D | aes_generic.c | 1300 #define f_rn(bo, bi, n, k) do { \ argument 1301 bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \ 1307 #define f_nround(bo, bi, k) do {\ argument 1308 f_rn(bo, bi, 0, k); \ 1309 f_rn(bo, bi, 1, k); \ 1310 f_rn(bo, bi, 2, k); \ 1311 f_rn(bo, bi, 3, k); \ 1315 #define f_rl(bo, bi, n, k) do { \ argument 1316 bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \ 1322 #define f_lround(bo, bi, k) do {\ argument [all …]
|
/linux-3.4.99/net/can/ |
D | bcm.c | 164 struct bcm_sock *bo = bcm_sk(sk); in bcm_proc_show() local 169 seq_printf(m, " / bo %pK", bo); in bcm_proc_show() 170 seq_printf(m, " / dropped %lu", bo->dropped_usr_msgs); in bcm_proc_show() 171 seq_printf(m, " / bound %s", bcm_proc_getifname(ifname, bo->ifindex)); in bcm_proc_show() 174 list_for_each_entry(op, &bo->rx_ops, list) { in bcm_proc_show() 205 list_for_each_entry(op, &bo->tx_ops, list) { in bcm_proc_show() 337 struct bcm_sock *bo = bcm_sk(sk); in bcm_send_to_user() local 341 bo->dropped_usr_msgs++; in bcm_send_to_user() 828 struct bcm_sock *bo = bcm_sk(sk); in bcm_tx_setup() local 842 op = bcm_find_op(&bo->tx_ops, msg_head->can_id, ifindex); in bcm_tx_setup() [all …]
|
/linux-3.4.99/drivers/staging/rtl8192u/ieee80211/ |
D | aes.c | 127 #define f_rn(bo, bi, n, k) \ argument 128 bo[n] = ft_tab[0][byte(bi[n],0)] ^ \ 133 #define i_rn(bo, bi, n, k) \ argument 134 bo[n] = it_tab[0][byte(bi[n],0)] ^ \ 145 #define f_rl(bo, bi, n, k) \ argument 146 bo[n] = fl_tab[0][byte(bi[n],0)] ^ \ 151 #define i_rl(bo, bi, n, k) \ argument 152 bo[n] = il_tab[0][byte(bi[n],0)] ^ \ 329 #define f_nround(bo, bi, k) \ argument 330 f_rn(bo, bi, 0, k); \ [all …]
|
/linux-3.4.99/drivers/staging/omapdrm/ |
D | omap_fbdev.c | 38 struct drm_gem_object *bo; member 89 omap_gem_roll(fbdev->bo, fbi->var.yoffset * npages); in pan_worker() 183 fbdev->bo = omap_gem_new(dev, gsize, OMAP_BO_SCANOUT | OMAP_BO_WC); in omap_fbdev_create() 184 if (!fbdev->bo) { in omap_fbdev_create() 190 fb = omap_framebuffer_init(dev, &mode_cmd, &fbdev->bo); in omap_fbdev_create() 196 drm_gem_object_unreference(fbdev->bo); in omap_fbdev_create() 209 ret = omap_gem_get_paddr(fbdev->bo, &paddr, true); in omap_fbdev_create() 248 fbi->screen_base = omap_gem_vaddr(fbdev->bo); in omap_fbdev_create() 249 fbi->screen_size = fbdev->bo->size; in omap_fbdev_create() 251 fbi->fix.smem_len = fbdev->bo->size; in omap_fbdev_create()
|
D | omap_fb.c | 78 struct drm_gem_object *bo; member 98 omap_fb->planes[0].bo, handle); in omap_framebuffer_create_handle() 112 if (plane->bo) in omap_framebuffer_destroy() 113 drm_gem_object_unreference_unlocked(plane->bo); in omap_framebuffer_destroy() 180 void (*unpin)(void *arg, struct drm_gem_object *bo)) in omap_framebuffer_replace() argument 196 unpin(arg, pa->bo); in omap_framebuffer_replace() 201 ret = omap_gem_get_paddr(pb->bo, &pb->paddr, true); in omap_framebuffer_replace() 209 unpin(arg, pb->bo); in omap_framebuffer_replace() 223 return omap_fb->planes[p].bo; in omap_framebuffer_bo() 293 omap_gem_describe(plane->bo, m); in omap_framebuffer_describe() [all …]
|