Lines Matching refs:mob
86 struct vmw_mob *mob);
87 static void vmw_mob_pt_setup(struct vmw_mob *mob,
123 struct vmw_mob *mob; in vmw_setup_otable_base() local
134 mob = vmw_mob_create(otable->size >> PAGE_SHIFT); in vmw_setup_otable_base()
135 if (unlikely(mob == NULL)) { in vmw_setup_otable_base()
141 mob->pt_level = VMW_MOBFMT_PTDEPTH_0; in vmw_setup_otable_base()
142 mob->pt_root_page = vmw_piter_dma_addr(&iter); in vmw_setup_otable_base()
144 ret = vmw_mob_pt_populate(dev_priv, mob); in vmw_setup_otable_base()
148 vmw_mob_pt_setup(mob, iter, otable->size >> PAGE_SHIFT); in vmw_setup_otable_base()
149 mob->pt_level += VMW_MOBFMT_PTDEPTH_1 - SVGA3D_MOBFMT_PT_1; in vmw_setup_otable_base()
162 cmd->body.baseAddress = mob->pt_root_page >> PAGE_SHIFT; in vmw_setup_otable_base()
165 cmd->body.ptDepth = mob->pt_level; in vmw_setup_otable_base()
172 BUG_ON(mob->pt_level == VMW_MOBFMT_PTDEPTH_2); in vmw_setup_otable_base()
175 otable->page_table = mob; in vmw_setup_otable_base()
181 vmw_mob_destroy(mob); in vmw_setup_otable_base()
391 struct vmw_mob *mob = kzalloc(sizeof(*mob), GFP_KERNEL); in vmw_mob_create() local
393 if (unlikely(!mob)) in vmw_mob_create()
396 mob->num_pages = vmw_mob_calculate_pt_pages(data_pages); in vmw_mob_create()
398 return mob; in vmw_mob_create()
412 struct vmw_mob *mob) in vmw_mob_pt_populate() argument
414 BUG_ON(mob->pt_bo != NULL); in vmw_mob_pt_populate()
416 return vmw_bo_create_and_populate(dev_priv, mob->num_pages * PAGE_SIZE, &mob->pt_bo); in vmw_mob_pt_populate()
492 static void vmw_mob_pt_setup(struct vmw_mob *mob, in vmw_mob_pt_setup() argument
497 struct ttm_buffer_object *bo = mob->pt_bo; in vmw_mob_pt_setup()
511 mob->pt_level = 0; in vmw_mob_pt_setup()
513 ++mob->pt_level; in vmw_mob_pt_setup()
514 BUG_ON(mob->pt_level > 2); in vmw_mob_pt_setup()
522 mob->pt_root_page = vmw_piter_dma_addr(&save_pt_iter); in vmw_mob_pt_setup()
531 void vmw_mob_destroy(struct vmw_mob *mob) in vmw_mob_destroy() argument
533 if (mob->pt_bo) { in vmw_mob_destroy()
534 vmw_bo_unpin_unlocked(mob->pt_bo); in vmw_mob_destroy()
535 ttm_bo_put(mob->pt_bo); in vmw_mob_destroy()
536 mob->pt_bo = NULL; in vmw_mob_destroy()
538 kfree(mob); in vmw_mob_destroy()
548 struct vmw_mob *mob) in vmw_mob_unbind() argument
555 struct ttm_buffer_object *bo = mob->pt_bo; in vmw_mob_unbind()
569 cmd->body.mobid = mob->id; in vmw_mob_unbind()
596 struct vmw_mob *mob, in vmw_mob_bind() argument
609 mob->id = mob_id; in vmw_mob_bind()
615 mob->pt_level = VMW_MOBFMT_PTDEPTH_0; in vmw_mob_bind()
616 mob->pt_root_page = vmw_piter_dma_addr(&data_iter); in vmw_mob_bind()
617 } else if (unlikely(mob->pt_bo == NULL)) { in vmw_mob_bind()
618 ret = vmw_mob_pt_populate(dev_priv, mob); in vmw_mob_bind()
622 vmw_mob_pt_setup(mob, data_iter, num_data_pages); in vmw_mob_bind()
624 mob->pt_level += VMW_MOBFMT_PTDEPTH_1 - SVGA3D_MOBFMT_PT_1; in vmw_mob_bind()
636 cmd->body.ptDepth = mob->pt_level; in vmw_mob_bind()
637 cmd->body.base = mob->pt_root_page >> PAGE_SHIFT; in vmw_mob_bind()
647 vmw_bo_unpin_unlocked(mob->pt_bo); in vmw_mob_bind()
648 ttm_bo_put(mob->pt_bo); in vmw_mob_bind()
649 mob->pt_bo = NULL; in vmw_mob_bind()