/linux-5.19.10/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_page_dirty.c | 81 static void vmw_bo_dirty_scan_pagetable(struct vmw_buffer_object *vbo) in vmw_bo_dirty_scan_pagetable() argument 83 struct vmw_bo_dirty *dirty = vbo->dirty; in vmw_bo_dirty_scan_pagetable() 84 pgoff_t offset = drm_vma_node_start(&vbo->base.base.vma_node); in vmw_bo_dirty_scan_pagetable() 85 struct address_space *mapping = vbo->base.bdev->dev_mapping; in vmw_bo_dirty_scan_pagetable() 119 static void vmw_bo_dirty_scan_mkwrite(struct vmw_buffer_object *vbo) in vmw_bo_dirty_scan_mkwrite() argument 121 struct vmw_bo_dirty *dirty = vbo->dirty; in vmw_bo_dirty_scan_mkwrite() 122 unsigned long offset = drm_vma_node_start(&vbo->base.base.vma_node); in vmw_bo_dirty_scan_mkwrite() 123 struct address_space *mapping = vbo->base.bdev->dev_mapping; in vmw_bo_dirty_scan_mkwrite() 129 num_marked = wp_shared_mapping_range(vbo->base.bdev->dev_mapping, in vmw_bo_dirty_scan_mkwrite() 163 void vmw_bo_dirty_scan(struct vmw_buffer_object *vbo) in vmw_bo_dirty_scan() argument [all …]
|
D | vmwgfx_validation.c | 176 struct vmw_buffer_object *vbo) in vmw_validation_find_bo_dup() argument 186 if (!vmwgfx_ht_find_item(ctx->ht, (unsigned long) vbo, &hash)) in vmw_validation_find_bo_dup() 192 if (entry->base.bo == &vbo->base) { in vmw_validation_find_bo_dup() 257 struct vmw_buffer_object *vbo, in vmw_validation_add_bo() argument 263 bo_node = vmw_validation_find_bo_dup(ctx, vbo); in vmw_validation_add_bo() 279 bo_node->hash.key = (unsigned long) vbo; in vmw_validation_add_bo() 288 val_buf->bo = ttm_bo_get_unless_zero(&vbo->base); in vmw_validation_add_bo() 414 struct vmw_buffer_object *vbo, in vmw_validation_res_switch_backup() argument 425 val->new_backup = vbo; in vmw_validation_res_switch_backup() 455 struct vmw_buffer_object *vbo = res->backup; in vmw_validation_res_reserve() local [all …]
|
D | vmwgfx_gem.c | 68 struct vmw_buffer_object *vbo = vmw_buffer_object(bo); in vmw_gem_pin_private() local 75 vmw_bo_pin_reserved(vbo, do_pin); in vmw_gem_pin_private() 127 struct vmw_buffer_object *vbo = vmw_buffer_object(bo); in vmw_gem_destroy() local 129 WARN_ON(vbo->dirty); in vmw_gem_destroy() 130 WARN_ON(!RB_EMPTY_ROOT(&vbo->res_tree)); in vmw_gem_destroy() 131 vmw_bo_unmap(vbo); in vmw_gem_destroy() 132 drm_gem_object_release(&vbo->base.base); in vmw_gem_destroy() 133 kfree(vbo); in vmw_gem_destroy() 170 struct vmw_buffer_object *vbo; in vmw_gem_object_create_ioctl() local 175 req->size, &handle, &vbo); in vmw_gem_object_create_ioctl() [all …]
|
D | vmwgfx_bo.c | 296 void vmw_bo_pin_reserved(struct vmw_buffer_object *vbo, bool pin) in vmw_bo_pin_reserved() argument 301 struct ttm_buffer_object *bo = &vbo->base; in vmw_bo_pin_reserved() 344 void *vmw_bo_map_and_cache(struct vmw_buffer_object *vbo) in vmw_bo_map_and_cache() argument 346 struct ttm_buffer_object *bo = &vbo->base; in vmw_bo_map_and_cache() 351 virtual = ttm_kmap_obj_virtual(&vbo->map, ¬_used); in vmw_bo_map_and_cache() 355 ret = ttm_bo_kmap(bo, 0, bo->resource->num_pages, &vbo->map); in vmw_bo_map_and_cache() 359 return ttm_kmap_obj_virtual(&vbo->map, ¬_used); in vmw_bo_map_and_cache() 371 void vmw_bo_unmap(struct vmw_buffer_object *vbo) in vmw_bo_unmap() argument 373 if (vbo->map.bo == NULL) in vmw_bo_unmap() 376 ttm_bo_kunmap(&vbo->map); in vmw_bo_unmap() [all …]
|
D | vmwgfx_resource.c | 776 void vmw_resource_unbind_list(struct vmw_buffer_object *vbo) in vmw_resource_unbind_list() argument 779 .bo = &vbo->base, in vmw_resource_unbind_list() 783 dma_resv_assert_held(vbo->base.base.resv); in vmw_resource_unbind_list() 784 while (!RB_EMPTY_ROOT(&vbo->res_tree)) { in vmw_resource_unbind_list() 785 struct rb_node *node = vbo->res_tree.rb_node; in vmw_resource_unbind_list() 797 (void) ttm_bo_wait(&vbo->base, false, false); in vmw_resource_unbind_list() 995 struct vmw_buffer_object *vbo = NULL; in vmw_resource_pin() local 998 vbo = res->backup; in vmw_resource_pin() 1000 ret = ttm_bo_reserve(&vbo->base, interruptible, false, NULL); in vmw_resource_pin() 1003 if (!vbo->base.pin_count) { in vmw_resource_pin() [all …]
|
D | vmwgfx_drv.h | 849 extern void vmw_resource_unbind_list(struct vmw_buffer_object *vbo); 854 int vmw_resources_clean(struct vmw_buffer_object *vbo, pgoff_t start, 924 extern void *vmw_bo_map_and_cache(struct vmw_buffer_object *vbo); 925 extern void vmw_bo_unmap(struct vmw_buffer_object *vbo); 937 static inline void vmw_bo_prio_adjust(struct vmw_buffer_object *vbo) in vmw_bo_prio_adjust() argument 939 int i = ARRAY_SIZE(vbo->res_prios); in vmw_bo_prio_adjust() 942 if (vbo->res_prios[i]) { in vmw_bo_prio_adjust() 943 vbo->base.priority = i; in vmw_bo_prio_adjust() 948 vbo->base.priority = 3; in vmw_bo_prio_adjust() 960 static inline void vmw_bo_prio_add(struct vmw_buffer_object *vbo, int prio) in vmw_bo_prio_add() argument [all …]
|
D | vmwgfx_validation.h | 176 struct vmw_buffer_object *vbo, 196 struct vmw_buffer_object *vbo,
|
D | vmwgfx_drv.c | 392 struct vmw_buffer_object *vbo; in vmw_dummy_query_bo_create() local 404 &vmw_bo_bo_free, &vbo); in vmw_dummy_query_bo_create() 408 ret = ttm_bo_reserve(&vbo->base, false, true, NULL); in vmw_dummy_query_bo_create() 410 vmw_bo_pin_reserved(vbo, true); in vmw_dummy_query_bo_create() 412 ret = ttm_bo_kmap(&vbo->base, 0, 1, &map); in vmw_dummy_query_bo_create() 420 vmw_bo_pin_reserved(vbo, false); in vmw_dummy_query_bo_create() 421 ttm_bo_unreserve(&vbo->base); in vmw_dummy_query_bo_create() 425 vmw_bo_unreference(&vbo); in vmw_dummy_query_bo_create() 427 dev_priv->dummy_query_bo = vbo; in vmw_dummy_query_bo_create()
|
D | vmwgfx_fb.c | 187 struct vmw_buffer_object *vbo = par->vmw_bo; in vmw_fb_dirty_flush() local 198 (void) ttm_bo_reserve(&vbo->base, false, false, NULL); in vmw_fb_dirty_flush() 199 virtual = vmw_bo_map_and_cache(vbo); in vmw_fb_dirty_flush() 255 ttm_bo_unreserve(&vbo->base); in vmw_fb_dirty_flush()
|
D | vmwgfx_execbuf.c | 68 struct vmw_buffer_object *vbo; member 1189 reloc->vbo = vmw_bo; in vmw_translate_mob_ptr() 1243 reloc->vbo = vmw_bo; in vmw_translate_guest_ptr() 1720 struct vmw_buffer_object *vbo; in vmw_cmd_res_switch_backup() local 1728 ret = vmw_translate_mob_ptr(dev_priv, sw_context, buf_id, &vbo); in vmw_cmd_res_switch_backup() 1732 vmw_validation_res_switch_backup(sw_context->ctx, info, vbo, in vmw_cmd_res_switch_backup() 3771 bo = &reloc->vbo->base; in vmw_apply_relocations()
|
/linux-5.19.10/fs/ntfs3/ |
D | dir.c | 321 const struct INDEX_HDR *hdr, u64 vbo, u64 pos, in ntfs_read_hdr() argument 343 if (vbo + off < pos) in ntfs_read_hdr() 349 ctx->pos = vbo + off; in ntfs_read_hdr() 368 u64 vbo; in ntfs_readdir() local 436 vbo = (u64)bit << index_bits; in ntfs_readdir() 437 if (vbo >= i_size) { in ntfs_readdir() 451 vbo = (u64)bit << index_bits; in ntfs_readdir() 452 if (vbo >= i_size) { in ntfs_readdir() 464 vbo + sbi->record_size, pos, name, ctx); in ntfs_readdir() 494 u64 vbo = 0; in ntfs_dir_count() local [all …]
|
D | attrib.c | 1065 u64 vbo; in attr_data_read_resident() local 1076 vbo = page->index << PAGE_SHIFT; in attr_data_read_resident() 1078 if (vbo < data_size) { in attr_data_read_resident() 1081 u32 use = data_size - vbo; in attr_data_read_resident() 1086 memcpy(kaddr, data + vbo, use); in attr_data_read_resident() 1101 u64 vbo; in attr_data_write_resident() local 1115 vbo = page->index << PAGE_SHIFT; in attr_data_write_resident() 1117 if (vbo < data_size) { in attr_data_write_resident() 1120 u32 use = data_size - vbo; in attr_data_write_resident() 1124 memcpy(data + vbo, kaddr, use); in attr_data_write_resident() [all …]
|
D | file.c | 131 loff_t vbo = (loff_t)vcn << bits; in ntfs_extend_initialized_size() local 132 loff_t to = vbo + ((loff_t)clen << bits); in ntfs_extend_initialized_size() 140 if (vbo < pos) { in ntfs_extend_initialized_size() 141 pos = vbo; in ntfs_extend_initialized_size() 193 static int ntfs_zero_range(struct inode *inode, u64 vbo, u64 vbo_to) in ntfs_zero_range() argument 198 pgoff_t idx = vbo >> PAGE_SHIFT; in ntfs_zero_range() 199 u32 z_start = vbo & (PAGE_SIZE - 1); in ntfs_zero_range() 282 u64 vbo = (u64)vcn << sbi->cluster_bits; in ntfs_sparse_cluster() local 286 loff_t vbo_clst = vbo & sbi->cluster_mask_inv; in ntfs_sparse_cluster() 287 loff_t end = ntfs_up_cluster(sbi, vbo + bytes); in ntfs_sparse_cluster() [all …]
|
D | fsntfs.c | 742 u64 vbo; in ntfs_clear_mft_tail() local 754 vbo = (u64)from * rs; in ntfs_clear_mft_tail() 755 for (; from < to; from++, vbo += rs) { in ntfs_clear_mft_tail() 758 err = ntfs_get_bh(sbi, run, vbo, rs, &nb); in ntfs_clear_mft_tail() 1085 u64 vbo, const void *buf, size_t bytes, int sync) in ntfs_sb_write_run() argument 1089 u32 off = vbo & sbi->cluster_mask; in ntfs_sb_write_run() 1090 CLST lcn, clen, vcn = vbo >> cluster_bits, vcn_next; in ntfs_sb_write_run() 1133 const struct runs_tree *run, u64 vbo) in ntfs_bread_run() argument 1140 if (!run_lookup_entry(run, vbo >> cluster_bits, &lcn, NULL, NULL)) in ntfs_bread_run() 1143 lbo = ((u64)lcn << cluster_bits) + (vbo & sbi->cluster_mask); in ntfs_bread_run() [all …]
|
D | frecord.c | 911 u64 vbo; in ni_ins_attr_ext() local 942 vbo = is_mft_data ? ((u64)svcn << sbi->cluster_bits) : 0; in ni_ins_attr_ext() 958 vbo <= ((u64)mi->rno << sbi->record_bits))) { in ni_ins_attr_ext() 1003 if (is_mft_data && vbo <= ((u64)rno << sbi->record_bits)) { in ni_ins_attr_ext() 1843 __u64 vbo, __u64 len) in ni_fiemap() argument 1851 CLST vcn = vbo >> cluster_bits; in ni_fiemap() 1893 end = vbo + len; in ni_fiemap() 1900 while (vbo < end) { in ni_fiemap() 1941 vbo = (u64)vcn << cluster_bits; in ni_fiemap() 1961 vbo = (u64)vcn << cluster_bits; in ni_fiemap() [all …]
|
D | fslog.c | 890 static inline void *alloc_rsttbl_from_idx(struct RESTART_TABLE **tbl, u32 vbo) in alloc_rsttbl_from_idx() argument 899 if (vbo >= bytes) { in alloc_rsttbl_from_idx() 904 u32 bytes2idx = vbo - bytes; in alloc_rsttbl_from_idx() 916 e = Add2Ptr(rt, vbo); in alloc_rsttbl_from_idx() 928 if (off == vbo) { in alloc_rsttbl_from_idx() 950 if (off == vbo) { in alloc_rsttbl_from_idx() 1047 u32 vbo = (lsn << log->seq_num_bits) >> (log->seq_num_bits - 3); in lsn_to_vbo() local 1049 return vbo; in lsn_to_vbo() 1111 u32 vbo; member 1118 static int read_log_page(struct ntfs_log *log, u32 vbo, in read_log_page() argument [all …]
|
D | index.c | 208 size_t data_size, valid_size, vbo, off = bit >> 3; in bmp_buf_get() local 263 vbo = off & ~(size_t)sbi->block_mask; in bmp_buf_get() 265 bbuf->new_valid = vbo + blocksize; in bmp_buf_get() 271 if (vbo >= valid_size) { in bmp_buf_get() 273 } else if (vbo + blocksize > valid_size) { in bmp_buf_get() 375 size_t vbo = from >> 3; in scan_nres_bitmap() local 376 sector_t blk = (vbo & sbi->cluster_mask) >> sb->s_blocksize_bits; in scan_nres_bitmap() 377 sector_t vblock = vbo >> sb->s_blocksize_bits; in scan_nres_bitmap() 390 vcn = vbo >> sbi->cluster_bits; in scan_nres_bitmap() 422 vbo = (u64)vblock << sb->s_blocksize_bits; in scan_nres_bitmap() [all …]
|
D | inode.c | 518 static noinline int ntfs_get_block_vbo(struct inode *inode, u64 vbo, in ntfs_get_block_vbo() argument 539 if (!create && vbo >= ni->i_valid) { in ntfs_get_block_vbo() 544 if (vbo >= inode->i_size) { in ntfs_get_block_vbo() 560 vcn = vbo >> cluster_bits; in ntfs_get_block_vbo() 561 off = vbo & sbi->cluster_mask; in ntfs_get_block_vbo() 598 if (vbo >= valid) in ntfs_get_block_vbo() 605 if (vbo >= valid) in ntfs_get_block_vbo() 608 if (vbo + bytes > valid) { in ntfs_get_block_vbo() 609 ni->i_valid = vbo + bytes; in ntfs_get_block_vbo() 612 } else if (vbo >= valid) { in ntfs_get_block_vbo() [all …]
|
D | bitmap.c | 513 size_t wpos, wbit, iw, vbo; in wnd_rescan() local 522 vbo = 0; in wnd_rescan() 533 vbo * 8 - prev_tail, in wnd_rescan() 548 u32 off = vbo & sbi->cluster_mask; in wnd_rescan() 550 if (!run_lookup_entry(&wnd->run, vbo >> cluster_bits, in wnd_rescan() 576 wbit = vbo * 8; in wnd_rescan() 620 vbo += blocksize; in wnd_rescan() 687 size_t vbo; in wnd_map() local 695 vbo = (u64)iw << sb->s_blocksize_bits; in wnd_map() 697 if (!run_lookup_entry(&wnd->run, vbo >> sbi->cluster_bits, &lcn, &clen, in wnd_map() [all …]
|
D | record.c | 121 u64 vbo = (u64)mi->rno << sbi->record_bits; in mi_read() local 133 err = ntfs_read_bh(sbi, run, vbo, &rec->rhdr, bpr, &mi->nb); in mi_read() 152 vbo >> sbi->cluster_bits); in mi_read() 162 err = ntfs_read_bh(sbi, run, vbo, &rec->rhdr, bpr, &mi->nb); in mi_read() 349 u64 vbo = (u64)rno << sbi->record_bits; in mi_format_new() local 388 err = ntfs_get_bh(sbi, &ni->file.run, vbo, sbi->record_size, in mi_format_new()
|
D | ntfs_fs.h | 442 int attr_collapse_range(struct ntfs_inode *ni, u64 vbo, u64 bytes); 443 int attr_punch_hole(struct ntfs_inode *ni, u64 vbo, u64 bytes, u32 *frame_size); 553 __u64 vbo, __u64 len); 606 u64 vbo, const void *buf, size_t bytes, int sync); 608 const struct runs_tree *run, u64 vbo); 610 u64 vbo, void *buf, u32 bytes, struct ntfs_buffers *nb); 611 int ntfs_read_bh(struct ntfs_sb_info *sbi, const struct runs_tree *run, u64 vbo, 614 int ntfs_get_bh(struct ntfs_sb_info *sbi, const struct runs_tree *run, u64 vbo, 619 struct page **pages, u32 nr_pages, u64 vbo, u32 bytes, 623 u64 vbo, u64 *lbo, u64 *bytes);
|
/linux-5.19.10/drivers/gpu/drm/imx/ |
D | ipuv3-plane.c | 376 unsigned long eba, ubo, vbo, old_ubo, old_vbo, alpha_eba; in ipu_plane_atomic_check() local 465 vbo = drm_plane_state_to_vbo(new_state); in ipu_plane_atomic_check() 467 if (vbo & 0x7 || vbo > 0xfffff8) in ipu_plane_atomic_check() 472 if (vbo != old_vbo) in ipu_plane_atomic_check() 585 unsigned long eba, ubo, vbo; in ipu_plane_atomic_update() local 706 vbo = drm_plane_state_to_vbo(new_state); in ipu_plane_atomic_update() 710 swap(ubo, vbo); in ipu_plane_atomic_update() 713 fb->pitches[1], ubo, vbo); in ipu_plane_atomic_update() 716 "phy = %lu %lu %lu, x = %d, y = %d", eba, ubo, vbo, in ipu_plane_atomic_update()
|
/linux-5.19.10/drivers/gpu/drm/vc4/ |
D | vc4_validate.c | 899 struct drm_gem_cma_object *vbo = in validate_gl_shader_rec() local 908 to_vc4_bo(&vbo->base)->write_seqno); in validate_gl_shader_rec() 913 if (vbo->base.size < offset || in validate_gl_shader_rec() 914 vbo->base.size - offset < attr_size) { in validate_gl_shader_rec() 916 offset, attr_size, vbo->base.size); in validate_gl_shader_rec() 921 max_index = ((vbo->base.size - offset - attr_size) / in validate_gl_shader_rec() 931 *(uint32_t *)(pkt_v + o) = vbo->paddr + offset; in validate_gl_shader_rec()
|