Home
last modified time | relevance | path

Searched refs:vm_end (Results 1 – 25 of 223) sorted by relevance

123456789

/linux-6.6.21/mm/
Dnommu.c103 return vma->vm_end - vma->vm_start; in kobjsize()
433 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions()
434 BUG_ON(last->vm_top < last->vm_end); in validate_nommu_regions()
440 BUG_ON(region->vm_end <= region->vm_start); in validate_nommu_regions()
441 BUG_ON(region->vm_top < region->vm_end); in validate_nommu_regions()
586 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in delete_vma_from_mm()
682 if (vma->vm_end != end) in find_vma_exact()
901 vma->vm_region->vm_top = vma->vm_region->vm_end; in do_mmap_shared_file()
937 vma->vm_region->vm_top = vma->vm_region->vm_end; in do_mmap_private()
970 region->vm_end = region->vm_start + len; in do_mmap_private()
[all …]
Dmmap.c299 if (VM_WARN_ON_ONCE_MM(vma->vm_end != vmi_end, mm)) in validate_mm()
376 unsigned long vm_end = min(end, vma->vm_end); in count_vma_pages_range() local
378 nr_pages += PHYS_PFN(vm_end - vm_start); in count_vma_pages_range()
400 vma_iter_config(&vmi, vma->vm_start, vma->vm_end); in vma_link()
466 uprobe_munmap(vp->vma, vp->vma->vm_start, vp->vma->vm_end); in vma_prepare()
470 vp->adj_next->vm_end); in vma_prepare()
556 vp->remove->vm_end); in vma_complete()
564 WARN_ON_ONCE(vp->vma->vm_end < vp->remove->vm_end); in vma_complete()
639 if (next && (vma != next) && (end == next->vm_end)) { in vma_expand()
654 VM_WARN_ON(vma->vm_start < start || vma->vm_end > end); in vma_expand()
[all …]
Dmsync.c90 fend = fstart + (min(end, vma->vm_end) - start) - 1; in SYSCALL_DEFINE3()
91 start = vma->vm_end; in SYSCALL_DEFINE3()
107 vma = find_vma(mm, vma->vm_end); in SYSCALL_DEFINE3()
Dmadvise.c169 if (end != vma->vm_end) { in madvise_update_vma()
780 if (range.start >= vma->vm_end) in madvise_free_single_vma()
782 range.end = min(vma->vm_end, end_addr); in madvise_free_single_vma()
887 if (end > vma->vm_end) { in madvise_dontneed_free()
900 end = vma->vm_end; in madvise_dontneed_free()
931 if (!vma || start >= vma->vm_end) { in madvise_populate()
937 tmp_end = min_t(unsigned long, end, vma->vm_end); in madvise_populate()
1265 tmp = vma->vm_end; in madvise_walk_vmas()
1274 if (prev && start < prev->vm_end) in madvise_walk_vmas()
1275 start = prev->vm_end; in madvise_walk_vmas()
[all …]
Dmremap.c615 if (!err && vma->vm_end != old_addr + old_len) in move_vma()
681 if (vma->vm_end > old_addr + old_len) in move_vma()
682 account_end = vma->vm_end; in move_vma()
710 vma->vm_end == (old_addr + old_len)) in move_vma()
775 if (old_len > vma->vm_end - addr) in vma_to_resize()
889 unsigned long end = vma->vm_end + delta; in vma_expandable()
891 if (end < vma->vm_end) /* overflow */ in vma_expandable()
893 if (find_vma_intersection(vma->vm_mm, vma->vm_end, end)) in vma_expandable()
1031 if (old_len == vma->vm_end - addr) { in SYSCALL_DEFINE5()
Dinternal.h649 if (address < vma->vm_start || address >= vma->vm_end) in vma_pgoff_address()
689 if (address < vma->vm_start || address > vma->vm_end) in vma_address_end()
690 address = vma->vm_end; in vma_address_end()
1111 vma->vm_end, vmi->mas.index, vmi->mas.last); in vma_iter_store()
1116 vmi->mas.last, vma->vm_start, vma->vm_start, vma->vm_end, in vma_iter_store()
1125 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store()
1136 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
Dpagewalk.c506 next = min(end, vma->vm_end); in walk_page_range()
507 vma = find_vma(mm, vma->vm_end); in walk_page_range()
577 if (start < vma->vm_start || end > vma->vm_end) in walk_page_range_vma()
600 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
665 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()
Dmlock.c443 if (end != vma->vm_end) { in mlock_fixup()
511 tmp = vma->vm_end; in apply_vma_lock_flags()
552 if (end < vma->vm_end) { in count_mm_mlocked_page_nr()
556 count += vma->vm_end - vma->vm_start; in count_mm_mlocked_page_nr()
697 mlock_fixup(&vmi, vma, &prev, vma->vm_start, vma->vm_end, in apply_mlockall_flags()
/linux-6.6.21/mm/damon/
Dvaddr-test.h28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas()
72 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas()
73 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas()
74 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas()
75 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas()
76 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas()
77 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
/linux-6.6.21/fs/proc/
Dtask_nommu.c35 size += region->vm_end - region->vm_start; in task_mem()
37 size = vma->vm_end - vma->vm_start; in task_mem()
46 slack = region->vm_end - vma->vm_end; in task_mem()
89 vsize += vma->vm_end - vma->vm_start; in task_vsize()
109 size += region->vm_end - region->vm_start; in task_statm()
150 vma->vm_end, in nommu_vma_show()
/linux-6.6.21/include/trace/events/
Dmmap.h80 __field(unsigned long, vm_end)
87 __entry->vm_end = vma->vm_end - 1;
93 (unsigned long) __entry->vm_end
Dfs_dax.h17 __field(unsigned long, vm_end)
30 __entry->vm_end = vmf->vma->vm_end;
47 __entry->vm_end,
/linux-6.6.21/drivers/gpu/drm/
Ddrm_vm.c78 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot()
240 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_shm_close()
395 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_open_locked()
421 vma->vm_start, vma->vm_end - vma->vm_start); in drm_vm_close_locked()
465 unsigned long length = vma->vm_end - vma->vm_start; in drm_mmap_dma()
470 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_dma()
532 vma->vm_start, vma->vm_end, vma->vm_pgoff); in drm_mmap_locked()
559 if (map->size < vma->vm_end - vma->vm_start) in drm_mmap_locked()
600 vma->vm_end - vma->vm_start, in drm_mmap_locked()
606 vma->vm_start, vma->vm_end, (unsigned long long)(map->offset + offset)); in drm_mmap_locked()
[all …]
/linux-6.6.21/scripts/coccinelle/api/
Dvma_pages.cocci22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT
32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT)
44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
/linux-6.6.21/drivers/char/
Dmspec.c76 unsigned long vm_end; /* Original (unsplit) end. */ member
114 last_index = (vdata->vm_end - vdata->vm_start) >> PAGE_SHIFT; in mspec_close()
203 vdata->vm_end = vma->vm_end; in mspec_mmap()
/linux-6.6.21/arch/x86/um/
Dmem_32.c18 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init()
49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
/linux-6.6.21/drivers/media/v4l2-core/
Dvideobuf-vmalloc.c56 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
68 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
233 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper()
254 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
Dvideobuf-dma-contig.c67 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
79 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
177 if ((untagged_baddr + mem->size) > vma->vm_end) in videobuf_dma_contig_user_get()
321 map, q, vma->vm_start, vma->vm_end, in __videobuf_mmap_mapper()
/linux-6.6.21/drivers/media/common/videobuf2/
Dvideobuf2-memops.c96 vma->vm_end); in vb2_common_vm_open()
114 vma->vm_end); in vb2_common_vm_close()
/linux-6.6.21/drivers/soc/qcom/
Drmtfs_mem.c136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap()
139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap()
140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap()
148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
/linux-6.6.21/drivers/dax/
Ddevice.c35 if (vma->vm_start & mask || vma->vm_end & mask) { in check_vma()
38 current->comm, func, vma->vm_start, vma->vm_end, in check_vma()
162 (pmd_addr + PMD_SIZE) > vmf->vma->vm_end) in __dev_dax_pmd_fault()
207 (pud_addr + PUD_SIZE) > vmf->vma->vm_end) in __dev_dax_pud_fault()
240 vmf->vma->vm_start, vmf->vma->vm_end, order); in dev_dax_huge_fault()
/linux-6.6.21/kernel/bpf/
Dtask_iter.c513 info->prev_vm_end = curr_vma->vm_end; in task_vma_seq_get_next()
559 curr_vma = find_vma(curr_mm, curr_vma->vm_end); in task_vma_seq_get_next()
572 curr_vma->vm_end == info->prev_vm_end) in task_vma_seq_get_next()
573 curr_vma = find_vma(curr_mm, curr_vma->vm_end); in task_vma_seq_get_next()
673 info->prev_vm_end = info->vma->vm_end; in task_vma_seq_stop()
806 if (vma && vma->vm_start <= start && vma->vm_end > start) { in BPF_CALL_5()
/linux-6.6.21/arch/powerpc/include/asm/
Dfb.h13 vma->vm_end - vma->vm_start, in fb_pgprotect()
/linux-6.6.21/arch/powerpc/kvm/
Dbook3s_hv_uvmem.c416 ret = ksm_madvise(vma, vma->vm_start, vma->vm_end, in kvmppc_memslot_page_merge()
423 start = vma->vm_end; in kvmppc_memslot_page_merge()
424 } while (end > vma->vm_end); in kvmppc_memslot_page_merge()
628 if (!vma || addr >= vma->vm_end) { in kvmppc_uvmem_drop_pages()
812 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_uv_migrate_mem_slot()
973 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_in()
1073 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_out()
/linux-6.6.21/drivers/sbus/char/
Dflash.c66 if (vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)) > size) in flash_mmap()
67 size = vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)); in flash_mmap()

123456789