/linux-6.1.9/arch/hexagon/kernel/ |
D | vdso.c | 52 unsigned long vdso_base; in arch_setup_additional_pages() local 59 vdso_base = STACK_TOP; in arch_setup_additional_pages() 61 vdso_base = get_unmapped_area(NULL, vdso_base, PAGE_SIZE, 0, 0); in arch_setup_additional_pages() 62 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 63 ret = vdso_base; in arch_setup_additional_pages() 68 ret = install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages() 76 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages()
|
/linux-6.1.9/arch/csky/kernel/ |
D | vdso.c | 59 unsigned long vdso_base, vdso_len; in arch_setup_additional_pages() local 65 vdso_base = get_unmapped_area(NULL, 0, vdso_len, 0, 0); in arch_setup_additional_pages() 66 if (IS_ERR_VALUE(vdso_base)) { in arch_setup_additional_pages() 67 ret = vdso_base; in arch_setup_additional_pages() 76 mm->context.vdso = (void *)vdso_base; in arch_setup_additional_pages() 79 install_special_mapping(mm, vdso_base, vdso_pages << PAGE_SHIFT, in arch_setup_additional_pages() 88 vdso_base += (vdso_pages << PAGE_SHIFT); in arch_setup_additional_pages() 89 ret = install_special_mapping(mm, vdso_base, PAGE_SIZE, in arch_setup_additional_pages()
|
/linux-6.1.9/arch/riscv/kernel/ |
D | vdso.c | 254 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in __setup_additional_pages() local 263 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in __setup_additional_pages() 264 if (IS_ERR_VALUE(vdso_base)) { in __setup_additional_pages() 265 ret = ERR_PTR(vdso_base); in __setup_additional_pages() 269 ret = _install_special_mapping(mm, vdso_base, VVAR_SIZE, in __setup_additional_pages() 274 vdso_base += VVAR_SIZE; in __setup_additional_pages() 275 mm->context.vdso = (void *)vdso_base; in __setup_additional_pages() 278 _install_special_mapping(mm, vdso_base, vdso_text_len, in __setup_additional_pages()
|
/linux-6.1.9/arch/powerpc/kernel/ |
D | vdso.c | 196 unsigned long vdso_size, vdso_base, mappings_size; in __arch_setup_additional_pages() local 217 vdso_base = get_unmapped_area(NULL, 0, mappings_size, 0, 0); in __arch_setup_additional_pages() 218 if (IS_ERR_VALUE(vdso_base)) in __arch_setup_additional_pages() 219 return vdso_base; in __arch_setup_additional_pages() 222 vdso_base = ALIGN(vdso_base, VDSO_ALIGNMENT); in __arch_setup_additional_pages() 229 mm->context.vdso = (void __user *)vdso_base + vvar_size; in __arch_setup_additional_pages() 231 vma = _install_special_mapping(mm, vdso_base, vvar_size, in __arch_setup_additional_pages() 247 vma = _install_special_mapping(mm, vdso_base + vvar_size, vdso_size, in __arch_setup_additional_pages() 251 do_munmap(mm, vdso_base, vvar_size, NULL); in __arch_setup_additional_pages()
|
/linux-6.1.9/arch/arm64/kernel/ |
D | vdso.c | 217 unsigned long vdso_base, vdso_text_len, vdso_mapping_len; in __setup_additional_pages() local 227 vdso_base = get_unmapped_area(NULL, 0, vdso_mapping_len, 0, 0); in __setup_additional_pages() 228 if (IS_ERR_VALUE(vdso_base)) { in __setup_additional_pages() 229 ret = ERR_PTR(vdso_base); in __setup_additional_pages() 233 ret = _install_special_mapping(mm, vdso_base, VVAR_NR_PAGES * PAGE_SIZE, in __setup_additional_pages() 242 vdso_base += VVAR_NR_PAGES * PAGE_SIZE; in __setup_additional_pages() 243 mm->context.vdso = (void *)vdso_base; in __setup_additional_pages() 244 ret = _install_special_mapping(mm, vdso_base, vdso_text_len, in __setup_additional_pages()
|
/linux-6.1.9/arch/parisc/include/asm/ |
D | vdso.h | 12 #define VDSO64_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso64_offset_##name)) 13 #define VDSO32_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso32_offset_##name))
|
D | mmu.h | 7 unsigned long vdso_base; member
|
D | elf.h | 368 #define VDSO_CURRENT_BASE current->mm->context.vdso_base
|
/linux-6.1.9/arch/s390/include/asm/ |
D | vdso.h | 14 #define VDSO64_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso64_offset_##name)) 16 #define VDSO32_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso32_offset_##name))
|
D | mmu.h | 19 unsigned long vdso_base; member
|
D | elf.h | 294 (unsigned long)current->mm->context.vdso_base); \
|
/linux-6.1.9/arch/parisc/kernel/ |
D | vdso.c | 32 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap() 91 current->mm->context.vdso_base = vdso_text_start; in arch_setup_additional_pages()
|
/linux-6.1.9/arch/powerpc/include/asm/ |
D | mmu_context.h | 265 unsigned long vdso_base = (unsigned long)mm->context.vdso; in arch_unmap() local 267 if (start <= vdso_base && vdso_base < end) in arch_unmap()
|
/linux-6.1.9/arch/loongarch/kernel/ |
D | vdso.c | 76 static unsigned long vdso_base(void) in vdso_base() function 106 data_addr = get_unmapped_area(NULL, vdso_base(), size, 0, 0); in arch_setup_additional_pages()
|
/linux-6.1.9/arch/mips/kernel/ |
D | vdso.c | 72 static unsigned long vdso_base(void) in vdso_base() function 131 base = get_unmapped_area(NULL, vdso_base(), size, 0, 0); in arch_setup_additional_pages()
|
/linux-6.1.9/arch/s390/kernel/ |
D | vdso.c | 139 current->mm->context.vdso_base = vma->vm_start; in vdso_mremap() 205 current->mm->context.vdso_base = vdso_text_start; in map_vdso()
|