Lines Matching refs:addr
16 #define COLOUR_ALIGN(addr,pgoff) \ argument
17 ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \
30 arch_get_unmapped_area(struct file *filp, unsigned long addr, in arch_get_unmapped_area() argument
51 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area()
53 return addr; in arch_get_unmapped_area()
59 if (addr) { in arch_get_unmapped_area()
61 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area()
63 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area()
65 vma = find_vma(mm, addr); in arch_get_unmapped_area()
66 if (TASK_SIZE - len >= addr && in arch_get_unmapped_area()
67 (!vma || addr + len <= vm_start_gap(vma))) in arch_get_unmapped_area()
68 return addr; in arch_get_unmapped_area()
87 unsigned long addr = addr0; in arch_get_unmapped_area_topdown() local
105 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area_topdown()
107 return addr; in arch_get_unmapped_area_topdown()
111 if (addr) { in arch_get_unmapped_area_topdown()
113 addr = COLOUR_ALIGN(addr, pgoff); in arch_get_unmapped_area_topdown()
115 addr = PAGE_ALIGN(addr); in arch_get_unmapped_area_topdown()
116 vma = find_vma(mm, addr); in arch_get_unmapped_area_topdown()
117 if (TASK_SIZE - len >= addr && in arch_get_unmapped_area_topdown()
118 (!vma || addr + len <= vm_start_gap(vma))) in arch_get_unmapped_area_topdown()
119 return addr; in arch_get_unmapped_area_topdown()
128 addr = vm_unmapped_area(&info); in arch_get_unmapped_area_topdown()
136 if (addr & ~PAGE_MASK) { in arch_get_unmapped_area_topdown()
137 VM_BUG_ON(addr != -ENOMEM); in arch_get_unmapped_area_topdown()
141 addr = vm_unmapped_area(&info); in arch_get_unmapped_area_topdown()
144 return addr; in arch_get_unmapped_area_topdown()
151 int valid_phys_addr_range(phys_addr_t addr, size_t size) in valid_phys_addr_range() argument
153 if (addr < PHYS_OFFSET) in valid_phys_addr_range()
155 if (addr + size > __pa(high_memory - 1) + 1) in valid_phys_addr_range()