Searched refs:zone_end_pfn (Results 1 – 13 of 13) sorted by relevance
/linux-6.1.9/arch/x86/mm/ |
D | highmem_32.c | 18 unsigned long zone_start_pfn, zone_end_pfn; in set_highmem_pages_init() local 24 zone_end_pfn = zone_start_pfn + zone->spanned_pages; in set_highmem_pages_init() 28 zone->name, nid, zone_start_pfn, zone_end_pfn); in set_highmem_pages_init() 31 zone_end_pfn); in set_highmem_pages_init()
|
/linux-6.1.9/mm/ |
D | page_alloc.c | 1873 for (; block_start_pfn < zone_end_pfn(zone); in set_zone_contiguous() 1877 block_end_pfn = min(block_end_pfn, zone_end_pfn(zone)); in set_zone_contiguous() 2146 if (first_init_pfn < zone_end_pfn(zone)) in deferred_init_memmap() 2210 if (zone_end_pfn(zone) != pgdat_end_pfn(pgdat)) in deferred_grow_zone() 3346 max_zone_pfn = zone_end_pfn(zone); in mark_free_pages() 6737 unsigned long start_pfn, unsigned long zone_end_pfn, in memmap_init_range() argument 6773 if (defer_init(nid, pfn, zone_end_pfn)) in memmap_init_range() 6990 unsigned long zone_end_pfn = zone_start_pfn + zone->spanned_pages; in memmap_init_zone_range() local 6993 start_pfn = clamp(start_pfn, zone_start_pfn, zone_end_pfn); in memmap_init_zone_range() 6994 end_pfn = clamp(end_pfn, zone_start_pfn, zone_end_pfn); in memmap_init_zone_range() [all …]
|
D | shuffle.c | 84 unsigned long end_pfn = zone_end_pfn(z); in __shuffle_zone()
|
D | compaction.c | 230 pageblock_start_pfn(zone_end_pfn(zone) - 1); in reset_cached_positions() 293 block_pfn = min(block_pfn, zone_end_pfn(zone) - 1); in __reset_isolation_pfn() 328 unsigned long free_pfn = zone_end_pfn(zone) - 1; in __reset_isolation_suitable() 1362 end_pfn = min(pageblock_end_pfn(pfn), zone_end_pfn(cc->zone)); in fast_isolate_around() 1533 zone_end_pfn(cc->zone)), in fast_isolate_freepages() 1589 zone_end_pfn(zone)); in isolate_freepages() 2294 unsigned long end_pfn = zone_end_pfn(cc->zone); in compact_zone()
|
D | memory_hotplug.c | 406 zone_end_pfn(zone)); in shrink_zone_span() 408 zone->spanned_pages = zone_end_pfn(zone) - pfn; in shrink_zone_span() 414 } else if (zone_end_pfn(zone) == end_pfn) { in shrink_zone_span() 439 unsigned long end_pfn = zone_end_pfn(zone); in update_pgdat_span() 645 unsigned long old_end_pfn = zone_end_pfn(zone); in resize_zone_range()
|
D | page_owner.c | 280 unsigned long end_pfn = zone_end_pfn(zone); in pagetypeinfo_showmixedcount_print() 622 unsigned long end_pfn = zone_end_pfn(zone); in init_pages_in_zone()
|
D | memblock.c | 1320 if (zone_end_pfn(zone) <= spfn) { in __next_mem_pfn_range_in_zone() 1328 *out_epfn = min(zone_end_pfn(zone), epfn); in __next_mem_pfn_range_in_zone()
|
D | kmemleak.c | 1558 unsigned long end_pfn = zone_end_pfn(zone); in kmemleak_scan()
|
D | vmstat.c | 1544 unsigned long end_pfn = zone_end_pfn(zone); in pagetypeinfo_showblockcount_print()
|
D | huge_memory.c | 2925 max_zone_pfn = zone_end_pfn(zone); in split_huge_pages_all()
|
/linux-6.1.9/include/linux/ |
D | mmzone.h | 907 static inline unsigned long zone_end_pfn(const struct zone *zone) in zone_end_pfn() function 914 return zone->zone_start_pfn <= pfn && pfn < zone_end_pfn(zone); in zone_spans_pfn() 1018 if (start_pfn >= zone_end_pfn(zone) || in zone_intersects()
|
/linux-6.1.9/arch/arm64/kernel/ |
D | hibernate.c | 267 max_zone_pfn = zone_end_pfn(zone); in swsusp_mte_save_tags()
|
/linux-6.1.9/kernel/power/ |
D | snapshot.c | 635 zone_end = zone_end_pfn(zone); in create_mem_extents() 1297 max_zone_pfn = zone_end_pfn(zone); in count_highmem_pages() 1364 max_zone_pfn = zone_end_pfn(zone); in count_data_pages() 1458 max_zone_pfn = zone_end_pfn(zone); in copy_data_pages()
|