/linux-6.1.9/tools/perf/util/ |
D | thread-stack.c | 96 u64 kernel_start; member 162 ts->kernel_start = machine__kernel_start(machine); in thread_stack__init() 166 ts->kernel_start = 1ULL << 63; in thread_stack__init() 515 static inline u64 callchain_context(u64 ip, u64 kernel_start) in callchain_context() argument 517 return ip < kernel_start ? PERF_CONTEXT_USER : PERF_CONTEXT_KERNEL; in callchain_context() 522 size_t sz, u64 ip, u64 kernel_start) in thread_stack__sample() argument 525 u64 context = callchain_context(ip, kernel_start); in thread_stack__sample() 546 context = callchain_context(ip, kernel_start); in thread_stack__sample() 565 u64 sample_ip, u64 kernel_start) in thread_stack__sample_late() argument 568 u64 sample_context = callchain_context(sample_ip, kernel_start); in thread_stack__sample_late() [all …]
|
D | thread-stack.h | 87 size_t sz, u64 ip, u64 kernel_start); 90 u64 kernel_start); 95 u64 sample_ip, u64 kernel_start);
|
D | machine.h | 58 u64 kernel_start; member 97 if (!machine->kernel_start) in machine__kernel_start() 99 return machine->kernel_start; in machine__kernel_start() 104 u64 kernel_start = machine__kernel_start(machine); in machine__kernel_ip() local 106 return ip >= kernel_start; in machine__kernel_ip()
|
D | db-export.c | 215 u64 kernel_start = machine__kernel_start(machine); in call_path_from_sample() local 265 kernel_start); in call_path_from_sample()
|
D | arm-spe.c | 77 u64 kernel_start; member 233 return ip >= spe->kernel_start ? in arm_spe_cpumode() 623 if (!spe->kernel_start) in arm_spe_run_decoder() 624 spe->kernel_start = machine__kernel_start(spe->machine); in arm_spe_run_decoder()
|
D | intel-pt.c | 87 u64 kernel_start; member 674 return ip >= ptq->pt->kernel_start ? in intel_pt_nr_cpumode() 969 } else if (ip >= ptq->pt->kernel_start) { in __intel_pt_pgd_ip() 1214 pt->kernel_start); in intel_pt_add_callchain() 1252 pt->kernel_start); in intel_pt_add_br_stack() 1790 sample->ip, pt->kernel_start); in intel_pt_prep_sample() 2225 pt->kernel_start); in intel_pt_do_synth_pebs_sample() 2854 if (!pt->kernel_start) { in intel_pt_run_decoder() 2855 pt->kernel_start = machine__kernel_start(pt->machine); in intel_pt_run_decoder() 2881 state->from_ip >= pt->kernel_start) { in intel_pt_run_decoder() [all …]
|
D | machine.c | 117 machine->kernel_start = 0; in machine__init() 3248 machine->kernel_start = 1ULL << 63; in machine__get_kernel_start() 3257 machine->kernel_start = map->start; in machine__get_kernel_start()
|
/linux-6.1.9/arch/riscv/mm/ |
D | physaddr.c | 24 unsigned long kernel_start = kernel_map.virt_addr; in __phys_addr_symbol() local 25 unsigned long kernel_end = kernel_start + kernel_map.size; in __phys_addr_symbol() 31 VIRTUAL_BUG_ON(x < kernel_start || x > kernel_end); in __phys_addr_symbol()
|
/linux-6.1.9/arch/x86/mm/ |
D | mem_encrypt_identity.c | 296 unsigned long kernel_start, kernel_end, kernel_len; in sme_encrypt_kernel() local 327 kernel_start = __pa_symbol(_text); in sme_encrypt_kernel() 329 kernel_len = kernel_end - kernel_start; in sme_encrypt_kernel() 371 pgtable_area_len += sme_pgtable_calc(execute_end - kernel_start) * 2; in sme_encrypt_kernel() 436 ppd.paddr = kernel_start; in sme_encrypt_kernel() 437 ppd.vaddr = kernel_start; in sme_encrypt_kernel() 442 ppd.paddr = kernel_start; in sme_encrypt_kernel() 443 ppd.vaddr = kernel_start + decrypted_base; in sme_encrypt_kernel() 474 sme_encrypt_execute(kernel_start, kernel_start + decrypted_base, in sme_encrypt_kernel() 487 ppd.vaddr = kernel_start + decrypted_base; in sme_encrypt_kernel()
|
/linux-6.1.9/arch/arm/boot/bootp/ |
D | kernel.S | 2 .globl kernel_start 3 kernel_start: label
|
D | init.S | 58 b kernel_start @ call kernel
|
/linux-6.1.9/arch/riscv/kernel/ |
D | elf_kexec.c | 197 unsigned long kernel_start; in elf_kexec_load() local 212 kernel_start = image->start; in elf_kexec_load() 273 &kernel_start, in elf_kexec_load() 274 sizeof(kernel_start), 0); in elf_kexec_load()
|
/linux-6.1.9/arch/parisc/mm/ |
D | init.c | 355 unsigned long kernel_start, kernel_end; in map_pages() local 359 kernel_start = __pa((unsigned long)&__init_begin); in map_pages() 402 } else if (address < kernel_start || address >= kernel_end) { in map_pages()
|
/linux-6.1.9/arch/arm64/mm/ |
D | mmu.c | 527 phys_addr_t kernel_start = __pa_symbol(_stext); in map_mem() local 551 memblock_mark_nomap(kernel_start, kernel_end - kernel_start); in map_mem() 586 __map_memblock(pgdp, kernel_start, kernel_end, in map_mem() 588 memblock_clear_nomap(kernel_start, kernel_end - kernel_start); in map_mem()
|