/linux-6.1.9/arch/x86/kvm/ |
D | pmu.c | 88 return static_call(kvm_x86_pmu_pmc_is_enabled)(pmc); in pmc_is_enabled() 268 if (!static_call(kvm_x86_pmu_hw_event_available)(pmc)) in check_pmu_event_filter() 346 struct kvm_pmc *pmc = static_call(kvm_x86_pmu_pmc_idx_to_pmc)(pmu, bit); in kvm_pmu_handle_event() 367 return static_call(kvm_x86_pmu_is_valid_rdpmc_ecx)(vcpu, idx); in kvm_pmu_is_valid_rdpmc_ecx() 417 pmc = static_call(kvm_x86_pmu_rdpmc_ecx_to_pmc)(vcpu, idx, &mask); in kvm_pmu_rdpmc() 422 (static_call(kvm_x86_get_cpl)(vcpu) != 0) && in kvm_pmu_rdpmc() 440 return static_call(kvm_x86_pmu_msr_idx_to_pmc)(vcpu, msr) || in kvm_pmu_is_valid_msr() 441 static_call(kvm_x86_pmu_is_valid_msr)(vcpu, msr); in kvm_pmu_is_valid_msr() 447 struct kvm_pmc *pmc = static_call(kvm_x86_pmu_msr_idx_to_pmc)(vcpu, msr); in kvm_pmu_mark_pmc_in_use() 455 return static_call(kvm_x86_pmu_get_msr)(vcpu, msr_info); in kvm_pmu_get_msr() [all …]
|
D | kvm_cache_regs.h | 89 static_call(kvm_x86_cache_reg)(vcpu, reg); in kvm_register_read_raw() 129 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_PDPTR); in kvm_pdptr_read() 144 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR0); in kvm_read_cr0_bits() 158 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR4); in kvm_read_cr4_bits() 165 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR3); in kvm_read_cr3()
|
D | x86.c | 835 if (static_call(kvm_x86_get_cpl)(vcpu) <= required_cpl) in kvm_require_cpl() 957 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in kvm_set_cr0() 971 static_call(kvm_x86_set_cr0)(vcpu, cr0); in kvm_set_cr0() 1093 if (static_call(kvm_x86_get_cpl)(vcpu) != 0 || in kvm_emulate_xsetbv() 1118 static_call(kvm_x86_is_valid_cr4)(vcpu, cr4); in kvm_is_valid_cr4() 1189 static_call(kvm_x86_set_cr4)(vcpu, cr4); in kvm_set_cr4() 1330 static_call(kvm_x86_set_dr7)(vcpu, dr7); in kvm_update_dr7() 1661 return static_call(kvm_x86_get_msr_feature)(msr); in kvm_get_msr_feature() 1737 r = static_call(kvm_x86_set_efer)(vcpu, efer); in set_efer() 1863 return static_call(kvm_x86_set_msr)(vcpu, &msr); in __kvm_set_msr() [all …]
|
D | mmu.h | 152 static_call(kvm_x86_load_mmu_pgd)(vcpu, root_hpa, in kvm_mmu_load_pgd() 170 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in permission_fault()
|
D | lapic.h | 235 !static_call(kvm_x86_apic_init_signal_blocked)(vcpu); in kvm_apic_init_sipi_allowed()
|
/linux-6.1.9/security/keys/trusted-keys/ |
D | trusted_core.c | 182 ret = static_call(trusted_key_unseal)(payload, datablob); in trusted_instantiate() 189 ret = static_call(trusted_key_get_random)(payload->key, in trusted_instantiate() 200 ret = static_call(trusted_key_seal)(payload, datablob); in trusted_instantiate() 269 ret = static_call(trusted_key_seal)(new_p, datablob); in trusted_update() 374 ret = static_call(trusted_key_init)(); in init_trusted()
|
/linux-6.1.9/tools/include/linux/ |
D | static_call_types.h | 90 #define static_call(name) __static_call(name) macro 98 #define static_call(name) \ macro
|
/linux-6.1.9/include/linux/ |
D | static_call_types.h | 90 #define static_call(name) __static_call(name) macro 98 #define static_call(name) \ macro
|
D | entry-common.h | 423 #define irqentry_exit_cond_resched() static_call(irqentry_exit_cond_resched)()
|
D | perf_event.h | 1308 return static_call(__perf_guest_state)(); in perf_guest_state() 1312 return static_call(__perf_guest_get_ip)(); in perf_guest_get_ip() 1316 return static_call(__perf_guest_handle_intel_pt_intr)(); in perf_guest_handle_intel_pt_intr()
|
D | tracepoint.h | 173 static_call(tp_func_##name)(__data, args); \
|
/linux-6.1.9/tools/objtool/ |
D | builtin-check.c | 75 OPT_BOOLEAN('t', "static-call", &opts.static_call, "annotate static calls"), 131 opts.static_call || in opts_valid()
|
/linux-6.1.9/arch/arm/include/asm/ |
D | paravirt.h | 18 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/linux-6.1.9/arch/arm64/include/asm/ |
D | paravirt.h | 18 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/linux-6.1.9/tools/objtool/include/objtool/ |
D | builtin.h | 26 bool static_call; member
|
/linux-6.1.9/arch/x86/events/ |
D | core.c | 700 return static_call(x86_pmu_guest_get_msrs)(nr, data); in perf_guest_get_msrs() 731 static_call(x86_pmu_disable_all)(); in x86_pmu_disable() 1013 c = static_call(x86_pmu_get_event_constraints)(cpuc, i, cpuc->event_list[i]); in x86_schedule_events() 1356 static_call(x86_pmu_enable_all)(added); in x86_pmu_enable() 1466 ret = static_call(x86_pmu_schedule_events)(cpuc, n, assign); in x86_pmu_add() 1508 static_call(x86_pmu_set_period)(event); in x86_pmu_start() 1515 static_call(x86_pmu_enable)(event); in x86_pmu_start() 1588 static_call(x86_pmu_disable)(event); in x86_pmu_stop() 1600 static_call(x86_pmu_update)(event); in x86_pmu_stop() 1690 val = static_call(x86_pmu_update)(event); in x86_pmu_handle_irq() [all …]
|
/linux-6.1.9/arch/x86/events/amd/ |
D | core.c | 354 return static_call(amd_pmu_branch_hw_config)(event); in amd_core_hw_config() 529 static_call(amd_pmu_branch_reset)(); in amd_pmu_cpu_reset() 681 if (!static_call(amd_pmu_test_overflow)(idx)) in amd_pmu_wait_on_overflow() 804 static_call(amd_pmu_branch_add)(event); in amd_pmu_add_event() 812 static_call(amd_pmu_branch_del)(event); in amd_pmu_del_event()
|
/linux-6.1.9/drivers/cpufreq/ |
D | amd-pstate.c | 87 return static_call(amd_pstate_enable)(enable); in amd_pstate_enable() 145 return static_call(amd_pstate_init_perf)(cpudata); in amd_pstate_init_perf() 177 static_call(amd_pstate_update_perf)(cpudata, min_perf, des_perf, in amd_pstate_update_perf()
|
/linux-6.1.9/arch/x86/include/asm/ |
D | paravirt.h | 31 return static_call(pv_sched_clock)(); in paravirt_sched_clock() 45 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/linux-6.1.9/arch/powerpc/kernel/ |
D | irq.c | 238 irq = static_call(ppc_get_irq)(); in __do_irq()
|
D | Makefile | 130 obj-$(CONFIG_PPC32) += entry_32.o setup_32.o early_32.o static_call.o
|
/linux-6.1.9/kernel/ |
D | Makefile | 112 obj-$(CONFIG_HAVE_STATIC_CALL) += static_call.o
|
D | static_call_inline.c | 536 WARN_ON(static_call(sc_selftest)(scd->val) != scd->expect); in test_static_call_init()
|
/linux-6.1.9/arch/x86/kernel/ |
D | Makefile | 67 obj-y += static_call.o
|
/linux-6.1.9/arch/x86/kvm/mmu/ |
D | spte.c | 181 spte |= static_call(kvm_x86_get_mt_mask)(vcpu, gfn, in make_spte()
|