Lines Matching refs:static_call
835 if (static_call(kvm_x86_get_cpl)(vcpu) <= required_cpl) in kvm_require_cpl()
957 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in kvm_set_cr0()
971 static_call(kvm_x86_set_cr0)(vcpu, cr0); in kvm_set_cr0()
1093 if (static_call(kvm_x86_get_cpl)(vcpu) != 0 || in kvm_emulate_xsetbv()
1118 static_call(kvm_x86_is_valid_cr4)(vcpu, cr4); in kvm_is_valid_cr4()
1189 static_call(kvm_x86_set_cr4)(vcpu, cr4); in kvm_set_cr4()
1330 static_call(kvm_x86_set_dr7)(vcpu, dr7); in kvm_update_dr7()
1661 return static_call(kvm_x86_get_msr_feature)(msr); in kvm_get_msr_feature()
1737 r = static_call(kvm_x86_set_efer)(vcpu, efer); in set_efer()
1863 return static_call(kvm_x86_set_msr)(vcpu, &msr); in __kvm_set_msr()
1905 ret = static_call(kvm_x86_get_msr)(vcpu, &msr); in __kvm_get_msr()
1973 return static_call(kvm_x86_complete_emulated_msr)(vcpu, vcpu->run->msr.error); in complete_fast_msr_access()
2037 return static_call(kvm_x86_complete_emulated_msr)(vcpu, r); in kvm_emulate_rdmsr()
2062 return static_call(kvm_x86_complete_emulated_msr)(vcpu, r); in kvm_emulate_wrmsr()
2575 static_call(kvm_x86_get_l2_tsc_offset)(vcpu), in kvm_vcpu_write_tsc_offset()
2576 static_call(kvm_x86_get_l2_tsc_multiplier)(vcpu)); in kvm_vcpu_write_tsc_offset()
2580 static_call(kvm_x86_write_tsc_offset)(vcpu, vcpu->arch.tsc_offset); in kvm_vcpu_write_tsc_offset()
2591 static_call(kvm_x86_get_l2_tsc_multiplier)(vcpu)); in kvm_vcpu_write_tsc_multiplier()
2596 static_call(kvm_x86_write_tsc_multiplier)( in kvm_vcpu_write_tsc_multiplier()
3399 static_call(kvm_x86_flush_tlb_all)(vcpu); in kvm_vcpu_flush_tlb_all()
3417 static_call(kvm_x86_flush_tlb_guest)(vcpu); in kvm_vcpu_flush_tlb_guest()
3424 static_call(kvm_x86_flush_tlb_current)(vcpu); in kvm_vcpu_flush_tlb_current()
4452 r = static_call(kvm_x86_has_emulated_msr)(kvm, MSR_IA32_SMBASE); in kvm_vm_ioctl_check_extension()
4694 if (static_call(kvm_x86_has_wbinvd_exit)()) in kvm_arch_vcpu_load()
4701 static_call(kvm_x86_vcpu_load)(vcpu, cpu); in kvm_arch_vcpu_load()
4796 vcpu->arch.preempted_in_kernel = !static_call(kvm_x86_get_cpl)(vcpu); in kvm_arch_vcpu_put()
4810 static_call(kvm_x86_vcpu_put)(vcpu); in kvm_arch_vcpu_put()
4941 static_call(kvm_x86_setup_mce)(vcpu); in kvm_vcpu_ioctl_x86_setup_mce()
5101 events->interrupt.shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
5105 events->nmi.masked = static_call(kvm_x86_get_nmi_mask)(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
5190 static_call(kvm_x86_set_interrupt_shadow)(vcpu, in kvm_vcpu_ioctl_x86_set_vcpu_events()
5196 static_call(kvm_x86_set_nmi_mask)(vcpu, events->nmi.masked); in kvm_vcpu_ioctl_x86_set_vcpu_events()
5503 return static_call(kvm_x86_enable_direct_tlbflush)(vcpu); in kvm_vcpu_ioctl_enable_cap()
5972 ret = static_call(kvm_x86_set_tss_addr)(kvm, addr); in kvm_vm_ioctl_set_tss_addr()
5979 return static_call(kvm_x86_set_identity_map_addr)(kvm, ident_addr); in kvm_vm_ioctl_set_identity_map_addr()
6285 r = static_call(kvm_x86_vm_copy_enc_context_from)(kvm, cap->args[0]); in kvm_vm_ioctl_enable_cap()
6292 r = static_call(kvm_x86_vm_move_enc_context_from)(kvm, cap->args[0]); in kvm_vm_ioctl_enable_cap()
6928 r = static_call(kvm_x86_mem_enc_ioctl)(kvm, argp); in kvm_arch_vm_ioctl()
6942 r = static_call(kvm_x86_mem_enc_register_region)(kvm, ®ion); in kvm_arch_vm_ioctl()
6956 r = static_call(kvm_x86_mem_enc_unregister_region)(kvm, ®ion); in kvm_arch_vm_ioctl()
7068 if (!static_call(kvm_x86_has_emulated_msr)(NULL, emulated_msrs_all[i])) in kvm_init_msr_list()
7131 static_call(kvm_x86_set_segment)(vcpu, var, seg); in kvm_set_segment()
7137 static_call(kvm_x86_get_segment)(vcpu, var, seg); in kvm_get_segment()
7160 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_read()
7170 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_fetch()
7180 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_write()
7233 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_fetch_guest_virt()
7258 u64 access = (static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_read_guest_virt()
7281 else if (static_call(kvm_x86_get_cpl)(vcpu) == 3) in emulator_read_std()
7335 else if (static_call(kvm_x86_get_cpl)(vcpu) == 3) in emulator_write_std()
7356 return static_call(kvm_x86_can_emulate_instruction)(vcpu, emul_type, in kvm_can_emulate_insn()
7405 u64 access = ((static_call(kvm_x86_get_cpl)(vcpu) == 3) ? PFERR_USER_MASK : 0) in vcpu_mmio_gva_to_gpa()
7822 return static_call(kvm_x86_get_segment_base)(vcpu, seg); in get_segment_base()
7835 if (static_call(kvm_x86_has_wbinvd_exit)()) { in kvm_emulate_wbinvd_noskip()
7940 return static_call(kvm_x86_get_cpl)(emul_to_vcpu(ctxt)); in emulator_get_cpl()
7945 static_call(kvm_x86_get_gdt)(emul_to_vcpu(ctxt), dt); in emulator_get_gdt()
7950 static_call(kvm_x86_get_idt)(emul_to_vcpu(ctxt), dt); in emulator_get_idt()
7955 static_call(kvm_x86_set_gdt)(emul_to_vcpu(ctxt), dt); in emulator_set_gdt()
7960 static_call(kvm_x86_set_idt)(emul_to_vcpu(ctxt), dt); in emulator_set_idt()
8130 return static_call(kvm_x86_check_intercept)(emul_to_vcpu(ctxt), info, stage, in emulator_intercept()
8173 static_call(kvm_x86_set_nmi_mask)(emul_to_vcpu(ctxt), masked); in emulator_set_nmi_mask()
8191 return static_call(kvm_x86_leave_smm)(emul_to_vcpu(ctxt), smstate); in emulator_leave_smm()
8265 u32 int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in toggle_interruptibility()
8276 static_call(kvm_x86_set_interrupt_shadow)(vcpu, mask); in toggle_interruptibility()
8317 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in init_emulate_ctxt()
8377 static_call(kvm_x86_get_exit_info)(vcpu, (u32 *)&info[0], &info[1], in prepare_emulation_failure_exit()
8456 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()
8645 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in kvm_skip_emulated_instruction()
8648 r = static_call(kvm_x86_skip_emulated_instruction)(vcpu); in kvm_skip_emulated_instruction()
8680 shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in kvm_is_code_breakpoint_inhibited()
8954 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in x86_emulate_instruction()
9563 ulong vcpu_reasons = static_call(kvm_x86_vcpu_get_apicv_inhibit_reasons)(vcpu); in kvm_vcpu_apicv_activated()
9666 if (static_call(kvm_x86_get_cpl)(vcpu) != 0) { in kvm_emulate_hypercall()
9756 static_call(kvm_x86_patch_hypercall)(vcpu, instruction); in emulator_fix_hypercall()
9773 kvm_run->if_flag = static_call(kvm_x86_get_if_flag)(vcpu); in post_kvm_run_save()
9808 static_call(kvm_x86_update_cr8_intercept)(vcpu, tpr, max_irr); in update_cr8_intercept()
9831 static_call(kvm_x86_inject_exception)(vcpu); in kvm_inject_exception()
9917 static_call(kvm_x86_inject_nmi)(vcpu); in kvm_check_and_inject_events()
9919 static_call(kvm_x86_inject_irq)(vcpu, true); in kvm_check_and_inject_events()
10003 r = can_inject ? static_call(kvm_x86_smi_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10012 static_call(kvm_x86_enable_smi_window)(vcpu); in kvm_check_and_inject_events()
10016 r = can_inject ? static_call(kvm_x86_nmi_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10022 static_call(kvm_x86_inject_nmi)(vcpu); in kvm_check_and_inject_events()
10024 WARN_ON(static_call(kvm_x86_nmi_allowed)(vcpu, true) < 0); in kvm_check_and_inject_events()
10027 static_call(kvm_x86_enable_nmi_window)(vcpu); in kvm_check_and_inject_events()
10031 r = can_inject ? static_call(kvm_x86_interrupt_allowed)(vcpu, true) : -EBUSY; in kvm_check_and_inject_events()
10036 static_call(kvm_x86_inject_irq)(vcpu, false); in kvm_check_and_inject_events()
10037 WARN_ON(static_call(kvm_x86_interrupt_allowed)(vcpu, true) < 0); in kvm_check_and_inject_events()
10040 static_call(kvm_x86_enable_irq_window)(vcpu); in kvm_check_and_inject_events()
10081 if (static_call(kvm_x86_get_nmi_mask)(vcpu) || vcpu->arch.nmi_injected) in process_nmi()
10171 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_32()
10175 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_32()
10225 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_64()
10235 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_64()
10264 static_call(kvm_x86_enter_smm)(vcpu, buf); in enter_smm()
10269 if (static_call(kvm_x86_get_nmi_mask)(vcpu)) in enter_smm()
10272 static_call(kvm_x86_set_nmi_mask)(vcpu, true); in enter_smm()
10278 static_call(kvm_x86_set_cr0)(vcpu, cr0); in enter_smm()
10281 static_call(kvm_x86_set_cr4)(vcpu, 0); in enter_smm()
10285 static_call(kvm_x86_set_idt)(vcpu, &dt); in enter_smm()
10316 static_call(kvm_x86_set_efer)(vcpu, 0); in enter_smm()
10360 static_call(kvm_x86_refresh_apicv_exec_ctrl)(vcpu); in kvm_vcpu_update_apicv()
10384 if (!static_call(kvm_x86_check_apicv_inhibit_reasons)(reason)) in __kvm_set_or_clear_apicv_inhibit()
10648 static_call(kvm_x86_msr_filter_changed)(vcpu); in vcpu_enter_guest()
10651 static_call(kvm_x86_update_cpu_dirty_logging)(vcpu); in vcpu_enter_guest()
10673 static_call(kvm_x86_enable_irq_window)(vcpu); in vcpu_enter_guest()
10688 static_call(kvm_x86_prepare_switch_to_guest)(vcpu); in vcpu_enter_guest()
10738 static_call(kvm_x86_request_immediate_exit)(vcpu); in vcpu_enter_guest()
10770 exit_fastpath = static_call(kvm_x86_vcpu_run)(vcpu); in vcpu_enter_guest()
10791 static_call(kvm_x86_sync_dirty_debug_regs)(vcpu); in vcpu_enter_guest()
10820 static_call(kvm_x86_handle_exit_irqoff)(vcpu); in vcpu_enter_guest()
10866 r = static_call(kvm_x86_handle_exit)(vcpu, exit_fastpath); in vcpu_enter_guest()
10872 static_call(kvm_x86_cancel_injection)(vcpu); in vcpu_enter_guest()
11186 r = static_call(kvm_x86_vcpu_pre_run)(vcpu); in kvm_arch_vcpu_ioctl_run()
11306 static_call(kvm_x86_get_idt)(vcpu, &dt); in __get_sregs_common()
11309 static_call(kvm_x86_get_gdt)(vcpu, &dt); in __get_sregs_common()
11504 static_call(kvm_x86_set_idt)(vcpu, &dt); in __set_sregs_common()
11507 static_call(kvm_x86_set_gdt)(vcpu, &dt); in __set_sregs_common()
11518 static_call(kvm_x86_set_efer)(vcpu, sregs->efer); in __set_sregs_common()
11521 static_call(kvm_x86_set_cr0)(vcpu, sregs->cr0); in __set_sregs_common()
11525 static_call(kvm_x86_set_cr4)(vcpu, sregs->cr4); in __set_sregs_common()
11695 static_call(kvm_x86_update_exception_bitmap)(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
11825 return static_call(kvm_x86_vcpu_precreate)(kvm); in kvm_arch_vcpu_precreate()
11912 r = static_call(kvm_x86_vcpu_create)(vcpu); in kvm_arch_vcpu_create()
11970 static_call(kvm_x86_vcpu_free)(vcpu); in kvm_arch_vcpu_destroy()
12091 static_call(kvm_x86_vcpu_reset)(vcpu, init_event); in kvm_vcpu_reset()
12110 static_call(kvm_x86_set_cr0)(vcpu, new_cr0); in kvm_vcpu_reset()
12111 static_call(kvm_x86_set_cr4)(vcpu, 0); in kvm_vcpu_reset()
12112 static_call(kvm_x86_set_efer)(vcpu, 0); in kvm_vcpu_reset()
12113 static_call(kvm_x86_update_exception_bitmap)(vcpu); in kvm_vcpu_reset()
12165 ret = static_call(kvm_x86_hardware_enable)(); in kvm_arch_hardware_enable()
12247 static_call(kvm_x86_hardware_disable)(); in kvm_arch_hardware_disable()
12316 static_call(kvm_x86_hardware_unsetup)(); in kvm_arch_hardware_unsetup()
12356 static_call(kvm_x86_sched_in)(vcpu, cpu); in kvm_arch_sched_in()
12382 ret = static_call(kvm_x86_vm_init)(kvm); in kvm_arch_init_vm()
12889 static_call(kvm_x86_guest_apic_has_interrupt)(vcpu)); in kvm_guest_apic_has_interrupt()
12909 static_call(kvm_x86_nmi_allowed)(vcpu, false))) in kvm_vcpu_has_events()
12914 static_call(kvm_x86_smi_allowed)(vcpu, false))) in kvm_vcpu_has_events()
12944 static_call(kvm_x86_dy_apicv_has_pending_interrupt)(vcpu)) in kvm_arch_dy_has_pending_interrupt()
12983 return static_call(kvm_x86_interrupt_allowed)(vcpu, false); in kvm_arch_interrupt_allowed()
13009 rflags = static_call(kvm_x86_get_rflags)(vcpu); in kvm_get_rflags()
13021 static_call(kvm_x86_set_rflags)(vcpu, rflags); in __kvm_set_rflags()
13133 static_call(kvm_x86_get_cpl)(vcpu) == 0) in kvm_can_deliver_async_pf()
13292 ret = static_call(kvm_x86_pi_update_irte)(irqfd->kvm, in kvm_arch_irq_bypass_add_producer()
13317 ret = static_call(kvm_x86_pi_update_irte)(irqfd->kvm, prod->irq, irqfd->gsi, 0); in kvm_arch_irq_bypass_del_producer()
13328 return static_call(kvm_x86_pi_update_irte)(kvm, host_irq, guest_irq, set); in kvm_arch_update_irqfd_routing()