@@ -5056,8 +5056,8 @@ int kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa, u64 error_code,
if (!mmio_info_in_cache(vcpu, cr2_or_gpa, direct) && !is_guest_mode(vcpu))
emulation_type |= EMULTYPE_ALLOW_RETRY_PF;
emulate:
- return x86_emulate_instruction(vcpu, cr2_or_gpa, emulation_type, insn,
- insn_len);
+ return x86_emulate_instruction(vcpu, cr2_or_gpa, emulation_type, 0,
+ insn, insn_len);
}
EXPORT_SYMBOL_GPL(kvm_mmu_page_fault);
@@ -7445,7 +7445,8 @@ int x86_decode_emulated_instruction(struct kvm_vcpu *vcpu, int emulation_type,
EXPORT_SYMBOL_GPL(x86_decode_emulated_instruction);
int x86_emulate_instruction(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa,
- int emulation_type, void *insn, int insn_len)
+ int emulation_type, int emulation_reason,
+ void *insn, int insn_len)
{
int r;
struct x86_emulate_ctxt *ctxt = vcpu->arch.emulate_ctxt;
@@ -7604,14 +7605,14 @@ int x86_emulate_instruction(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa,
int kvm_emulate_instruction(struct kvm_vcpu *vcpu, int emulation_type)
{
- return x86_emulate_instruction(vcpu, 0, emulation_type, NULL, 0);
+ return x86_emulate_instruction(vcpu, 0, emulation_type, 0, NULL, 0);
}
EXPORT_SYMBOL_GPL(kvm_emulate_instruction);
int kvm_emulate_instruction_from_buffer(struct kvm_vcpu *vcpu,
void *insn, int insn_len)
{
- return x86_emulate_instruction(vcpu, 0, 0, insn, insn_len);
+ return x86_emulate_instruction(vcpu, 0, 0, 0, insn, insn_len);
}
EXPORT_SYMBOL_GPL(kvm_emulate_instruction_from_buffer);
@@ -276,7 +276,8 @@ void kvm_fixup_and_inject_pf_error(struct kvm_vcpu *vcpu, gva_t gva, u16 error_c
int x86_decode_emulated_instruction(struct kvm_vcpu *vcpu, int emulation_type,
void *insn, int insn_len);
int x86_emulate_instruction(struct kvm_vcpu *vcpu, gpa_t cr2_or_gpa,
- int emulation_type, void *insn, int insn_len);
+ int emulation_type, int emulation_reason,
+ void *insn, int insn_len);
fastpath_t handle_fastpath_set_msr_irqoff(struct kvm_vcpu *vcpu);
extern u64 host_xcr0;