MSR_EFER 83 arch/x86/kernel/acpi/sleep.c rdmsr(MSR_EFER, header->pmode_efer_low, MSR_EFER 566 arch/x86/kernel/vmi_32.c rdmsr(MSR_EFER, l, h); MSR_EFER 279 arch/x86/kvm/svm.c rdmsrl(MSR_EFER, efer); MSR_EFER 280 arch/x86/kvm/svm.c wrmsrl(MSR_EFER, efer & ~MSR_EFER_SVME_MASK); MSR_EFER 312 arch/x86/kvm/svm.c rdmsrl(MSR_EFER, efer); MSR_EFER 313 arch/x86/kvm/svm.c wrmsrl(MSR_EFER, efer | MSR_EFER_SVME_MASK); MSR_EFER 155 arch/x86/kvm/vmx.c MSR_EFER, MSR_K6_STAR, MSR_EFER 519 arch/x86/kvm/vmx.c wrmsrl(MSR_EFER, guest_efer); MSR_EFER 810 arch/x86/kvm/vmx.c vmx->msr_offset_efer = __find_msr_index(vmx, MSR_EFER); MSR_EFER 861 arch/x86/kvm/vmx.c case MSR_EFER: MSR_EFER 902 arch/x86/kvm/vmx.c case MSR_EFER: MSR_EFER 1406 arch/x86/kvm/vmx.c find_msr_entry(to_vmx(vcpu), MSR_EFER)->data |= EFER_LMA | EFER_LME; MSR_EFER 1571 arch/x86/kvm/vmx.c struct kvm_msr_entry *msr = find_msr_entry(vmx, MSR_EFER); MSR_EFER 650 arch/x86/kvm/x86.c case MSR_EFER: MSR_EFER 779 arch/x86/kvm/x86.c case MSR_EFER: MSR_EFER 987 arch/x86/kvm/x86.c rdmsrl(MSR_EFER, efer); MSR_EFER 604 arch/x86/mm/init_32.c rdmsr(MSR_EFER, l, h); MSR_EFER 606 arch/x86/mm/init_32.c wrmsr(MSR_EFER, l, h); MSR_EFER 123 arch/x86/mm/init_64.c rdmsrl(MSR_EFER, efer); MSR_EFER 67 arch/x86/power/cpu_64.c rdmsrl(MSR_EFER, ctxt->efer); MSR_EFER 98 arch/x86/power/cpu_64.c wrmsrl(MSR_EFER, ctxt->efer);