vmcs_readl 400 arch/x86/kvm/vmx.c return vmcs_readl(field); vmcs_readl 405 arch/x86/kvm/vmx.c return vmcs_readl(field); vmcs_readl 411 arch/x86/kvm/vmx.c return vmcs_readl(field); vmcs_readl 413 arch/x86/kvm/vmx.c return vmcs_readl(field) | ((u64)vmcs_readl(field+1) << 32); vmcs_readl 455 arch/x86/kvm/vmx.c vmcs_writel(field, vmcs_readl(field) & ~mask); vmcs_readl 460 arch/x86/kvm/vmx.c vmcs_writel(field, vmcs_readl(field) | mask); vmcs_readl 599 arch/x86/kvm/vmx.c wrmsrl(MSR_GS_BASE, vmcs_readl(HOST_GS_BASE)); vmcs_readl 703 arch/x86/kvm/vmx.c return vmcs_readl(GUEST_RFLAGS); vmcs_readl 718 arch/x86/kvm/vmx.c rip = vmcs_readl(GUEST_RIP); vmcs_readl 856 arch/x86/kvm/vmx.c data = vmcs_readl(GUEST_FS_BASE); vmcs_readl 859 arch/x86/kvm/vmx.c data = vmcs_readl(GUEST_GS_BASE); vmcs_readl 871 arch/x86/kvm/vmx.c data = vmcs_readl(GUEST_SYSENTER_EIP); vmcs_readl 874 arch/x86/kvm/vmx.c data = vmcs_readl(GUEST_SYSENTER_ESP); vmcs_readl 956 arch/x86/kvm/vmx.c vcpu->arch.regs[VCPU_REGS_RSP] = vmcs_readl(GUEST_RSP); vmcs_readl 957 arch/x86/kvm/vmx.c vcpu->arch.rip = vmcs_readl(GUEST_RIP); vmcs_readl 997 arch/x86/kvm/vmx.c flags = vmcs_readl(GUEST_RFLAGS); vmcs_readl 1276 arch/x86/kvm/vmx.c if (vmcs_readl(sf->base) == save->base && (save->base & AR_S_MASK)) { vmcs_readl 1298 arch/x86/kvm/vmx.c flags = vmcs_readl(GUEST_RFLAGS); vmcs_readl 1303 arch/x86/kvm/vmx.c vmcs_writel(GUEST_CR4, (vmcs_readl(GUEST_CR4) & ~X86_CR4_VME) | vmcs_readl 1304 arch/x86/kvm/vmx.c (vmcs_readl(CR4_READ_SHADOW) & X86_CR4_VME)); vmcs_readl 1336 arch/x86/kvm/vmx.c save->base = vmcs_readl(sf->base); vmcs_readl 1351 arch/x86/kvm/vmx.c vcpu->arch.rmode.tr.base = vmcs_readl(GUEST_TR_BASE); vmcs_readl 1360 arch/x86/kvm/vmx.c flags = vmcs_readl(GUEST_RFLAGS); vmcs_readl 1367 arch/x86/kvm/vmx.c vmcs_writel(GUEST_CR4, vmcs_readl(GUEST_CR4) | X86_CR4_VME); vmcs_readl 1370 arch/x86/kvm/vmx.c vmcs_write16(GUEST_SS_SELECTOR, vmcs_readl(GUEST_SS_BASE) >> 4); vmcs_readl 1376 arch/x86/kvm/vmx.c if (vmcs_readl(GUEST_CS_BASE) == 0xffff0000) vmcs_readl 1378 arch/x86/kvm/vmx.c vmcs_write16(GUEST_CS_SELECTOR, vmcs_readl(GUEST_CS_BASE) >> 4); vmcs_readl 1433 arch/x86/kvm/vmx.c vcpu->arch.cr4 |= vmcs_readl(GUEST_CR4) & ~KVM_GUEST_CR4_MASK; vmcs_readl 1596 arch/x86/kvm/vmx.c return vmcs_readl(sf->base); vmcs_readl 1605 arch/x86/kvm/vmx.c var->base = vmcs_readl(sf->base); vmcs_readl 1697 arch/x86/kvm/vmx.c dt->base = vmcs_readl(GUEST_IDTR_BASE); vmcs_readl 1709 arch/x86/kvm/vmx.c dt->base = vmcs_readl(GUEST_GDTR_BASE); vmcs_readl 2142 arch/x86/kvm/vmx.c vmx->rmode.irq.rip = vmcs_readl(GUEST_RIP); vmcs_readl 2179 arch/x86/kvm/vmx.c ((vmcs_readl(GUEST_RFLAGS) & X86_EFLAGS_IF) && vmcs_readl 2231 arch/x86/kvm/vmx.c flags = vmcs_readl(GUEST_RFLAGS); vmcs_readl 2291 arch/x86/kvm/vmx.c rip = vmcs_readl(GUEST_RIP); vmcs_readl 2298 arch/x86/kvm/vmx.c cr2 = vmcs_readl(EXIT_QUALIFICATION); vmcs_readl 2348 arch/x86/kvm/vmx.c exit_qualification = vmcs_readl(EXIT_QUALIFICATION); vmcs_readl 2360 arch/x86/kvm/vmx.c down = (vmcs_readl(GUEST_RFLAGS) & X86_EFLAGS_DF) != 0; vmcs_readl 2384 arch/x86/kvm/vmx.c exit_qualification = vmcs_readl(EXIT_QUALIFICATION); vmcs_readl 2472 arch/x86/kvm/vmx.c exit_qualification = vmcs_readl(EXIT_QUALIFICATION); vmcs_readl 2619 arch/x86/kvm/vmx.c exit_qualification = vmcs_readl(EXIT_QUALIFICATION); vmcs_readl 2738 arch/x86/kvm/vmx.c KVMTRACE_3D(VMEXIT, vcpu, exit_reason, (u32)vmcs_readl(GUEST_RIP), vmcs_readl 2739 arch/x86/kvm/vmx.c (u32)((u64)vmcs_readl(GUEST_RIP) >> 32), entryexit); vmcs_readl 2744 arch/x86/kvm/vmx.c vcpu->arch.cr3 = vmcs_readl(GUEST_CR3); vmcs_readl 2821 arch/x86/kvm/vmx.c (vmcs_readl(GUEST_RFLAGS) & X86_EFLAGS_IF)); vmcs_readl 2925 arch/x86/kvm/vmx.c if (vmcs_readl(GUEST_RIP) + 1 != vmx->rmode.irq.rip)