kvm_load_segment_descriptor 3357 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ldt_selector, 0, VCPU_SREG_LDTR)) kvm_load_segment_descriptor 3360 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->es, 1, VCPU_SREG_ES)) kvm_load_segment_descriptor 3363 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->cs, 9, VCPU_SREG_CS)) kvm_load_segment_descriptor 3366 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ss, 1, VCPU_SREG_SS)) kvm_load_segment_descriptor 3369 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ds, 1, VCPU_SREG_DS)) kvm_load_segment_descriptor 3372 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->fs, 1, VCPU_SREG_FS)) kvm_load_segment_descriptor 3375 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->gs, 1, VCPU_SREG_GS)) kvm_load_segment_descriptor 3416 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ldt, 0, VCPU_SREG_LDTR)) kvm_load_segment_descriptor 3419 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->es, 1, VCPU_SREG_ES)) kvm_load_segment_descriptor 3422 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->cs, 9, VCPU_SREG_CS)) kvm_load_segment_descriptor 3425 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ss, 1, VCPU_SREG_SS)) kvm_load_segment_descriptor 3428 arch/x86/kvm/x86.c if (kvm_load_segment_descriptor(vcpu, tss->ds, 1, VCPU_SREG_DS)) kvm_load_segment_descriptor 1535 arch/x86/kvm/x86_emulate.c err = kvm_load_segment_descriptor(ctxt->vcpu, sel, kvm_load_segment_descriptor 1719 arch/x86/kvm/x86_emulate.c if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) { kvm_load_segment_descriptor 510 include/asm-x86/kvm_host.h int kvm_load_segment_descriptor(struct kvm_vcpu *vcpu, u16 selector,