VirtualBox

Changeset 14803 in vbox for trunk/src/VBox


Ignore:
Timestamp:
Nov 29, 2008 1:40:02 AM (16 years ago)
Author:
vboxsync
Message:

#3202: More darwin mess.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HWVMXR0.cpp

    r14757 r14803  
    4242
    4343/*******************************************************************************
     44*   Defined Constants And Macros                                               *
     45*******************************************************************************/
     46#if defined(RT_ARCH_AMD64)
     47# define VMX_IS_64BIT_HOST_MODE()   (true)
     48#elif defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     49# define VMX_IS_64BIT_HOST_MODE()   (g_fVMXIs64bitHost != 0)
     50#else
     51# define VMX_IS_64BIT_HOST_MODE()   (false)
     52#endif
     53
     54/*******************************************************************************
    4455*   Global Variables                                                           *
    4556*******************************************************************************/
     
    4758static uint32_t const g_aIOSize[4]  = {1, 2, 0, 4};
    4859static uint32_t const g_aIOOpAnd[4] = {0xff, 0xffff, 0, 0xffffffff};
     60
     61#ifdef VBOX_WITH_HYBIRD_32BIT_KERNEL
     62/** See HWACCMR0A.asm. */
     63extern "C" uint32_t g_fVMXIs64bitHost;
     64#endif
    4965
    5066/*******************************************************************************
     
    314330        /* Note: VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_MWAIT_EXIT might cause a vmlaunch failure with an invalid control fields error. (combined with some other exit reasons) */
    315331
    316 #if HC_ARCH_BITS == 64
    317         if (pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.n.allowed1 & VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_USE_TPR_SHADOW)
    318         {
    319             /* CR8 reads from the APIC shadow page; writes cause an exit is they lower the TPR below the threshold */
    320             val |= VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_USE_TPR_SHADOW;
    321             Assert(pVM->hwaccm.s.vmx.pAPIC);
    322         }
    323         else
    324             /* Exit on CR8 reads & writes in case the TPR shadow feature isn't present. */
    325             val |= VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_CR8_STORE_EXIT | VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_CR8_LOAD_EXIT;
     332#if HC_ARCH_BITS == 64 || defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     333        if (VMX_IS_64BIT_HOST_MODE())
     334        {
     335            if (pVM->hwaccm.s.vmx.msr.vmx_proc_ctls.n.allowed1 & VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_USE_TPR_SHADOW)
     336            {
     337                /* CR8 reads from the APIC shadow page; writes cause an exit is they lower the TPR below the threshold */
     338                val |= VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_USE_TPR_SHADOW;
     339                Assert(pVM->hwaccm.s.vmx.pAPIC);
     340            }
     341            else
     342                /* Exit on CR8 reads & writes in case the TPR shadow feature isn't present. */
     343                val |= VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_CR8_STORE_EXIT | VMX_VMCS_CTRL_PROC_EXEC_CONTROLS_CR8_LOAD_EXIT;
     344        }
    326345#endif
    327346
     
    384403        /* Save debug controls (dr7 & IA32_DEBUGCTL_MSR) (forced to 1 on the 'first' VT-x capable CPUs; this actually includes the newest Nehalem CPUs) */
    385404        val |= VMX_VMCS_CTRL_EXIT_CONTROLS_SAVE_DEBUG;
    386 #if HC_ARCH_BITS == 64
    387         val |= VMX_VMCS_CTRL_EXIT_CONTROLS_HOST_AMD64;
    388 #else
    389         /* else Must be zero when AMD64 is not available. */
     405#if HC_ARCH_BITS == 64 || defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     406        if (VMX_IS_64BIT_HOST_MODE())
     407            val |= VMX_VMCS_CTRL_EXIT_CONTROLS_HOST_AMD64;
     408        /* else: Must be zero when AMD64 is not available. */
    390409#endif
    391410        val &= pVM->hwaccm.s.vmx.msr.vmx_exit.n.allowed1;
     
    807826        rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_ES,          0);
    808827#if HC_ARCH_BITS == 32
    809         rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_FS,          0);
    810         rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_GS,          0);
     828        if (!VMX_IS_64BIT_HOST_MODE())
     829        {
     830            rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_FS,      0);
     831            rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_GS,      0);
     832        }
    811833#endif
    812834        rc |= VMXWriteVMCS(VMX_VMCS16_HOST_FIELD_SS,          ss);
     
    841863#if HC_ARCH_BITS == 64
    842864        trBase = X86DESC64_BASE(*pDesc);
     865#elif defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     866        if (VMX_IS_64BIT_HOST_MODE())
     867            trBase = X86DESC64_BASE(*(PX86DESC64)pDesc);
     868        else
     869            trBase = X86DESC_BASE(*pDesc);
    843870#else
    844871        trBase = X86DESC_BASE(*pDesc);
     
    849876
    850877        /* FS and GS base. */
    851 #if HC_ARCH_BITS == 64
    852         Log2(("MSR_K8_FS_BASE = %RX64\n", ASMRdMsr(MSR_K8_FS_BASE)));
    853         Log2(("MSR_K8_GS_BASE = %RX64\n", ASMRdMsr(MSR_K8_GS_BASE)));
    854         rc  = VMXWriteVMCS64(VMX_VMCS_HOST_FS_BASE,         ASMRdMsr(MSR_K8_FS_BASE));
    855         rc |= VMXWriteVMCS64(VMX_VMCS_HOST_GS_BASE,         ASMRdMsr(MSR_K8_GS_BASE));
     878#if HC_ARCH_BITS == 64 || defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     879        if (VMX_IS_64BIT_HOST_MODE())
     880        {
     881            Log2(("MSR_K8_FS_BASE = %RX64\n", ASMRdMsr(MSR_K8_FS_BASE)));
     882            Log2(("MSR_K8_GS_BASE = %RX64\n", ASMRdMsr(MSR_K8_GS_BASE)));
     883            rc  = VMXWriteVMCS64(VMX_VMCS_HOST_FS_BASE,     ASMRdMsr(MSR_K8_FS_BASE));
     884            rc |= VMXWriteVMCS64(VMX_VMCS_HOST_GS_BASE,     ASMRdMsr(MSR_K8_GS_BASE));
     885        }
    856886#endif
    857887        AssertRC(rc);
     
    861891        rc  = VMXWriteVMCS(VMX_VMCS32_HOST_SYSENTER_CS,       ASMRdMsr_Low(MSR_IA32_SYSENTER_CS));
    862892        Log2(("VMX_VMCS_HOST_SYSENTER_CS  %08x\n", ASMRdMsr_Low(MSR_IA32_SYSENTER_CS)));
    863 #if HC_ARCH_BITS == 32
     893#ifdef VBOX_WITH_HYBIRD_32BIT_KERNEL
     894        if (VMX_IS_64BIT_HOST_MODE())
     895        {
     896            Log2(("VMX_VMCS_HOST_SYSENTER_EIP %RX64\n",         ASMRdMsr(MSR_IA32_SYSENTER_EIP)));
     897            Log2(("VMX_VMCS_HOST_SYSENTER_ESP %RX64\n",         ASMRdMsr(MSR_IA32_SYSENTER_ESP)));
     898            rc |= VMXWriteVMCS64(VMX_VMCS_HOST_SYSENTER_ESP,    ASMRdMsr(MSR_IA32_SYSENTER_ESP));
     899            rc |= VMXWriteVMCS64(VMX_VMCS_HOST_SYSENTER_EIP,    ASMRdMsr(MSR_IA32_SYSENTER_EIP));
     900        }
     901        else
     902        {
     903            rc |= VMXWriteVMCS(VMX_VMCS_HOST_SYSENTER_ESP,  ASMRdMsr_Low(MSR_IA32_SYSENTER_ESP));
     904            rc |= VMXWriteVMCS(VMX_VMCS_HOST_SYSENTER_EIP,  ASMRdMsr_Low(MSR_IA32_SYSENTER_EIP));
     905            Log2(("VMX_VMCS_HOST_SYSENTER_EIP %RX32\n",     ASMRdMsr_Low(MSR_IA32_SYSENTER_EIP)));
     906            Log2(("VMX_VMCS_HOST_SYSENTER_ESP %RX32\n",     ASMRdMsr_Low(MSR_IA32_SYSENTER_ESP)));
     907        }
     908#elif HC_ARCH_BITS == 32
    864909        rc |= VMXWriteVMCS(VMX_VMCS_HOST_SYSENTER_ESP,      ASMRdMsr_Low(MSR_IA32_SYSENTER_ESP));
    865910        rc |= VMXWriteVMCS(VMX_VMCS_HOST_SYSENTER_EIP,      ASMRdMsr_Low(MSR_IA32_SYSENTER_EIP));
     
    33033348            Log(("VMX_VMCS_HOST_RIP %RHv\n", val));
    33043349
    3305 # if HC_ARCH_BITS == 64
    3306             Log(("MSR_K6_EFER       = %RX64\n", ASMRdMsr(MSR_K6_EFER)));
    3307             Log(("MSR_K6_STAR       = %RX64\n", ASMRdMsr(MSR_K6_STAR)));
    3308             Log(("MSR_K8_LSTAR      = %RX64\n", ASMRdMsr(MSR_K8_LSTAR)));
    3309             Log(("MSR_K8_CSTAR      = %RX64\n", ASMRdMsr(MSR_K8_CSTAR)));
    3310             Log(("MSR_K8_SF_MASK    = %RX64\n", ASMRdMsr(MSR_K8_SF_MASK)));
     3350# if HC_ARCH_BITS == 64 || defined(VBOX_WITH_HYBIRD_32BIT_KERNEL)
     3351            if (VMX_IS_64BIT_HOST_MODE())
     3352            {
     3353                Log(("MSR_K6_EFER       = %RX64\n", ASMRdMsr(MSR_K6_EFER)));
     3354                Log(("MSR_K6_STAR       = %RX64\n", ASMRdMsr(MSR_K6_STAR)));
     3355                Log(("MSR_K8_LSTAR      = %RX64\n", ASMRdMsr(MSR_K8_LSTAR)));
     3356                Log(("MSR_K8_CSTAR      = %RX64\n", ASMRdMsr(MSR_K8_CSTAR)));
     3357                Log(("MSR_K8_SF_MASK    = %RX64\n", ASMRdMsr(MSR_K8_SF_MASK)));
     3358            }
    33113359# endif
    33123360#endif /* VBOX_STRICT */
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette