VirtualBox

Changeset 55248 in vbox for trunk/src/VBox/VMM/VMMR0


Ignore:
Timestamp:
Apr 14, 2015 1:43:25 PM (10 years ago)
Author:
vboxsync
Message:

HMR0VMX,EM,IEM: Moved the CRx related interpretation methods from EM to IEM (VT-x only) to avoid tinkering with the EMAll.cpp stuff any more.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp

    r55129 r55248  
    67506750
    67516751    return rc;
     6752}
     6753
     6754
     6755/**
     6756 * Ensures that we've got a complete basic context.
     6757 *
     6758 * This excludes the FPU, SSE, AVX, and similar extended state.  The interface
     6759 * is for the interpreter.
     6760 *
     6761 * @returns VBox status code.
     6762 * @param   pVCpu           Pointer to the VMCPU of the calling EMT.
     6763 * @param   pMixedCtx       Pointer to the guest-CPU context which may have data
     6764 *                          needing to be synced in.
     6765 * @thread  EMT(pVCpu)
     6766 */
     6767VMMR0_INT_DECL(int) HMR0EnsureCompleteBasicContext(PVMCPU pVCpu, PCPUMCTX pMixedCtx)
     6768{
     6769    /* Note! Since this is only applicable to VT-x, the implementation is placed
     6770             in the VT-x part of the sources instead of the generic stuff. */
     6771    if (pVCpu->CTX_SUFF(pVM)->hm.s.vmx.fSupported)
     6772        return hmR0VmxSaveGuestState(pVCpu, pMixedCtx);
     6773    return VINF_SUCCESS;
    67526774}
    67536775
     
    1092810950    STAM_PROFILE_ADV_START(&pVCpu->hm.s.StatExitMovCRx, y2);
    1092910951    int rc = hmR0VmxReadExitQualificationVmcs(pVCpu, pVmxTransient);
     10952    rc |= hmR0VmxReadExitInstrLenVmcs(pVmxTransient);
    1093010953    AssertRCReturn(rc, rc);
    1093110954
     
    1093310956    uint32_t const uAccessType           = VMX_EXIT_QUALIFICATION_CRX_ACCESS(uExitQualification);
    1093410957    PVM pVM                              = pVCpu->CTX_SUFF(pVM);
     10958    VBOXSTRICTRC rcStrict;
     10959    rc  = hmR0VmxSaveGuestRipRspRflags(pVCpu, pMixedCtx);
     10960    rc |= hmR0VmxSaveGuestSegmentRegs(pVCpu, pMixedCtx); /* Only really need CS+SS. */
    1093510961    switch (uAccessType)
    1093610962    {
    1093710963        case VMX_EXIT_QUALIFICATION_CRX_ACCESS_WRITE:       /* MOV to CRx */
    1093810964        {
    10939 #if 0
    10940             /* EMInterpretCRxWrite() references a lot of guest state (EFER, RFLAGS, Segment Registers, etc.) Sync entire state */
    10941             rc = hmR0VmxSaveGuestState(pVCpu, pMixedCtx);
    10942 #else
    10943             rc  = hmR0VmxSaveGuestRipRspRflags(pVCpu, pMixedCtx);
    1094410965            rc |= hmR0VmxSaveGuestControlRegs(pVCpu, pMixedCtx);
    10945             rc |= hmR0VmxSaveGuestSegmentRegs(pVCpu, pMixedCtx);
    10946 #endif
    1094710966            AssertRCReturn(rc, rc);
    1094810967
    10949             rc = EMInterpretCRxWrite(pVM, pVCpu, CPUMCTX2CORE(pMixedCtx),
    10950                                      VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification),
    10951                                      VMX_EXIT_QUALIFICATION_CRX_GENREG(uExitQualification));
    10952             Assert(rc == VINF_SUCCESS || rc == VERR_EM_INTERPRETER || rc == VINF_PGM_CHANGE_MODE || rc == VINF_PGM_SYNC_CR3);
    10953 
     10968            rcStrict = IEMExecDecodedMovCRxWrite(pVCpu, pVmxTransient->cbInstr,
     10969                                                 VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification),
     10970                                                 VMX_EXIT_QUALIFICATION_CRX_GENREG(uExitQualification));
     10971            AssertMsg(rcStrict == VINF_SUCCESS || rcStrict == VINF_IEM_RAISED_XCPT || rcStrict == VINF_PGM_CHANGE_MODE
     10972                      || rcStrict == VINF_PGM_SYNC_CR3, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1095410973            switch (VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification))
    1095510974            {
    1095610975                case 0: /* CR0 */
    1095710976                    HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR0);
    10958                     Log4(("CRX CR0 write rc=%d CR0=%#RX64\n", rc, pMixedCtx->cr0));
     10977                    Log4(("CRX CR0 write rcStrict=%Rrc CR0=%#RX64\n", VBOXSTRICTRC_VAL(rcStrict), pMixedCtx->cr0));
    1095910978                    break;
    1096010979                case 2: /* CR2 */
     
    1096410983                    Assert(!pVM->hm.s.fNestedPaging || !CPUMIsGuestPagingEnabledEx(pMixedCtx));
    1096510984                    HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR3);
    10966                     Log4(("CRX CR3 write rc=%d CR3=%#RX64\n", rc, pMixedCtx->cr3));
     10985                    Log4(("CRX CR3 write rcStrict=%Rrc CR3=%#RX64\n", VBOXSTRICTRC_VAL(rcStrict), pMixedCtx->cr3));
    1096710986                    break;
    1096810987                case 4: /* CR4 */
    1096910988                    HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR4);
    10970                     Log4(("CRX CR4 write rc=%d CR4=%#RX64\n", rc, pMixedCtx->cr4));
     10989                    Log4(("CRX CR4 write rc=%Rrc CR4=%#RX64\n", VBOXSTRICTRC_VAL(rcStrict), pMixedCtx->cr4));
    1097110990                    break;
    1097210991                case 8: /* CR8 */
    1097310992                    Assert(!(pVCpu->hm.s.vmx.u32ProcCtls & VMX_VMCS_CTRL_PROC_EXEC_USE_TPR_SHADOW));
    10974                     /* CR8 contains the APIC TPR. Was updated by EMInterpretCRxWrite(). */
     10993                    /* CR8 contains the APIC TPR. Was updated by IEMExecDecodedMovCRxWrite(). */
    1097510994                    HMCPU_CF_SET(pVCpu, HM_CHANGED_VMX_GUEST_APIC_STATE);
    1097610995                    break;
     
    1098611005        case VMX_EXIT_QUALIFICATION_CRX_ACCESS_READ:        /* MOV from CRx */
    1098711006        {
    10988             /* EMInterpretCRxRead() requires EFER MSR, CS. */
    10989             rc  = hmR0VmxSaveGuestSegmentRegs(pVCpu, pMixedCtx);
    1099011007            rc |= hmR0VmxSaveGuestControlRegs(pVCpu, pMixedCtx);
    1099111008            AssertRCReturn(rc, rc);
     11009
    1099211010            Assert(   !pVM->hm.s.fNestedPaging
    1099311011                   || !CPUMIsGuestPagingEnabledEx(pMixedCtx)
     
    1099811016                   || !(pVCpu->hm.s.vmx.u32ProcCtls & VMX_VMCS_CTRL_PROC_EXEC_USE_TPR_SHADOW));
    1099911017
    11000             rc = EMInterpretCRxRead(pVM, pVCpu, CPUMCTX2CORE(pMixedCtx),
    11001                                     VMX_EXIT_QUALIFICATION_CRX_GENREG(uExitQualification),
    11002                                     VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification));
    11003             Assert(rc == VINF_SUCCESS || rc == VERR_EM_INTERPRETER);
     11018            rcStrict = IEMExecDecodedMovCRxRead(pVCpu, pVmxTransient->cbInstr,
     11019                                                VMX_EXIT_QUALIFICATION_CRX_GENREG(uExitQualification),
     11020                                                VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification));
     11021            AssertMsg(rcStrict == VINF_SUCCESS || rcStrict == VINF_IEM_RAISED_XCPT, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1100411022            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCRxRead[VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification)]);
    11005             Log4(("CRX CR%d Read access rc=%d\n", VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification), rc));
     11023            Log4(("CRX CR%d Read access rcStrict=%Rrc\n", VMX_EXIT_QUALIFICATION_CRX_REGISTER(uExitQualification),
     11024                  VBOXSTRICTRC_VAL(rcStrict)));
    1100611025            break;
    1100711026        }
     
    1100911028        case VMX_EXIT_QUALIFICATION_CRX_ACCESS_CLTS:        /* CLTS (Clear Task-Switch Flag in CR0) */
    1101011029        {
    11011             rc = hmR0VmxSaveGuestCR0(pVCpu, pMixedCtx);
     11030            rc |= hmR0VmxSaveGuestCR0(pVCpu, pMixedCtx);
    1101211031            AssertRCReturn(rc, rc);
    11013             rc = EMInterpretCLTS(pVM, pVCpu);
    11014             AssertRCReturn(rc, rc);
     11032            rcStrict = IEMExecDecodedClts(pVCpu, pVmxTransient->cbInstr);
     11033            AssertMsg(rcStrict == VINF_SUCCESS || rcStrict == VINF_IEM_RAISED_XCPT, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1101511034            HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR0);
    1101611035            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitClts);
    11017             Log4(("CRX CLTS write rc=%d\n", rc));
     11036            Log4(("CRX CLTS rcStrict=%d\n", VBOXSTRICTRC_VAL(rcStrict)));
    1101811037            break;
    1101911038        }
     
    1102111040        case VMX_EXIT_QUALIFICATION_CRX_ACCESS_LMSW:        /* LMSW (Load Machine-Status Word into CR0) */
    1102211041        {
    11023             rc = hmR0VmxSaveGuestCR0(pVCpu, pMixedCtx);
     11042            rc |= hmR0VmxSaveGuestCR0(pVCpu, pMixedCtx);
    1102411043            AssertRCReturn(rc, rc);
    11025             rc = EMInterpretLMSW(pVM, pVCpu, CPUMCTX2CORE(pMixedCtx), VMX_EXIT_QUALIFICATION_CRX_LMSW_DATA(uExitQualification));
    11026             if (RT_LIKELY(rc == VINF_SUCCESS))
    11027                 HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR0);
     11044            rcStrict = IEMExecDecodedLmsw(pVCpu, pVmxTransient->cbInstr,
     11045                                          VMX_EXIT_QUALIFICATION_CRX_LMSW_DATA(uExitQualification));
     11046            AssertMsg(rcStrict == VINF_SUCCESS || rcStrict == VINF_IEM_RAISED_XCPT || rcStrict == VINF_PGM_CHANGE_MODE, ("%Rrc\n", VBOXSTRICTRC_VAL(rcStrict)));
    1102811047            STAM_COUNTER_INC(&pVCpu->hm.s.StatExitLmsw);
    11029             Log4(("CRX LMSW write rc=%d\n", rc));
     11048            Log4(("CRX LMSW rcStrict=%d\n", VBOXSTRICTRC_VAL(rcStrict)));
    1103011049            break;
    1103111050        }
    1103211051
    1103311052        default:
    11034         {
    11035             AssertMsgFailed(("Invalid access-type in Mov CRx VM-exit qualification %#x\n", uAccessType));
    11036             rc = VERR_VMX_UNEXPECTED_EXCEPTION;
    11037         }
    11038     }
    11039 
    11040     /* Validate possible error codes. */
    11041     Assert(rc == VINF_SUCCESS || rc == VINF_PGM_CHANGE_MODE || rc == VERR_EM_INTERPRETER || rc == VINF_PGM_SYNC_CR3
    11042            || rc == VERR_VMX_UNEXPECTED_EXCEPTION);
    11043     if (RT_SUCCESS(rc))
    11044     {
    11045         int rc2 = hmR0VmxAdvanceGuestRip(pVCpu, pMixedCtx, pVmxTransient);
    11046         AssertRCReturn(rc2, rc2);
    11047     }
    11048 
     11053            AssertMsgFailedReturn(("Invalid access-type in Mov CRx VM-exit qualification %#x\n", uAccessType),
     11054                                  VERR_VMX_UNEXPECTED_EXCEPTION);
     11055    }
     11056
     11057    HMCPU_CF_SET(pVCpu, rcStrict != VINF_IEM_RAISED_XCPT ? HM_CHANGED_GUEST_RIP | HM_CHANGED_GUEST_RFLAGS : HM_CHANGED_ALL_GUEST);
    1104911058    STAM_PROFILE_ADV_STOP(&pVCpu->hm.s.StatExitMovCRx, y2);
    11050     return rc;
     11059    return VBOXSTRICTRC_TODO(rcStrict);
    1105111060}
    1105211061
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette