VirtualBox

Changeset 67660 in vbox


Ignore:
Timestamp:
Jun 28, 2017 10:17:05 AM (7 years ago)
Author:
vboxsync
Message:

VMM/HMSVMR0: Added decode assist for invlpg, mov CRx instruction VM-exits.

Location:
trunk
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/VBox/vmm/hm_svm.h

    r66668 r67660  
    297297/** @} */
    298298
    299 /** @name SVMVMCB.u64ExitInfo1 for Mov CRX accesses.
    300  * @{
    301  */
    302 /** The access was via Mov CRx instruction bit number. */
     299/** @name SVMVMCB.u64ExitInfo1 for Mov CRx accesses.
     300 * @{
     301 */
     302/** The mask of whether the access was via a Mov CRx instruction. */
    303303#define SVM_EXIT1_MOV_CRX_MASK                RT_BIT_64(63)
    304 /** @} */
    305 
     304/** The mask for the GPR number of the Mov CRx instruction.  */
     305#define SVM_EXIT1_MOV_CRX_GPR_NUMBER          0xf
     306/** @} */
     307
     308/** @name SVMVMCB.u64ExitInfo1 for Mov DRx accesses.
     309 * @{
     310 */
     311/** The mask for the GPR number of the Mov DRx instruction.  */
     312#define SVM_EXIT1_MOV_DRX_GPR_NUMBER          0xf
     313/** @} */
    306314
    307315/** @name SVMVMCB.ctrl.u64InterceptCtrl
  • trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp

    r67529 r67660  
    45724572    PVM pVM = pVCpu->CTX_SUFF(pVM);
    45734573    Assert(!pVM->hm.s.fNestedPaging);
    4574 
    4575     /** @todo Decode Assist. */
     4574    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitInvlpg);
     4575
     4576    if (pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_DECODE_ASSIST)
     4577    {
     4578        Assert(pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_NRIP_SAVE);
     4579        PCSVMVMCB pVmcb = (PCSVMVMCB)pVCpu->hm.s.svm.pvVmcb;
     4580        uint8_t const cbInstr   = pVmcb->ctrl.u64NextRIP - pCtx->rip;
     4581        RTGCPTR const GCPtrPage = pVmcb->ctrl.u64ExitInfo1;
     4582        VBOXSTRICTRC rcStrict = IEMExecDecodedInvlpg(pVCpu, cbInstr, GCPtrPage);
     4583        HMSVM_CHECK_SINGLE_STEP(pVCpu, rcStrict);
     4584        return VBOXSTRICTRC_VAL(rcStrict);
     4585    }
     4586
    45764587    int rc = hmR0SvmInterpretInvlpg(pVM, pVCpu, pCtx);    /* Updates RIP if successful. */
    4577     STAM_COUNTER_INC(&pVCpu->hm.s.StatExitInvlpg);
    45784588    Assert(rc == VINF_SUCCESS || rc == VERR_EM_INTERPRETER);
    45794589    HMSVM_CHECK_SINGLE_STEP(pVCpu, rc);
     
    46724682
    46734683    Log4(("hmR0SvmExitReadCRx: CS:RIP=%04x:%#RX64\n", pCtx->cs.Sel, pCtx->rip));
    4674 
    4675     /** @todo Decode Assist. */
     4684    STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCRxRead[pSvmTransient->u64ExitCode - SVM_EXIT_READ_CR0]);
     4685
     4686    PVM pVM = pVCpu->CTX_SUFF(pVM);
     4687    if (pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_DECODE_ASSIST)
     4688    {
     4689        Assert(pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_NRIP_SAVE);
     4690        PCSVMVMCB pVmcb = (PCSVMVMCB)pVCpu->hm.s.svm.pvVmcb;
     4691        bool const fMovCRx = RT_BOOL(pVmcb->ctrl.u64ExitInfo1 & SVM_EXIT1_MOV_CRX_MASK);
     4692        if (fMovCRx)
     4693        {
     4694            uint8_t const cbInstr = pVmcb->ctrl.u64NextRIP - pCtx->rip;
     4695            uint8_t const iCrReg  = pSvmTransient->u64ExitCode - SVM_EXIT_READ_CR0;
     4696            uint8_t const iGReg   = pVmcb->ctrl.u64ExitInfo1 & SVM_EXIT1_MOV_CRX_GPR_NUMBER;
     4697            VBOXSTRICTRC rcStrict = IEMExecDecodedMovCRxRead(pVCpu, cbInstr, iGReg, iCrReg);
     4698            HMSVM_CHECK_SINGLE_STEP(pVCpu, rcStrict);
     4699            return VBOXSTRICTRC_VAL(rcStrict);
     4700        }
     4701        /* else: SMSW instruction, fall back below to IEM for this. */
     4702    }
     4703
    46764704    VBOXSTRICTRC rc2 = EMInterpretInstruction(pVCpu, CPUMCTX2CORE(pCtx), 0 /* pvFault */);
    46774705    int rc = VBOXSTRICTRC_VAL(rc2);
     
    46794707              ("hmR0SvmExitReadCRx: EMInterpretInstruction failed rc=%Rrc\n", rc));
    46804708    Assert((pSvmTransient->u64ExitCode - SVM_EXIT_READ_CR0) <= 15);
    4681     STAM_COUNTER_INC(&pVCpu->hm.s.StatExitCRxRead[pSvmTransient->u64ExitCode - SVM_EXIT_READ_CR0]);
    46824709    HMSVM_CHECK_SINGLE_STEP(pVCpu, rc);
    46834710    return rc;
     
    46924719    HMSVM_VALIDATE_EXIT_HANDLER_PARAMS();
    46934720
    4694     /** @todo Decode Assist. */
    4695     VBOXSTRICTRC rcStrict = IEMExecOneBypassEx(pVCpu, CPUMCTX2CORE(pCtx), NULL);
    4696     if (RT_UNLIKELY(   rcStrict == VERR_IEM_ASPECT_NOT_IMPLEMENTED
    4697                     || rcStrict == VERR_IEM_INSTR_NOT_IMPLEMENTED))
    4698         rcStrict = VERR_EM_INTERPRETER;
     4721    uint8_t const iCrReg = pSvmTransient->u64ExitCode - SVM_EXIT_WRITE_CR0;
     4722    Assert(iCrReg <= 15);
     4723
     4724    VBOXSTRICTRC rcStrict;
     4725    PVM pVM = pVCpu->CTX_SUFF(pVM);
     4726    bool fDecodedInstr = false;
     4727    if (pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_DECODE_ASSIST)
     4728    {
     4729        Assert(pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_NRIP_SAVE);
     4730        PCSVMVMCB pVmcb = (PCSVMVMCB)pVCpu->hm.s.svm.pvVmcb;
     4731        bool const fMovCRx = RT_BOOL(pVmcb->ctrl.u64ExitInfo1 & SVM_EXIT1_MOV_CRX_MASK);
     4732        if (fMovCRx)
     4733        {
     4734            uint8_t const cbInstr = pVmcb->ctrl.u64NextRIP - pCtx->rip;
     4735            uint8_t const iGReg   = pVmcb->ctrl.u64ExitInfo1 & SVM_EXIT1_MOV_CRX_GPR_NUMBER;
     4736            rcStrict = IEMExecDecodedMovCRxWrite(pVCpu, cbInstr, iCrReg, iGReg);
     4737            fDecodedInstr = true;
     4738        }
     4739        /* else: LMSW or CLTS instruction, fall back below to IEM for this. */
     4740    }
     4741
     4742    if (!fDecodedInstr)
     4743    {
     4744        rcStrict = IEMExecOneBypassEx(pVCpu, CPUMCTX2CORE(pCtx), NULL);
     4745        if (RT_UNLIKELY(   rcStrict == VERR_IEM_ASPECT_NOT_IMPLEMENTED
     4746                        || rcStrict == VERR_IEM_INSTR_NOT_IMPLEMENTED))
     4747            rcStrict = VERR_EM_INTERPRETER;
     4748    }
     4749
    46994750    if (rcStrict == VINF_SUCCESS)
    47004751    {
    4701         /* RIP has been updated by EMInterpretInstruction(). */
    4702         Assert((pSvmTransient->u64ExitCode - SVM_EXIT_WRITE_CR0) <= 15);
    4703         switch (pSvmTransient->u64ExitCode - SVM_EXIT_WRITE_CR0)
     4752        switch (iCrReg)
    47044753        {
    47054754            case 0:     /* CR0. */
     
    47084757
    47094758            case 3:     /* CR3. */
    4710                 Assert(!pVCpu->CTX_SUFF(pVM)->hm.s.fNestedPaging);
     4759                Assert(!pVM->hm.s.fNestedPaging);
    47114760                HMCPU_CF_SET(pVCpu, HM_CHANGED_GUEST_CR3);
    47124761                break;
     
    47224771            default:
    47234772                AssertMsgFailed(("hmR0SvmExitWriteCRx: Invalid/Unexpected Write-CRx exit. u64ExitCode=%#RX64 %#x\n",
    4724                                  pSvmTransient->u64ExitCode, pSvmTransient->u64ExitCode - SVM_EXIT_WRITE_CR0));
     4773                                 pSvmTransient->u64ExitCode, iCrReg));
    47254774                break;
    47264775        }
  • trunk/src/VBox/VMM/VMMR3/HM.cpp

    r67529 r67660  
    16481648
    16491649    /*
     1650     * SVM R0 code assumes if the decode-assist feature exists, NRIP feature exists too.
     1651     */
     1652    AssertLogRelReturn(  !(pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_DECODE_ASSIST)
     1653                       || (pVM->hm.s.svm.u32Features & X86_CPUID_SVM_FEATURE_EDX_NRIP_SAVE),
     1654                       VERR_HM_UNSUPPORTED_CPU_FEATURE_COMBO);
     1655
     1656    /*
    16501657     * Nested paging is determined in HMR3Init, verify the sanity of that.
    16511658     */
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette