VirtualBox

Changeset 103838 in vbox


Ignore:
Timestamp:
Mar 13, 2024 8:06:55 PM (11 months ago)
Author:
vboxsync
Message:

VMM/IEM: Emit the IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE()/IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT()/IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks only once per TB or after a helper was called which could modify cr0/cr4/xcr0. Most of the time the check can be omitted for SIMD code, bugref:10614

Location:
trunk/src/VBox/VMM
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r103836 r103838  
    809809iemNativeEmitMaybeRaiseDeviceNotAvailable(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr)
    810810{
    811     /*
    812      * Make sure we don't have any outstanding guest register writes as we may
    813      * raise an #NM and all guest register must be up to date in CPUMCTX.
    814      *
    815      * @todo r=aeichner Can we postpone this to the RaiseNm path?
    816      */
    817     off = iemNativeRegFlushPendingWrites(pReNative, off);
     811#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     812    STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckPotential);
     813
     814    if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE))
     815    {
     816#endif
     817        /*
     818         * Make sure we don't have any outstanding guest register writes as we may
     819         * raise an #NM and all guest register must be up to date in CPUMCTX.
     820         *
     821         * @todo r=aeichner Can we postpone this to the RaiseNm path?
     822         */
     823        off = iemNativeRegFlushPendingWrites(pReNative, off);
    818824
    819825#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
    820     off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
     826        off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
    821827#else
    822     RT_NOREF(idxInstr);
    823 #endif
    824 
    825     /* Allocate a temporary CR0 register. */
    826     uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
    827     uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
    828 
    829     /*
    830      * if (cr0 & (X86_CR0_EM | X86_CR0_TS) != 0)
    831      *     return raisexcpt();
    832      */
    833     /* Test and jump. */
    834     off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(pReNative, off, idxCr0Reg, X86_CR0_EM | X86_CR0_TS, idxLabelRaiseNm);
    835 
    836     /* Free but don't flush the CR0 register. */
    837     iemNativeRegFreeTmp(pReNative, idxCr0Reg);
     828        RT_NOREF(idxInstr);
     829#endif
     830
     831        /* Allocate a temporary CR0 register. */
     832        uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
     833        uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
     834
     835        /*
     836         * if (cr0 & (X86_CR0_EM | X86_CR0_TS) != 0)
     837         *     return raisexcpt();
     838         */
     839        /* Test and jump. */
     840        off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(pReNative, off, idxCr0Reg, X86_CR0_EM | X86_CR0_TS, idxLabelRaiseNm);
     841
     842        /* Free but don't flush the CR0 register. */
     843        iemNativeRegFreeTmp(pReNative, idxCr0Reg);
     844
     845#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     846        pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE;
     847    }
     848    else
     849        STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckOmitted);
     850#endif
    838851
    839852    return off;
     
    901914iemNativeEmitMaybeRaiseSseRelatedXcpt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr)
    902915{
    903     /*
    904      * Make sure we don't have any outstanding guest register writes as we may
    905      * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX.
    906      *
    907      * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path?
    908      */
    909     off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/);
     916#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     917    STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeSseXcptCheckPotential);
     918
     919    if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE))
     920    {
     921#endif
     922        /*
     923         * Make sure we don't have any outstanding guest register writes as we may
     924         * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX.
     925         *
     926         * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path?
     927         */
     928        off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/);
    910929
    911930#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
    912     off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
     931        off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
    913932#else
    914     RT_NOREF(idxInstr);
    915 #endif
    916 
    917     /* Allocate a temporary CR0 and CR4 register. */
    918     uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
    919     uint8_t const idxCr4Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly);
    920     uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
    921     uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd);
    922 
    923     /** @todo r=aeichner Optimize this more later to have less compares and branches,
    924      *                   (see IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() in IEMMc.h but check that it has some
    925      *                   actual performance benefit first). */
    926     /*
    927      * if (cr0 & X86_CR0_EM)
    928      *     return raisexcpt();
    929      */
    930     off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_EM_BIT, idxLabelRaiseUd);
    931     /*
    932      * if (!(cr4 & X86_CR4_OSFXSR))
    933      *     return raisexcpt();
    934      */
    935     off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSFXSR_BIT, idxLabelRaiseUd);
    936     /*
    937      * if (cr0 & X86_CR0_TS)
    938      *     return raisexcpt();
    939      */
    940     off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm);
    941 
    942     /* Free but don't flush the CR0 and CR4 register. */
    943     iemNativeRegFreeTmp(pReNative, idxCr0Reg);
    944     iemNativeRegFreeTmp(pReNative, idxCr4Reg);
     933        RT_NOREF(idxInstr);
     934#endif
     935
     936        /* Allocate a temporary CR0 and CR4 register. */
     937        uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
     938        uint8_t const idxCr4Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly);
     939        uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
     940        uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd);
     941
     942        /** @todo r=aeichner Optimize this more later to have less compares and branches,
     943         *                   (see IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() in IEMMc.h but check that it has some
     944         *                   actual performance benefit first). */
     945        /*
     946         * if (cr0 & X86_CR0_EM)
     947         *     return raisexcpt();
     948         */
     949        off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_EM_BIT, idxLabelRaiseUd);
     950        /*
     951         * if (!(cr4 & X86_CR4_OSFXSR))
     952         *     return raisexcpt();
     953         */
     954        off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSFXSR_BIT, idxLabelRaiseUd);
     955        /*
     956         * if (cr0 & X86_CR0_TS)
     957         *     return raisexcpt();
     958         */
     959        off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm);
     960
     961        /* Free but don't flush the CR0 and CR4 register. */
     962        iemNativeRegFreeTmp(pReNative, idxCr0Reg);
     963        iemNativeRegFreeTmp(pReNative, idxCr4Reg);
     964#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     965        pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE;
     966    }
     967    else
     968        STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeSseXcptCheckOmitted);
     969#endif
    945970
    946971    return off;
     
    962987iemNativeEmitMaybeRaiseAvxRelatedXcpt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr)
    963988{
    964     /*
    965      * Make sure we don't have any outstanding guest register writes as we may
    966      * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX.
    967      *
    968      * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path?
    969      */
    970     off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/);
     989#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     990    STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeAvxXcptCheckPotential);
     991
     992    if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX))
     993    {
     994#endif
     995        /*
     996         * Make sure we don't have any outstanding guest register writes as we may
     997         * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX.
     998         *
     999         * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path?
     1000         */
     1001        off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/);
    9711002
    9721003#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
    973     off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
     1004        off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
    9741005#else
    975     RT_NOREF(idxInstr);
    976 #endif
    977 
    978     /* Allocate a temporary CR0, CR4 and XCR0 register. */
    979     uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
    980     uint8_t const idxCr4Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly);
    981     uint8_t const idxXcr0Reg      = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Xcr0, kIemNativeGstRegUse_ReadOnly);
    982     uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
    983     uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd);
    984 
    985     /** @todo r=aeichner Optimize this more later to have less compares and branches,
    986      *                   (see IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() in IEMMc.h but check that it has some
    987      *                   actual performance benefit first). */
    988     /*
    989      * if ((xcr0 & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE))
    990      *     return raisexcpt();
    991      */
    992     const uint8_t idxRegTmp = iemNativeRegAllocTmpImm(pReNative, &off, XSAVE_C_YMM | XSAVE_C_SSE);
    993     off = iemNativeEmitAndGprByGpr(pReNative, off, idxRegTmp, idxXcr0Reg);
    994     off = iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, idxRegTmp, XSAVE_C_YMM | XSAVE_C_SSE, idxLabelRaiseUd);
    995     iemNativeRegFreeTmp(pReNative, idxRegTmp);
    996 
    997     /*
    998      * if (!(cr4 & X86_CR4_OSXSAVE))
    999      *     return raisexcpt();
    1000      */
    1001     off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSXSAVE_BIT, idxLabelRaiseUd);
    1002     /*
    1003      * if (cr0 & X86_CR0_TS)
    1004      *     return raisexcpt();
    1005      */
    1006     off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm);
    1007 
    1008     /* Free but don't flush the CR0, CR4 and XCR0 register. */
    1009     iemNativeRegFreeTmp(pReNative, idxCr0Reg);
    1010     iemNativeRegFreeTmp(pReNative, idxCr4Reg);
    1011     iemNativeRegFreeTmp(pReNative, idxXcr0Reg);
     1006        RT_NOREF(idxInstr);
     1007#endif
     1008
     1009        /* Allocate a temporary CR0, CR4 and XCR0 register. */
     1010        uint8_t const idxCr0Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly);
     1011        uint8_t const idxCr4Reg       = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly);
     1012        uint8_t const idxXcr0Reg      = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Xcr0, kIemNativeGstRegUse_ReadOnly);
     1013        uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm);
     1014        uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd);
     1015
     1016        /** @todo r=aeichner Optimize this more later to have less compares and branches,
     1017         *                   (see IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() in IEMMc.h but check that it has some
     1018         *                   actual performance benefit first). */
     1019        /*
     1020         * if ((xcr0 & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE))
     1021         *     return raisexcpt();
     1022         */
     1023        const uint8_t idxRegTmp = iemNativeRegAllocTmpImm(pReNative, &off, XSAVE_C_YMM | XSAVE_C_SSE);
     1024        off = iemNativeEmitAndGprByGpr(pReNative, off, idxRegTmp, idxXcr0Reg);
     1025        off = iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, idxRegTmp, XSAVE_C_YMM | XSAVE_C_SSE, idxLabelRaiseUd);
     1026        iemNativeRegFreeTmp(pReNative, idxRegTmp);
     1027
     1028        /*
     1029         * if (!(cr4 & X86_CR4_OSXSAVE))
     1030         *     return raisexcpt();
     1031         */
     1032        off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSXSAVE_BIT, idxLabelRaiseUd);
     1033        /*
     1034         * if (cr0 & X86_CR0_TS)
     1035         *     return raisexcpt();
     1036         */
     1037        off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm);
     1038
     1039        /* Free but don't flush the CR0, CR4 and XCR0 register. */
     1040        iemNativeRegFreeTmp(pReNative, idxCr0Reg);
     1041        iemNativeRegFreeTmp(pReNative, idxCr4Reg);
     1042        iemNativeRegFreeTmp(pReNative, idxXcr0Reg);
     1043#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     1044        pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX;
     1045    }
     1046    else
     1047        STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeAvxXcptCheckOmitted);
     1048#endif
    10121049
    10131050    return off;
     
    19892026{
    19902027    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, X86_EFL_STATUS_BITS);
     2028
     2029#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     2030    /* Clear the appropriate check emitted flags when a helper is called which could modify a control register. */
     2031    if (pfnCImpl == (uintptr_t)iemCImpl_xsetbv) /* Modifies xcr0 which only the AVX check uses. */
     2032        pReNative->fSimdRaiseXcptChecksEmitted &= ~IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX;
     2033    else if (pfnCImpl == (uintptr_t)iemCImpl_mov_Cd_Rd) /* Can modify cr4 which all checks use. */
     2034        pReNative->fSimdRaiseXcptChecksEmitted = 0;
     2035    else if (   pfnCImpl == (uintptr_t)iemCImpl_FarJmp
     2036             || pfnCImpl == (uintptr_t)iemCImpl_callf
     2037             || pfnCImpl == (uintptr_t)iemCImpl_lmsw
     2038             || pfnCImpl == (uintptr_t)iemCImpl_clts) /* Will only modify cr0 */
     2039        pReNative->fSimdRaiseXcptChecksEmitted &= ~(  IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX
     2040                                                    | IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE
     2041                                                    | IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE);
     2042#endif
    19912043
    19922044    /*
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r103832 r103838  
    29392939    pReNative->Core.offPc                  = 0;
    29402940    pReNative->Core.cInstrPcUpdateSkipped  = 0;
     2941#endif
     2942#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     2943    pReNative->fSimdRaiseXcptChecksEmitted = 0;
    29412944#endif
    29422945    pReNative->Core.bmHstRegs              = IEMNATIVE_REG_FIXED_MASK
  • trunk/src/VBox/VMM/VMMR3/IEMR3.cpp

    r103828 r103838  
    538538        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateTotal,   STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Total RIP updates",   "/IEM/CPU%u/re/NativePcUpdateTotal", idCpu);
    539539        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativePcUpdateDelayed, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Delayed RIP updates", "/IEM/CPU%u/re/NativePcUpdateDelayed", idCpu);
     540
     541#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     542        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckPotential, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks", "/IEM/CPU%u/re/NativeMaybeDeviceNotAvailXcptCheckPotential", idCpu);
     543        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeSseXcptCheckPotential,            STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks",     "/IEM/CPU%u/re/NativeMaybeSseXcptCheckPotential", idCpu);
     544        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeAvxXcptCheckPotential,            STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Potential IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks",     "/IEM/CPU%u/re/NativeMaybeAvxXcptCheckPotential", idCpu);
     545
     546        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckOmitted,   STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks omitted",   "/IEM/CPU%u/re/NativeMaybeDeviceNotAvailXcptCheckOmitted", idCpu);
     547        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeSseXcptCheckOmitted,              STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks omitted",       "/IEM/CPU%u/re/NativeMaybeSseXcptCheckOmitted", idCpu);
     548        STAMR3RegisterF(pVM, &pVCpu->iem.s.StatNativeMaybeAvxXcptCheckOmitted,              STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks omitted",       "/IEM/CPU%u/re/NativeMaybeAvxXcptCheckOmitted", idCpu);
     549#endif
    540550
    541551        /* Ratio of the status bit skippables. */
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r103828 r103838  
    18981898    STAMCOUNTER             StatNativePcUpdateDelayed;
    18991899
    1900 
    1901     uint64_t                u64Padding;
     1900#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     1901    /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks. */
     1902    STAMCOUNTER             StatNativeMaybeDeviceNotAvailXcptCheckPotential;
     1903    /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks. */
     1904    STAMCOUNTER             StatNativeMaybeSseXcptCheckPotential;
     1905    /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks. */
     1906    STAMCOUNTER             StatNativeMaybeAvxXcptCheckPotential;
     1907
     1908    /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks omitted. */
     1909    STAMCOUNTER             StatNativeMaybeDeviceNotAvailXcptCheckOmitted;
     1910    /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks omitted. */
     1911    STAMCOUNTER             StatNativeMaybeSseXcptCheckOmitted;
     1912    /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks omitted. */
     1913    STAMCOUNTER             StatNativeMaybeAvxXcptCheckOmitted;
     1914#endif
     1915
     1916    uint64_t                au64Padding[3];
    19021917    /** @} */
    19031918
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r103829 r103838  
    12061206# define IEMNATIVE_SIMD_REG_STATE_SET_DIRTY_HI_U128(a_pReNative, a_iSimdReg) \
    12071207    ((a_pReNative)->Core.bmGstSimdRegShadowDirtyHi128 |= RT_BIT_64(a_iSimdReg))
     1208
     1209/** Flag for indicating that IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() has emitted code in the current TB. */
     1210# define IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE RT_BIT_32(0)
     1211/** Flag for indicating that IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() has emitted code in the current TB. */
     1212# define IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE                  RT_BIT_32(1)
     1213/** Flag for indicating that IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() has emitted code in the current TB. */
     1214# define IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX                  RT_BIT_32(2)
    12081215#endif
    12091216
     
    13061313    /** The expected IEMCPU::fExec value for the current call/instruction. */
    13071314    uint32_t                    fExec;
     1315#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
     1316    /** IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_XXX flags for exception flags
     1317     * we only emit once per TB (or when the cr0/cr4/xcr0 register changes).
     1318     *
     1319     * This is an optimization because these control registers can only be changed from
     1320     * by calling a C helper we can catch. Should reduce the number of instructions in a TB
     1321     * consisting of multiple SIMD instructions.
     1322     */
     1323    uint32_t                    fSimdRaiseXcptChecksEmitted;
     1324#endif
    13081325
    13091326    /** Core state requiring care with branches. */
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette