Changeset 103838 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Mar 13, 2024 8:06:55 PM (11 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r103836 r103838 809 809 iemNativeEmitMaybeRaiseDeviceNotAvailable(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr) 810 810 { 811 /* 812 * Make sure we don't have any outstanding guest register writes as we may 813 * raise an #NM and all guest register must be up to date in CPUMCTX. 814 * 815 * @todo r=aeichner Can we postpone this to the RaiseNm path? 816 */ 817 off = iemNativeRegFlushPendingWrites(pReNative, off); 811 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 812 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckPotential); 813 814 if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE)) 815 { 816 #endif 817 /* 818 * Make sure we don't have any outstanding guest register writes as we may 819 * raise an #NM and all guest register must be up to date in CPUMCTX. 820 * 821 * @todo r=aeichner Can we postpone this to the RaiseNm path? 822 */ 823 off = iemNativeRegFlushPendingWrites(pReNative, off); 818 824 819 825 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 820 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));826 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 821 827 #else 822 RT_NOREF(idxInstr); 823 #endif 824 825 /* Allocate a temporary CR0 register. */ 826 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 827 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 828 829 /* 830 * if (cr0 & (X86_CR0_EM | X86_CR0_TS) != 0) 831 * return raisexcpt(); 832 */ 833 /* Test and jump. */ 834 off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(pReNative, off, idxCr0Reg, X86_CR0_EM | X86_CR0_TS, idxLabelRaiseNm); 835 836 /* Free but don't flush the CR0 register. */ 837 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 828 RT_NOREF(idxInstr); 829 #endif 830 831 /* Allocate a temporary CR0 register. */ 832 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 833 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 834 835 /* 836 * if (cr0 & (X86_CR0_EM | X86_CR0_TS) != 0) 837 * return raisexcpt(); 838 */ 839 /* Test and jump. */ 840 off = iemNativeEmitTestAnyBitsInGprAndJmpToLabelIfAnySet(pReNative, off, idxCr0Reg, X86_CR0_EM | X86_CR0_TS, idxLabelRaiseNm); 841 842 /* Free but don't flush the CR0 register. */ 843 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 844 845 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 846 pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE; 847 } 848 else 849 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeDeviceNotAvailXcptCheckOmitted); 850 #endif 838 851 839 852 return off; … … 901 914 iemNativeEmitMaybeRaiseSseRelatedXcpt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr) 902 915 { 903 /* 904 * Make sure we don't have any outstanding guest register writes as we may 905 * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX. 906 * 907 * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path? 908 */ 909 off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/); 916 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 917 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeSseXcptCheckPotential); 918 919 if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE)) 920 { 921 #endif 922 /* 923 * Make sure we don't have any outstanding guest register writes as we may 924 * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX. 925 * 926 * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path? 927 */ 928 off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/); 910 929 911 930 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 912 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));931 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 913 932 #else 914 RT_NOREF(idxInstr); 915 #endif 916 917 /* Allocate a temporary CR0 and CR4 register. */ 918 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 919 uint8_t const idxCr4Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly); 920 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 921 uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd); 922 923 /** @todo r=aeichner Optimize this more later to have less compares and branches, 924 * (see IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() in IEMMc.h but check that it has some 925 * actual performance benefit first). */ 926 /* 927 * if (cr0 & X86_CR0_EM) 928 * return raisexcpt(); 929 */ 930 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_EM_BIT, idxLabelRaiseUd); 931 /* 932 * if (!(cr4 & X86_CR4_OSFXSR)) 933 * return raisexcpt(); 934 */ 935 off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSFXSR_BIT, idxLabelRaiseUd); 936 /* 937 * if (cr0 & X86_CR0_TS) 938 * return raisexcpt(); 939 */ 940 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm); 941 942 /* Free but don't flush the CR0 and CR4 register. */ 943 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 944 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 933 RT_NOREF(idxInstr); 934 #endif 935 936 /* Allocate a temporary CR0 and CR4 register. */ 937 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 938 uint8_t const idxCr4Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly); 939 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 940 uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd); 941 942 /** @todo r=aeichner Optimize this more later to have less compares and branches, 943 * (see IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() in IEMMc.h but check that it has some 944 * actual performance benefit first). */ 945 /* 946 * if (cr0 & X86_CR0_EM) 947 * return raisexcpt(); 948 */ 949 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_EM_BIT, idxLabelRaiseUd); 950 /* 951 * if (!(cr4 & X86_CR4_OSFXSR)) 952 * return raisexcpt(); 953 */ 954 off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSFXSR_BIT, idxLabelRaiseUd); 955 /* 956 * if (cr0 & X86_CR0_TS) 957 * return raisexcpt(); 958 */ 959 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm); 960 961 /* Free but don't flush the CR0 and CR4 register. */ 962 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 963 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 964 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 965 pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE; 966 } 967 else 968 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeSseXcptCheckOmitted); 969 #endif 945 970 946 971 return off; … … 962 987 iemNativeEmitMaybeRaiseAvxRelatedXcpt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr) 963 988 { 964 /* 965 * Make sure we don't have any outstanding guest register writes as we may 966 * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX. 967 * 968 * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path? 969 */ 970 off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/); 989 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 990 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeAvxXcptCheckPotential); 991 992 if (!(pReNative->fSimdRaiseXcptChecksEmitted & IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX)) 993 { 994 #endif 995 /* 996 * Make sure we don't have any outstanding guest register writes as we may 997 * raise an \#UD or \#NM and all guest register must be up to date in CPUMCTX. 998 * 999 * @todo r=aeichner Can we postpone this to the RaiseNm/RaiseUd path? 1000 */ 1001 off = iemNativeRegFlushPendingWrites(pReNative, off, false /*fFlushShadows*/); 971 1002 972 1003 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 973 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));1004 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 974 1005 #else 975 RT_NOREF(idxInstr); 976 #endif 977 978 /* Allocate a temporary CR0, CR4 and XCR0 register. */ 979 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 980 uint8_t const idxCr4Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly); 981 uint8_t const idxXcr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Xcr0, kIemNativeGstRegUse_ReadOnly); 982 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 983 uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd); 984 985 /** @todo r=aeichner Optimize this more later to have less compares and branches, 986 * (see IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() in IEMMc.h but check that it has some 987 * actual performance benefit first). */ 988 /* 989 * if ((xcr0 & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE)) 990 * return raisexcpt(); 991 */ 992 const uint8_t idxRegTmp = iemNativeRegAllocTmpImm(pReNative, &off, XSAVE_C_YMM | XSAVE_C_SSE); 993 off = iemNativeEmitAndGprByGpr(pReNative, off, idxRegTmp, idxXcr0Reg); 994 off = iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, idxRegTmp, XSAVE_C_YMM | XSAVE_C_SSE, idxLabelRaiseUd); 995 iemNativeRegFreeTmp(pReNative, idxRegTmp); 996 997 /* 998 * if (!(cr4 & X86_CR4_OSXSAVE)) 999 * return raisexcpt(); 1000 */ 1001 off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSXSAVE_BIT, idxLabelRaiseUd); 1002 /* 1003 * if (cr0 & X86_CR0_TS) 1004 * return raisexcpt(); 1005 */ 1006 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm); 1007 1008 /* Free but don't flush the CR0, CR4 and XCR0 register. */ 1009 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 1010 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 1011 iemNativeRegFreeTmp(pReNative, idxXcr0Reg); 1006 RT_NOREF(idxInstr); 1007 #endif 1008 1009 /* Allocate a temporary CR0, CR4 and XCR0 register. */ 1010 uint8_t const idxCr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr0, kIemNativeGstRegUse_ReadOnly); 1011 uint8_t const idxCr4Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Cr4, kIemNativeGstRegUse_ReadOnly); 1012 uint8_t const idxXcr0Reg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_Xcr0, kIemNativeGstRegUse_ReadOnly); 1013 uint8_t const idxLabelRaiseNm = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseNm); 1014 uint8_t const idxLabelRaiseUd = iemNativeLabelCreate(pReNative, kIemNativeLabelType_RaiseUd); 1015 1016 /** @todo r=aeichner Optimize this more later to have less compares and branches, 1017 * (see IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() in IEMMc.h but check that it has some 1018 * actual performance benefit first). */ 1019 /* 1020 * if ((xcr0 & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE)) 1021 * return raisexcpt(); 1022 */ 1023 const uint8_t idxRegTmp = iemNativeRegAllocTmpImm(pReNative, &off, XSAVE_C_YMM | XSAVE_C_SSE); 1024 off = iemNativeEmitAndGprByGpr(pReNative, off, idxRegTmp, idxXcr0Reg); 1025 off = iemNativeEmitTestIfGprNotEqualImmAndJmpToLabel(pReNative, off, idxRegTmp, XSAVE_C_YMM | XSAVE_C_SSE, idxLabelRaiseUd); 1026 iemNativeRegFreeTmp(pReNative, idxRegTmp); 1027 1028 /* 1029 * if (!(cr4 & X86_CR4_OSXSAVE)) 1030 * return raisexcpt(); 1031 */ 1032 off = iemNativeEmitTestBitInGprAndJmpToLabelIfNotSet(pReNative, off, idxCr4Reg, X86_CR4_OSXSAVE_BIT, idxLabelRaiseUd); 1033 /* 1034 * if (cr0 & X86_CR0_TS) 1035 * return raisexcpt(); 1036 */ 1037 off = iemNativeEmitTestBitInGprAndJmpToLabelIfSet(pReNative, off, idxCr0Reg, X86_CR0_TS_BIT, idxLabelRaiseNm); 1038 1039 /* Free but don't flush the CR0, CR4 and XCR0 register. */ 1040 iemNativeRegFreeTmp(pReNative, idxCr0Reg); 1041 iemNativeRegFreeTmp(pReNative, idxCr4Reg); 1042 iemNativeRegFreeTmp(pReNative, idxXcr0Reg); 1043 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 1044 pReNative->fSimdRaiseXcptChecksEmitted |= IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX; 1045 } 1046 else 1047 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeMaybeAvxXcptCheckOmitted); 1048 #endif 1012 1049 1013 1050 return off; … … 1989 2026 { 1990 2027 IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, X86_EFL_STATUS_BITS); 2028 2029 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 2030 /* Clear the appropriate check emitted flags when a helper is called which could modify a control register. */ 2031 if (pfnCImpl == (uintptr_t)iemCImpl_xsetbv) /* Modifies xcr0 which only the AVX check uses. */ 2032 pReNative->fSimdRaiseXcptChecksEmitted &= ~IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX; 2033 else if (pfnCImpl == (uintptr_t)iemCImpl_mov_Cd_Rd) /* Can modify cr4 which all checks use. */ 2034 pReNative->fSimdRaiseXcptChecksEmitted = 0; 2035 else if ( pfnCImpl == (uintptr_t)iemCImpl_FarJmp 2036 || pfnCImpl == (uintptr_t)iemCImpl_callf 2037 || pfnCImpl == (uintptr_t)iemCImpl_lmsw 2038 || pfnCImpl == (uintptr_t)iemCImpl_clts) /* Will only modify cr0 */ 2039 pReNative->fSimdRaiseXcptChecksEmitted &= ~( IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_AVX 2040 | IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_SSE 2041 | IEMNATIVE_SIMD_RAISE_XCPT_CHECKS_EMITTED_MAYBE_DEVICE_NOT_AVAILABLE); 2042 #endif 1991 2043 1992 2044 /* -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r103832 r103838 2939 2939 pReNative->Core.offPc = 0; 2940 2940 pReNative->Core.cInstrPcUpdateSkipped = 0; 2941 #endif 2942 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 2943 pReNative->fSimdRaiseXcptChecksEmitted = 0; 2941 2944 #endif 2942 2945 pReNative->Core.bmHstRegs = IEMNATIVE_REG_FIXED_MASK
Note:
See TracChangeset
for help on using the changeset viewer.