- Timestamp:
- Apr 5, 2024 12:22:54 PM (10 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r104174 r104177 3351 3351 'IEM_MC_STORE_SREG_BASE_U32': (McBlock.parseMcGeneric, True, True, False, ), 3352 3352 'IEM_MC_STORE_SREG_BASE_U64': (McBlock.parseMcGeneric, True, True, False, ), 3353 'IEM_MC_STORE_SSE_RESULT': (McBlock.parseMcGeneric, True, True, g_fNativeSimd),3354 3353 'IEM_MC_STORE_XREG_R32': (McBlock.parseMcGeneric, True, True, g_fNativeSimd), 3355 3354 'IEM_MC_STORE_XREG_R64': (McBlock.parseMcGeneric, True, True, g_fNativeSimd), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h
r104174 r104177 663 663 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 664 664 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 665 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));666 665 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 666 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 667 667 668 668 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 690 690 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 691 691 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 692 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));693 692 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 693 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 694 694 695 695 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 727 727 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 728 728 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2); 729 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));730 729 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 730 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 731 731 732 732 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 754 754 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 755 755 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2); 756 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));757 756 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 757 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 758 758 759 759 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 791 791 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 792 792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));794 793 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 794 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 795 795 796 796 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 818 818 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 819 819 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 820 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));821 820 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 821 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 822 822 823 823 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 855 855 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 856 856 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2); 857 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));858 857 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 858 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 859 859 860 860 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 882 882 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 883 883 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2); 884 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));885 884 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 885 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 886 886 887 887 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 979 979 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 980 980 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 981 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));982 981 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 982 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 983 983 984 984 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 1006 1006 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1007 1007 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 1008 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));1009 1008 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 1009 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), SseRes); 1010 1010 1011 1011 IEM_MC_ADVANCE_RIP_AND_FINISH(); -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veLiveness.cpp
r104174 r104177 1084 1084 #define IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE() NOP() 1085 1085 1086 #define IEM_MC_STORE_SSE_RESULT(a_SseData, a_iXmmReg) NOP() //IEM_LIVENESS_XREG_CLOBBER(a_iXmmReg)1087 1086 #define IEM_MC_SSE_UPDATE_MXCSR(a_fMxcsr) IEM_LIVENESS_MXCSR_MODIFY() 1088 1087 -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r104172 r104177 4415 4415 RT_NOREF(fConst); 4416 4416 #endif 4417 4418 return off;4419 }4420 4421 4422 #define IEM_MC_REF_MXCSR(a_pfMxcsr) \4423 off = iemNativeEmitRefMxcsr(pReNative, off, a_pfMxcsr)4424 4425 /** Handles IEM_MC_REF_MXCSR. */4426 DECL_INLINE_THROW(uint32_t)4427 iemNativeEmitRefMxcsr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRef)4428 {4429 iemNativeVarSetKindToGstRegRef(pReNative, idxVarRef, kIemNativeGstRegRef_MxCsr, 0);4430 IEMNATIVE_ASSERT_VAR_SIZE(pReNative, idxVarRef, sizeof(void *));4431 4432 /* If we've delayed writing back the register value, flush it now. */4433 off = iemNativeRegFlushPendingSpecificWrite(pReNative, off, kIemNativeGstRegRef_MxCsr, 0);4434 4435 /* If there is a shadow copy of guest MXCSR, flush it now. */4436 iemNativeRegFlushGuestShadows(pReNative, RT_BIT_64(kIemNativeGstReg_MxCsr));4437 4417 4438 4418 return off; … … 8851 8831 8852 8832 8853 #define IEM_MC_STORE_SSE_RESULT(a_SseData, a_iXmmReg) \8854 off = iemNativeEmitSimdSseStoreResult(pReNative, off, a_SseData, a_iXmmReg)8855 8856 /** Emits code for IEM_MC_STORE_SSE_RESULT. */8857 DECL_INLINE_THROW(uint32_t)8858 iemNativeEmitSimdSseStoreResult(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxSseRes, uint8_t iXReg)8859 {8860 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxSseRes);8861 IEMNATIVE_ASSERT_VAR_SIZE(pReNative, idxSseRes, sizeof(X86XMMREG));8862 8863 /* The ForUpdate is important as we might end up not writing the result value to the register in case of an unmasked exception. */8864 uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(iXReg),8865 kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate);8866 uint8_t const idxVarRegRes = iemNativeVarSimdRegisterAcquire(pReNative, idxSseRes, &off, true /*fInitalized*/);8867 uint8_t const idxRegMxCsr = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_MxCsr, kIemNativeGstRegUse_ReadOnly);8868 uint8_t const idxRegTmp = iemNativeRegAllocTmp(pReNative, &off);8869 8870 /* Update the value if there is no unmasked exception. */8871 /* tmp = mxcsr */8872 off = iemNativeEmitLoadGprFromGpr32(pReNative, off, idxRegTmp, idxRegMxCsr);8873 /* tmp &= X86_MXCSR_XCPT_MASK */8874 off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxRegTmp, X86_MXCSR_XCPT_MASK);8875 /* tmp >>= X86_MXCSR_XCPT_MASK_SHIFT */8876 off = iemNativeEmitShiftGprRight(pReNative, off, idxRegTmp, X86_MXCSR_XCPT_MASK_SHIFT);8877 /* tmp = ~tmp */8878 off = iemNativeEmitInvBitsGpr(pReNative, off, idxRegTmp, idxRegTmp, false /*f64Bit*/);8879 /* tmp &= mxcsr */8880 off = iemNativeEmitAndGpr32ByGpr32(pReNative, off, idxRegTmp, idxRegMxCsr);8881 8882 off = iemNativeEmitTestAnyBitsInGpr(pReNative, off, idxRegTmp, X86_MXCSR_XCPT_FLAGS);8883 uint32_t offFixup = off;8884 off = iemNativeEmitJnzToFixed(pReNative, off, off);8885 off = iemNativeEmitSimdLoadVecRegFromVecRegU128(pReNative, off, idxSimdRegDst, idxVarRegRes);8886 iemNativeFixupFixedJump(pReNative, offFixup, off);8887 8888 /* Free but don't flush the shadowed register. */8889 iemNativeVarRegisterRelease(pReNative, idxSseRes);8890 iemNativeSimdRegFreeTmp(pReNative, idxSimdRegDst);8891 iemNativeRegFreeTmp(pReNative, idxRegMxCsr);8892 iemNativeRegFreeTmp(pReNative, idxRegTmp);8893 8894 return off;8895 }8896 8897 8833 8898 8834 /********************************************************************************************************************************* -
trunk/src/VBox/VMM/include/IEMMc.h
r104174 r104177 2976 2976 /** Actualizes the guest FPU state so it can be accessed and modified. */ 2977 2977 #define IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE() iemFpuActualizeStateForChange(pVCpu) 2978 2979 /** Stores SSE SIMD result updating MXCSR. */2980 #define IEM_MC_STORE_SSE_RESULT(a_Res, a_iXmmReg) \2981 do { \2982 PCX86FXSTATE pFpuCtx = &pVCpu->cpum.GstCtx.XState.x87; \2983 if (( ~((pFpuCtx->MXCSR & X86_MXCSR_XCPT_MASK) >> X86_MXCSR_XCPT_MASK_SHIFT) \2984 & (pFpuCtx->MXCSR & X86_MXCSR_XCPT_FLAGS)) == 0) \2985 pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXmmReg)] = (a_Res); \2986 } while (0)2987 2978 2988 2979 /** Prepares for using the SSE state. -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r104174 r104177 1060 1060 #define IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ() (void)fMcBegin; const int fFpuRead = 1, fSseRead = 1 1061 1061 #define IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE() (void)fMcBegin; const int fFpuRead = 1, fFpuWrite = 1, fSseRead = 1, fSseWrite = 1 1062 #define IEM_MC_STORE_SSE_RESULT(a_SseData, a_iXmmReg) do { (void)fSseWrite; (void)fMcBegin; } while (0)1063 1062 #define IEM_MC_SSE_UPDATE_MXCSR(a_fMxcsr) do { (void)fSseWrite; (void)fMcBegin; } while (0) 1064 1063 #define IEM_MC_PREPARE_SSE_USAGE() (void)fMcBegin; const int fSseRead = 1, fSseWrite = 1, fSseHost = 1
Note:
See TracChangeset
for help on using the changeset viewer.