Changeset 96339 in vbox
- Timestamp:
- Aug 19, 2022 3:18:46 PM (3 years ago)
- svn:sync-xref-src-repo-rev:
- 153155
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
TabularUnified trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm ¶
r96335 r96339 4597 4597 IEMIMPL_FP_F2 maxps 4598 4598 IEMIMPL_FP_F2 maxpd 4599 4600 4601 ;; 4602 ; Floating point instruction working on a full sized register and a single precision operand. 4603 ; 4604 ; @param 1 The instruction 4605 ; 4606 ; @param A0 FPU context (FXSTATE or XSAVEAREA). 4607 ; @param A1 Where to return the result including the MXCSR value. 4608 ; @param A2 Pointer to the first media register size operand (input/output). 4609 ; @param A3 Pointer to the second single precision floating point value (input). 4610 ; 4611 %macro IEMIMPL_FP_F2_R32 1 4612 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128_r32, 12 4613 PROLOGUE_4_ARGS 4614 IEMIMPL_SSE_PROLOGUE 4615 SSE_LD_FXSTATE_MXCSR A0 4616 4617 movdqu xmm0, [A2] 4618 movd xmm1, [A3] 4619 %1 xmm0, xmm1 4620 movdqu [A1 + IEMSSERESULT.uResult], xmm0 4621 4622 SSE_ST_FXSTATE_MXCSR A1, A0 4623 IEMIMPL_SSE_PROLOGUE 4624 EPILOGUE_4_ARGS 4625 ENDPROC iemAImpl_ %+ %1 %+ _u128_r32 4626 4627 BEGINPROC_FASTCALL iemAImpl_v %+ %1 %+ _u128_r32, 12 4628 PROLOGUE_4_ARGS 4629 IEMIMPL_AVX_PROLOGUE 4630 AVX_LD_XSAVEAREA_MXCSR A0 4631 4632 vmovd xmm0, [A2] 4633 vmovd xmm1, [A3] 4634 v %+ %1 xmm0, xmm0, xmm1 4635 vmovdqu [A1 + IEMAVX128RESULT.uResult], xmm0 4636 4637 AVX128_ST_XSAVEAREA_MXCSR A1 4638 IEMIMPL_AVX_PROLOGUE 4639 EPILOGUE_4_ARGS 4640 ENDPROC iemAImpl_v %+ %1 %+ _u128_r32 4641 %endmacro 4642 4643 IEMIMPL_FP_F2_R32 addss 4644 4645 4646 ;; 4647 ; Floating point instruction working on a full sized register and a double precision operand. 4648 ; 4649 ; @param 1 The instruction 4650 ; 4651 ; @param A0 FPU context (FXSTATE or XSAVEAREA). 4652 ; @param A1 Where to return the result including the MXCSR value. 4653 ; @param A2 Pointer to the first media register size operand (input/output). 4654 ; @param A3 Pointer to the second double precision floating point value (input). 4655 ; 4656 %macro IEMIMPL_FP_F2_R64 1 4657 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128_r64, 12 4658 PROLOGUE_4_ARGS 4659 IEMIMPL_SSE_PROLOGUE 4660 SSE_LD_FXSTATE_MXCSR A0 4661 4662 movdqu xmm0, [A2] 4663 movq xmm1, [A3] 4664 %1 xmm0, xmm1 4665 movdqu [A1 + IEMSSERESULT.uResult], xmm0 4666 4667 SSE_ST_FXSTATE_MXCSR A1, A0 4668 IEMIMPL_SSE_PROLOGUE 4669 EPILOGUE_4_ARGS 4670 ENDPROC iemAImpl_ %+ %1 %+ _u128_r64 4671 4672 BEGINPROC_FASTCALL iemAImpl_v %+ %1 %+ _u128_r64, 12 4673 PROLOGUE_4_ARGS 4674 IEMIMPL_AVX_PROLOGUE 4675 AVX_LD_XSAVEAREA_MXCSR A0 4676 4677 vmovdqu xmm0, [A2] 4678 vmovq xmm1, [A3] 4679 v %+ %1 xmm0, xmm0, xmm1 4680 vmovdqu [A1 + IEMAVX128RESULT.uResult], xmm0 4681 4682 AVX128_ST_XSAVEAREA_MXCSR A1 4683 IEMIMPL_AVX_PROLOGUE 4684 EPILOGUE_4_ARGS 4685 ENDPROC iemAImpl_v %+ %1 %+ _u128_r64 4686 %endmacro 4687 4688 IEMIMPL_FP_F2_R64 addsd -
TabularUnified trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp ¶
r96335 r96339 14184 14184 14185 14185 /** 14186 * ADDSS 14187 */ 14188 #ifdef IEM_WITHOUT_ASSEMBLY 14189 IEM_DECL_IMPL_DEF(void, iemAImpl_addss_u128_r32,(PX86FXSTATE pFpuState, PIEMSSERESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2)) 14190 { 14191 pResult->MXCSR |= iemAImpl_addps_u128_worker(&pResult->uResult.ar32[0], pFpuState->MXCSR, &puSrc1->ar32[0], pr32Src2); 14192 pResult->uResult.ar32[1] = puSrc1->ar32[1]; 14193 pResult->uResult.ar32[2] = puSrc1->ar32[2]; 14194 pResult->uResult.ar32[3] = puSrc1->ar32[3]; 14195 } 14196 #endif 14197 14198 14199 /** 14186 14200 * ADDPD 14187 14201 */ … … 14205 14219 pResult->MXCSR |= iemAImpl_addpd_u128_worker(&pResult->uResult.ar64[0], pFpuState->MXCSR, &puSrc1->ar64[0], &puSrc2->ar64[0]); 14206 14220 pResult->MXCSR |= iemAImpl_addpd_u128_worker(&pResult->uResult.ar64[1], pFpuState->MXCSR, &puSrc1->ar64[1], &puSrc2->ar64[1]); 14221 } 14222 #endif 14223 14224 14225 /** 14226 * ADDSD 14227 */ 14228 #ifdef IEM_WITHOUT_ASSEMBLY 14229 IEM_DECL_IMPL_DEF(void, iemAImpl_addsd_u128_r64,(PX86FXSTATE pFpuState, PIEMSSERESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2)) 14230 { 14231 pResult->MXCSR |= iemAImpl_addpd_u128_worker(&pResult->uResult.ar64[0], pFpuState->MXCSR, &puSrc1->ar64[0], pr64Src2); 14232 pResult->uResult.ar64[1] = puSrc1->ar64[1]; 14207 14233 } 14208 14234 #endif -
TabularUnified trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h ¶
r96335 r96339 802 802 803 803 /** 804 * Common worker for SSE instructions on the forms: 805 * pxxs xmm1, xmm2/mem32 806 * 807 * Proper alignment of the 128-bit operand is enforced. 808 * Exceptions type 2. SSE cpuid checks. 809 * 810 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full 811 */ 812 FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32) 813 { 814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 815 if (IEM_IS_MODRM_REG_MODE(bRm)) 816 { 817 /* 818 * Register, register. 819 */ 820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 821 IEM_MC_BEGIN(3, 1); 822 IEM_MC_LOCAL(IEMSSERESULT, SseRes); 823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0); 824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); 825 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2); 826 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 827 IEM_MC_PREPARE_SSE_USAGE(); 828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 829 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2); 831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm)); 832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 833 834 IEM_MC_ADVANCE_RIP(); 835 IEM_MC_END(); 836 } 837 else 838 { 839 /* 840 * Register, memory. 841 */ 842 IEM_MC_BEGIN(3, 2); 843 IEM_MC_LOCAL(IEMSSERESULT, SseRes); 844 IEM_MC_LOCAL(RTFLOAT32U, r32Src2); 845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0); 846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); 847 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2); 848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 849 850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 853 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 854 855 IEM_MC_PREPARE_SSE_USAGE(); 856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2); 858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm)); 859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 860 861 IEM_MC_ADVANCE_RIP(); 862 IEM_MC_END(); 863 } 864 return VINF_SUCCESS; 865 } 866 867 868 /** 804 869 * Common worker for SSE2 instructions on the forms: 805 870 * pxxd xmm1, xmm2/mem128 … … 856 921 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 857 922 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2); 923 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm)); 924 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 925 926 IEM_MC_ADVANCE_RIP(); 927 IEM_MC_END(); 928 } 929 return VINF_SUCCESS; 930 } 931 932 933 /** 934 * Common worker for SSE2 instructions on the forms: 935 * pxxs xmm1, xmm2/mem64 936 * 937 * Proper alignment of the 128-bit operand is enforced. 938 * Exceptions type 2. SSE2 cpuid checks. 939 * 940 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full 941 */ 942 FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64) 943 { 944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 945 if (IEM_IS_MODRM_REG_MODE(bRm)) 946 { 947 /* 948 * Register, register. 949 */ 950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 951 IEM_MC_BEGIN(3, 1); 952 IEM_MC_LOCAL(IEMSSERESULT, SseRes); 953 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0); 954 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); 955 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2); 956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 957 IEM_MC_PREPARE_SSE_USAGE(); 958 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 959 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 960 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2); 961 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm)); 962 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 963 964 IEM_MC_ADVANCE_RIP(); 965 IEM_MC_END(); 966 } 967 else 968 { 969 /* 970 * Register, memory. 971 */ 972 IEM_MC_BEGIN(3, 2); 973 IEM_MC_LOCAL(IEMSSERESULT, SseRes); 974 IEM_MC_LOCAL(RTFLOAT64U, r64Src2); 975 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0); 976 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1); 977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2); 978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 979 980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 983 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 984 985 IEM_MC_PREPARE_SSE_USAGE(); 986 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 987 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2); 858 988 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm)); 859 989 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 3940 4070 3941 4071 /** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */ 3942 FNIEMOP_STUB(iemOp_addss_Vss_Wss); 4072 FNIEMOP_DEF(iemOp_addss_Vss_Wss) 4073 { 4074 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0); 4075 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32); 4076 } 4077 4078 3943 4079 /** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */ 3944 FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd); 4080 FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd) 4081 { 4082 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0); 4083 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64); 4084 } 3945 4085 3946 4086 -
TabularUnified trunk/src/VBox/VMM/include/IEMInternal.h ¶
r96335 r96339 2411 2411 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPSSEF2U128,(PX86FXSTATE pFpuState, PIEMSSERESULT pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2)); 2412 2412 typedef FNIEMAIMPLFPSSEF2U128 *PFNIEMAIMPLFPSSEF2U128; 2413 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPSSEF2U128R32,(PX86FXSTATE pFpuState, PIEMSSERESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2)); 2414 typedef FNIEMAIMPLFPSSEF2U128R32 *PFNIEMAIMPLFPSSEF2U128R32; 2415 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPSSEF2U128R64,(PX86FXSTATE pFpuState, PIEMSSERESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2)); 2416 typedef FNIEMAIMPLFPSSEF2U128R64 *PFNIEMAIMPLFPSSEF2U128R64; 2417 2413 2418 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPAVXF3U128,(PX86XSAVEAREA pExtState, PIEMAVX128RESULT pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2)); 2414 2419 typedef FNIEMAIMPLFPAVXF3U128 *PFNIEMAIMPLFPAVXF3U128; 2420 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPAVXF3U128R32,(PX86XSAVEAREA pExtState, PIEMAVX128RESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2)); 2421 typedef FNIEMAIMPLFPAVXF3U128R32 *PFNIEMAIMPLFPAVXF3U128R32; 2422 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPAVXF3U128R64,(PX86XSAVEAREA pExtState, PIEMAVX128RESULT pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2)); 2423 typedef FNIEMAIMPLFPAVXF3U128R64 *PFNIEMAIMPLFPAVXF3U128R64; 2424 2415 2425 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPAVXF3U256,(PX86XSAVEAREA pExtState, PIEMAVX256RESULT pResult, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2)); 2416 2426 typedef FNIEMAIMPLFPAVXF3U256 *PFNIEMAIMPLFPAVXF3U256; … … 2429 2439 FNIEMAIMPLFPSSEF2U128 iemAImpl_maxpd_u128; 2430 2440 2441 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_addss_u128_r32; 2442 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_addsd_u128_r64; 2443 2431 2444 FNIEMAIMPLFPAVXF3U128 iemAImpl_vaddps_u128, iemAImpl_vaddps_u128_fallback; 2432 2445 FNIEMAIMPLFPAVXF3U128 iemAImpl_vaddpd_u128, iemAImpl_vaddpd_u128_fallback; … … 2441 2454 FNIEMAIMPLFPAVXF3U128 iemAImpl_vmaxps_u128, iemAImpl_vmaxps_u128_fallback; 2442 2455 FNIEMAIMPLFPAVXF3U128 iemAImpl_vmaxpd_u128, iemAImpl_vmaxpd_u128_fallback; 2456 2457 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback; 2458 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback; 2443 2459 2444 2460 FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddps_u256, iemAImpl_vaddps_u256_fallback; -
TabularUnified trunk/src/VBox/VMM/include/IEMMc.h ¶
r96247 r96339 451 451 #define IEM_MC_REF_XREG_U64_CONST(a_pu64Dst, a_iXReg) \ 452 452 (a_pu64Dst) = ((uint64_t const *)&pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[0]) 453 #define IEM_MC_REF_XREG_R32_CONST(a_pr32Dst, a_iXReg) \ 454 (a_pr32Dst) = ((RTFLOAT32U const *)&pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].ar32[0]) 455 #define IEM_MC_REF_XREG_R64_CONST(a_pr64Dst, a_iXReg) \ 456 (a_pr64Dst) = ((RTFLOAT64U const *)&pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].ar64[0]) 453 457 #define IEM_MC_COPY_XREG_U128(a_iXRegDst, a_iXRegSrc) \ 454 458 do { pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXRegDst)].au64[0] \ … … 694 698 #ifndef IEM_WITH_SETJMP 695 699 # define IEM_MC_FETCH_MEM_R32(a_r32Dst, a_iSeg, a_GCPtrMem) \ 696 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU32(pVCpu, &(a_r32Dst).u 32, (a_iSeg), (a_GCPtrMem)))700 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU32(pVCpu, &(a_r32Dst).u, (a_iSeg), (a_GCPtrMem))) 697 701 # define IEM_MC_FETCH_MEM_R64(a_r64Dst, a_iSeg, a_GCPtrMem) \ 698 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU64(pVCpu, &(a_r64Dst). au64[0], (a_iSeg), (a_GCPtrMem)))702 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU64(pVCpu, &(a_r64Dst).u, (a_iSeg), (a_GCPtrMem))) 699 703 # define IEM_MC_FETCH_MEM_R80(a_r80Dst, a_iSeg, a_GCPtrMem) \ 700 704 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataR80(pVCpu, &(a_r80Dst), (a_iSeg), (a_GCPtrMem))) -
TabularUnified trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp ¶
r96335 r96339 475 475 #define iemAImpl_maxpd_u128 NULL 476 476 477 #define iemAImpl_addss_u128_r32 NULL 478 #define iemAImpl_addsd_u128_r64 NULL 477 479 /** @} */ 478 480 … … 698 700 #define IEM_MC_REF_XREG_U128_CONST(a_pu128Dst, a_iXReg) do { (a_pu128Dst) = (PCRTUINT128U)((uintptr_t)0); CHK_PTYPE(PCRTUINT128U, a_pu128Dst); (void)fSseWrite; (void)fMcBegin; } while (0) 699 701 #define IEM_MC_REF_XREG_U64_CONST(a_pu64Dst, a_iXReg) do { (a_pu64Dst) = (uint64_t const *)((uintptr_t)0); CHK_PTYPE(uint64_t const *, a_pu64Dst); (void)fSseWrite; (void)fMcBegin; } while (0) 702 #define IEM_MC_REF_XREG_R32_CONST(a_pr32Dst, a_iXReg) do { (a_pr32Dst) = (RTFLOAT32U const *)((uintptr_t)0); CHK_PTYPE(RTFLOAT32U const *, a_pr32Dst); (void)fSseWrite; (void)fMcBegin; } while (0) 703 #define IEM_MC_REF_XREG_R64_CONST(a_pr64Dst, a_iXReg) do { (a_pr64Dst) = (RTFLOAT64U const *)((uintptr_t)0); CHK_PTYPE(RTFLOAT64U const *, a_pr64Dst); (void)fSseWrite; (void)fMcBegin; } while (0) 700 704 #define IEM_MC_REF_XREG_XMM_CONST(a_pXmmDst, a_iXReg) do { (a_pXmmDst) = (PCX86XMMREG)((uintptr_t)0); CHK_PTYPE(PCX86XMMREG, a_pXmmDst); (void)fSseWrite; (void)fMcBegin; } while (0) 701 705 #define IEM_MC_COPY_XREG_U128(a_iXRegDst, a_iXRegSrc) do { (void)fSseWrite; (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.