Changeset 105283 in vbox
- Timestamp:
- Jul 11, 2024 8:26:27 PM (5 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r105275 r105283 6707 6707 6708 6708 6709 ; 6710 ; CMPPS (SSE) 6709 ;; 6710 ; SSE/AVX instructions with 8-bit immediates of the form 6711 ; xxx xmm1, xmm2, imm8. 6712 ; vxxx xmm1, xmm2, xmm3, imm8. 6713 ; and we need to load and save the MXCSR register. 6714 ; 6715 ; @param 1 The instruction name. 6716 ; @param 2 Flag whether this instruction has a 256-bit AVX variant (1) or not (0). 6717 ; @param 3 Number of bytes for the encoding of the SSE variant + ret instruction (AVX is fixed to 6). 6711 6718 ; 6712 6719 ; @return R0_32 The new MXCSR value of the guest. … … 6716 6723 ; @param A3 The 8-bit immediate (input). 6717 6724 ; 6718 BEGINPROC_FASTCALL iemAImpl_cmpps_u128, 16 6725 %macro IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR 3 6726 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128, 16 6719 6727 PROLOGUE_4_ARGS 6720 6728 IEMIMPL_SSE_PROLOGUE … … 6724 6732 movdqu xmm0, [A2 + IEMMEDIAF2XMMSRC.uSrc1] 6725 6733 movdqu xmm1, [A2 + IEMMEDIAF2XMMSRC.uSrc2] 6726 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A3, 5 6727 movdqu [A1], xmm0 6728 6729 SSE_AVX_ST_MXCSR R0_32, A0_32 6730 IEMIMPL_SSE_EPILOGUE 6731 EPILOGUE_4_ARGS 6732 %assign bImm 0 6733 %rep 256 6734 .imm %+ bImm: 6735 IBT_ENDBRxx_WITHOUT_NOTRACK 6736 cmpps xmm0, xmm1, bImm 6737 ret 6738 %assign bImm bImm + 1 6739 %endrep 6740 .immEnd: 6741 ENDPROC iemAImpl_cmpps_u128 6742 6743 ;; 6744 ; SSE instructions with 8-bit immediates of the form 6745 ; xxx xmm1, xmm2, imm8. 6746 ; where the instruction encoding takes up 5 bytes and we need to load and save the MXCSR 6747 ; register. 6748 ; 6749 ; @param 1 The instruction name. 6750 ; 6751 ; @return R0_32 The new MXCSR value of the guest. 6752 ; @param A0_32 The guest's MXCSR register value to use (input). 6753 ; @param A1 Pointer to the first media register size operand (output). 6754 ; @param A2 Pointer to the two media register sized inputs - IEMMEDIAF2XMMSRC (input). 6755 ; @param A3 The 8-bit immediate (input). 6756 ; 6757 %macro IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR_5 1 6758 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128, 16 6759 PROLOGUE_4_ARGS 6760 IEMIMPL_SSE_PROLOGUE 6761 SSE_AVX_LD_MXCSR A0_32 6762 6763 movzx A3, A3_8 ; must clear top bits 6764 movdqu xmm0, [A2 + IEMMEDIAF2XMMSRC.uSrc1] 6765 movdqu xmm1, [A2 + IEMMEDIAF2XMMSRC.uSrc2] 6766 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A3, 6 6734 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A3, %3 6767 6735 movdqu [A1], xmm0 6768 6736 … … 6780 6748 .immEnd: 6781 6749 ENDPROC iemAImpl_ %+ %1 %+ _u128 6782 %endmacro 6783 6784 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR_5 cmppd 6785 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR_5 cmpss 6786 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR_5 cmpsd 6750 6751 6752 BEGINPROC_FASTCALL iemAImpl_v %+ %1 %+ _u128, 16 6753 PROLOGUE_4_ARGS 6754 IEMIMPL_SSE_PROLOGUE 6755 SSE_AVX_LD_MXCSR A0_32 6756 6757 movzx A3, A3_8 ; must clear top bits 6758 movdqu xmm0, [A2 + IEMMEDIAF2XMMSRC.uSrc1] 6759 movdqu xmm1, [A2 + IEMMEDIAF2XMMSRC.uSrc2] 6760 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A3, 6 6761 movdqu [A1], xmm0 6762 6763 SSE_AVX_ST_MXCSR R0_32, A0_32 6764 IEMIMPL_SSE_EPILOGUE 6765 EPILOGUE_4_ARGS 6766 %assign bImm 0 6767 %rep 256 6768 .imm %+ bImm: 6769 IBT_ENDBRxx_WITHOUT_NOTRACK 6770 v %+ %1 xmm0, xmm0, xmm1, bImm 6771 ret 6772 %assign bImm bImm + 1 6773 %endrep 6774 .immEnd: 6775 ENDPROC iemAImpl_v %+ %1 %+ _u128 6776 6777 %if %2 == 1 6778 BEGINPROC_FASTCALL iemAImpl_v %+ %1 %+ _u256, 16 6779 PROLOGUE_4_ARGS 6780 IEMIMPL_SSE_PROLOGUE 6781 SSE_AVX_LD_MXCSR A0_32 6782 6783 movzx A3, A3_8 ; must clear top bits 6784 vmovdqu ymm0, [A2 + IEMMEDIAF2YMMSRC.uSrc1] 6785 vmovdqu ymm1, [A2 + IEMMEDIAF2YMMSRC.uSrc2] 6786 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A3, 6 6787 vmovdqu [A1], ymm0 6788 6789 SSE_AVX_ST_MXCSR R0_32, A0_32 6790 IEMIMPL_SSE_EPILOGUE 6791 EPILOGUE_4_ARGS 6792 %assign bImm 0 6793 %rep 256 6794 .imm %+ bImm: 6795 IBT_ENDBRxx_WITHOUT_NOTRACK 6796 v %+ %1 ymm0, ymm0, ymm1, bImm 6797 ret 6798 %assign bImm bImm + 1 6799 %endrep 6800 .immEnd: 6801 ENDPROC iemAImpl_v %+ %1 %+ _u256 6802 %endif 6803 %endmacro 6804 6805 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR cmpps, 1, 5 6806 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR cmppd, 1, 6 6807 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR cmpss, 0, 6 6808 IEMIMPL_MEDIA_SSE_INSN_IMM8_MXCSR cmpsd, 0, 6 6787 6809 6788 6810 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r105275 r105283 19252 19252 19253 19253 /** 19254 * CMPPS / CMPPD / CMPSS / CMPSD 19255 */ 19256 #ifdef IEM_WITHOUT_ASSEMBLY 19254 * [V]CMPPS / [V]CMPPD / [V]CMPSS / [V]CMPSD 19255 */ 19257 19256 /** 19258 19257 * A compare truth table entry. … … 19287 19286 /* 06H (NLE_US) */ { true, true, false, false, true }, 19288 19287 /* 07H (ORQ_Q) */ { false, false, true, true, true }, 19289 /** @todo AVX variants. */ 19288 /* Entries supported by the AVX variants. */ 19289 /* 08H (EQ_UQ) */ { false, true, true, false, false }, 19290 /* 09H (NGE_US) */ { true, true, false, true, false }, 19291 /* 0aH (NGT_US) */ { true, true, true, true, false }, 19292 /* 0bH (FALSE_OQ)*/ { false, false, false, false, false }, 19293 /* 0cH (NEQ_OQ) */ { false, false, false, true, true }, 19294 /* 0dH (GE_OS) */ { true, false, true, false, true }, 19295 /* 0eH (GT_OS) */ { true, false, false, false, true }, 19296 /* 0fH (TRUE_UQ) */ { false, true, true, true, true }, 19297 /* 10H (EQ_OS) */ { true, false, true, false, false }, 19298 /* 11H (LT_OQ) */ { false, false, false, true, false }, 19299 /* 12H (LE_OQ) */ { false, false, true, true, false }, 19300 /* 13H (UNORD_S) */ { true, true, false, false, false }, 19301 /* 14H (NEQ_US) */ { true, true, false, true, true }, 19302 /* 15H (NLT_UQ) */ { false, true, true, false, true }, 19303 /* 16H (NLE_UQ) */ { false, true, false, false, true }, 19304 /* 17H (ORD_S) */ { true, false, true, true, true }, 19305 /* 18H (EQ_US) */ { true, true, true, false, false }, 19306 /* 19H (NGE_UQ) */ { false, true, false, true, false }, 19307 /* 1aH (NGT_UQ) */ { false, true, true, true, false }, 19308 /* 1bH (FALSE_OS)*/ { true, false, false, false, false }, 19309 /* 1cH (NEQ_OS) */ { true, false, false, true, true }, 19310 /* 1dH (GE_OQ) */ { false, false, true, false, true }, 19311 /* 1eH (GT_OQ) */ { false, false, false, false, true }, 19312 /* 1fH (TRUE_US) */ { true, true, true, true, true }, 19290 19313 }; 19291 19314 … … 19369 19392 19370 19393 19394 #ifdef IEM_WITHOUT_ASSEMBLY 19371 19395 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_cmpps_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC pSrc, uint8_t bEvil)) 19372 19396 { … … 19421 19445 } 19422 19446 #endif 19447 19448 19449 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmpps_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC pSrc, uint8_t bEvil)) 19450 { 19451 for (uint8_t i = 0; i < RT_ELEMENTS(puDst->ar32); i++) 19452 { 19453 if (iemAImpl_cmp_worker_r32(&uMxCsrIn, &pSrc->uSrc1.ar32[i], &pSrc->uSrc2.ar32[i], bEvil & 0x1f)) 19454 puDst->au32[i] = UINT32_MAX; 19455 else 19456 puDst->au32[i] = 0; 19457 } 19458 19459 return uMxCsrIn; 19460 } 19461 19462 19463 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmpps_u256_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC pSrc, uint8_t bEvil)) 19464 { 19465 for (uint8_t i = 0; i < RT_ELEMENTS(puDst->ar32); i++) 19466 { 19467 if (iemAImpl_cmp_worker_r32(&uMxCsrIn, &pSrc->uSrc1.ar32[i], &pSrc->uSrc2.ar32[i], bEvil & 0x1f)) 19468 puDst->au32[i] = UINT32_MAX; 19469 else 19470 puDst->au32[i] = 0; 19471 } 19472 19473 return uMxCsrIn; 19474 } 19475 19476 19477 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmppd_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC pSrc, uint8_t bEvil)) 19478 { 19479 for (uint8_t i = 0; i < RT_ELEMENTS(puDst->ar64); i++) 19480 { 19481 if (iemAImpl_cmp_worker_r64(&uMxCsrIn, &pSrc->uSrc1.ar64[i], &pSrc->uSrc2.ar64[i], bEvil & 0x1f)) 19482 puDst->au64[i] = UINT64_MAX; 19483 else 19484 puDst->au64[i] = 0; 19485 } 19486 19487 return uMxCsrIn; 19488 } 19489 19490 19491 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmppd_u256_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC pSrc, uint8_t bEvil)) 19492 { 19493 for (uint8_t i = 0; i < RT_ELEMENTS(puDst->ar64); i++) 19494 { 19495 if (iemAImpl_cmp_worker_r64(&uMxCsrIn, &pSrc->uSrc1.ar64[i], &pSrc->uSrc2.ar64[i], bEvil & 0x1f)) 19496 puDst->au64[i] = UINT64_MAX; 19497 else 19498 puDst->au64[i] = 0; 19499 } 19500 19501 return uMxCsrIn; 19502 } 19503 19504 19505 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmpss_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC pSrc, uint8_t bEvil)) 19506 { 19507 if (iemAImpl_cmp_worker_r32(&uMxCsrIn, &pSrc->uSrc1.ar32[0], &pSrc->uSrc2.ar32[0], bEvil & 0x1f)) 19508 puDst->au32[0] = UINT32_MAX; 19509 else 19510 puDst->au32[0] = 0; 19511 19512 puDst->au32[1] = pSrc->uSrc1.au32[1]; 19513 puDst->au64[1] = pSrc->uSrc1.au64[1]; 19514 return uMxCsrIn; 19515 } 19516 19517 19518 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcmpsd_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC pSrc, uint8_t bEvil)) 19519 { 19520 if (iemAImpl_cmp_worker_r64(&uMxCsrIn, &pSrc->uSrc1.ar64[0], &pSrc->uSrc2.ar64[0], bEvil & 0x1f)) 19521 puDst->au64[0] = UINT64_MAX; 19522 else 19523 puDst->au64[0] = 0; 19524 19525 puDst->au64[1] = pSrc->uSrc1.au64[1]; 19526 return uMxCsrIn; 19527 } 19423 19528 19424 19529 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r105251 r105283 3109 3109 'IEM_MC_FETCH_MEM_YMM_ALIGN_AVX': (McBlock.parseMcGeneric, True, True, False, ), 3110 3110 'IEM_MC_FETCH_MEM_YMM_NO_AC': (McBlock.parseMcGeneric, True, True, g_fNativeSimd), 3111 'IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM': (McBlock.parseMcGeneric, True, True, False, ), 3111 3112 'IEM_MC_FETCH_MEM16_U8': (McBlock.parseMcGeneric, True, True, False, ), 3112 3113 'IEM_MC_FETCH_MEM32_U8': (McBlock.parseMcGeneric, True, True, False, ), … … 3137 3138 'IEM_MC_FETCH_YREG_U32': (McBlock.parseMcGeneric, False, False, g_fNativeSimd), 3138 3139 'IEM_MC_FETCH_YREG_U64': (McBlock.parseMcGeneric, False, False, g_fNativeSimd), 3140 'IEM_MC_FETCH_YREG_PAIR_YMM': (McBlock.parseMcGeneric, False, False, False, ), 3139 3141 'IEM_MC_FLIP_EFL_BIT': (McBlock.parseMcGeneric, True, True, True, ), 3140 3142 'IEM_MC_FPU_FROM_MMX_MODE': (McBlock.parseMcGeneric, True, True, False, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h
r105253 r105283 5071 5071 /* Opcode VEX.F2.0F 0xc1 - invalid. */ 5072 5072 5073 #define IEMOP_VCMPP_BODY(a_Instr) \ 5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 5075 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 5076 { \ 5077 /* \ 5078 * Register, Register. \ 5079 */ \ 5080 if (pVCpu->iem.s.uVexLength) \ 5081 { \ 5082 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 5083 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \ 5084 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \ 5085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 5086 IEM_MC_PREPARE_AVX_USAGE(); \ 5087 IEM_MC_LOCAL(X86YMMREG, uDst); \ 5088 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \ 5089 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \ 5090 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \ 5091 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \ 5092 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \ 5093 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \ 5094 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \ 5095 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \ 5096 puDst, puSrc, bImmArg); \ 5097 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \ 5098 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \ 5099 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5100 IEM_MC_END(); \ 5101 } \ 5102 else \ 5103 { \ 5104 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 5105 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \ 5106 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \ 5107 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 5108 IEM_MC_PREPARE_AVX_USAGE(); \ 5109 IEM_MC_LOCAL(X86XMMREG, uDst); \ 5110 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \ 5111 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \ 5112 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \ 5113 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \ 5114 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \ 5115 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \ 5116 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \ 5117 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \ 5118 puDst, puSrc, bImmArg); \ 5119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \ 5120 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \ 5121 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5122 IEM_MC_END(); \ 5123 } \ 5124 } \ 5125 else \ 5126 { \ 5127 /* \ 5128 * Register, Memory. \ 5129 */ \ 5130 if (pVCpu->iem.s.uVexLength) \ 5131 { \ 5132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 5133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 5134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \ 5135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \ 5136 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \ 5137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 5138 IEM_MC_PREPARE_AVX_USAGE(); \ 5139 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \ 5140 IEM_MC_LOCAL(X86YMMREG, uDst); \ 5141 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \ 5142 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \ 5143 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \ 5144 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 5145 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \ 5146 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \ 5147 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \ 5148 puDst, puSrc, bImmArg); \ 5149 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \ 5150 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \ 5151 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5152 IEM_MC_END(); \ 5153 } \ 5154 else \ 5155 { \ 5156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 5157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 5158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \ 5159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \ 5160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \ 5161 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 5162 IEM_MC_PREPARE_AVX_USAGE(); \ 5163 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \ 5164 IEM_MC_LOCAL(X86XMMREG, uDst); \ 5165 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \ 5166 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \ 5167 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \ 5168 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 5169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \ 5170 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \ 5171 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \ 5172 puDst, puSrc, bImmArg); \ 5173 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \ 5174 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \ 5175 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 5176 IEM_MC_END(); \ 5177 } \ 5178 } \ 5179 (void)0 5180 5181 5073 5182 /** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */ 5074 FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib); 5183 FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib) 5184 { 5185 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0); 5186 IEMOP_VCMPP_BODY(vcmpps); 5187 } 5188 5189 5075 5190 /** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */ 5076 FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib); 5191 FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib) 5192 { 5193 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0); 5194 IEMOP_VCMPP_BODY(vcmppd); 5195 } 5196 5197 5077 5198 /** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */ 5078 FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib); 5199 FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib) 5200 { 5201 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW); 5202 5203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5204 if (IEM_IS_MODRM_REG_MODE(bRm)) 5205 { 5206 /* 5207 * XMM32, XMM32. 5208 */ 5209 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 5210 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 5211 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 5212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 5213 IEM_MC_PREPARE_AVX_USAGE(); 5214 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); 5215 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); 5216 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 5217 IEM_MC_LOCAL(X86XMMREG, uDst); 5218 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 5219 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 5220 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback), 5221 puDst, puSrc, bImmArg); 5222 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 5223 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 5224 5225 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5226 IEM_MC_END(); 5227 } 5228 else 5229 { 5230 /* 5231 * XMM32, [mem32]. 5232 */ 5233 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 5234 5235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 5236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 5237 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 5238 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 5239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 5240 IEM_MC_PREPARE_AVX_USAGE(); 5241 5242 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); 5243 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); 5244 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 5245 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 5246 IEM_MC_LOCAL(X86XMMREG, uDst); 5247 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 5248 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 5249 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback), 5250 puDst, puSrc, bImmArg); 5251 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 5252 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 5253 5254 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5255 IEM_MC_END(); 5256 } 5257 } 5258 5259 5079 5260 /** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */ 5080 FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib); 5261 FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib) 5262 { 5263 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW); 5264 5265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5266 if (IEM_IS_MODRM_REG_MODE(bRm)) 5267 { 5268 /* 5269 * XMM64, XMM64. 5270 */ 5271 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 5272 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 5273 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 5274 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 5275 IEM_MC_PREPARE_AVX_USAGE(); 5276 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); 5277 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); 5278 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 5279 IEM_MC_LOCAL(X86XMMREG, uDst); 5280 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 5281 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 5282 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback), 5283 puDst, puSrc, bImmArg); 5284 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 5285 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 5286 5287 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5288 IEM_MC_END(); 5289 } 5290 else 5291 { 5292 /* 5293 * XMM64, [mem64]. 5294 */ 5295 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 5296 5297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 5299 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 5300 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 5301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 5302 IEM_MC_PREPARE_AVX_USAGE(); 5303 5304 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); 5305 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); 5306 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 5307 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 5308 IEM_MC_LOCAL(X86XMMREG, uDst); 5309 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); 5310 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 5311 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback), 5312 puDst, puSrc, bImmArg); 5313 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 5314 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 5315 5316 IEM_MC_ADVANCE_RIP_AND_FINISH(); 5317 IEM_MC_END(); 5318 } 5319 } 5320 5081 5321 5082 5322 /* Opcode VEX.0F 0xc3 - invalid */ -
trunk/src/VBox/VMM/VMMAll/IEMAllN8vePython.py
r105183 r105283 212 212 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64': (None, True, True, False, ), 213 213 'IEM_MC_FETCH_MEM_FLAT_YMM_NO_AC': (None, True, True, g_fNativeSimd), 214 'IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX_AND_YREG_YMM': (None, True, True, False, ), 214 215 'IEM_MC_MEM_FLAT_MAP_D80_WO': (None, True, True, True, ), 215 216 'IEM_MC_MEM_FLAT_MAP_I16_WO': (None, True, True, True, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py
r105277 r105283 84 84 'X86YMMREG': ( 256, False, 'X86YMMREG', ), 85 85 'IEMMEDIAF2XMMSRC': ( 256, False, 'IEMMEDIAF2XMMSRC',), 86 'IEMMEDIAF2YMMSRC': ( 512, False, 'IEMMEDIAF2YMMSRC',), 86 87 'RTUINT256U': ( 256, False, 'RTUINT256U', ), 87 88 'IEMPCMPISTRXSRC': ( 256, False, 'IEMPCMPISTRXSRC', ), … … 888 889 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64': 889 890 ( 2, 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64' ), 891 'IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM': ( 2, 'IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX_AND_YREG_YMM' ), 890 892 'IEM_MC_STORE_MEM_U8': ( 0, 'IEM_MC_STORE_MEM_FLAT_U8' ), 891 893 'IEM_MC_STORE_MEM_U16': ( 0, 'IEM_MC_STORE_MEM_FLAT_U16' ), -
trunk/src/VBox/VMM/include/IEMInternal.h
r105277 r105283 4159 4159 typedef const IEMMEDIAF2XMMSRC *PCIEMMEDIAF2XMMSRC; 4160 4160 4161 4161 4162 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3XMMIMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC puSrc, uint8_t bEvil)); 4162 4163 typedef FNIEMAIMPLMEDIAF3XMMIMM8 *PFNIEMAIMPLMEDIAF3XMMIMM8; 4164 4165 4166 typedef struct IEMMEDIAF2YMMSRC 4167 { 4168 X86YMMREG uSrc1; 4169 X86YMMREG uSrc2; 4170 } IEMMEDIAF2YMMSRC; 4171 typedef IEMMEDIAF2YMMSRC *PIEMMEDIAF2YMMSRC; 4172 typedef const IEMMEDIAF2YMMSRC *PCIEMMEDIAF2YMMSRC; 4173 4174 4175 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3YMMIMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC puSrc, uint8_t bEvil)); 4176 typedef FNIEMAIMPLMEDIAF3YMMIMM8 *PFNIEMAIMPLMEDIAF3YMMIMM8; 4163 4177 4164 4178 … … 4167 4181 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpss_u128; 4168 4182 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpsd_u128; 4183 4184 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpps_u128, iemAImpl_vcmpps_u128_fallback; 4185 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmppd_u128, iemAImpl_vcmppd_u128_fallback; 4186 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback; 4187 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback; 4188 4189 FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmpps_u256, iemAImpl_vcmpps_u256_fallback; 4190 FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmppd_u256, iemAImpl_vcmppd_u256_fallback; 4191 4169 4192 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundss_u128; 4170 4193 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundsd_u128; -
trunk/src/VBox/VMM/include/IEMMc.h
r105183 r105283 691 691 (a_uYmmDst).au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrcTmp].au64[0]; \ 692 692 (a_uYmmDst).au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrcTmp].au64[1]; \ 693 } while (0) 694 #define IEM_MC_FETCH_YREG_PAIR_YMM(a_uYmmDst, a_iYRegSrc1, a_iYRegSrc2) \ 695 do { uintptr_t const iYRegSrc1Tmp = (a_iYRegSrc1); \ 696 uintptr_t const iYRegSrc2Tmp = (a_iYRegSrc2); \ 697 (a_uYmmDst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc1Tmp].au64[0]; \ 698 (a_uYmmDst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc1Tmp].au64[1]; \ 699 (a_uYmmDst).uSrc1.au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc1Tmp].au64[0]; \ 700 (a_uYmmDst).uSrc1.au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc1Tmp].au64[1]; \ 701 (a_uYmmDst).uSrc2.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc2Tmp].au64[0]; \ 702 (a_uYmmDst).uSrc2.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegSrc2Tmp].au64[1]; \ 703 (a_uYmmDst).uSrc2.au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc2Tmp].au64[0]; \ 704 (a_uYmmDst).uSrc2.au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrc2Tmp].au64[1]; \ 693 705 } while (0) 694 706 … … 1268 1280 # define IEM_MC_FETCH_MEM_YMM_ALIGN_AVX(a_YmmDst, a_iSeg, a_GCPtrMem) \ 1269 1281 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU256AlignedAvx(pVCpu, &(a_YmmDst).ymm, (a_iSeg), (a_GCPtrMem))) 1282 1283 # define IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(a_uYmmDst, a_iYRegSrc1, a_iSeg2, a_GCPtrMem2) do { \ 1284 uintptr_t const a_iYRegSrc1Tmp = (a_iYRegSrc1); \ 1285 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU256AlignedAvx(pVCpu, &(a_uYmmDst).uSrc2.ymm, (a_iSeg2), (a_GCPtrMem2))); \ 1286 (a_uYmmDst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[0]; \ 1287 (a_uYmmDst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[1]; \ 1288 (a_uYmmDst).uSrc1.au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[0]; \ 1289 (a_uYmmDst).uSrc1.au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[1]; \ 1290 } while (0) 1291 1270 1292 #else 1271 1293 # define IEM_MC_FETCH_MEM_U256(a_u256Dst, a_iSeg, a_GCPtrMem) \ … … 1283 1305 iemMemFetchDataU256AlignedAvxJmp(pVCpu, &(a_YmmDst).ymm, (a_iSeg), (a_GCPtrMem)) 1284 1306 1307 # define IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(a_uYmmDst, a_iYRegSrc1, a_iSeg2, a_GCPtrMem2) do { \ 1308 uintptr_t const a_iYRegSrc1Tmp = (a_iYRegSrc1); \ 1309 iemMemFetchDataU256AlignedAvxJmp(pVCpu, &(a_uYmmDst).uSrc2.ymm, (a_iSeg2), (a_GCPtrMem2)); \ 1310 (a_uYmmDst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[0]; \ 1311 (a_uYmmDst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[1]; \ 1312 (a_uYmmDst).uSrc1.au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[0]; \ 1313 (a_uYmmDst).uSrc1.au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[1]; \ 1314 } while (0) 1315 1285 1316 # define IEM_MC_FETCH_MEM_FLAT_U256(a_u256Dst, a_GCPtrMem) \ 1286 1317 iemMemFlatFetchDataU256NoAcJmp(pVCpu, &(a_u256Dst), (a_GCPtrMem)) … … 1296 1327 # define IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX(a_YmmDst, a_GCPtrMem) \ 1297 1328 iemMemFlatFetchDataU256AlignedAvxJmp(pVCpu, &(a_YmmDst).ymm, (a_GCPtrMem)) 1329 1330 # define IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX_AND_YREG_YMM(a_uYmmDst, a_iYRegSrc1, a_GCPtrMem2) do { \ 1331 uintptr_t const a_iYRegSrc1Tmp = (a_iYRegSrc1); \ 1332 iemMemFlatFetchDataU256AlignedAvxJmp(pVCpu, &(a_uYmmDst).uSrc2.ymm, (a_GCPtrMem2)); \ 1333 (a_uYmmDst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[0]; \ 1334 (a_uYmmDst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[a_iYRegSrc1Tmp].au64[1]; \ 1335 (a_uYmmDst).uSrc1.au64[2] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[0]; \ 1336 (a_uYmmDst).uSrc1.au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[a_iYRegSrc1Tmp].au64[1]; \ 1337 } while (0) 1338 1298 1339 #endif 1299 1340 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r105183 r105283 857 857 #define IEM_MC_FETCH_YREG_U64(a_u64Value, a_iYRegSrc, a_iQWord) do { CHK_YREG_IDX(a_iYRegSrc); CHK_VAR(a_u64Value); (a_u64Value) = UINT64_MAX; CHK_TYPE(uint64_t, a_u64Value); (void)fAvxRead; (void)fMcBegin; } while (0) 858 858 #define IEM_MC_FETCH_YREG_U32(a_u32Value, a_iYRegSrc) do { CHK_YREG_IDX(a_iYRegSrc); CHK_VAR(a_u32Value); (a_u32Value) = UINT32_MAX; CHK_TYPE(uint32_t, a_u32Value); (void)fAvxRead; (void)fMcBegin; } while (0) 859 #define IEM_MC_FETCH_YREG_PAIR_YMM(a_uYmmDst, a_iYRegSrc1, a_iYRegSrc2) \ 860 do { CHK_YREG_IDX(a_iYRegSrc1); CHK_YREG_IDX(a_iYRegSrc2); CHK_VAR(a_uYmmDst); (a_uYmmDst).uSrc1.au64[0] = (a_uYmmDst).uSrc1.au64[1] = (a_uYmmDst).uSrc1.au64[2] = (a_uYmmDst).uSrc1.au64[3] = (a_uYmmDst).uSrc2.au64[0] = (a_uYmmDst).uSrc2.au64[1] = (a_uYmmDst).uSrc2.au64[2] = (a_uYmmDst).uSrc2.au64[3] = 0; CHK_TYPE(IEMMEDIAF2YMMSRC, a_uYmmDst); (void)fAvxRead; (void)fMcBegin; } while (0) 859 861 #define IEM_MC_STORE_YREG_U64(a_iYRegDst, a_iQword, a_u64Value) do { CHK_XREG_IDX(a_iYRegDst); CHK_VAR(a_u64Value); CHK_TYPE(uint64_t, a_u64Value); AssertCompile((a_iQword) < 4); (void)fAvxWrite; (void)fMcBegin; } while (0) 860 862 #define IEM_MC_STORE_YREG_U128(a_iYRegDst, a_iDQword, a_u128Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_VAR(a_u128Value); CHK_TYPE(RTUINT128U, a_u128Value); (void)fAvxWrite; (void)fMcBegin; } while (0) … … 941 943 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) \ 942 944 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_VAR(a_GCPtrMem2); CHK_VAR(a_Dst); CHK_TYPE(IEMPCMPESTRXSRC, a_Dst); (void)fMcBegin; } while (0) 945 # define IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(a_uYmmDst, a_iYRegSrc1, a_iSeg2, a_GCPtrMem2) \ 946 do { CHK_XREG_IDX(a_iYRegSrc1); (void)fAvxRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_VAR(a_GCPtrMem2); CHK_VAR(a_uYmmDst); CHK_TYPE(IEMMEDIAF2YMMSRC, a_uYmmDst); (void)fMcBegin; } while (0) 943 947 944 948 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) do { CHK_SEG_IDX(a_iSeg); CHK_GCPTR(a_GCPtrMem); CHK_VAR(a_GCPtrMem); CHK_TYPE(uint8_t, a_u8Value); CHK_VAR(a_u8Value); CHK_SEG_IDX(a_iSeg); (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.