Changeset 95487 in vbox
- Timestamp:
- Jul 3, 2022 2:02:39 PM (3 years ago)
- svn:sync-xref-src-repo-rev:
- 152101
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r95482 r95487 3719 3719 ; 3720 3720 3721 BEGINPROC_FASTCALL iemAImpl_pshufw , 163722 PROLOGUE_ 4_ARGS3721 BEGINPROC_FASTCALL iemAImpl_pshufw_u64, 16 3722 PROLOGUE_3_ARGS 3723 3723 IEMIMPL_MMX_PROLOGUE 3724 3724 3725 movq mm 0, [A1]3726 movq mm 1, [A2]3727 lea T0, [A 3 + A3*4] ; sizeof(pshufw+ret) == 53725 movq mm1, [A1] 3726 movq mm0, mm0 ; paranoia! 3727 lea T0, [A2 + A2*4] ; sizeof(pshufw+ret) == 5 3728 3728 lea T1, [.imm0 xWrtRIP] 3729 3729 lea T1, [T1 + T0] 3730 3730 call T1 3731 movq [A 1], mm03731 movq [A0], mm0 3732 3732 3733 3733 IEMIMPL_MMX_EPILOGUE 3734 EPILOGUE_ 4_ARGS3734 EPILOGUE_3_ARGS 3735 3735 %assign bImm 0 3736 3736 %rep 256 … … 3742 3742 .immEnd: ; 256*5 == 0x500 3743 3743 dw 0xfaff + (.immEnd - .imm0) ; will cause warning if entries are too big. 3744 dw 0x104ff - (.immEnd - .imm0) ; will cause warning if entries are small big.3745 ENDPROC iemAImpl_pshufw 3744 dw 0x104ff - (.immEnd - .imm0) ; will cause warning if entries are too small. 3745 ENDPROC iemAImpl_pshufw_u64 3746 3746 3747 3747 3748 3748 %macro IEMIMPL_MEDIA_SSE_PSHUFXX 1 3749 BEGINPROC_FASTCALL iemAImpl_ %+ %1 , 163750 PROLOGUE_ 4_ARGS3749 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128, 16 3750 PROLOGUE_3_ARGS 3751 3751 IEMIMPL_SSE_PROLOGUE 3752 3752 3753 movdqu xmm 0, [A1]3754 movdqu xmm 1, [A2]3753 movdqu xmm1, [A1] 3754 movdqu xmm0, xmm1 ; paranoia! 3755 3755 lea T1, [.imm0 xWrtRIP] 3756 lea T0, [A 3 + A3*2] ; sizeof(pshufXX+ret) == 6: (A3 * 3) *23756 lea T0, [A2 + A2*2] ; sizeof(pshufXX+ret) == 6: (A3 * 3) *2 3757 3757 lea T1, [T1 + T0*2] 3758 3758 call T1 3759 movdqu [A 1], xmm03759 movdqu [A0], xmm0 3760 3760 3761 3761 IEMIMPL_SSE_EPILOGUE 3762 EPILOGUE_ 4_ARGS3762 EPILOGUE_3_ARGS 3763 3763 %assign bImm 0 3764 3764 %rep 256 … … 3770 3770 .immEnd: ; 256*6 == 0x600 3771 3771 dw 0xf9ff + (.immEnd - .imm0) ; will cause warning if entries are too big. 3772 dw 0x105ff - (.immEnd - .imm0) ; will cause warning if entries are small big.3773 ENDPROC iemAImpl_ %+ %1 3772 dw 0x105ff - (.immEnd - .imm0) ; will cause warning if entries are too small. 3773 ENDPROC iemAImpl_ %+ %1 %+ _u128 3774 3774 %endmacro 3775 3775 … … 3777 3777 IEMIMPL_MEDIA_SSE_PSHUFXX pshuflw 3778 3778 IEMIMPL_MEDIA_SSE_PSHUFXX pshufd 3779 3780 3781 %macro IEMIMPL_MEDIA_AVX_VPSHUFXX 1 3782 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u256, 16 3783 PROLOGUE_3_ARGS 3784 IEMIMPL_SSE_PROLOGUE 3785 3786 vmovdqu ymm1, [A1] 3787 vmovdqu ymm0, ymm1 ; paranoia! 3788 lea T1, [.imm0 xWrtRIP] 3789 lea T0, [A2 + A2*2] ; sizeof(pshufXX+ret) == 6: (A3 * 3) *2 3790 lea T1, [T1 + T0*2] 3791 call T1 3792 vmovdqu [A0], ymm0 3793 3794 IEMIMPL_SSE_EPILOGUE 3795 EPILOGUE_3_ARGS 3796 %assign bImm 0 3797 %rep 256 3798 .imm %+ bImm: 3799 %1 ymm0, ymm1, bImm 3800 ret 3801 %assign bImm bImm + 1 3802 %endrep 3803 .immEnd: ; 256*6 == 0x600 3804 dw 0xf9ff + (.immEnd - .imm0) ; will cause warning if entries are too big. 3805 dw 0x105ff - (.immEnd - .imm0) ; will cause warning if entries are too small. 3806 ENDPROC iemAImpl_ %+ %1 %+ _u256 3807 %endmacro 3808 3809 IEMIMPL_MEDIA_AVX_VPSHUFXX vpshufhw 3810 IEMIMPL_MEDIA_AVX_VPSHUFXX vpshuflw 3811 IEMIMPL_MEDIA_AVX_VPSHUFXX vpshufd 3779 3812 3780 3813 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r95482 r95487 8359 8359 8360 8360 /* 8361 * 8361 * PSHUFW, [V]PSHUFHW, [V]PSHUFLW, [V]PSHUFD 8362 8362 */ 8363 8363 #ifdef IEM_WITHOUT_ASSEMBLY 8364 8364 8365 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw,(PCX86FXSTATE pFpuState, uint64_t *pu64Dst, uint64_t const *pu64Src, uint8_t bEvil)) 8366 { 8367 RT_NOREF(pFpuState, pu64Dst, pu64Src, bEvil); 8368 AssertReleaseFailed(); 8369 } 8370 8371 8372 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufhw,(PCX86FXSTATE pFpuState, PRTUINT128U pu128Dst, PCRTUINT128U pu128Src, uint8_t bEvil)) 8373 { 8374 RT_NOREF(pFpuState, pu128Dst, pu128Src, bEvil); 8375 AssertReleaseFailed(); 8376 } 8377 8378 8379 IEM_DECL_IMPL_DEF(void, iemAImpl_pshuflw,(PCX86FXSTATE pFpuState, PRTUINT128U pu128Dst, PCRTUINT128U pu128Src, uint8_t bEvil)) 8380 { 8381 RT_NOREF(pFpuState, pu128Dst, pu128Src, bEvil); 8382 AssertReleaseFailed(); 8383 } 8384 8385 8386 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufd,(PCX86FXSTATE pFpuState, PRTUINT128U pu128Dst, PCRTUINT128U pu128Src, uint8_t bEvil)) 8387 { 8388 RT_NOREF(pFpuState, pu128Dst, pu128Src, bEvil); 8389 AssertReleaseFailed(); 8390 } 8365 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil)) 8366 { 8367 uint64_t const uSrc = *puSrc; 8368 ASMCompilerBarrier(); 8369 *puDst = RT_MAKE_U64_FROM_U16(uSrc >> (( bEvil & 3) * 16), 8370 uSrc >> (((bEvil >> 2) & 3) * 16), 8371 uSrc >> (((bEvil >> 4) & 3) * 16), 8372 uSrc >> (((bEvil >> 6) & 3) * 16)); 8373 } 8374 8375 8376 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufhw_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil)) 8377 { 8378 puDst->QWords.qw0 = puSrc->QWords.qw0; 8379 uint64_t const uSrc = puSrc->QWords.qw1; 8380 ASMCompilerBarrier(); 8381 puDst->QWords.qw1 = RT_MAKE_U64_FROM_U16(uSrc >> (( bEvil & 3) * 16), 8382 uSrc >> (((bEvil >> 2) & 3) * 16), 8383 uSrc >> (((bEvil >> 4) & 3) * 16), 8384 uSrc >> (((bEvil >> 6) & 3) * 16)); 8385 } 8386 8387 #endif 8388 8389 IEM_DECL_IMPL_DEF(void, iemAImpl_vpshufhw_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil)) 8390 { 8391 puDst->QWords.qw0 = puSrc->QWords.qw0; 8392 uint64_t const uSrc1 = puSrc->QWords.qw1; 8393 puDst->QWords.qw2 = puSrc->QWords.qw2; 8394 uint64_t const uSrc3 = puSrc->QWords.qw3; 8395 ASMCompilerBarrier(); 8396 puDst->QWords.qw1 = RT_MAKE_U64_FROM_U16(uSrc1 >> (( bEvil & 3) * 16), 8397 uSrc1 >> (((bEvil >> 2) & 3) * 16), 8398 uSrc1 >> (((bEvil >> 4) & 3) * 16), 8399 uSrc1 >> (((bEvil >> 6) & 3) * 16)); 8400 puDst->QWords.qw3 = RT_MAKE_U64_FROM_U16(uSrc3 >> (( bEvil & 3) * 16), 8401 uSrc3 >> (((bEvil >> 2) & 3) * 16), 8402 uSrc3 >> (((bEvil >> 4) & 3) * 16), 8403 uSrc3 >> (((bEvil >> 6) & 3) * 16)); 8404 } 8405 8406 #ifdef IEM_WITHOUT_ASSEMBLY 8407 IEM_DECL_IMPL_DEF(void, iemAImpl_pshuflw_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil)) 8408 { 8409 puDst->QWords.qw1 = puSrc->QWords.qw1; 8410 uint64_t const uSrc = puSrc->QWords.qw0; 8411 ASMCompilerBarrier(); 8412 puDst->QWords.qw0 = RT_MAKE_U64_FROM_U16(uSrc >> (( bEvil & 3) * 16), 8413 uSrc >> (((bEvil >> 2) & 3) * 16), 8414 uSrc >> (((bEvil >> 4) & 3) * 16), 8415 uSrc >> (((bEvil >> 6) & 3) * 16)); 8416 8417 } 8418 #endif 8419 8420 8421 IEM_DECL_IMPL_DEF(void, iemAImpl_vpshuflw_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil)) 8422 { 8423 puDst->QWords.qw3 = puSrc->QWords.qw3; 8424 uint64_t const uSrc2 = puSrc->QWords.qw2; 8425 puDst->QWords.qw1 = puSrc->QWords.qw1; 8426 uint64_t const uSrc0 = puSrc->QWords.qw0; 8427 ASMCompilerBarrier(); 8428 puDst->QWords.qw0 = RT_MAKE_U64_FROM_U16(uSrc0 >> (( bEvil & 3) * 16), 8429 uSrc0 >> (((bEvil >> 2) & 3) * 16), 8430 uSrc0 >> (((bEvil >> 4) & 3) * 16), 8431 uSrc0 >> (((bEvil >> 6) & 3) * 16)); 8432 puDst->QWords.qw2 = RT_MAKE_U64_FROM_U16(uSrc2 >> (( bEvil & 3) * 16), 8433 uSrc2 >> (((bEvil >> 2) & 3) * 16), 8434 uSrc2 >> (((bEvil >> 4) & 3) * 16), 8435 uSrc2 >> (((bEvil >> 6) & 3) * 16)); 8436 8437 } 8438 8439 8440 #ifdef IEM_WITHOUT_ASSEMBLY 8441 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil)) 8442 { 8443 RTUINT128U const uSrc = *puSrc; 8444 ASMCompilerBarrier(); 8445 puDst->au32[0] = uSrc.au32[bEvil & 3]; 8446 puDst->au32[1] = uSrc.au32[(bEvil >> 2) & 3]; 8447 puDst->au32[2] = uSrc.au32[(bEvil >> 4) & 3]; 8448 puDst->au32[3] = uSrc.au32[(bEvil >> 6) & 3]; 8449 } 8450 #endif 8451 8452 8453 IEM_DECL_IMPL_DEF(void, iemAImpl_vpshufd_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil)) 8454 { 8455 RTUINT256U const uSrc = *puSrc; 8456 ASMCompilerBarrier(); 8457 puDst->au128[0].au32[0] = uSrc.au128[0].au32[bEvil & 3]; 8458 puDst->au128[0].au32[1] = uSrc.au128[0].au32[(bEvil >> 2) & 3]; 8459 puDst->au128[0].au32[2] = uSrc.au128[0].au32[(bEvil >> 4) & 3]; 8460 puDst->au128[0].au32[3] = uSrc.au128[0].au32[(bEvil >> 6) & 3]; 8461 puDst->au128[1].au32[0] = uSrc.au128[1].au32[bEvil & 3]; 8462 puDst->au128[1].au32[1] = uSrc.au128[1].au32[(bEvil >> 2) & 3]; 8463 puDst->au128[1].au32[2] = uSrc.au128[1].au32[(bEvil >> 4) & 3]; 8464 puDst->au128[1].au32[3] = uSrc.au128[1].au32[(bEvil >> 6) & 3]; 8465 } 8466 8467 8468 #ifdef IEM_WITHOUT_ASSEMBLY 8391 8469 8392 8470 /* PUNPCKHxxx */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r95482 r95487 357 357 'RM_REG': ( 'ModR/M', [ 'reg', 'rm' ], '11 mr/reg', ), 358 358 'RM_MEM': ( 'ModR/M', [ 'reg', 'rm' ], '!11 mr/reg', ), 359 'RMI': ( 'ModR/M', [ 'reg', 'rm', 'imm' ], '', ), 360 'RMI_REG': ( 'ModR/M', [ 'reg', 'rm', 'imm' ], '11 mr/reg', ), 361 'RMI_MEM': ( 'ModR/M', [ 'reg', 'rm', 'imm' ], '!11 mr/reg', ), 359 362 'MR': ( 'ModR/M', [ 'rm', 'reg' ], '', ), 360 363 'MR_REG': ( 'ModR/M', [ 'rm', 'reg' ], '11 mr/reg', ), … … 535 538 'forced_32_op_size_x86': 'DISOPTYPE_FORCED_32_OP_SIZE_X86', ##< Forced 32 bits operand size; regardless of prefix bytes 536 539 ## (only in 16 & 32 bits mode!) 537 'sse': 'DISOPTYPE_SSE', ##< SSE,SSE2,SSE3,AVX,++ instruction. Not implemented yet! 540 'avx': 'DISOPTYPE_AVX', ##< AVX,AVX2,++ instruction. Not implemented yet! 541 'sse': 'DISOPTYPE_SSE', ##< SSE,SSE2,SSE3,++ instruction. Not implemented yet! 538 542 'mmx': 'DISOPTYPE_MMX', ##< MMX,MMXExt,3DNow,++ instruction. Not implemented yet! 539 543 'fpu': 'DISOPTYPE_FPU', ##< FPU instruction. Not implemented yet! -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r95482 r95487 4051 4051 FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib) 4052 4052 { 4053 IEMOP_MNEMONIC (pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");4053 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0); 4054 4054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4055 4055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 4067 4067 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 4068 4068 IEM_MC_PREPARE_FPU_USAGE(); 4069 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 4070 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 4071 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 4069 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm)); 4070 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm)); 4071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg); 4072 IEM_MC_MODIFIED_MREG_BY_REF(pDst); 4073 IEM_MC_FPU_TO_MMX_MODE(); 4072 4074 IEM_MC_ADVANCE_RIP(); 4073 4075 IEM_MC_END(); … … 4092 4094 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 4093 4095 IEM_MC_PREPARE_FPU_USAGE(); 4094 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 4095 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 4096 4097 IEM_MC_ADVANCE_RIP(); 4098 IEM_MC_END(); 4099 } 4100 return VINF_SUCCESS; 4101 } 4102 4103 /** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */ 4104 FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib) 4105 { 4106 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib"); 4096 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm)); 4097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg); 4098 IEM_MC_MODIFIED_MREG_BY_REF(pDst); 4099 IEM_MC_FPU_TO_MMX_MODE(); 4100 4101 IEM_MC_ADVANCE_RIP(); 4102 IEM_MC_END(); 4103 } 4104 return VINF_SUCCESS; 4105 } 4106 4107 4108 /** 4109 * Common worker for SSE2 instructions on the forms: 4110 * pshufd xmm1, xmm2/mem128, imm8 4111 * pshufhw xmm1, xmm2/mem128, imm8 4112 * pshuflw xmm1, xmm2/mem128, imm8 4113 * 4114 * Proper alignment of the 128-bit operand is enforced. 4115 * Exceptions type 4. SSE2 cpuid checks. 4116 */ 4117 FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker) 4118 { 4107 4119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4108 4120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 4115 4127 4116 4128 IEM_MC_BEGIN(3, 0); 4117 IEM_MC_ARG(PRTUINT128U, p Dst, 0);4118 IEM_MC_ARG(PCRTUINT128U, p Src, 1);4129 IEM_MC_ARG(PRTUINT128U, puDst, 0); 4130 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 4119 4131 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 4120 4132 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4121 4133 IEM_MC_PREPARE_SSE_USAGE(); 4122 IEM_MC_REF_XREG_U128(p Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);4123 IEM_MC_REF_XREG_U128_CONST(p Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);4124 IEM_MC_CALL_ SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);4134 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 4135 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 4136 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg); 4125 4137 IEM_MC_ADVANCE_RIP(); 4126 4138 IEM_MC_END(); … … 4132 4144 */ 4133 4145 IEM_MC_BEGIN(3, 2); 4134 IEM_MC_ARG(PRTUINT128U, p Dst, 0);4146 IEM_MC_ARG(PRTUINT128U, puDst, 0); 4135 4147 IEM_MC_LOCAL(RTUINT128U, uSrc); 4136 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, p Src, uSrc, 1);4148 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 4137 4149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4138 4150 … … 4145 4157 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 4146 4158 IEM_MC_PREPARE_SSE_USAGE(); 4147 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4148 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 4149 4150 IEM_MC_ADVANCE_RIP(); 4151 IEM_MC_END(); 4152 } 4153 return VINF_SUCCESS; 4154 } 4159 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 4160 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg); 4161 4162 IEM_MC_ADVANCE_RIP(); 4163 IEM_MC_END(); 4164 } 4165 return VINF_SUCCESS; 4166 } 4167 4168 4169 /** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */ 4170 FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib) 4171 { 4172 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); 4173 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128); 4174 } 4175 4155 4176 4156 4177 /** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */ 4157 4178 FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib) 4158 4179 { 4159 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib"); 4160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4162 { 4163 /* 4164 * Register, register. 4165 */ 4166 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 4167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4168 4169 IEM_MC_BEGIN(3, 0); 4170 IEM_MC_ARG(PRTUINT128U, pDst, 0); 4171 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 4172 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 4173 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4174 IEM_MC_PREPARE_SSE_USAGE(); 4175 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4176 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 4177 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 4178 IEM_MC_ADVANCE_RIP(); 4179 IEM_MC_END(); 4180 } 4181 else 4182 { 4183 /* 4184 * Register, memory. 4185 */ 4186 IEM_MC_BEGIN(3, 2); 4187 IEM_MC_ARG(PRTUINT128U, pDst, 0); 4188 IEM_MC_LOCAL(RTUINT128U, uSrc); 4189 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4191 4192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4193 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 4194 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4197 4198 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 4199 IEM_MC_PREPARE_SSE_USAGE(); 4200 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4201 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 4202 4203 IEM_MC_ADVANCE_RIP(); 4204 IEM_MC_END(); 4205 } 4206 return VINF_SUCCESS; 4207 } 4180 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); 4181 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128); 4182 } 4183 4208 4184 4209 4185 /** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */ 4210 4186 FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib) 4211 4187 { 4212 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib"); 4213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4215 { 4216 /* 4217 * Register, register. 4218 */ 4219 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 4220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4221 4222 IEM_MC_BEGIN(3, 0); 4223 IEM_MC_ARG(PRTUINT128U, pDst, 0); 4224 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 4225 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 4226 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4227 IEM_MC_PREPARE_SSE_USAGE(); 4228 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4229 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 4230 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 4231 IEM_MC_ADVANCE_RIP(); 4232 IEM_MC_END(); 4233 } 4234 else 4235 { 4236 /* 4237 * Register, memory. 4238 */ 4239 IEM_MC_BEGIN(3, 2); 4240 IEM_MC_ARG(PRTUINT128U, pDst, 0); 4241 IEM_MC_LOCAL(RTUINT128U, uSrc); 4242 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4244 4245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4246 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 4247 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 4248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4249 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4250 4251 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 4252 IEM_MC_PREPARE_SSE_USAGE(); 4253 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4254 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 4255 4256 IEM_MC_ADVANCE_RIP(); 4257 IEM_MC_END(); 4258 } 4259 return VINF_SUCCESS; 4188 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0); 4189 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128); 4260 4190 } 4261 4191 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r95482 r95487 2506 2506 /* Opcode VEX.0F 0x70 - invalid */ 2507 2507 2508 2509 /** 2510 * Common worker for AVX/AVX2 instructions on the forms: 2511 * - vpxxx xmm0, xmm2/mem128, imm8 2512 * - vpxxx ymm0, ymm2/mem256, imm8 2513 * 2514 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit. 2515 */ 2516 FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256) 2517 { 2518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2519 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2520 { 2521 /* 2522 * Register, register. 2523 */ 2524 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2525 if (pVCpu->iem.s.uVexLength) 2526 { 2527 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 2528 IEM_MC_BEGIN(3, 2); 2529 IEM_MC_LOCAL(RTUINT256U, uDst); 2530 IEM_MC_LOCAL(RTUINT256U, uSrc); 2531 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 2532 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1); 2533 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2534 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 2535 IEM_MC_PREPARE_AVX_USAGE(); 2536 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 2537 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg); 2538 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2539 IEM_MC_ADVANCE_RIP(); 2540 IEM_MC_END(); 2541 } 2542 else 2543 { 2544 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2545 IEM_MC_BEGIN(3, 0); 2546 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2547 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 2548 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2549 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 2550 IEM_MC_PREPARE_AVX_USAGE(); 2551 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2552 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 2553 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg); 2554 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2555 IEM_MC_ADVANCE_RIP(); 2556 IEM_MC_END(); 2557 } 2558 } 2559 else 2560 { 2561 /* 2562 * Register, memory. 2563 */ 2564 if (pVCpu->iem.s.uVexLength) 2565 { 2566 IEM_MC_BEGIN(3, 3); 2567 IEM_MC_LOCAL(RTUINT256U, uDst); 2568 IEM_MC_LOCAL(RTUINT256U, uSrc); 2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2570 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 2571 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1); 2572 2573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2574 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2575 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 2576 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2577 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 2578 IEM_MC_PREPARE_AVX_USAGE(); 2579 2580 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2581 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg); 2582 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 2583 2584 IEM_MC_ADVANCE_RIP(); 2585 IEM_MC_END(); 2586 } 2587 else 2588 { 2589 IEM_MC_BEGIN(3, 1); 2590 IEM_MC_LOCAL(RTUINT128U, uSrc); 2591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2592 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 2594 2595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2596 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2597 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2598 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2599 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 2600 IEM_MC_PREPARE_AVX_USAGE(); 2601 2602 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2603 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2604 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg); 2605 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2606 2607 IEM_MC_ADVANCE_RIP(); 2608 IEM_MC_END(); 2609 } 2610 } 2611 return VINF_SUCCESS; 2612 } 2613 2614 2508 2615 /** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */ 2509 FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib); 2510 //FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib) 2511 //{ 2512 // IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib"); 2513 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2514 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2515 // { 2516 // /* 2517 // * Register, register. 2518 // */ 2519 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2520 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2521 // 2522 // IEM_MC_BEGIN(3, 0); 2523 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2524 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 2525 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2526 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2527 // IEM_MC_PREPARE_SSE_USAGE(); 2528 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2529 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2530 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 2531 // IEM_MC_ADVANCE_RIP(); 2532 // IEM_MC_END(); 2533 // } 2534 // else 2535 // { 2536 // /* 2537 // * Register, memory. 2538 // */ 2539 // IEM_MC_BEGIN(3, 2); 2540 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2541 // IEM_MC_LOCAL(RTUINT128U, uSrc); 2542 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 2543 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2544 // 2545 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2546 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2547 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2548 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2549 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2550 // 2551 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2552 // IEM_MC_PREPARE_SSE_USAGE(); 2553 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2554 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 2555 // 2556 // IEM_MC_ADVANCE_RIP(); 2557 // IEM_MC_END(); 2558 // } 2559 // return VINF_SUCCESS; 2560 //} 2616 FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib) 2617 { 2618 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0); 2619 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128, 2620 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback)); 2621 2622 } 2623 2561 2624 2562 2625 /** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */ 2563 FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib); 2564 //FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib) 2565 //{ 2566 // IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib"); 2567 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2568 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2569 // { 2570 // /* 2571 // * Register, register. 2572 // */ 2573 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2574 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2575 // 2576 // IEM_MC_BEGIN(3, 0); 2577 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2578 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 2579 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2580 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2581 // IEM_MC_PREPARE_SSE_USAGE(); 2582 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2583 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2584 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 2585 // IEM_MC_ADVANCE_RIP(); 2586 // IEM_MC_END(); 2587 // } 2588 // else 2589 // { 2590 // /* 2591 // * Register, memory. 2592 // */ 2593 // IEM_MC_BEGIN(3, 2); 2594 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2595 // IEM_MC_LOCAL(RTUINT128U, uSrc); 2596 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 2597 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2598 // 2599 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2600 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2601 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2602 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2603 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2604 // 2605 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2606 // IEM_MC_PREPARE_SSE_USAGE(); 2607 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2608 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 2609 // 2610 // IEM_MC_ADVANCE_RIP(); 2611 // IEM_MC_END(); 2612 // } 2613 // return VINF_SUCCESS; 2614 //} 2626 FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib) 2627 { 2628 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0); 2629 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128, 2630 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback)); 2631 2632 } 2633 2615 2634 2616 2635 /** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */ 2617 FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib); 2618 //FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib) 2619 //{ 2620 // IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib"); 2621 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2622 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2623 // { 2624 // /* 2625 // * Register, register. 2626 // */ 2627 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2628 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2629 // 2630 // IEM_MC_BEGIN(3, 0); 2631 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2632 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 2633 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2634 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2635 // IEM_MC_PREPARE_SSE_USAGE(); 2636 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2637 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2638 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 2639 // IEM_MC_ADVANCE_RIP(); 2640 // IEM_MC_END(); 2641 // } 2642 // else 2643 // { 2644 // /* 2645 // * Register, memory. 2646 // */ 2647 // IEM_MC_BEGIN(3, 2); 2648 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 2649 // IEM_MC_LOCAL(RTUINT128U, uSrc); 2650 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 2651 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2652 // 2653 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2654 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 2655 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 2656 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2657 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2658 // 2659 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2660 // IEM_MC_PREPARE_SSE_USAGE(); 2661 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2662 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 2663 // 2664 // IEM_MC_ADVANCE_RIP(); 2665 // IEM_MC_END(); 2666 // } 2667 // return VINF_SUCCESS; 2668 //} 2636 FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib) 2637 { 2638 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0); 2639 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128, 2640 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback)); 2641 } 2669 2642 2670 2643 -
trunk/src/VBox/VMM/include/IEMInternal.h
r95483 r95487 935 935 /** ModR/M: reg, r/m (memory) */ 936 936 #define IEMOPFORM_RM_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3) 937 /** ModR/M: reg, r/m */ 938 #define IEMOPFORM_RMI 1 939 /** ModR/M: reg, r/m (register) */ 940 #define IEMOPFORM_RMI_REG (IEMOPFORM_RM | IEMOPFORM_MOD3) 941 /** ModR/M: reg, r/m (memory) */ 942 #define IEMOPFORM_RMI_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3) 937 943 /** ModR/M: r/m, reg */ 938 #define IEMOPFORM_MR 1944 #define IEMOPFORM_MR 2 939 945 /** ModR/M: r/m (register), reg */ 940 946 #define IEMOPFORM_MR_REG (IEMOPFORM_MR | IEMOPFORM_MOD3) … … 942 948 #define IEMOPFORM_MR_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3) 943 949 /** ModR/M: r/m only */ 944 #define IEMOPFORM_M 2950 #define IEMOPFORM_M 3 945 951 /** ModR/M: r/m only (register). */ 946 952 #define IEMOPFORM_M_REG (IEMOPFORM_M | IEMOPFORM_MOD3) … … 948 954 #define IEMOPFORM_M_MEM (IEMOPFORM_M | IEMOPFORM_NOT_MOD3) 949 955 /** ModR/M: reg only */ 950 #define IEMOPFORM_R 3956 #define IEMOPFORM_R 4 951 957 952 958 /** VEX+ModR/M: reg, r/m */ 953 #define IEMOPFORM_VEX_RM 4959 #define IEMOPFORM_VEX_RM 8 954 960 /** VEX+ModR/M: reg, r/m (register) */ 955 961 #define IEMOPFORM_VEX_RM_REG (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3) … … 957 963 #define IEMOPFORM_VEX_RM_MEM (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3) 958 964 /** VEX+ModR/M: r/m, reg */ 959 #define IEMOPFORM_VEX_MR 5965 #define IEMOPFORM_VEX_MR 9 960 966 /** VEX+ModR/M: r/m (register), reg */ 961 967 #define IEMOPFORM_VEX_MR_REG (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3) … … 963 969 #define IEMOPFORM_VEX_MR_MEM (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3) 964 970 /** VEX+ModR/M: r/m only */ 965 #define IEMOPFORM_VEX_M 6971 #define IEMOPFORM_VEX_M 10 966 972 /** VEX+ModR/M: r/m only (register). */ 967 973 #define IEMOPFORM_VEX_M_REG (IEMOPFORM_VEX_M | IEMOPFORM_MOD3) … … 969 975 #define IEMOPFORM_VEX_M_MEM (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3) 970 976 /** VEX+ModR/M: reg only */ 971 #define IEMOPFORM_VEX_R 7977 #define IEMOPFORM_VEX_R 11 972 978 /** VEX+ModR/M: reg, vvvv, r/m */ 973 #define IEMOPFORM_VEX_RVM 8979 #define IEMOPFORM_VEX_RVM 12 974 980 /** VEX+ModR/M: reg, vvvv, r/m (register). */ 975 981 #define IEMOPFORM_VEX_RVM_REG (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3) … … 977 983 #define IEMOPFORM_VEX_RVM_MEM (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3) 978 984 /** VEX+ModR/M: reg, r/m, vvvv */ 979 #define IEMOPFORM_VEX_RMV 9985 #define IEMOPFORM_VEX_RMV 13 980 986 /** VEX+ModR/M: reg, r/m, vvvv (register). */ 981 987 #define IEMOPFORM_VEX_RMV_REG (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3) … … 983 989 #define IEMOPFORM_VEX_RMV_MEM (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3) 984 990 /** VEX+ModR/M: reg, r/m, imm8 */ 985 #define IEMOPFORM_VEX_RMI 1 0991 #define IEMOPFORM_VEX_RMI 14 986 992 /** VEX+ModR/M: reg, r/m, imm8 (register). */ 987 993 #define IEMOPFORM_VEX_RMI_REG (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3) … … 989 995 #define IEMOPFORM_VEX_RMI_MEM (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3) 990 996 /** VEX+ModR/M: r/m, vvvv, reg */ 991 #define IEMOPFORM_VEX_MVR 1 1997 #define IEMOPFORM_VEX_MVR 15 992 998 /** VEX+ModR/M: r/m, vvvv, reg (register) */ 993 999 #define IEMOPFORM_VEX_MVR_REG (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3) … … 995 1001 #define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3) 996 1002 /** VEX+ModR/M+/n: vvvv, r/m */ 997 #define IEMOPFORM_VEX_VM 1 21003 #define IEMOPFORM_VEX_VM 16 998 1004 /** VEX+ModR/M+/n: vvvv, r/m (register) */ 999 1005 #define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3) … … 1002 1008 1003 1009 /** Fixed register instruction, no R/M. */ 1004 #define IEMOPFORM_FIXED 161010 #define IEMOPFORM_FIXED 32 1005 1011 1006 1012 /** The r/m is a register. */ … … 1835 1841 /** @name Media (SSE/MMX/AVX) operation: Packed Shuffle Stuff (evil) 1836 1842 * @{ */ 1837 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUF,(PCX86FXSTATE pFpuState, PRTUINT128U pu128Dst, 1838 PCRTUINT128U pu128Src, uint8_t bEvil)); 1839 typedef FNIEMAIMPLMEDIAPSHUF *PFNIEMAIMPLMEDIAPSHUF; 1840 FNIEMAIMPLMEDIAPSHUF iemAImpl_pshufhw, iemAImpl_pshuflw, iemAImpl_pshufd; 1841 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw,(PCX86FXSTATE pFpuState, uint64_t *pu64Dst, uint64_t const *pu64Src, uint8_t bEvil)); 1843 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil)); 1844 typedef FNIEMAIMPLMEDIAPSHUFU128 *PFNIEMAIMPLMEDIAPSHUFU128; 1845 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil)); 1846 typedef FNIEMAIMPLMEDIAPSHUFU256 *PFNIEMAIMPLMEDIAPSHUFU256; 1847 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil)); 1848 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_pshufhw_u128, iemAImpl_pshuflw_u128, iemAImpl_pshufd_u128; 1849 #ifndef IEM_WITHOUT_ASSEMBLY 1850 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256, iemAImpl_vpshuflw_u256, iemAImpl_vpshufd_u256; 1851 #endif 1852 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256_fallback, iemAImpl_vpshuflw_u256_fallback, iemAImpl_vpshufd_u256_fallback; 1842 1853 /** @} */ 1843 1854 -
trunk/src/VBox/VMM/include/IEMMc.h
r95483 r95487 390 390 #define IEM_MC_REF_MREG_U32_CONST(a_pu32Dst, a_iMReg) \ 391 391 (a_pu32Dst) = ((uint32_t const *)&pVCpu->cpum.GstCtx.XState.x87.aRegs[(a_iMReg)].mmx) 392 #define IEM_MC_MODIFIED_MREG(a_iMReg) \ 393 do { pVCpu->cpum.GstCtx.XState.x87.aRegs[(a_iMReg)].au32[2] = 0xffff; } while (0) 394 #define IEM_MC_MODIFIED_MREG_BY_REF(a_pu64Dst) \ 395 do { ((uint32_t *)(a_pu64Dst))[2] = 0xffff; } while (0) 392 396 393 397 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) \ -
trunk/src/VBox/VMM/include/IEMOpHlp.h
r95441 r95487 415 415 { /* likely */ } \ 416 416 else \ 417 return IEMOP_RAISE_INVALID_ LOCK_PREFIX(); \417 return IEMOP_RAISE_INVALID_OPCODE(); \ 418 418 } while (0) 419 419 … … 432 432 { /* likely */ } \ 433 433 else \ 434 return IEMOP_RAISE_INVALID_ LOCK_PREFIX(); \434 return IEMOP_RAISE_INVALID_OPCODE(); \ 435 435 } while (0) 436 436 … … 448 448 { /* likely */ } \ 449 449 else \ 450 return IEMOP_RAISE_INVALID_ LOCK_PREFIX(); \450 return IEMOP_RAISE_INVALID_OPCODE(); \ 451 451 } while (0) 452 452 … … 466 466 { /* likely */ } \ 467 467 else \ 468 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 468 return IEMOP_RAISE_INVALID_OPCODE(); \ 469 } while (0) 470 471 /** 472 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz, 473 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate 474 * register 0, if in real or v8086 mode, or if the a_fFeature is present in the 475 * guest CPU. 476 */ 477 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \ 478 do \ 479 { \ 480 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 481 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \ 482 && !pVCpu->iem.s.uVex3rdReg \ 483 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 484 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \ 485 { /* likely */ } \ 486 else \ 487 return IEMOP_RAISE_INVALID_OPCODE(); \ 469 488 } while (0) 470 489 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r95473 r95487 133 133 #define IEMOP_HLP_DONE_VEX_DECODING_L0() do { } while (0) 134 134 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV() do { } while (0) 135 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) do { } while (0) 135 136 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() do { } while (0) 136 137 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() do { } while (0) … … 330 331 #define iemCImpl_FarJmp NULL 331 332 332 #define iemAImpl_pshufhw 333 #define iemAImpl_pshuflw 334 #define iemAImpl_pshufd 333 #define iemAImpl_pshufhw_u128 NULL 334 #define iemAImpl_pshuflw_u128 NULL 335 #define iemAImpl_pshufd_u128 NULL 335 336 336 337 /** @} */ … … 539 540 #define IEM_MC_REF_MREG_U64_CONST(a_pu64Dst, a_iMReg) do { (a_pu64Dst) = (uint64_t const *)((uintptr_t)0); CHK_PTYPE(uint64_t const *, a_pu64Dst); (void)fFpuWrite; (void)fMcBegin; } while (0) 540 541 #define IEM_MC_REF_MREG_U32_CONST(a_pu32Dst, a_iMReg) do { (a_pu32Dst) = (uint32_t const *)((uintptr_t)0); CHK_PTYPE(uint32_t const *, a_pu32Dst); (void)fFpuWrite; (void)fMcBegin; } while (0) 542 #define IEM_MC_MODIFIED_MREG(a_iMReg) do { (void)fFpuWrite; (void)fMcBegin; } while (0) 543 #define IEM_MC_MODIFIED_MREG_BY_REF(a_pu64Dst) do { AssertCompile(sizeof(*a_pu64Dst) <= sizeof(uint64_t)); (void)fFpuWrite; (void)fMcBegin; } while (0) 541 544 542 545 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) do { (a_u128Value) = g_u128Zero; CHK_TYPE(RTUINT128U, a_u128Value); (void)fSseRead; (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.