Changeset 105295 in vbox
- Timestamp:
- Jul 12, 2024 11:07:20 AM (5 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r105283 r105295 4388 4388 IEMIMPL_MEDIA_OPT_F2 sha256msg2, 0 4389 4389 4390 4390 4391 ;; 4391 4392 ; Media instruction working on one full sized and one half sized register (lower half). … … 4776 4777 ; 4777 4778 ; @param 1 The instruction 4779 ; @param 2 Flag whether to add a 256-bit variant (1) or not (0). 4778 4780 ; 4779 4781 ; @param A0 Pointer to the destination media register size operand (output). … … 4781 4783 ; @param A2 Pointer to the second source media register size operand (input). 4782 4784 ; 4783 %macro IEMIMPL_MEDIA_OPT_F3 14785 %macro IEMIMPL_MEDIA_OPT_F3 2 4784 4786 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u128, 12 4785 4787 PROLOGUE_3_ARGS … … 4795 4797 ENDPROC iemAImpl_ %+ %1 %+ _u128 4796 4798 4799 %if %2 == 1 4797 4800 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u256, 12 4798 4801 PROLOGUE_3_ARGS … … 4807 4810 EPILOGUE_3_ARGS 4808 4811 ENDPROC iemAImpl_ %+ %1 %+ _u256 4809 %endmacro 4810 4811 IEMIMPL_MEDIA_OPT_F3 vpshufb 4812 IEMIMPL_MEDIA_OPT_F3 vpand 4813 IEMIMPL_MEDIA_OPT_F3 vpminub 4814 IEMIMPL_MEDIA_OPT_F3 vpminuw 4815 IEMIMPL_MEDIA_OPT_F3 vpminud 4816 IEMIMPL_MEDIA_OPT_F3 vpminsb 4817 IEMIMPL_MEDIA_OPT_F3 vpminsw 4818 IEMIMPL_MEDIA_OPT_F3 vpminsd 4819 IEMIMPL_MEDIA_OPT_F3 vpmaxub 4820 IEMIMPL_MEDIA_OPT_F3 vpmaxuw 4821 IEMIMPL_MEDIA_OPT_F3 vpmaxud 4822 IEMIMPL_MEDIA_OPT_F3 vpmaxsb 4823 IEMIMPL_MEDIA_OPT_F3 vpmaxsw 4824 IEMIMPL_MEDIA_OPT_F3 vpmaxsd 4825 IEMIMPL_MEDIA_OPT_F3 vpandn 4826 IEMIMPL_MEDIA_OPT_F3 vpor 4827 IEMIMPL_MEDIA_OPT_F3 vpxor 4828 IEMIMPL_MEDIA_OPT_F3 vpcmpeqb 4829 IEMIMPL_MEDIA_OPT_F3 vpcmpeqw 4830 IEMIMPL_MEDIA_OPT_F3 vpcmpeqd 4831 IEMIMPL_MEDIA_OPT_F3 vpcmpeqq 4832 IEMIMPL_MEDIA_OPT_F3 vpcmpgtb 4833 IEMIMPL_MEDIA_OPT_F3 vpcmpgtw 4834 IEMIMPL_MEDIA_OPT_F3 vpcmpgtd 4835 IEMIMPL_MEDIA_OPT_F3 vpcmpgtq 4836 IEMIMPL_MEDIA_OPT_F3 vpaddb 4837 IEMIMPL_MEDIA_OPT_F3 vpaddw 4838 IEMIMPL_MEDIA_OPT_F3 vpaddd 4839 IEMIMPL_MEDIA_OPT_F3 vpaddq 4840 IEMIMPL_MEDIA_OPT_F3 vpsubb 4841 IEMIMPL_MEDIA_OPT_F3 vpsubw 4842 IEMIMPL_MEDIA_OPT_F3 vpsubd 4843 IEMIMPL_MEDIA_OPT_F3 vpsubq 4844 IEMIMPL_MEDIA_OPT_F3 vpacksswb 4845 IEMIMPL_MEDIA_OPT_F3 vpackssdw 4846 IEMIMPL_MEDIA_OPT_F3 vpackuswb 4847 IEMIMPL_MEDIA_OPT_F3 vpackusdw 4848 IEMIMPL_MEDIA_OPT_F3 vpmullw 4849 IEMIMPL_MEDIA_OPT_F3 vpmulld 4850 IEMIMPL_MEDIA_OPT_F3 vpmulhw 4851 IEMIMPL_MEDIA_OPT_F3 vpmulhuw 4852 IEMIMPL_MEDIA_OPT_F3 vpavgb 4853 IEMIMPL_MEDIA_OPT_F3 vpavgw 4854 IEMIMPL_MEDIA_OPT_F3 vpsignb 4855 IEMIMPL_MEDIA_OPT_F3 vpsignw 4856 IEMIMPL_MEDIA_OPT_F3 vpsignd 4857 IEMIMPL_MEDIA_OPT_F3 vphaddw 4858 IEMIMPL_MEDIA_OPT_F3 vphaddd 4859 IEMIMPL_MEDIA_OPT_F3 vphsubw 4860 IEMIMPL_MEDIA_OPT_F3 vphsubd 4861 IEMIMPL_MEDIA_OPT_F3 vphaddsw 4862 IEMIMPL_MEDIA_OPT_F3 vphsubsw 4863 IEMIMPL_MEDIA_OPT_F3 vpmaddubsw 4864 IEMIMPL_MEDIA_OPT_F3 vpmulhrsw 4865 IEMIMPL_MEDIA_OPT_F3 vpsadbw 4866 IEMIMPL_MEDIA_OPT_F3 vpmuldq 4867 IEMIMPL_MEDIA_OPT_F3 vpmuludq 4868 IEMIMPL_MEDIA_OPT_F3 vunpcklps 4869 IEMIMPL_MEDIA_OPT_F3 vunpcklpd 4870 IEMIMPL_MEDIA_OPT_F3 vunpckhps 4871 IEMIMPL_MEDIA_OPT_F3 vunpckhpd 4872 IEMIMPL_MEDIA_OPT_F3 vpsubsb 4873 IEMIMPL_MEDIA_OPT_F3 vpsubsw 4874 IEMIMPL_MEDIA_OPT_F3 vpsubusb 4875 IEMIMPL_MEDIA_OPT_F3 vpsubusw 4876 IEMIMPL_MEDIA_OPT_F3 vpaddusb 4877 IEMIMPL_MEDIA_OPT_F3 vpaddusw 4878 IEMIMPL_MEDIA_OPT_F3 vpaddsb 4879 IEMIMPL_MEDIA_OPT_F3 vpaddsw 4880 IEMIMPL_MEDIA_OPT_F3 vpermilps 4881 IEMIMPL_MEDIA_OPT_F3 vpermilpd 4882 IEMIMPL_MEDIA_OPT_F3 vpmaddwd 4883 IEMIMPL_MEDIA_OPT_F3 vpsrlvd 4884 IEMIMPL_MEDIA_OPT_F3 vpsrlvq 4885 IEMIMPL_MEDIA_OPT_F3 vpsravd 4886 IEMIMPL_MEDIA_OPT_F3 vpsllvd 4887 IEMIMPL_MEDIA_OPT_F3 vpsllvq 4812 %endif 4813 %endmacro 4814 4815 IEMIMPL_MEDIA_OPT_F3 vpshufb, 1 4816 IEMIMPL_MEDIA_OPT_F3 vpand, 1 4817 IEMIMPL_MEDIA_OPT_F3 vpminub, 1 4818 IEMIMPL_MEDIA_OPT_F3 vpminuw, 1 4819 IEMIMPL_MEDIA_OPT_F3 vpminud, 1 4820 IEMIMPL_MEDIA_OPT_F3 vpminsb, 1 4821 IEMIMPL_MEDIA_OPT_F3 vpminsw, 1 4822 IEMIMPL_MEDIA_OPT_F3 vpminsd, 1 4823 IEMIMPL_MEDIA_OPT_F3 vpmaxub, 1 4824 IEMIMPL_MEDIA_OPT_F3 vpmaxuw, 1 4825 IEMIMPL_MEDIA_OPT_F3 vpmaxud, 1 4826 IEMIMPL_MEDIA_OPT_F3 vpmaxsb, 1 4827 IEMIMPL_MEDIA_OPT_F3 vpmaxsw, 1 4828 IEMIMPL_MEDIA_OPT_F3 vpmaxsd, 1 4829 IEMIMPL_MEDIA_OPT_F3 vpandn, 1 4830 IEMIMPL_MEDIA_OPT_F3 vpor, 1 4831 IEMIMPL_MEDIA_OPT_F3 vpxor, 1 4832 IEMIMPL_MEDIA_OPT_F3 vpcmpeqb, 1 4833 IEMIMPL_MEDIA_OPT_F3 vpcmpeqw, 1 4834 IEMIMPL_MEDIA_OPT_F3 vpcmpeqd, 1 4835 IEMIMPL_MEDIA_OPT_F3 vpcmpeqq, 1 4836 IEMIMPL_MEDIA_OPT_F3 vpcmpgtb, 1 4837 IEMIMPL_MEDIA_OPT_F3 vpcmpgtw, 1 4838 IEMIMPL_MEDIA_OPT_F3 vpcmpgtd, 1 4839 IEMIMPL_MEDIA_OPT_F3 vpcmpgtq, 1 4840 IEMIMPL_MEDIA_OPT_F3 vpaddb, 1 4841 IEMIMPL_MEDIA_OPT_F3 vpaddw, 1 4842 IEMIMPL_MEDIA_OPT_F3 vpaddd, 1 4843 IEMIMPL_MEDIA_OPT_F3 vpaddq, 1 4844 IEMIMPL_MEDIA_OPT_F3 vpsubb, 1 4845 IEMIMPL_MEDIA_OPT_F3 vpsubw, 1 4846 IEMIMPL_MEDIA_OPT_F3 vpsubd, 1 4847 IEMIMPL_MEDIA_OPT_F3 vpsubq, 1 4848 IEMIMPL_MEDIA_OPT_F3 vpacksswb, 1 4849 IEMIMPL_MEDIA_OPT_F3 vpackssdw, 1 4850 IEMIMPL_MEDIA_OPT_F3 vpackuswb, 1 4851 IEMIMPL_MEDIA_OPT_F3 vpackusdw, 1 4852 IEMIMPL_MEDIA_OPT_F3 vpmullw, 1 4853 IEMIMPL_MEDIA_OPT_F3 vpmulld, 1 4854 IEMIMPL_MEDIA_OPT_F3 vpmulhw, 1 4855 IEMIMPL_MEDIA_OPT_F3 vpmulhuw, 1 4856 IEMIMPL_MEDIA_OPT_F3 vpavgb, 1 4857 IEMIMPL_MEDIA_OPT_F3 vpavgw, 1 4858 IEMIMPL_MEDIA_OPT_F3 vpsignb, 1 4859 IEMIMPL_MEDIA_OPT_F3 vpsignw, 1 4860 IEMIMPL_MEDIA_OPT_F3 vpsignd, 1 4861 IEMIMPL_MEDIA_OPT_F3 vphaddw, 1 4862 IEMIMPL_MEDIA_OPT_F3 vphaddd, 1 4863 IEMIMPL_MEDIA_OPT_F3 vphsubw, 1 4864 IEMIMPL_MEDIA_OPT_F3 vphsubd, 1 4865 IEMIMPL_MEDIA_OPT_F3 vphaddsw, 1 4866 IEMIMPL_MEDIA_OPT_F3 vphsubsw, 1 4867 IEMIMPL_MEDIA_OPT_F3 vpmaddubsw, 1 4868 IEMIMPL_MEDIA_OPT_F3 vpmulhrsw, 1 4869 IEMIMPL_MEDIA_OPT_F3 vpsadbw, 1 4870 IEMIMPL_MEDIA_OPT_F3 vpmuldq, 1 4871 IEMIMPL_MEDIA_OPT_F3 vpmuludq, 1 4872 IEMIMPL_MEDIA_OPT_F3 vunpcklps, 1 4873 IEMIMPL_MEDIA_OPT_F3 vunpcklpd, 1 4874 IEMIMPL_MEDIA_OPT_F3 vunpckhps, 1 4875 IEMIMPL_MEDIA_OPT_F3 vunpckhpd, 1 4876 IEMIMPL_MEDIA_OPT_F3 vpsubsb, 1 4877 IEMIMPL_MEDIA_OPT_F3 vpsubsw, 1 4878 IEMIMPL_MEDIA_OPT_F3 vpsubusb, 1 4879 IEMIMPL_MEDIA_OPT_F3 vpsubusw, 1 4880 IEMIMPL_MEDIA_OPT_F3 vpaddusb, 1 4881 IEMIMPL_MEDIA_OPT_F3 vpaddusw, 1 4882 IEMIMPL_MEDIA_OPT_F3 vpaddsb, 1 4883 IEMIMPL_MEDIA_OPT_F3 vpaddsw, 1 4884 IEMIMPL_MEDIA_OPT_F3 vpermilps, 1 4885 IEMIMPL_MEDIA_OPT_F3 vpermilpd, 1 4886 IEMIMPL_MEDIA_OPT_F3 vpmaddwd, 1 4887 IEMIMPL_MEDIA_OPT_F3 vpsrlvd, 1 4888 IEMIMPL_MEDIA_OPT_F3 vpsrlvq, 1 4889 IEMIMPL_MEDIA_OPT_F3 vpsravd, 1 4890 IEMIMPL_MEDIA_OPT_F3 vpsllvd, 1 4891 IEMIMPL_MEDIA_OPT_F3 vpsllvq, 1 4892 4893 IEMIMPL_MEDIA_OPT_F3 vaesenc, 0 4894 IEMIMPL_MEDIA_OPT_F3 vaesenclast, 0 4895 IEMIMPL_MEDIA_OPT_F3 vaesdec, 0 4896 IEMIMPL_MEDIA_OPT_F3 vaesdeclast, 0 4897 4898 4899 ;; 4900 ; VAESIMC instruction. 4901 ; 4902 ; @param A0 Pointer to the first media register size operand (output). 4903 ; @param A1 Pointer to the second media register size operand (input). 4904 ; 4905 BEGINPROC_FASTCALL iemAImpl_vaesimc_u128, 8 4906 PROLOGUE_2_ARGS 4907 IEMIMPL_SSE_PROLOGUE 4908 4909 movdqu xmm0, [A0] 4910 movdqu xmm1, [A1] 4911 vaesimc xmm0, xmm1 4912 movdqu [A0], xmm0 4913 4914 IEMIMPL_SSE_EPILOGUE 4915 EPILOGUE_2_ARGS 4916 ENDPROC iemAImpl_vaesimc_u128 4917 4918 4919 ;; 4920 ; VAESKEYGENASSIST instruction. 4921 ; 4922 ; @param A0 Pointer to the first media register size operand (output). 4923 ; @param A1 Pointer to the second media register size operand (input). 4924 ; @param A2 8-bit immediate for the round constant. 4925 ; 4926 BEGINPROC_FASTCALL iemAImpl_vaeskeygenassist_u128, 16 4927 PROLOGUE_3_ARGS 4928 IEMIMPL_AVX_PROLOGUE 4929 4930 movzx A2, A2_8 ; must clear top bits 4931 movdqu xmm0, [A0] 4932 movdqu xmm1, [A1] 4933 IEMIMPL_CALL_JUMP_TABLE_TARGET T1, A2, 8 4934 movdqu [A0], xmm0 4935 4936 IEMIMPL_AVX_EPILOGUE 4937 EPILOGUE_3_ARGS 4938 %assign bImm 0 4939 %rep 256 4940 .imm %+ bImm: 4941 IBT_ENDBRxx_WITHOUT_NOTRACK 4942 vaeskeygenassist xmm0, xmm1, bImm 4943 ret 4944 int3 4945 %assign bImm bImm + 1 4946 %endrep 4947 .immEnd: 4948 ENDPROC iemAImpl_vaeskeygenassist_u128 4949 4888 4950 4889 4951 ;; -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r105283 r105295 18339 18339 18340 18340 18341 IEM_DECL_IMPL_DEF(void, iemAImpl_vaeskeygenassist_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bImm)) 18342 { 18343 iemAImpl_aeskeygenassist_u128_fallback(puDst, puSrc, bImm); 18344 } 18345 18346 18341 18347 /** 18342 18348 * [V]AESIMC 18343 18349 */ 18344 18350 IEM_DECL_IMPL_DEF(void, iemAImpl_aesimc_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc)) 18351 { 18352 *puDst = iemAImpl_aes_inv_mix_col(puSrc); /* Src = Key. */ 18353 } 18354 18355 18356 IEM_DECL_IMPL_DEF(void, iemAImpl_vaesimc_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc)) 18345 18357 { 18346 18358 *puDst = iemAImpl_aes_inv_mix_col(puSrc); /* Src = Key. */ … … 18365 18377 18366 18378 18379 IEM_DECL_IMPL_DEF(void, iemAImpl_vaesenc_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2)) 18380 { 18381 RTUINT128U uTmp; 18382 18383 uTmp = iemAImpl_aes_shift_rows(puSrc1, iemAImpl_aes_shift_rows_tbl); /* Dst = state. */ 18384 uTmp = iemAImpl_aes_sub_bytes(&uTmp, iemAImpl_aes_sbox); 18385 uTmp = iemAImpl_aes_mix_col(&uTmp); 18386 uTmp.au64[0] ^= puSrc2->au64[0]; /* Src = Round Key. */ 18387 uTmp.au64[1] ^= puSrc2->au64[1]; 18388 18389 *puDst = uTmp; 18390 } 18391 18392 18367 18393 /** 18368 18394 * [V]AESENCLAST … … 18376 18402 uTmp.au64[0] ^= puSrc->au64[0]; /* Src = Round Key. */ 18377 18403 uTmp.au64[1] ^= puSrc->au64[1]; 18404 18405 *puDst = uTmp; 18406 } 18407 18408 18409 IEM_DECL_IMPL_DEF(void, iemAImpl_vaesenclast_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2)) 18410 { 18411 RTUINT128U uTmp; 18412 18413 uTmp = iemAImpl_aes_shift_rows(puSrc1, iemAImpl_aes_shift_rows_tbl); /* Dst = state. */ 18414 uTmp = iemAImpl_aes_sub_bytes(&uTmp, iemAImpl_aes_sbox); 18415 uTmp.au64[0] ^= puSrc2->au64[0]; /* Src = Round Key. */ 18416 uTmp.au64[1] ^= puSrc2->au64[1]; 18378 18417 18379 18418 *puDst = uTmp; … … 18398 18437 18399 18438 18439 IEM_DECL_IMPL_DEF(void, iemAImpl_vaesdec_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2)) 18440 { 18441 RTUINT128U uTmp; 18442 18443 uTmp = iemAImpl_aes_shift_rows(puSrc1, iemAImpl_aes_inv_shift_rows_tbl); /* Dst = state. */ 18444 uTmp = iemAImpl_aes_sub_bytes(&uTmp, iemAImpl_aes_inv_sbox); 18445 uTmp = iemAImpl_aes_inv_mix_col(&uTmp); 18446 uTmp.au64[0] ^= puSrc2->au64[0]; /* Src = Round Key. */ 18447 uTmp.au64[1] ^= puSrc2->au64[1]; 18448 18449 *puDst = uTmp; 18450 } 18451 18452 18400 18453 /** 18401 18454 * [V]AESDECLAST … … 18412 18465 *puDst = uTmp; 18413 18466 } 18467 18468 18469 IEM_DECL_IMPL_DEF(void, iemAImpl_vaesdeclast_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2)) 18470 { 18471 RTUINT128U uTmp; 18472 18473 uTmp = iemAImpl_aes_shift_rows(puSrc1, iemAImpl_aes_inv_shift_rows_tbl); /* Dst = state. */ 18474 uTmp = iemAImpl_aes_sub_bytes(&uTmp, iemAImpl_aes_inv_sbox); 18475 uTmp.au64[0] ^= puSrc2->au64[0]; /* Src = Round Key. */ 18476 uTmp.au64[1] ^= puSrc2->au64[1]; 18477 18478 *puDst = uTmp; 18479 } 18480 18414 18481 18415 18482 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h
r104784 r105295 33 33 * @{ 34 34 */ 35 36 /** 37 * Common worker for AESNI/AVX instructions on the forms: 38 * - vaesxxx xmm0, xmm1, xmm2/mem128 39 * 40 * Exceptions type 4. AVX and AESNI cpuid check for 128-bit operation. 41 */ 42 FNIEMOP_DEF_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, PFNIEMAIMPLMEDIAOPTF3U128, pfnU128) 43 { 44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 45 if (IEM_IS_MODRM_REG_MODE(bRm)) 46 { 47 /* 48 * Register, register. 49 */ 50 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 51 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi); 52 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 53 IEM_MC_PREPARE_AVX_USAGE(); 54 55 IEM_MC_LOCAL(RTUINT128U, uDst); 56 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0); 57 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 58 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 59 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 60 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 61 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2); 62 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 63 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 64 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 65 IEM_MC_ADVANCE_RIP_AND_FINISH(); 66 IEM_MC_END(); 67 } 68 else 69 { 70 /* 71 * Register, memory. 72 */ 73 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 76 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi); 77 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 78 IEM_MC_PREPARE_AVX_USAGE(); 79 80 IEM_MC_LOCAL(RTUINT128U, uDst); 81 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0); 82 IEM_MC_LOCAL(RTUINT128U, uSrc2); 83 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); 84 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 85 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 87 88 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2); 89 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); 90 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 91 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 92 IEM_MC_ADVANCE_RIP_AND_FINISH(); 93 IEM_MC_END(); 94 } 95 } 96 35 97 36 98 /* Opcode VEX.0F38 0x00 - invalid. */ … … 2225 2287 /* Opcode VEX.66.0F38 0xd9 - invalid. */ 2226 2288 /* Opcode VEX.66.0F38 0xda - invalid. */ 2289 2290 2227 2291 /** Opcode VEX.66.0F38 0xdb. */ 2228 FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq); 2292 FNIEMOP_DEF(iemOp_vaesimc_Vdq_Wdq) 2293 { 2294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2295 if (IEM_IS_MODRM_REG_MODE(bRm)) 2296 { 2297 /* 2298 * Register, register. 2299 */ 2300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2301 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi); 2302 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2303 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 2304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2305 IEM_MC_PREPARE_AVX_USAGE(); 2306 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2307 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 2308 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc); 2309 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2310 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2311 IEM_MC_END(); 2312 } 2313 else 2314 { 2315 /* 2316 * Register, memory. 2317 */ 2318 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2319 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2320 IEM_MC_LOCAL(RTUINT128U, uSrc); 2321 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 2322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2323 2324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2325 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi); 2326 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2327 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2328 2329 IEM_MC_PREPARE_AVX_USAGE(); 2330 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2331 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc); 2332 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 2333 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2334 IEM_MC_END(); 2335 } 2336 } 2337 2338 2229 2339 /** Opcode VEX.66.0F38 0xdc. */ 2230 FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq); 2340 FNIEMOP_DEF(iemOp_vaesenc_Vdq_Wdq) 2341 { 2342 IEMOP_MNEMONIC3(VEX_RVM, VAESENC, vaesenc, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); 2343 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, 2344 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */ 2345 } 2346 2347 2231 2348 /** Opcode VEX.66.0F38 0xdd. */ 2232 FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq); 2349 FNIEMOP_DEF(iemOp_vaesenclast_Vdq_Wdq) 2350 { 2351 IEMOP_MNEMONIC3(VEX_RVM, VAESENCLAST, vaesenclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); 2352 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, 2353 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */ 2354 } 2355 2356 2233 2357 /** Opcode VEX.66.0F38 0xde. */ 2234 FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq); 2358 FNIEMOP_DEF(iemOp_vaesdec_Vdq_Wdq) 2359 { 2360 IEMOP_MNEMONIC3(VEX_RVM, VAESDEC, vaesdec, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); 2361 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, 2362 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */ 2363 } 2364 2365 2235 2366 /** Opcode VEX.66.0F38 0xdf. */ 2236 FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq); 2367 FNIEMOP_DEF(iemOp_vaesdeclast_Vdq_Wdq) 2368 { 2369 IEMOP_MNEMONIC3(VEX_RVM, VAESDECLAST, vaesdeclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); 2370 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, 2371 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */ 2372 } 2373 2237 2374 2238 2375 /* Opcode VEX.66.0F38 0xe0 - invalid. */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h
r105279 r105295 2299 2299 /* Opcode VEX.66.0F3A 0xdd - invalid */ 2300 2300 /* Opcode VEX.66.0F3A 0xde - invalid */ 2301 2302 2301 2303 /* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */ 2302 FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib); 2304 FNIEMOP_DEF(iemOp_vaeskeygen_Vdq_Wdq_Ib) 2305 { 2306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2307 if (IEM_IS_MODRM_REG_MODE(bRm)) 2308 { 2309 /* 2310 * Register, register. 2311 */ 2312 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 2313 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2314 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi); 2315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2316 IEM_MC_PREPARE_AVX_USAGE(); 2317 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2318 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 2319 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 2320 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2321 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 2322 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback), 2323 puDst, puSrc, bImmArg); 2324 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2325 IEM_MC_END(); 2326 } 2327 else 2328 { 2329 /* 2330 * Register, memory. 2331 */ 2332 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 2333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2335 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 2336 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 2337 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi); 2338 2339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2340 IEM_MC_PREPARE_AVX_USAGE(); 2341 IEM_MC_ARG(PRTUINT128U, puDst, 0); 2342 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 2343 IEM_MC_LOCAL(RTUINT128U, uSrc); 2344 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 2345 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2346 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback), 2347 puDst, puSrc, bImmArg); 2348 IEM_MC_ADVANCE_RIP_AND_FINISH(); 2349 IEM_MC_END(); 2350 } 2351 } 2303 2352 2304 2353 -
trunk/src/VBox/VMM/include/IEMInternal.h
r105291 r105295 3983 3983 3984 3984 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback; 3985 FNIEMAIMPLMEDIAOPTF 2U128 iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback;3986 FNIEMAIMPLMEDIAOPTF 2U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback;3987 FNIEMAIMPLMEDIAOPTF 2U128 iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback;3988 FNIEMAIMPLMEDIAOPTF 2U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback;3985 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback; 3986 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback; 3987 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback; 3988 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback; 3989 3989 3990 3990 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback; 3991 3991 3992 FNIEMAIMPLMEDIAOPTF 3U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback;3992 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback; 3993 3993 3994 3994 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback; -
trunk/src/VBox/VMM/include/IEMOpHlp.h
r104784 r105295 519 519 * a_fFeature is not present in the guest CPU. 520 520 */ 521 #define IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(a_fFeature, a_fFeature2) \ 522 do \ 523 { \ 524 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 525 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \ 526 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 527 && pVCpu->iem.s.uVexLength == 0 \ 528 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \ 529 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \ 530 { /* likely */ } \ 531 else \ 532 IEMOP_RAISE_INVALID_OPCODE_RET(); \ 533 } while (0) 534 535 /** 536 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz, 537 * repnz or size prefixes are present, or if in real or v8086 mode, or if the 538 * a_fFeature is not present in the guest CPU. 539 */ 521 540 #define IEMOP_HLP_DONE_VEX_DECODING_L1_EX(a_fFeature) \ 522 541 do \ … … 604 623 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 605 624 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \ 625 { /* likely */ } \ 626 else \ 627 IEMOP_RAISE_INVALID_OPCODE_RET(); \ 628 } while (0) 629 630 /** 631 * Done decoding VEX, no V, L=0. 632 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if 633 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature or a_fFeature2 634 * is not present in the guest CPU. 635 */ 636 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(a_fFeature, a_fFeature2) \ 637 do \ 638 { \ 639 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 640 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \ 641 && pVCpu->iem.s.uVexLength == 0 \ 642 && pVCpu->iem.s.uVex3rdReg == 0 \ 643 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 644 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \ 645 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \ 606 646 { /* likely */ } \ 607 647 else \ -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r105283 r105295 171 171 #define IEMOP_HLP_DONE_VEX_DECODING_EX(a_fFeature) do { } while (0) 172 172 #define IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeature) do { } while (0) 173 #define IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(a_fFeature, a_fFeature2) do { } while (0) 173 174 #define IEMOP_HLP_DONE_VEX_DECODING_L1_EX(a_fFeature) do { } while (0) 174 175 #define IEMOP_HLP_DONE_VEX_DECODING_L0() do { } while (0) … … 179 180 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() do { } while (0) 180 181 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) do { } while (0) 182 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(a_fFeature, a_fFeature2) do { } while (0) 181 183 #define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) do { } while (0) 182 184 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() do { } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.