Changeset 66965 in vbox for trunk/src/VBox
- Timestamp:
- May 19, 2017 9:38:05 AM (8 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r66950 r66965 1381 1381 } 1382 1382 1383 #ifdef IEM_WITH_VEX 1383 1384 1384 1385 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovsldup_256_rr,(PX86XSAVEAREA pXState, uint8_t iYRegDst, uint8_t iYRegSrc)) … … 1407 1408 } 1408 1409 1410 #endif /* IEM_WITH_VEX */ 1411 1409 1412 1410 1413 IEM_DECL_IMPL_DEF(void, iemAImpl_movshdup,(PCX86FXSTATE pFpuState, PRTUINT128U puDst, PCRTUINT128U puSrc)) … … 1425 1428 } 1426 1429 1427 1430 #ifdef IEM_WITH_VEX 1431 1432 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovddup_256_rr,(PX86XSAVEAREA pXState, uint8_t iYRegDst, uint8_t iYRegSrc)) 1433 { 1434 pXState->x87.aXMM[iYRegDst].au64[0] = pXState->x87.aXMM[iYRegSrc].au64[0]; 1435 pXState->x87.aXMM[iYRegDst].au64[1] = pXState->x87.aXMM[iYRegSrc].au64[0]; 1436 pXState->u.YmmHi.aYmmHi[iYRegDst].au64[0] = pXState->u.YmmHi.aYmmHi[iYRegSrc].au64[0]; 1437 pXState->u.YmmHi.aYmmHi[iYRegDst].au64[1] = pXState->u.YmmHi.aYmmHi[iYRegSrc].au64[0]; 1438 } 1439 1440 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovddup_256_rm,(PX86XSAVEAREA pXState, uint8_t iYRegDst, PCRTUINT256U pSrc)) 1441 { 1442 pXState->x87.aXMM[iYRegDst].au64[0] = pSrc->au64[0]; 1443 pXState->x87.aXMM[iYRegDst].au64[1] = pSrc->au64[0]; 1444 pXState->u.YmmHi.aYmmHi[iYRegDst].au64[0] = pSrc->au64[2]; 1445 pXState->u.YmmHi.aYmmHi[iYRegDst].au64[1] = pSrc->au64[2]; 1446 } 1447 1448 #endif /* IEM_WITH_VEX */ 1449 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66957 r66965 847 847 848 848 /** 849 * @ opcode 0x12 850 * @ oppfx 0xf2 851 * @ opcpuid sse3 852 * @ opgroup og_sse3_pcksclr_datamove 853 * @ opxcpttype 5 854 * @ optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 -> 855 * op1=0x22222222111111112222222211111111 849 * @opcode 0x12 850 * @oppfx 0xf2 851 * @opcpuid avx 852 * @opgroup og_avx_pcksclr_datamove 853 * @opxcpttype 5 854 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111 855 * -> op1=0x22222222111111112222222211111111 856 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111 857 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111 858 * @oponly 856 859 */ 857 FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); 858 //FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx) 859 //{ 860 // IEMOP_MNEMONIC2(RM, VMOVDDUP, vmovddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 861 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 862 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 863 // { 864 // /* 865 // * Register, register. 866 // */ 867 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 868 // IEM_MC_BEGIN(2, 0); 869 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 870 // IEM_MC_ARG(uint64_t, uSrc, 1); 871 // 872 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 873 // IEM_MC_PREPARE_SSE_USAGE(); 874 // 875 // IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 876 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 877 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 878 // 879 // IEM_MC_ADVANCE_RIP(); 880 // IEM_MC_END(); 881 // } 882 // else 883 // { 884 // /* 885 // * Register, memory. 886 // */ 887 // IEM_MC_BEGIN(2, 2); 888 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 889 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 890 // IEM_MC_ARG(uint64_t, uSrc, 1); 891 // 892 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 893 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 894 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 895 // IEM_MC_PREPARE_SSE_USAGE(); 896 // 897 // IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 898 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 899 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 900 // 901 // IEM_MC_ADVANCE_RIP(); 902 // IEM_MC_END(); 903 // } 904 // return VINF_SUCCESS; 905 //} 860 FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx) 861 { 862 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 865 { 866 /* 867 * Register, register. 868 */ 869 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 870 if (pVCpu->iem.s.uVexLength == 0) 871 { 872 IEM_MC_BEGIN(2, 0); 873 IEM_MC_ARG(PRTUINT128U, puDst, 0); 874 IEM_MC_ARG(uint64_t, uSrc, 1); 875 876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 877 IEM_MC_PREPARE_AVX_USAGE(); 878 879 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 880 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 881 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 882 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 883 884 IEM_MC_ADVANCE_RIP(); 885 IEM_MC_END(); 886 } 887 else 888 { 889 IEM_MC_BEGIN(3, 0); 890 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS(); 891 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1); 892 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2); 893 894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 895 IEM_MC_PREPARE_AVX_USAGE(); 896 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc); 897 898 IEM_MC_ADVANCE_RIP(); 899 IEM_MC_END(); 900 } 901 } 902 else 903 { 904 /* 905 * Register, memory. 906 */ 907 if (pVCpu->iem.s.uVexLength == 0) 908 { 909 IEM_MC_BEGIN(2, 2); 910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 911 IEM_MC_ARG(PRTUINT128U, puDst, 0); 912 IEM_MC_ARG(uint64_t, uSrc, 1); 913 914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 915 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 916 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 917 IEM_MC_PREPARE_AVX_USAGE(); 918 919 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 920 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 921 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 922 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 923 924 IEM_MC_ADVANCE_RIP(); 925 IEM_MC_END(); 926 } 927 else 928 { 929 IEM_MC_BEGIN(3, 2); 930 IEM_MC_LOCAL(RTUINT256U, uSrc); 931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 932 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS(); 933 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1); 934 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2); 935 936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 937 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV(); 938 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 939 IEM_MC_PREPARE_AVX_USAGE(); 940 941 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 942 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc); 943 944 IEM_MC_ADVANCE_RIP(); 945 IEM_MC_END(); 946 } 947 } 948 return VINF_SUCCESS; 949 } 950 906 951 907 952 -
trunk/src/VBox/VMM/include/IEMInternal.h
r66957 r66965 1643 1643 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovsldup_256_rr,(PX86XSAVEAREA pXState, uint8_t iYRegDst, uint8_t iYRegSrc)); 1644 1644 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovsldup_256_rm,(PX86XSAVEAREA pXState, uint8_t iYRegDst, PCRTUINT256U pSrc)); 1645 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovddup_256_rr,(PX86XSAVEAREA pXState, uint8_t iYRegDst, uint8_t iYRegSrc)); 1646 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovddup_256_rm,(PX86XSAVEAREA pXState, uint8_t iYRegDst, PCRTUINT256U pSrc)); 1645 1647 1646 1648 /** @} */
Note:
See TracChangeset
for help on using the changeset viewer.