Changeset 104369 in vbox
- Timestamp:
- Apr 19, 2024 7:23:34 AM (10 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h
r104113 r104369 726 726 727 727 /** Opcode VEX.66.0F3A 0x17. */ 728 FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib); 728 FNIEMOP_DEF(iemOp_vextractps_Ed_Vdq_Ib) 729 { 730 //IEMOP_MNEMONIC3(VEX_MRI_REG, VEXTRACTPS, vextractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); 731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 732 if (IEM_IS_MODRM_REG_MODE(bRm)) 733 { 734 /* 735 * greg32, XMM, imm8. 736 */ 737 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 738 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 739 IEM_MC_LOCAL(uint32_t, uSrc); 740 741 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 742 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 743 IEM_MC_PREPARE_AVX_USAGE(); 744 745 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/); 746 IEM_MC_STORE_GREG_U32( IEM_GET_MODRM_RM(pVCpu, bRm), uSrc); 747 IEM_MC_ADVANCE_RIP_AND_FINISH(); 748 IEM_MC_END(); 749 } 750 else 751 { 752 /* 753 * [mem32], XMM, imm8. 754 */ 755 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 756 IEM_MC_LOCAL(uint32_t, uSrc); 757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 758 759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 760 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 761 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 762 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 763 IEM_MC_PREPARE_AVX_USAGE(); 764 765 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/); 766 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 767 IEM_MC_ADVANCE_RIP_AND_FINISH(); 768 IEM_MC_END(); 769 } 770 } 729 771 730 772 … … 889 931 890 932 /** Opcode VEX.66.0F3A 0x21, */ 891 FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib); 933 FNIEMOP_DEF(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib) 934 { 935 //IEMOP_MNEMONIC4(VEX_RVMR_REG, VINSERTPS, vinsertps, Vdq, Hdq, UdqMd, Ib, DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); /// @todo 936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 937 if (IEM_IS_MODRM_REG_MODE(bRm)) 938 { 939 /* 940 * XMM, XMM, XMM, imm8. 941 */ 942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 943 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 944 IEM_MC_LOCAL(RTUINT128U, uSrc1); 945 IEM_MC_LOCAL(uint32_t, uSrc2); 946 947 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 948 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 949 IEM_MC_PREPARE_AVX_USAGE(); 950 951 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 952 IEM_MC_FETCH_XREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3); 953 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1); 954 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2); 955 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm); 956 IEM_MC_ADVANCE_RIP_AND_FINISH(); 957 IEM_MC_END(); 958 } 959 else 960 { 961 /* 962 * XMM, XMM, [mem32], imm8. 963 */ 964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); 965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 966 IEM_MC_LOCAL(RTUINT128U, uSrc1); 967 IEM_MC_LOCAL(uint32_t, uSrc2); 968 969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 970 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 971 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 972 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 973 IEM_MC_PREPARE_AVX_USAGE(); 974 975 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 976 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 977 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1); 978 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2); 979 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm); 980 IEM_MC_ADVANCE_RIP_AND_FINISH(); 981 IEM_MC_END(); 982 } 983 } 892 984 893 985
Note:
See TracChangeset
for help on using the changeset viewer.