Changeset 65754 in vbox for trunk/src/VBox
- Timestamp:
- Feb 13, 2017 9:14:10 AM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 113469
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65753 r65754 3843 3843 3844 3844 3845 /** Opcode 0x0f 0x7e.*/3846 FNIEMOP_DEF(iemOp_movd_q_Ey_Pd __movd_q_Ey_Vy__movq_Vq_Wq)3845 /** Opcode 0x0f 0x7e - movd_q Ey, Pd */ 3846 FNIEMOP_DEF(iemOp_movd_q_Ey_Pd) 3847 3847 { 3848 3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3849 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3850 { 3851 case IEM_OP_PRF_SIZE_OP: /* SSE */ 3852 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3853 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq"); 3854 else 3855 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd"); 3856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3857 { 3858 /* greg, XMM */ 3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3860 IEM_MC_BEGIN(0, 1); 3861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3862 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3863 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3864 { 3865 IEM_MC_LOCAL(uint64_t, u64Tmp); 3866 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3867 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3868 } 3869 else 3870 { 3871 IEM_MC_LOCAL(uint32_t, u32Tmp); 3872 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3873 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3874 } 3875 IEM_MC_ADVANCE_RIP(); 3876 IEM_MC_END(); 3877 } 3878 else 3879 { 3880 /* [mem], XMM */ 3881 IEM_MC_BEGIN(0, 2); 3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3883 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3886 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3887 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3888 { 3889 IEM_MC_LOCAL(uint64_t, u64Tmp); 3890 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3891 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3892 } 3893 else 3894 { 3895 IEM_MC_LOCAL(uint32_t, u32Tmp); 3896 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3898 } 3899 IEM_MC_ADVANCE_RIP(); 3900 IEM_MC_END(); 3901 } 3902 return VINF_SUCCESS; 3903 3904 case 0: /* MMX */ 3905 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3906 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq"); 3907 else 3908 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd"); 3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3910 { 3911 /* greg, MMX */ 3912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3913 IEM_MC_BEGIN(0, 1); 3914 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3915 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3916 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3917 { 3918 IEM_MC_LOCAL(uint64_t, u64Tmp); 3919 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3920 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3921 } 3922 else 3923 { 3924 IEM_MC_LOCAL(uint32_t, u32Tmp); 3925 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3926 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3927 } 3928 IEM_MC_ADVANCE_RIP(); 3929 IEM_MC_END(); 3930 } 3931 else 3932 { 3933 /* [mem], MMX */ 3934 IEM_MC_BEGIN(0, 2); 3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3936 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3939 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3940 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3941 { 3942 IEM_MC_LOCAL(uint64_t, u64Tmp); 3943 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3944 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3945 } 3946 else 3947 { 3948 IEM_MC_LOCAL(uint32_t, u32Tmp); 3949 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3950 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3951 } 3952 IEM_MC_ADVANCE_RIP(); 3953 IEM_MC_END(); 3954 } 3955 return VINF_SUCCESS; 3956 3957 default: 3958 return IEMOP_RAISE_INVALID_OPCODE(); 3959 } 3960 } 3849 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3850 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq"); 3851 else 3852 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd"); 3853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3854 { 3855 /* greg, MMX */ 3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3857 IEM_MC_BEGIN(0, 1); 3858 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3859 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3860 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3861 { 3862 IEM_MC_LOCAL(uint64_t, u64Tmp); 3863 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3864 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3865 } 3866 else 3867 { 3868 IEM_MC_LOCAL(uint32_t, u32Tmp); 3869 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3870 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3871 } 3872 IEM_MC_ADVANCE_RIP(); 3873 IEM_MC_END(); 3874 } 3875 else 3876 { 3877 /* [mem], MMX */ 3878 IEM_MC_BEGIN(0, 2); 3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3880 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3883 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3885 { 3886 IEM_MC_LOCAL(uint64_t, u64Tmp); 3887 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3888 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3889 } 3890 else 3891 { 3892 IEM_MC_LOCAL(uint32_t, u32Tmp); 3893 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3894 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3895 } 3896 IEM_MC_ADVANCE_RIP(); 3897 IEM_MC_END(); 3898 } 3899 return VINF_SUCCESS; 3900 } 3901 3902 /** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */ 3903 FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy) 3904 { 3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3906 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3907 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq"); 3908 else 3909 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd"); 3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3911 { 3912 /* greg, XMM */ 3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3914 IEM_MC_BEGIN(0, 1); 3915 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3917 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3918 { 3919 IEM_MC_LOCAL(uint64_t, u64Tmp); 3920 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3921 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3922 } 3923 else 3924 { 3925 IEM_MC_LOCAL(uint32_t, u32Tmp); 3926 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3927 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3928 } 3929 IEM_MC_ADVANCE_RIP(); 3930 IEM_MC_END(); 3931 } 3932 else 3933 { 3934 /* [mem], XMM */ 3935 IEM_MC_BEGIN(0, 2); 3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3937 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3940 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3941 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3942 { 3943 IEM_MC_LOCAL(uint64_t, u64Tmp); 3944 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3946 } 3947 else 3948 { 3949 IEM_MC_LOCAL(uint32_t, u32Tmp); 3950 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3951 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3952 } 3953 IEM_MC_ADVANCE_RIP(); 3954 IEM_MC_END(); 3955 } 3956 return VINF_SUCCESS; 3957 } 3958 3959 /** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */ 3960 FNIEMOP_STUB(iemOp_vmovq_Vq_Wq); 3961 /* Opcode 0xf2 0x0f 0x7e - invalid */ 3961 3962 3962 3963 … … 8291 8292 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps, 8292 8293 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps, 8293 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),8294 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM, 8294 8295 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq), 8295 8296
Note:
See TracChangeset
for help on using the changeset viewer.