Changeset 65755 in vbox
- Timestamp:
- Feb 13, 2017 9:17:43 AM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 113470
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65754 r65755 3962 3962 3963 3963 3964 /** Opcode 0x0f 0x7f. */ 3965 FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq) 3966 { 3964 /** Opcode 0x0f 0x7f - movq Qq, Pq */ 3965 FNIEMOP_DEF(iemOp_movq_Qq_Pq) 3966 { 3967 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq"); 3967 3968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3968 bool fAligned = false; 3969 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3970 { 3971 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */ 3972 fAligned = true; 3973 /* fall thru */ 3974 case IEM_OP_PRF_REPZ: /* SSE unaligned */ 3975 if (fAligned) 3976 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq"); 3977 else 3978 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq"); 3979 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3980 { 3981 /* 3982 * Register, register. 3983 */ 3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3985 IEM_MC_BEGIN(0, 0); 3986 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3987 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3988 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 3989 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3990 IEM_MC_ADVANCE_RIP(); 3991 IEM_MC_END(); 3992 } 3993 else 3994 { 3995 /* 3996 * Register, memory. 3997 */ 3998 IEM_MC_BEGIN(0, 2); 3999 IEM_MC_LOCAL(uint128_t, u128Tmp); 4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4001 4002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 4006 4007 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4008 if (fAligned) 4009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 4010 else 4011 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 4012 4013 IEM_MC_ADVANCE_RIP(); 4014 IEM_MC_END(); 4015 } 4016 return VINF_SUCCESS; 4017 4018 case 0: /* MMX */ 4019 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq"); 4020 4021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4022 { 4023 /* 4024 * Register, register. 4025 */ 4026 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 4027 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4029 IEM_MC_BEGIN(0, 1); 4030 IEM_MC_LOCAL(uint64_t, u64Tmp); 4031 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 4032 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 4033 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 4034 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp); 4035 IEM_MC_ADVANCE_RIP(); 4036 IEM_MC_END(); 4037 } 4038 else 4039 { 4040 /* 4041 * Register, memory. 4042 */ 4043 IEM_MC_BEGIN(0, 2); 4044 IEM_MC_LOCAL(uint64_t, u64Tmp); 4045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4046 4047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 4050 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 4051 4052 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 4053 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 4054 4055 IEM_MC_ADVANCE_RIP(); 4056 IEM_MC_END(); 4057 } 4058 return VINF_SUCCESS; 4059 4060 default: 4061 return IEMOP_RAISE_INVALID_OPCODE(); 4062 } 4063 } 3969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3970 { 3971 /* 3972 * Register, register. 3973 */ 3974 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3975 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3977 IEM_MC_BEGIN(0, 1); 3978 IEM_MC_LOCAL(uint64_t, u64Tmp); 3979 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3981 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3982 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp); 3983 IEM_MC_ADVANCE_RIP(); 3984 IEM_MC_END(); 3985 } 3986 else 3987 { 3988 /* 3989 * Register, memory. 3990 */ 3991 IEM_MC_BEGIN(0, 2); 3992 IEM_MC_LOCAL(uint64_t, u64Tmp); 3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3994 3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3998 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3999 4000 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 4001 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 4002 4003 IEM_MC_ADVANCE_RIP(); 4004 IEM_MC_END(); 4005 } 4006 return VINF_SUCCESS; 4007 } 4008 4009 /** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */ 4010 FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx) 4011 { 4012 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx"); 4013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4014 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4015 { 4016 /* 4017 * Register, register. 4018 */ 4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4020 IEM_MC_BEGIN(0, 0); 4021 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 4023 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 4024 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4025 IEM_MC_ADVANCE_RIP(); 4026 IEM_MC_END(); 4027 } 4028 else 4029 { 4030 /* 4031 * Register, memory. 4032 */ 4033 IEM_MC_BEGIN(0, 2); 4034 IEM_MC_LOCAL(uint128_t, u128Tmp); 4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4036 4037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4039 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4040 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 4041 4042 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4043 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 4044 4045 IEM_MC_ADVANCE_RIP(); 4046 IEM_MC_END(); 4047 } 4048 return VINF_SUCCESS; 4049 } 4050 4051 /** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */ 4052 FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx) 4053 { 4054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4055 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx"); 4056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4057 { 4058 /* 4059 * Register, register. 4060 */ 4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4062 IEM_MC_BEGIN(0, 0); 4063 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 4065 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 4066 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4067 IEM_MC_ADVANCE_RIP(); 4068 IEM_MC_END(); 4069 } 4070 else 4071 { 4072 /* 4073 * Register, memory. 4074 */ 4075 IEM_MC_BEGIN(0, 2); 4076 IEM_MC_LOCAL(uint128_t, u128Tmp); 4077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 4078 4079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 4082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 4083 4084 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 4085 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 4086 4087 IEM_MC_ADVANCE_RIP(); 4088 IEM_MC_END(); 4089 } 4090 return VINF_SUCCESS; 4091 } 4092 4093 /* Opcode 0xf2 0x0f 0x7f - invalid */ 4064 4094 4065 4095 … … 8293 8323 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps, 8294 8324 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM, 8295 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),8325 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM, 8296 8326 8297 8327 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
Note:
See TracChangeset
for help on using the changeset viewer.