Changeset 65751 in vbox
- Timestamp:
- Feb 13, 2017 8:25:42 AM (8 years ago)
- svn:sync-xref-src-repo-rev:
- 113466
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65750 r65751 3128 3128 3129 3129 3130 /** Opcode 0x0f 0x6f. */ 3131 FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq) 3132 { 3133 bool fAligned = false; 3130 /** Opcode 0x0f 0x6f - movq Pq, Qq */ 3131 FNIEMOP_DEF(iemOp_movq_Pq_Qq) 3132 { 3134 3133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3135 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3136 { 3137 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */ 3138 fAligned = true; 3139 /* fall thru */ 3140 case IEM_OP_PRF_REPZ: /* SSE unaligned */ 3141 if (fAligned) 3142 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq"); 3143 else 3144 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq"); 3145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3146 { 3147 /* 3148 * Register, register. 3149 */ 3150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3151 IEM_MC_BEGIN(0, 0); 3152 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3153 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3154 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 3155 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3156 IEM_MC_ADVANCE_RIP(); 3157 IEM_MC_END(); 3158 } 3159 else 3160 { 3161 /* 3162 * Register, memory. 3163 */ 3164 IEM_MC_BEGIN(0, 2); 3165 IEM_MC_LOCAL(uint128_t, u128Tmp); 3166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3167 3168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3172 if (fAligned) 3173 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3174 else 3175 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3176 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 3177 3178 IEM_MC_ADVANCE_RIP(); 3179 IEM_MC_END(); 3180 } 3181 return VINF_SUCCESS; 3182 3183 case 0: /* MMX */ 3184 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq"); 3185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3186 { 3187 /* 3188 * Register, register. 3189 */ 3190 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3191 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3193 IEM_MC_BEGIN(0, 1); 3194 IEM_MC_LOCAL(uint64_t, u64Tmp); 3195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3197 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK); 3198 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3199 IEM_MC_ADVANCE_RIP(); 3200 IEM_MC_END(); 3201 } 3202 else 3203 { 3204 /* 3205 * Register, memory. 3206 */ 3207 IEM_MC_BEGIN(0, 2); 3208 IEM_MC_LOCAL(uint64_t, u64Tmp); 3209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3210 3211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3213 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3214 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3215 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3216 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3217 3218 IEM_MC_ADVANCE_RIP(); 3219 IEM_MC_END(); 3220 } 3221 return VINF_SUCCESS; 3222 3223 default: 3224 return IEMOP_RAISE_INVALID_OPCODE(); 3225 } 3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq"); 3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3136 { 3137 /* 3138 * Register, register. 3139 */ 3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3143 IEM_MC_BEGIN(0, 1); 3144 IEM_MC_LOCAL(uint64_t, u64Tmp); 3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK); 3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3149 IEM_MC_ADVANCE_RIP(); 3150 IEM_MC_END(); 3151 } 3152 else 3153 { 3154 /* 3155 * Register, memory. 3156 */ 3157 IEM_MC_BEGIN(0, 2); 3158 IEM_MC_LOCAL(uint64_t, u64Tmp); 3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3160 3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3167 3168 IEM_MC_ADVANCE_RIP(); 3169 IEM_MC_END(); 3170 } 3171 return VINF_SUCCESS; 3172 } 3173 3174 /** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */ 3175 FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx) 3176 { 3177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3178 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq"); 3179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3180 { 3181 /* 3182 * Register, register. 3183 */ 3184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3185 IEM_MC_BEGIN(0, 0); 3186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3188 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 3189 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3190 IEM_MC_ADVANCE_RIP(); 3191 IEM_MC_END(); 3192 } 3193 else 3194 { 3195 /* 3196 * Register, memory. 3197 */ 3198 IEM_MC_BEGIN(0, 2); 3199 IEM_MC_LOCAL(uint128_t, u128Tmp); 3200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3201 3202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3204 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3205 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3206 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3207 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 3208 3209 IEM_MC_ADVANCE_RIP(); 3210 IEM_MC_END(); 3211 } 3212 return VINF_SUCCESS; 3213 } 3214 3215 /** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */ 3216 FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx) 3217 { 3218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3219 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq"); 3220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3221 { 3222 /* 3223 * Register, register. 3224 */ 3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3226 IEM_MC_BEGIN(0, 0); 3227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3229 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 3230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3231 IEM_MC_ADVANCE_RIP(); 3232 IEM_MC_END(); 3233 } 3234 else 3235 { 3236 /* 3237 * Register, memory. 3238 */ 3239 IEM_MC_BEGIN(0, 2); 3240 IEM_MC_LOCAL(uint128_t, u128Tmp); 3241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3242 3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3245 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3246 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3247 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3248 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 3249 3250 IEM_MC_ADVANCE_RIP(); 3251 IEM_MC_END(); 3252 } 3253 return VINF_SUCCESS; 3226 3254 } 3227 3255 … … 8125 8153 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8126 8154 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8127 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),8155 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM, 8128 8156 8129 8157 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
Note:
See TracChangeset
for help on using the changeset viewer.