Changeset 65752 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Feb 13, 2017 8:56:57 AM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65751 r65752 3255 3255 3256 3256 3257 /** Opcode 0x0f 0x70. The immediate here is evil! */ 3258 FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib) 3259 { 3257 /** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */ 3258 FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib) 3259 { 3260 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib"); 3260 3261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3261 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3262 { 3263 case IEM_OP_PRF_SIZE_OP: /* SSE */ 3264 case IEM_OP_PRF_REPNZ: /* SSE */ 3265 case IEM_OP_PRF_REPZ: /* SSE */ 3266 { 3267 PFNIEMAIMPLMEDIAPSHUF pfnAImpl; 3268 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3269 { 3270 case IEM_OP_PRF_SIZE_OP: 3271 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib"); 3272 pfnAImpl = iemAImpl_pshufd; 3273 break; 3274 case IEM_OP_PRF_REPNZ: 3275 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib"); 3276 pfnAImpl = iemAImpl_pshuflw; 3277 break; 3278 case IEM_OP_PRF_REPZ: 3279 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib"); 3280 pfnAImpl = iemAImpl_pshufhw; 3281 break; 3282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3283 } 3284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3285 { 3286 /* 3287 * Register, register. 3288 */ 3289 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3291 3292 IEM_MC_BEGIN(3, 0); 3293 IEM_MC_ARG(uint128_t *, pDst, 0); 3294 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3297 IEM_MC_PREPARE_SSE_USAGE(); 3298 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3299 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3300 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg); 3301 IEM_MC_ADVANCE_RIP(); 3302 IEM_MC_END(); 3303 } 3304 else 3305 { 3306 /* 3307 * Register, memory. 3308 */ 3309 IEM_MC_BEGIN(3, 2); 3310 IEM_MC_ARG(uint128_t *, pDst, 0); 3311 IEM_MC_LOCAL(uint128_t, uSrc); 3312 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3314 3315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3316 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3317 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3319 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3320 3321 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3322 IEM_MC_PREPARE_SSE_USAGE(); 3323 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3324 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg); 3325 3326 IEM_MC_ADVANCE_RIP(); 3327 IEM_MC_END(); 3328 } 3329 return VINF_SUCCESS; 3330 } 3331 3332 case 0: /* MMX Extension */ 3333 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib"); 3334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3335 { 3336 /* 3337 * Register, register. 3338 */ 3339 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3341 3342 IEM_MC_BEGIN(3, 0); 3343 IEM_MC_ARG(uint64_t *, pDst, 0); 3344 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3345 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3346 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3347 IEM_MC_PREPARE_FPU_USAGE(); 3348 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3349 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3350 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3351 IEM_MC_ADVANCE_RIP(); 3352 IEM_MC_END(); 3353 } 3354 else 3355 { 3356 /* 3357 * Register, memory. 3358 */ 3359 IEM_MC_BEGIN(3, 2); 3360 IEM_MC_ARG(uint64_t *, pDst, 0); 3361 IEM_MC_LOCAL(uint64_t, uSrc); 3362 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3364 3365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3366 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3367 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3369 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3370 3371 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3372 IEM_MC_PREPARE_FPU_USAGE(); 3373 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3374 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3375 3376 IEM_MC_ADVANCE_RIP(); 3377 IEM_MC_END(); 3378 } 3379 return VINF_SUCCESS; 3380 3381 default: 3382 return IEMOP_RAISE_INVALID_OPCODE(); 3383 } 3262 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3263 { 3264 /* 3265 * Register, register. 3266 */ 3267 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3269 3270 IEM_MC_BEGIN(3, 0); 3271 IEM_MC_ARG(uint64_t *, pDst, 0); 3272 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3273 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3274 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3275 IEM_MC_PREPARE_FPU_USAGE(); 3276 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3277 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3278 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3279 IEM_MC_ADVANCE_RIP(); 3280 IEM_MC_END(); 3281 } 3282 else 3283 { 3284 /* 3285 * Register, memory. 3286 */ 3287 IEM_MC_BEGIN(3, 2); 3288 IEM_MC_ARG(uint64_t *, pDst, 0); 3289 IEM_MC_LOCAL(uint64_t, uSrc); 3290 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3292 3293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3294 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3295 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3297 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3298 3299 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3300 IEM_MC_PREPARE_FPU_USAGE(); 3301 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3302 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3303 3304 IEM_MC_ADVANCE_RIP(); 3305 IEM_MC_END(); 3306 } 3307 return VINF_SUCCESS; 3308 } 3309 3310 /** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */ 3311 FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib) 3312 { 3313 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib"); 3314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3316 { 3317 /* 3318 * Register, register. 3319 */ 3320 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3322 3323 IEM_MC_BEGIN(3, 0); 3324 IEM_MC_ARG(uint128_t *, pDst, 0); 3325 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3326 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3327 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3328 IEM_MC_PREPARE_SSE_USAGE(); 3329 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3330 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 3332 IEM_MC_ADVANCE_RIP(); 3333 IEM_MC_END(); 3334 } 3335 else 3336 { 3337 /* 3338 * Register, memory. 3339 */ 3340 IEM_MC_BEGIN(3, 2); 3341 IEM_MC_ARG(uint128_t *, pDst, 0); 3342 IEM_MC_LOCAL(uint128_t, uSrc); 3343 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3345 3346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3347 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3348 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3350 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3351 3352 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3353 IEM_MC_PREPARE_SSE_USAGE(); 3354 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3355 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 3356 3357 IEM_MC_ADVANCE_RIP(); 3358 IEM_MC_END(); 3359 } 3360 return VINF_SUCCESS; 3361 } 3362 3363 /** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */ 3364 FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib) 3365 { 3366 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib"); 3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3368 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3369 { 3370 /* 3371 * Register, register. 3372 */ 3373 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3375 3376 IEM_MC_BEGIN(3, 0); 3377 IEM_MC_ARG(uint128_t *, pDst, 0); 3378 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3379 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3380 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3381 IEM_MC_PREPARE_SSE_USAGE(); 3382 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3383 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3384 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 3385 IEM_MC_ADVANCE_RIP(); 3386 IEM_MC_END(); 3387 } 3388 else 3389 { 3390 /* 3391 * Register, memory. 3392 */ 3393 IEM_MC_BEGIN(3, 2); 3394 IEM_MC_ARG(uint128_t *, pDst, 0); 3395 IEM_MC_LOCAL(uint128_t, uSrc); 3396 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3398 3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3400 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3401 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3403 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3404 3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3406 IEM_MC_PREPARE_SSE_USAGE(); 3407 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3408 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 3409 3410 IEM_MC_ADVANCE_RIP(); 3411 IEM_MC_END(); 3412 } 3413 return VINF_SUCCESS; 3414 } 3415 3416 /** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */ 3417 FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib) 3418 { 3419 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib"); 3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3422 { 3423 /* 3424 * Register, register. 3425 */ 3426 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3428 3429 IEM_MC_BEGIN(3, 0); 3430 IEM_MC_ARG(uint128_t *, pDst, 0); 3431 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3432 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3434 IEM_MC_PREPARE_SSE_USAGE(); 3435 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3436 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3437 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 3438 IEM_MC_ADVANCE_RIP(); 3439 IEM_MC_END(); 3440 } 3441 else 3442 { 3443 /* 3444 * Register, memory. 3445 */ 3446 IEM_MC_BEGIN(3, 2); 3447 IEM_MC_ARG(uint128_t *, pDst, 0); 3448 IEM_MC_LOCAL(uint128_t, uSrc); 3449 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3451 3452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3453 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3454 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3456 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3457 3458 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3459 IEM_MC_PREPARE_SSE_USAGE(); 3460 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3461 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 3462 3463 IEM_MC_ADVANCE_RIP(); 3464 IEM_MC_END(); 3465 } 3466 return VINF_SUCCESS; 3384 3467 } 3385 3468 … … 8155 8238 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM, 8156 8239 8157 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),8240 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib, 8158 8241 /* 0x71 */ IEMOP_X4(iemOp_Grp12), 8159 8242 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
Note:
See TracChangeset
for help on using the changeset viewer.