Changeset 67004 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- May 22, 2017 10:20:28 AM (8 years ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r67003 r67004 263 263 'Pq': ( 'IDX_UseModRM', 'reg', '%Pq', 'Pq', ), 264 264 'Pq_WO': ( 'IDX_UseModRM', 'reg', '%Pq', 'Pq', ), 265 'VdZx_WO': ( 'IDX_UseModRM', 'reg', '%Vd', 'Vd', ), 265 266 'Vss': ( 'IDX_UseModRM', 'reg', '%Vss', 'Vss', ), 266 267 'Vss_WO': ( 'IDX_UseModRM', 'reg', '%Vss', 'Vss', ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r67003 r67004 3465 3465 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff 3466 3466 */ 3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_O P_SIZES);3467 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 3468 3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3469 3469 { … … 3517 3517 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff 3518 3518 */ 3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_O P_SIZES);3519 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 3520 3520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3521 3521 { … … 3563 3563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3564 3564 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3565 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq"); 3566 else 3567 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed"); 3568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3569 { 3570 /* XMM, greg*/ 3571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3572 IEM_MC_BEGIN(0, 1); 3573 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3574 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3575 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3565 { 3566 /** 3567 * @opcode 0x6e 3568 * @opcodesub rex.w=1 3569 * @oppfx 0x66 3570 * @opcpuid sse2 3571 * @opgroup og_sse2_simdint_datamov 3572 * @opxcpttype 5 3573 * @optest 64-bit / op1=1 op2=2 -> op1=2 3574 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 3575 * @oponly 3576 */ 3577 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 3578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3576 3579 { 3580 /* XMM, greg64 */ 3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3582 IEM_MC_BEGIN(0, 1); 3577 3583 IEM_MC_LOCAL(uint64_t, u64Tmp); 3584 3585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3587 3578 3588 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3579 3589 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3590 3591 IEM_MC_ADVANCE_RIP(); 3592 IEM_MC_END(); 3580 3593 } 3581 3594 else 3582 3595 { 3596 /* XMM, [mem64] */ 3597 IEM_MC_BEGIN(0, 2); 3598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3599 IEM_MC_LOCAL(uint64_t, u64Tmp); 3600 3601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3604 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3605 3606 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3607 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3608 3609 IEM_MC_ADVANCE_RIP(); 3610 IEM_MC_END(); 3611 } 3612 } 3613 else 3614 { 3615 /** 3616 * @opdone 3617 * @opcode 0x6e 3618 * @opcodesub rex.w=0 3619 * @oppfx 0x66 3620 * @opcpuid sse2 3621 * @opgroup og_sse2_simdint_datamov 3622 * @opxcpttype 5 3623 * @opfunction iemOp_movd_q_Vy_Ey 3624 * @optest op1=1 op2=2 -> op1=2 3625 * @optest op1=0 op2=-42 -> op1=-42 3626 * @oponly 3627 */ 3628 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX); 3629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3630 { 3631 /* XMM, greg32 */ 3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3633 IEM_MC_BEGIN(0, 1); 3583 3634 IEM_MC_LOCAL(uint32_t, u32Tmp); 3635 3636 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3637 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3638 3584 3639 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3585 3640 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3586 } 3587 IEM_MC_ADVANCE_RIP(); 3588 IEM_MC_END(); 3589 } 3590 else 3591 { 3592 /* XMM, [mem] */ 3593 IEM_MC_BEGIN(0, 2); 3594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3595 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3598 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3599 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3600 { 3601 IEM_MC_LOCAL(uint64_t, u64Tmp); 3602 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3603 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3641 3642 IEM_MC_ADVANCE_RIP(); 3643 IEM_MC_END(); 3604 3644 } 3605 3645 else 3606 3646 { 3647 /* XMM, [mem32] */ 3648 IEM_MC_BEGIN(0, 2); 3649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3607 3650 IEM_MC_LOCAL(uint32_t, u32Tmp); 3651 3652 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3656 3608 3657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3609 3658 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3659 3660 IEM_MC_ADVANCE_RIP(); 3661 IEM_MC_END(); 3610 3662 } 3611 IEM_MC_ADVANCE_RIP();3612 IEM_MC_END();3613 3663 } 3614 3664 return VINF_SUCCESS;
Note:
See TracChangeset
for help on using the changeset viewer.