- Timestamp:
- Feb 13, 2017 9:07:02 AM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65752 r65753 3639 3639 3640 3640 /** 3641 * Common worker for SSE2 and MMX instructions on the forms:3641 * Common worker for MMX instructions on the form: 3642 3642 * pxxx mm1, mm2/mem64 3643 */ 3644 FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 3645 { 3646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3648 { 3649 /* 3650 * Register, register. 3651 */ 3652 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3653 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3655 IEM_MC_BEGIN(2, 0); 3656 IEM_MC_ARG(uint64_t *, pDst, 0); 3657 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3658 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3659 IEM_MC_PREPARE_FPU_USAGE(); 3660 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3661 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3662 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3663 IEM_MC_ADVANCE_RIP(); 3664 IEM_MC_END(); 3665 } 3666 else 3667 { 3668 /* 3669 * Register, memory. 3670 */ 3671 IEM_MC_BEGIN(2, 2); 3672 IEM_MC_ARG(uint64_t *, pDst, 0); 3673 IEM_MC_LOCAL(uint64_t, uSrc); 3674 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3676 3677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3680 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3681 3682 IEM_MC_PREPARE_FPU_USAGE(); 3683 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3684 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3685 3686 IEM_MC_ADVANCE_RIP(); 3687 IEM_MC_END(); 3688 } 3689 return VINF_SUCCESS; 3690 } 3691 3692 3693 /** 3694 * Common worker for SSE2 instructions on the forms: 3643 3695 * pxxx xmm1, xmm2/mem128 3644 3696 * 3645 3697 * Proper alignment of the 128-bit operand is enforced. 3646 * Exceptions type 4. SSE2 and MMXcpuid checks.3698 * Exceptions type 4. SSE2 cpuid checks. 3647 3699 */ 3648 FNIEMOP_DEF_1(iemOpCommon MmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)3700 FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 3649 3701 { 3650 3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3651 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 3652 { 3653 case IEM_OP_PRF_SIZE_OP: /* SSE */ 3654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3655 { 3656 /* 3657 * Register, register. 3658 */ 3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3660 IEM_MC_BEGIN(2, 0); 3661 IEM_MC_ARG(uint128_t *, pDst, 0); 3662 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3664 IEM_MC_PREPARE_SSE_USAGE(); 3665 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3666 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3667 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3668 IEM_MC_ADVANCE_RIP(); 3669 IEM_MC_END(); 3670 } 3671 else 3672 { 3673 /* 3674 * Register, memory. 3675 */ 3676 IEM_MC_BEGIN(2, 2); 3677 IEM_MC_ARG(uint128_t *, pDst, 0); 3678 IEM_MC_LOCAL(uint128_t, uSrc); 3679 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3681 3682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3684 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3686 3687 IEM_MC_PREPARE_SSE_USAGE(); 3688 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3689 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3690 3691 IEM_MC_ADVANCE_RIP(); 3692 IEM_MC_END(); 3693 } 3694 return VINF_SUCCESS; 3695 3696 case 0: /* MMX */ 3697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3698 { 3699 /* 3700 * Register, register. 3701 */ 3702 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3703 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3705 IEM_MC_BEGIN(2, 0); 3706 IEM_MC_ARG(uint64_t *, pDst, 0); 3707 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3708 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3709 IEM_MC_PREPARE_FPU_USAGE(); 3710 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3711 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3712 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3713 IEM_MC_ADVANCE_RIP(); 3714 IEM_MC_END(); 3715 } 3716 else 3717 { 3718 /* 3719 * Register, memory. 3720 */ 3721 IEM_MC_BEGIN(2, 2); 3722 IEM_MC_ARG(uint64_t *, pDst, 0); 3723 IEM_MC_LOCAL(uint64_t, uSrc); 3724 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3726 3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3731 3732 IEM_MC_PREPARE_FPU_USAGE(); 3733 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3734 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3735 3736 IEM_MC_ADVANCE_RIP(); 3737 IEM_MC_END(); 3738 } 3739 return VINF_SUCCESS; 3740 3741 default: 3742 return IEMOP_RAISE_INVALID_OPCODE(); 3743 } 3744 } 3745 3746 3747 /** Opcode 0x0f 0x74. */ 3748 FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq) 3703 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3704 { 3705 /* 3706 * Register, register. 3707 */ 3708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3709 IEM_MC_BEGIN(2, 0); 3710 IEM_MC_ARG(uint128_t *, pDst, 0); 3711 IEM_MC_ARG(uint128_t const *, pSrc, 1); 3712 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3713 IEM_MC_PREPARE_SSE_USAGE(); 3714 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3715 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3716 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3717 IEM_MC_ADVANCE_RIP(); 3718 IEM_MC_END(); 3719 } 3720 else 3721 { 3722 /* 3723 * Register, memory. 3724 */ 3725 IEM_MC_BEGIN(2, 2); 3726 IEM_MC_ARG(uint128_t *, pDst, 0); 3727 IEM_MC_LOCAL(uint128_t, uSrc); 3728 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1); 3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3730 3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3734 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3735 3736 IEM_MC_PREPARE_SSE_USAGE(); 3737 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3738 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3739 3740 IEM_MC_ADVANCE_RIP(); 3741 IEM_MC_END(); 3742 } 3743 return VINF_SUCCESS; 3744 } 3745 3746 3747 /** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */ 3748 FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq) 3749 3749 { 3750 3750 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb"); 3751 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3752 } 3753 3754 3755 /** Opcode 0x0f 0x75. */ 3756 FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq) 3751 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3752 } 3753 3754 /** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */ 3755 FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx) 3756 { 3757 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb"); 3758 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3759 } 3760 3761 /* Opcode 0xf3 0x0f 0x74 - invalid */ 3762 /* Opcode 0xf2 0x0f 0x74 - invalid */ 3763 3764 3765 /** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */ 3766 FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq) 3757 3767 { 3758 3768 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw"); 3759 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3760 } 3761 3762 3763 /** Opcode 0x0f 0x76. */ 3764 FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq) 3769 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3770 } 3771 3772 /** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */ 3773 FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx) 3774 { 3775 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw"); 3776 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3777 } 3778 3779 /* Opcode 0xf3 0x0f 0x75 - invalid */ 3780 /* Opcode 0xf2 0x0f 0x75 - invalid */ 3781 3782 3783 /** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */ 3784 FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq) 3765 3785 { 3766 3786 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd"); 3767 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3768 } 3787 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3788 } 3789 3790 /** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */ 3791 FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx) 3792 { 3793 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd"); 3794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3795 } 3796 3797 /* Opcode 0xf3 0x0f 0x76 - invalid */ 3798 /* Opcode 0xf2 0x0f 0x76 - invalid */ 3769 3799 3770 3800 … … 7994 8024 7995 8025 7996 /** Opcode 0x0f 0xef.*/7997 FNIEMOP_DEF(iemOp_pxor_Pq_Qq __pxor_Vdq_Wdq)8026 /** Opcode 0x0f 0xef - pxor Pq, Qq */ 8027 FNIEMOP_DEF(iemOp_pxor_Pq_Qq) 7998 8028 { 7999 8029 IEMOP_MNEMONIC(pxor, "pxor"); 8000 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor); 8001 } 8030 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor); 8031 } 8032 8033 /** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */ 8034 FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx) 8035 { 8036 IEMOP_MNEMONIC(vpxor, "vpxor"); 8037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor); 8038 } 8039 8002 8040 /* Opcode 0xf3 0x0f 0xef - invalid */ 8003 8041 /* Opcode 0xf2 0x0f 0xef - invalid */ … … 8242 8280 /* 0x72 */ IEMOP_X4(iemOp_Grp13), 8243 8281 /* 0x73 */ IEMOP_X4(iemOp_Grp14), 8244 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq __pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM,8245 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq __pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM,8246 /* 0x76 */ iemOp_pcmpe d_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM,8282 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8283 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8284 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8247 8285 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8248 8286 … … 8373 8411 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8374 8412 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8375 /* 0xef */ iemOp_pxor_Pq_Qq __pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM,8413 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8376 8414 8377 8415 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
Note:
See TracChangeset
for help on using the changeset viewer.