Changeset 98918 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
- Timestamp:
- Mar 12, 2023 3:23:20 AM (2 years ago)
- svn:sync-xref-src-repo-rev:
- 156254
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r98916 r98918 3581 3581 3582 3582 /** 3583 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via 3584 * iemOp_Grp1_Eb_Ib_80. 3585 */ 3586 #define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \ 3587 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 3588 { \ 3589 /* register target */ \ 3590 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 3592 IEM_MC_BEGIN(3, 0); \ 3593 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 3594 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \ 3595 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 3596 \ 3597 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 3598 IEM_MC_REF_EFLAGS(pEFlags); \ 3599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 3600 \ 3601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3602 IEM_MC_END(); \ 3603 } \ 3604 else \ 3605 { \ 3606 /* memory target */ \ 3607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 3608 { \ 3609 IEM_MC_BEGIN(3, 2); \ 3610 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 3611 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3613 \ 3614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 3615 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 3616 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \ 3617 IEMOP_HLP_DONE_DECODING(); \ 3618 \ 3619 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3620 IEM_MC_FETCH_EFLAGS(EFlags); \ 3621 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 3622 \ 3623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \ 3624 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3625 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3626 IEM_MC_END(); \ 3627 } \ 3628 else \ 3629 { \ 3630 (void)0 3631 3632 #define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \ 3633 IEMOP_HLP_DONE_DECODING(); \ 3634 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 3635 } \ 3636 } \ 3637 (void)0 3638 3639 #define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \ 3640 IEM_MC_BEGIN(3, 2); \ 3641 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 3642 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3644 \ 3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 3646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 3647 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \ 3648 IEMOP_HLP_DONE_DECODING(); \ 3649 \ 3650 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3651 IEM_MC_FETCH_EFLAGS(EFlags); \ 3652 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \ 3653 \ 3654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \ 3655 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3657 IEM_MC_END(); \ 3658 } \ 3659 } \ 3660 (void)0 3661 3662 3663 /** 3664 * @opmaps grp1_80,grp1_83 3665 * @opcode /0 3666 */ 3667 FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm) 3668 { 3669 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); 3670 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW); 3671 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked); 3672 } 3673 3674 3675 /** 3676 * @opmaps grp1_80,grp1_83 3677 * @opcode /1 3678 */ 3679 FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm) 3680 { 3681 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); 3682 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW); 3683 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked); 3684 } 3685 3686 3687 /** 3688 * @opmaps grp1_80,grp1_83 3689 * @opcode /2 3690 */ 3691 FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm) 3692 { 3693 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); 3694 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW); 3695 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked); 3696 } 3697 3698 3699 /** 3700 * @opmaps grp1_80,grp1_83 3701 * @opcode /3 3702 */ 3703 FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm) 3704 { 3705 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); 3706 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW); 3707 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked); 3708 } 3709 3710 3711 /** 3712 * @opmaps grp1_80,grp1_83 3713 * @opcode /4 3714 */ 3715 FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm) 3716 { 3717 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); 3718 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW); 3719 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked); 3720 } 3721 3722 3723 /** 3724 * @opmaps grp1_80,grp1_83 3725 * @opcode /5 3726 */ 3727 FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm) 3728 { 3729 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); 3730 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW); 3731 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked); 3732 } 3733 3734 3735 /** 3736 * @opmaps grp1_80,grp1_83 3737 * @opcode /6 3738 */ 3739 FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm) 3740 { 3741 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); 3742 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW); 3743 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked); 3744 } 3745 3746 3747 /** 3748 * @opmaps grp1_80,grp1_83 3749 * @opcode /7 3750 */ 3751 FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm) 3752 { 3753 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); 3754 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R); 3755 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK(); 3756 } 3757 3758 3759 /** 3583 3760 * @opcode 0x80 3584 3761 */ … … 3588 3765 switch (IEM_GET_MODRM_REG_8(bRm)) 3589 3766 { 3590 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break; 3591 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break; 3592 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break; 3593 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break; 3594 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break; 3595 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break; 3596 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break; 3597 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break; 3598 } 3599 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)]; 3600 3601 if (IEM_IS_MODRM_REG_MODE(bRm)) 3602 { 3603 /* register target */ 3604 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3606 IEM_MC_BEGIN(3, 0); 3607 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 3608 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); 3609 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3610 3611 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3612 IEM_MC_REF_EFLAGS(pEFlags); 3613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags); 3614 3615 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3616 IEM_MC_END(); 3617 } 3618 else 3619 { 3620 /* memory target */ 3621 uint32_t fAccess; 3622 if (pImpl->pfnLockedU8) 3623 fAccess = IEM_ACCESS_DATA_RW; 3624 else /* CMP */ 3625 fAccess = IEM_ACCESS_DATA_R; 3626 IEM_MC_BEGIN(3, 2); 3627 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 3628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3630 3631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 3632 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 3633 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); 3634 if (pImpl->pfnLockedU8) 3635 IEMOP_HLP_DONE_DECODING(); 3636 else 3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3638 3639 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3640 IEM_MC_FETCH_EFLAGS(EFlags); 3641 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 3642 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags); 3643 else 3644 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags); 3645 3646 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess); 3647 IEM_MC_COMMIT_EFLAGS(EFlags); 3648 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3649 IEM_MC_END(); 3650 } 3767 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm); 3768 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm); 3769 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm); 3770 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm); 3771 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm); 3772 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm); 3773 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm); 3774 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm); 3775 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3776 } 3777 } 3778 3779 3780 /** 3781 * Body for a group 1 binary operator. 3782 */ 3783 #define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \ 3784 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 3785 { \ 3786 /* register target */ \ 3787 switch (pVCpu->iem.s.enmEffOpSize) \ 3788 { \ 3789 case IEMMODE_16BIT: \ 3790 { \ 3791 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \ 3792 IEMOP_HLP_DONE_DECODING(); \ 3793 IEM_MC_BEGIN(3, 0); \ 3794 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 3795 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \ 3796 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 3797 \ 3798 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 3799 IEM_MC_REF_EFLAGS(pEFlags); \ 3800 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 3801 \ 3802 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3803 IEM_MC_END(); \ 3804 break; \ 3805 } \ 3806 \ 3807 case IEMMODE_32BIT: \ 3808 { \ 3809 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \ 3810 IEMOP_HLP_DONE_DECODING(); \ 3811 IEM_MC_BEGIN(3, 0); \ 3812 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 3813 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \ 3814 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 3815 \ 3816 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 3817 IEM_MC_REF_EFLAGS(pEFlags); \ 3818 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 3819 if (a_fRW == IEM_ACCESS_DATA_RW) \ 3820 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 3821 \ 3822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3823 IEM_MC_END(); \ 3824 break; \ 3825 } \ 3826 \ 3827 case IEMMODE_64BIT: \ 3828 { \ 3829 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \ 3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 3831 IEM_MC_BEGIN(3, 0); \ 3832 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 3833 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \ 3834 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 3835 \ 3836 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 3837 IEM_MC_REF_EFLAGS(pEFlags); \ 3838 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 3839 \ 3840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3841 IEM_MC_END(); \ 3842 break; \ 3843 } \ 3844 \ 3845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 3846 } \ 3847 } \ 3848 else \ 3849 { \ 3850 /* memory target */ \ 3851 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 3852 { \ 3853 switch (pVCpu->iem.s.enmEffOpSize) \ 3854 { \ 3855 case IEMMODE_16BIT: \ 3856 { \ 3857 IEM_MC_BEGIN(3, 2); \ 3858 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 3859 IEM_MC_ARG(uint16_t, u16Src, 1); \ 3860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3862 \ 3863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \ 3864 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \ 3865 IEM_MC_ASSIGN(u16Src, u16Imm); \ 3866 IEMOP_HLP_DONE_DECODING(); \ 3867 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3868 IEM_MC_FETCH_EFLAGS(EFlags); \ 3869 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 3870 \ 3871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \ 3872 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3874 IEM_MC_END(); \ 3875 break; \ 3876 } \ 3877 \ 3878 case IEMMODE_32BIT: \ 3879 { \ 3880 IEM_MC_BEGIN(3, 2); \ 3881 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 3882 IEM_MC_ARG(uint32_t, u32Src, 1); \ 3883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3885 \ 3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \ 3887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \ 3888 IEM_MC_ASSIGN(u32Src, u32Imm); \ 3889 IEMOP_HLP_DONE_DECODING(); \ 3890 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3891 IEM_MC_FETCH_EFLAGS(EFlags); \ 3892 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 3893 \ 3894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \ 3895 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3897 IEM_MC_END(); \ 3898 break; \ 3899 } \ 3900 \ 3901 case IEMMODE_64BIT: \ 3902 { \ 3903 IEM_MC_BEGIN(3, 2); \ 3904 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 3905 IEM_MC_ARG(uint64_t, u64Src, 1); \ 3906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3908 \ 3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \ 3910 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \ 3911 IEMOP_HLP_DONE_DECODING(); \ 3912 IEM_MC_ASSIGN(u64Src, u64Imm); \ 3913 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3914 IEM_MC_FETCH_EFLAGS(EFlags); \ 3915 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 3916 \ 3917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \ 3918 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3920 IEM_MC_END(); \ 3921 break; \ 3922 } \ 3923 \ 3924 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 3925 } \ 3926 } \ 3927 else \ 3928 { \ 3929 (void)0 3930 3931 #define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \ 3932 IEMOP_HLP_DONE_DECODING(); \ 3933 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 3934 } \ 3935 } \ 3936 (void)0 3937 3938 #define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \ 3939 switch (pVCpu->iem.s.enmEffOpSize) \ 3940 { \ 3941 case IEMMODE_16BIT: \ 3942 { \ 3943 IEM_MC_BEGIN(3, 2); \ 3944 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 3945 IEM_MC_ARG(uint16_t, u16Src, 1); \ 3946 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3948 \ 3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \ 3950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \ 3951 IEM_MC_ASSIGN(u16Src, u16Imm); \ 3952 IEMOP_HLP_DONE_DECODING(); \ 3953 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3954 IEM_MC_FETCH_EFLAGS(EFlags); \ 3955 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \ 3956 \ 3957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \ 3958 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3960 IEM_MC_END(); \ 3961 break; \ 3962 } \ 3963 \ 3964 case IEMMODE_32BIT: \ 3965 { \ 3966 IEM_MC_BEGIN(3, 2); \ 3967 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 3968 IEM_MC_ARG(uint32_t, u32Src, 1); \ 3969 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3971 \ 3972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \ 3973 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \ 3974 IEM_MC_ASSIGN(u32Src, u32Imm); \ 3975 IEMOP_HLP_DONE_DECODING(); \ 3976 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 3977 IEM_MC_FETCH_EFLAGS(EFlags); \ 3978 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \ 3979 \ 3980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \ 3981 IEM_MC_COMMIT_EFLAGS(EFlags); \ 3982 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 3983 IEM_MC_END(); \ 3984 break; \ 3985 } \ 3986 \ 3987 case IEMMODE_64BIT: \ 3988 { \ 3989 IEM_MC_BEGIN(3, 2); \ 3990 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 3991 IEM_MC_ARG(uint64_t, u64Src, 1); \ 3992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 3994 \ 3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \ 3996 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \ 3997 IEMOP_HLP_DONE_DECODING(); \ 3998 IEM_MC_ASSIGN(u64Src, u64Imm); \ 3999 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4000 IEM_MC_FETCH_EFLAGS(EFlags); \ 4001 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \ 4002 \ 4003 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \ 4004 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4006 IEM_MC_END(); \ 4007 break; \ 4008 } \ 4009 \ 4010 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 4011 } \ 4012 } \ 4013 } \ 4014 (void)0 4015 4016 4017 /** 4018 * @opmaps grp1_81 4019 * @opcode /0 4020 */ 4021 FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm) 4022 { 4023 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); 4024 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW); 4025 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked); 4026 } 4027 4028 4029 /** 4030 * @opmaps grp1_81 4031 * @opcode /1 4032 */ 4033 FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm) 4034 { 4035 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); 4036 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW); 4037 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked); 4038 } 4039 4040 4041 /** 4042 * @opmaps grp1_81 4043 * @opcode /2 4044 */ 4045 FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm) 4046 { 4047 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); 4048 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW); 4049 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked); 4050 } 4051 4052 4053 /** 4054 * @opmaps grp1_81 4055 * @opcode /3 4056 */ 4057 FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm) 4058 { 4059 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); 4060 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW); 4061 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked); 4062 } 4063 4064 4065 /** 4066 * @opmaps grp1_81 4067 * @opcode /4 4068 */ 4069 FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm) 4070 { 4071 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); 4072 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW); 4073 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked); 4074 } 4075 4076 4077 /** 4078 * @opmaps grp1_81 4079 * @opcode /5 4080 */ 4081 FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm) 4082 { 4083 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); 4084 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW); 4085 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked); 4086 } 4087 4088 4089 /** 4090 * @opmaps grp1_81 4091 * @opcode /6 4092 */ 4093 FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm) 4094 { 4095 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); 4096 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW); 4097 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked); 4098 } 4099 4100 4101 /** 4102 * @opmaps grp1_81 4103 * @opcode /7 4104 */ 4105 FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm) 4106 { 4107 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); 4108 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R); 4109 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK(); 3651 4110 } 3652 4111 … … 3660 4119 switch (IEM_GET_MODRM_REG_8(bRm)) 3661 4120 { 3662 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break; 3663 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break; 3664 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break; 3665 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break; 3666 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break; 3667 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break; 3668 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break; 3669 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break; 3670 } 3671 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)]; 3672 3673 switch (pVCpu->iem.s.enmEffOpSize) 3674 { 3675 case IEMMODE_16BIT: 3676 { 3677 if (IEM_IS_MODRM_REG_MODE(bRm)) 3678 { 3679 /* register target */ 3680 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3682 IEM_MC_BEGIN(3, 0); 3683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); 3685 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3686 3687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3688 IEM_MC_REF_EFLAGS(pEFlags); 3689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3690 3691 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3692 IEM_MC_END(); 3693 } 3694 else 3695 { 3696 /* memory target */ 3697 uint32_t fAccess; 3698 if (pImpl->pfnLockedU16) 3699 fAccess = IEM_ACCESS_DATA_RW; 3700 else /* CMP, TEST */ 3701 fAccess = IEM_ACCESS_DATA_R; 3702 IEM_MC_BEGIN(3, 2); 3703 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3704 IEM_MC_ARG(uint16_t, u16Src, 1); 3705 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3707 3708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); 3709 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 3710 IEM_MC_ASSIGN(u16Src, u16Imm); 3711 if (pImpl->pfnLockedU16) 3712 IEMOP_HLP_DONE_DECODING(); 3713 else 3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3715 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3716 IEM_MC_FETCH_EFLAGS(EFlags); 3717 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 3718 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3719 else 3720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 3721 3722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess); 3723 IEM_MC_COMMIT_EFLAGS(EFlags); 3724 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3725 IEM_MC_END(); 3726 } 3727 break; 3728 } 3729 3730 case IEMMODE_32BIT: 3731 { 3732 if (IEM_IS_MODRM_REG_MODE(bRm)) 3733 { 3734 /* register target */ 3735 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); 3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3737 IEM_MC_BEGIN(3, 0); 3738 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3739 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); 3740 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3741 3742 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3743 IEM_MC_REF_EFLAGS(pEFlags); 3744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 3745 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */ 3746 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 3747 3748 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3749 IEM_MC_END(); 3750 } 3751 else 3752 { 3753 /* memory target */ 3754 uint32_t fAccess; 3755 if (pImpl->pfnLockedU32) 3756 fAccess = IEM_ACCESS_DATA_RW; 3757 else /* CMP, TEST */ 3758 fAccess = IEM_ACCESS_DATA_R; 3759 IEM_MC_BEGIN(3, 2); 3760 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3761 IEM_MC_ARG(uint32_t, u32Src, 1); 3762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3764 3765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); 3766 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); 3767 IEM_MC_ASSIGN(u32Src, u32Imm); 3768 if (pImpl->pfnLockedU32) 3769 IEMOP_HLP_DONE_DECODING(); 3770 else 3771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3772 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3773 IEM_MC_FETCH_EFLAGS(EFlags); 3774 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 3775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 3776 else 3777 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 3778 3779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess); 3780 IEM_MC_COMMIT_EFLAGS(EFlags); 3781 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3782 IEM_MC_END(); 3783 } 3784 break; 3785 } 3786 3787 case IEMMODE_64BIT: 3788 { 3789 if (IEM_IS_MODRM_REG_MODE(bRm)) 3790 { 3791 /* register target */ 3792 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); 3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3794 IEM_MC_BEGIN(3, 0); 3795 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3796 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); 3797 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3798 3799 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3800 IEM_MC_REF_EFLAGS(pEFlags); 3801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 3802 3803 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3804 IEM_MC_END(); 3805 } 3806 else 3807 { 3808 /* memory target */ 3809 uint32_t fAccess; 3810 if (pImpl->pfnLockedU64) 3811 fAccess = IEM_ACCESS_DATA_RW; 3812 else /* CMP */ 3813 fAccess = IEM_ACCESS_DATA_R; 3814 IEM_MC_BEGIN(3, 2); 3815 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3816 IEM_MC_ARG(uint64_t, u64Src, 1); 3817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3819 3820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); 3821 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); 3822 if (pImpl->pfnLockedU64) 3823 IEMOP_HLP_DONE_DECODING(); 3824 else 3825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3826 IEM_MC_ASSIGN(u64Src, u64Imm); 3827 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3828 IEM_MC_FETCH_EFLAGS(EFlags); 3829 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 3830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 3831 else 3832 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 3833 3834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess); 3835 IEM_MC_COMMIT_EFLAGS(EFlags); 3836 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3837 IEM_MC_END(); 3838 } 3839 break; 3840 } 3841 4121 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm); 4122 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm); 4123 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm); 4124 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm); 4125 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm); 4126 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm); 4127 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm); 4128 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm); 3842 4129 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3843 4130 } … … 3858 4145 3859 4146 /** 4147 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via 4148 * iemOp_Grp1_Ev_Ib. 4149 */ 4150 #define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \ 4151 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 4152 { \ 4153 /* \ 4154 * Register target \ 4155 */ \ 4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 4157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4158 switch (pVCpu->iem.s.enmEffOpSize) \ 4159 { \ 4160 case IEMMODE_16BIT: \ 4161 { \ 4162 IEM_MC_BEGIN(3, 0); \ 4163 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 4164 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \ 4165 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 4166 \ 4167 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 4168 IEM_MC_REF_EFLAGS(pEFlags); \ 4169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 4170 \ 4171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4172 IEM_MC_END(); \ 4173 break; \ 4174 } \ 4175 \ 4176 case IEMMODE_32BIT: \ 4177 { \ 4178 IEM_MC_BEGIN(3, 0); \ 4179 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 4180 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \ 4181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 4182 \ 4183 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 4184 IEM_MC_REF_EFLAGS(pEFlags); \ 4185 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 4186 if ((a_fRW) != IEM_ACCESS_DATA_R) \ 4187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 4188 \ 4189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4190 IEM_MC_END(); \ 4191 break; \ 4192 } \ 4193 \ 4194 case IEMMODE_64BIT: \ 4195 { \ 4196 IEM_MC_BEGIN(3, 0); \ 4197 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 4198 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \ 4199 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 4200 \ 4201 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 4202 IEM_MC_REF_EFLAGS(pEFlags); \ 4203 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 4204 \ 4205 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4206 IEM_MC_END(); \ 4207 break; \ 4208 } \ 4209 \ 4210 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 4211 } \ 4212 } \ 4213 else \ 4214 { \ 4215 /* \ 4216 * Memory target. \ 4217 */ \ 4218 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 4219 { \ 4220 switch (pVCpu->iem.s.enmEffOpSize) \ 4221 { \ 4222 case IEMMODE_16BIT: \ 4223 { \ 4224 IEM_MC_BEGIN(3, 2); \ 4225 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 4226 IEM_MC_ARG(uint16_t, u16Src, 1); \ 4227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4229 \ 4230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4231 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4232 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \ 4233 IEMOP_HLP_DONE_DECODING(); \ 4234 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4235 IEM_MC_FETCH_EFLAGS(EFlags); \ 4236 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 4237 \ 4238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \ 4239 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4241 IEM_MC_END(); \ 4242 break; \ 4243 } \ 4244 \ 4245 case IEMMODE_32BIT: \ 4246 { \ 4247 IEM_MC_BEGIN(3, 2); \ 4248 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 4249 IEM_MC_ARG(uint32_t, u32Src, 1); \ 4250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4252 \ 4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4254 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4255 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \ 4256 IEMOP_HLP_DONE_DECODING(); \ 4257 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4258 IEM_MC_FETCH_EFLAGS(EFlags); \ 4259 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 4260 \ 4261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \ 4262 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4263 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4264 IEM_MC_END(); \ 4265 break; \ 4266 } \ 4267 \ 4268 case IEMMODE_64BIT: \ 4269 { \ 4270 IEM_MC_BEGIN(3, 2); \ 4271 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 4272 IEM_MC_ARG(uint64_t, u64Src, 1); \ 4273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4275 \ 4276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4278 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \ 4279 IEMOP_HLP_DONE_DECODING(); \ 4280 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4281 IEM_MC_FETCH_EFLAGS(EFlags); \ 4282 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 4283 \ 4284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \ 4285 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4287 IEM_MC_END(); \ 4288 break; \ 4289 } \ 4290 \ 4291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 4292 } \ 4293 } \ 4294 else \ 4295 { \ 4296 (void)0 4297 4298 #define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \ 4299 IEMOP_HLP_DONE_DECODING(); \ 4300 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 4301 } \ 4302 } \ 4303 (void)0 4304 4305 #define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \ 4306 switch (pVCpu->iem.s.enmEffOpSize) \ 4307 { \ 4308 case IEMMODE_16BIT: \ 4309 { \ 4310 IEM_MC_BEGIN(3, 2); \ 4311 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 4312 IEM_MC_ARG(uint16_t, u16Src, 1); \ 4313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4315 \ 4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4317 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4318 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \ 4319 IEMOP_HLP_DONE_DECODING(); \ 4320 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4321 IEM_MC_FETCH_EFLAGS(EFlags); \ 4322 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \ 4323 \ 4324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \ 4325 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4327 IEM_MC_END(); \ 4328 break; \ 4329 } \ 4330 \ 4331 case IEMMODE_32BIT: \ 4332 { \ 4333 IEM_MC_BEGIN(3, 2); \ 4334 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 4335 IEM_MC_ARG(uint32_t, u32Src, 1); \ 4336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4338 \ 4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4340 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4341 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \ 4342 IEMOP_HLP_DONE_DECODING(); \ 4343 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4344 IEM_MC_FETCH_EFLAGS(EFlags); \ 4345 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \ 4346 \ 4347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \ 4348 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4350 IEM_MC_END(); \ 4351 break; \ 4352 } \ 4353 \ 4354 case IEMMODE_64BIT: \ 4355 { \ 4356 IEM_MC_BEGIN(3, 2); \ 4357 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 4358 IEM_MC_ARG(uint64_t, u64Src, 1); \ 4359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \ 4360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 4361 \ 4362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \ 4363 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 4364 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \ 4365 IEMOP_HLP_DONE_DECODING(); \ 4366 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 4367 IEM_MC_FETCH_EFLAGS(EFlags); \ 4368 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \ 4369 \ 4370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \ 4371 IEM_MC_COMMIT_EFLAGS(EFlags); \ 4372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 4373 IEM_MC_END(); \ 4374 break; \ 4375 } \ 4376 \ 4377 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 4378 } \ 4379 } \ 4380 } \ 4381 (void)0 4382 4383 /** 4384 * @opmaps grp1_83 4385 * @opcode /0 4386 */ 4387 FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm) 4388 { 4389 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); 4390 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW); 4391 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked); 4392 } 4393 4394 4395 /** 4396 * @opmaps grp1_83 4397 * @opcode /1 4398 */ 4399 FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm) 4400 { 4401 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); 4402 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW); 4403 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked); 4404 } 4405 4406 4407 /** 4408 * @opmaps grp1_83 4409 * @opcode /2 4410 */ 4411 FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm) 4412 { 4413 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); 4414 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW); 4415 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked); 4416 } 4417 4418 4419 /** 4420 * @opmaps grp1_83 4421 * @opcode /3 4422 */ 4423 FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm) 4424 { 4425 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); 4426 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW); 4427 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked); 4428 } 4429 4430 4431 /** 4432 * @opmaps grp1_83 4433 * @opcode /4 4434 */ 4435 FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm) 4436 { 4437 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); 4438 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW); 4439 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked); 4440 } 4441 4442 4443 /** 4444 * @opmaps grp1_83 4445 * @opcode /5 4446 */ 4447 FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm) 4448 { 4449 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); 4450 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW); 4451 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked); 4452 } 4453 4454 4455 /** 4456 * @opmaps grp1_83 4457 * @opcode /6 4458 */ 4459 FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm) 4460 { 4461 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); 4462 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW); 4463 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked); 4464 } 4465 4466 4467 /** 4468 * @opmaps grp1_83 4469 * @opcode /7 4470 */ 4471 FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm) 4472 { 4473 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); 4474 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R); 4475 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK(); 4476 } 4477 4478 4479 /** 3860 4480 * @opcode 0x83 3861 4481 */ 3862 4482 FNIEMOP_DEF(iemOp_Grp1_Ev_Ib) 3863 4483 { 3864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);3865 switch (IEM_GET_MODRM_REG_8(bRm))3866 {3867 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;3868 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;3869 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;3870 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;3871 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;3872 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;3873 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;3874 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;3875 }3876 4484 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior 3877 4485 to the 386 even if absent in the intel reference manuals and some 3878 4486 3rd party opcode listings. */ 3879 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)]; 3880 3881 if (IEM_IS_MODRM_REG_MODE(bRm)) 3882 { 3883 /* 3884 * Register target 3885 */ 3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3887 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 3888 switch (pVCpu->iem.s.enmEffOpSize) 3889 { 3890 case IEMMODE_16BIT: 3891 { 3892 IEM_MC_BEGIN(3, 0); 3893 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3894 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); 3895 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3896 3897 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3898 IEM_MC_REF_EFLAGS(pEFlags); 3899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3900 3901 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3902 IEM_MC_END(); 3903 break; 3904 } 3905 3906 case IEMMODE_32BIT: 3907 { 3908 IEM_MC_BEGIN(3, 0); 3909 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3910 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); 3911 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3912 3913 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3914 IEM_MC_REF_EFLAGS(pEFlags); 3915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 3916 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */ 3917 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 3918 3919 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3920 IEM_MC_END(); 3921 break; 3922 } 3923 3924 case IEMMODE_64BIT: 3925 { 3926 IEM_MC_BEGIN(3, 0); 3927 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3928 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); 3929 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3930 3931 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 3932 IEM_MC_REF_EFLAGS(pEFlags); 3933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 3934 3935 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3936 IEM_MC_END(); 3937 break; 3938 } 3939 3940 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3941 } 3942 } 3943 else 3944 { 3945 /* 3946 * Memory target. 3947 */ 3948 uint32_t fAccess; 3949 if (pImpl->pfnLockedU16) 3950 fAccess = IEM_ACCESS_DATA_RW; 3951 else /* CMP */ 3952 fAccess = IEM_ACCESS_DATA_R; 3953 3954 switch (pVCpu->iem.s.enmEffOpSize) 3955 { 3956 case IEMMODE_16BIT: 3957 { 3958 IEM_MC_BEGIN(3, 2); 3959 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3960 IEM_MC_ARG(uint16_t, u16Src, 1); 3961 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3963 3964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 3965 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 3966 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); 3967 if (pImpl->pfnLockedU16) 3968 IEMOP_HLP_DONE_DECODING(); 3969 else 3970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3971 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3972 IEM_MC_FETCH_EFLAGS(EFlags); 3973 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 3974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 3975 else 3976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 3977 3978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess); 3979 IEM_MC_COMMIT_EFLAGS(EFlags); 3980 IEM_MC_ADVANCE_RIP_AND_FINISH(); 3981 IEM_MC_END(); 3982 break; 3983 } 3984 3985 case IEMMODE_32BIT: 3986 { 3987 IEM_MC_BEGIN(3, 2); 3988 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3989 IEM_MC_ARG(uint32_t, u32Src, 1); 3990 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 3991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3992 3993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 3994 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 3995 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); 3996 if (pImpl->pfnLockedU32) 3997 IEMOP_HLP_DONE_DECODING(); 3998 else 3999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4000 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 4001 IEM_MC_FETCH_EFLAGS(EFlags); 4002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 4003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 4004 else 4005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 4006 4007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess); 4008 IEM_MC_COMMIT_EFLAGS(EFlags); 4009 IEM_MC_ADVANCE_RIP_AND_FINISH(); 4010 IEM_MC_END(); 4011 break; 4012 } 4013 4014 case IEMMODE_64BIT: 4015 { 4016 IEM_MC_BEGIN(3, 2); 4017 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 4018 IEM_MC_ARG(uint64_t, u64Src, 1); 4019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 4020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4021 4022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 4023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 4024 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); 4025 if (pImpl->pfnLockedU64) 4026 IEMOP_HLP_DONE_DECODING(); 4027 else 4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4029 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 4030 IEM_MC_FETCH_EFLAGS(EFlags); 4031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 4032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 4033 else 4034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 4035 4036 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess); 4037 IEM_MC_COMMIT_EFLAGS(EFlags); 4038 IEM_MC_ADVANCE_RIP_AND_FINISH(); 4039 IEM_MC_END(); 4040 break; 4041 } 4042 4043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 4044 } 4487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4488 switch (IEM_GET_MODRM_REG_8(bRm)) 4489 { 4490 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm); 4491 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm); 4492 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm); 4493 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm); 4494 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm); 4495 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm); 4496 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm); 4497 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm); 4498 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 4045 4499 } 4046 4500 }
Note:
See TracChangeset
for help on using the changeset viewer.