Changeset 37084 in vbox for trunk/src/VBox
- Timestamp:
- May 13, 2011 7:53:02 PM (14 years ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r37079 r37084 2071 2071 static VBOXSTRICTRC iemRaisePageFault(PIEMCPU pIemCpu, RTGCPTR GCPtrWhere, uint32_t fAccess, int rc) 2072 2072 { 2073 AssertFailed(/** @todo implement this */); 2073 /** @todo implement this */ 2074 AssertMsgFailed(("GCPtrWhere=%RGp fAccess=%#x rc=%Rrc\n", GCPtrWhere, fAccess, rc)); 2074 2075 return VERR_NOT_IMPLEMENTED; 2075 2076 } -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r37056 r37084 567 567 568 568 569 ; 570 ; XADD for memory operands. 571 ; 572 ; Each function takes three arguments, first the pointer to the 573 ; memory/register, then the pointer to the register, and finally a pointer to 574 ; eflags. They all return void. 575 ; 576 BEGINCODE 577 BEGINPROC iemAImpl_xadd_u8 578 PROLOGUE_3_ARGS 579 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 580 mov T0_8, [A1] 581 xadd [A0], T0_8 582 mov [A1], T0_8 583 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 584 EPILOGUE_3_ARGS 585 ret 586 ENDPROC iemAImpl_xadd_u8 587 588 BEGINPROC iemAImpl_xadd_u16 589 PROLOGUE_3_ARGS 590 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 591 mov T0_16, [A1] 592 xadd [A0], T0_16 593 mov [A1], T0_16 594 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 595 EPILOGUE_3_ARGS 596 ret 597 ENDPROC iemAImpl_xadd_u16 598 599 BEGINPROC iemAImpl_xadd_u32 600 PROLOGUE_3_ARGS 601 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 602 mov T0_32, [A1] 603 xadd [A0], T0_32 604 mov [A1], T0_32 605 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 606 EPILOGUE_3_ARGS 607 ret 608 ENDPROC iemAImpl_xadd_u32 609 610 BEGINPROC iemAImpl_xadd_u64 611 %ifdef RT_ARCH_AMD64 612 PROLOGUE_3_ARGS 613 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 614 mov T0, [A1] 615 xadd [A0], T0 616 mov [A1], T0 617 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 618 EPILOGUE_3_ARGS 619 ret 620 %else 621 int3 622 %endif 623 ENDPROC iemAImpl_xadd_u64 624 625 BEGINPROC iemAImpl_xadd_u8_locked 626 PROLOGUE_3_ARGS 627 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 628 mov T0_8, [A1] 629 lock xadd [A0], T0_8 630 mov [A1], T0_8 631 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 632 EPILOGUE_3_ARGS 633 ret 634 ENDPROC iemAImpl_xadd_u8_locked 635 636 BEGINPROC iemAImpl_xadd_u16_locked 637 PROLOGUE_3_ARGS 638 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 639 mov T0_16, [A1] 640 lock xadd [A0], T0_16 641 mov [A1], T0_16 642 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 643 EPILOGUE_3_ARGS 644 ret 645 ENDPROC iemAImpl_xadd_u16_locked 646 647 BEGINPROC iemAImpl_xadd_u32_locked 648 PROLOGUE_3_ARGS 649 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 650 mov T0_32, [A1] 651 lock xadd [A0], T0_32 652 mov [A1], T0_32 653 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 654 EPILOGUE_3_ARGS 655 ret 656 ENDPROC iemAImpl_xadd_u32_locked 657 658 BEGINPROC iemAImpl_xadd_u64_locked 659 %ifdef RT_ARCH_AMD64 660 PROLOGUE_3_ARGS 661 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 662 mov T0, [A1] 663 lock xadd [A0], T0 664 mov [A1], T0 665 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 666 EPILOGUE_3_ARGS 667 ret 668 %else 669 int3 670 %endif 671 ENDPROC iemAImpl_xadd_u64_locked 672 673 569 674 ;; 570 675 ; Macro for implementing a unary operator. -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r37056 r37084 3872 3872 3873 3873 /** Opcode 0x0f 0xc0. */ 3874 FNIEMOP_STUB(iemOp_xadd_Eb_Gb); 3874 FNIEMOP_DEF(iemOp_xadd_Eb_Gb) 3875 { 3876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3877 IEMOP_MNEMONIC("xadd Eb,Gb"); 3878 3879 /* 3880 * If rm is denoting a register, no more instruction bytes. 3881 */ 3882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3883 { 3884 IEMOP_HLP_NO_LOCK_PREFIX(); 3885 3886 IEM_MC_BEGIN(3, 0); 3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 3888 IEM_MC_ARG(uint8_t *, pu8Reg, 1); 3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3890 3891 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 3892 IEM_MC_REF_GREG_U8(pu8Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3893 IEM_MC_REF_EFLAGS(pEFlags); 3894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags); 3895 3896 IEM_MC_ADVANCE_RIP(); 3897 IEM_MC_END(); 3898 } 3899 else 3900 { 3901 /* 3902 * We're accessing memory. 3903 */ 3904 IEM_MC_BEGIN(3, 3); 3905 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 3906 IEM_MC_ARG(uint8_t *, pu8Reg, 1); 3907 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 3908 IEM_MC_LOCAL(uint8_t, u8RegCopy); 3909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 3910 3911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 3912 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 3913 IEM_MC_FETCH_GREG_U8(u8RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3914 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy); 3915 IEM_MC_FETCH_EFLAGS(EFlags); 3916 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 3917 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags); 3918 else 3919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags); 3920 3921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); 3922 IEM_MC_COMMIT_EFLAGS(EFlags); 3923 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8RegCopy); 3924 IEM_MC_ADVANCE_RIP(); 3925 IEM_MC_END(); 3926 return VINF_SUCCESS; 3927 } 3928 return VINF_SUCCESS; 3929 } 3930 3931 3875 3932 /** Opcode 0x0f 0xc1. */ 3876 FNIEMOP_STUB(iemOp_xadd_Ev_Gv); 3933 FNIEMOP_DEF(iemOp_xadd_Ev_Gv) 3934 { 3935 IEMOP_MNEMONIC("xadd Ev,Gv"); 3936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3937 3938 /* 3939 * If rm is denoting a register, no more instruction bytes. 3940 */ 3941 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3942 { 3943 IEMOP_HLP_NO_LOCK_PREFIX(); 3944 3945 switch (pIemCpu->enmEffOpSize) 3946 { 3947 case IEMMODE_16BIT: 3948 IEM_MC_BEGIN(3, 0); 3949 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 3950 IEM_MC_ARG(uint16_t *, pu16Reg, 1); 3951 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3952 3953 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 3954 IEM_MC_REF_GREG_U16(pu16Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3955 IEM_MC_REF_EFLAGS(pEFlags); 3956 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags); 3957 3958 IEM_MC_ADVANCE_RIP(); 3959 IEM_MC_END(); 3960 return VINF_SUCCESS; 3961 3962 case IEMMODE_32BIT: 3963 IEM_MC_BEGIN(3, 0); 3964 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 3965 IEM_MC_ARG(uint32_t *, pu32Reg, 1); 3966 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3967 3968 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 3969 IEM_MC_REF_GREG_U32(pu32Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3970 IEM_MC_REF_EFLAGS(pEFlags); 3971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags); 3972 3973 IEM_MC_ADVANCE_RIP(); 3974 IEM_MC_END(); 3975 return VINF_SUCCESS; 3976 3977 case IEMMODE_64BIT: 3978 IEM_MC_BEGIN(3, 0); 3979 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 3980 IEM_MC_ARG(uint64_t *, pu64Reg, 1); 3981 IEM_MC_ARG(uint32_t *, pEFlags, 2); 3982 3983 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 3984 IEM_MC_REF_GREG_U64(pu64Reg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 3985 IEM_MC_REF_EFLAGS(pEFlags); 3986 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags); 3987 3988 IEM_MC_ADVANCE_RIP(); 3989 IEM_MC_END(); 3990 return VINF_SUCCESS; 3991 3992 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 3993 } 3994 } 3995 else 3996 { 3997 /* 3998 * We're accessing memory. 3999 */ 4000 switch (pIemCpu->enmEffOpSize) 4001 { 4002 case IEMMODE_16BIT: 4003 IEM_MC_BEGIN(3, 3); 4004 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 4005 IEM_MC_ARG(uint16_t *, pu16Reg, 1); 4006 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 4007 IEM_MC_LOCAL(uint16_t, u16RegCopy); 4008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4009 4010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4011 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4012 IEM_MC_FETCH_GREG_U16(u16RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4013 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy); 4014 IEM_MC_FETCH_EFLAGS(EFlags); 4015 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 4016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags); 4017 else 4018 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags); 4019 4020 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 4021 IEM_MC_COMMIT_EFLAGS(EFlags); 4022 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16RegCopy); 4023 IEM_MC_ADVANCE_RIP(); 4024 IEM_MC_END(); 4025 return VINF_SUCCESS; 4026 4027 case IEMMODE_32BIT: 4028 IEM_MC_BEGIN(3, 3); 4029 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 4030 IEM_MC_ARG(uint32_t *, pu32Reg, 1); 4031 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 4032 IEM_MC_LOCAL(uint32_t, u32RegCopy); 4033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4034 4035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4036 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4037 IEM_MC_FETCH_GREG_U32(u32RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4038 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy); 4039 IEM_MC_FETCH_EFLAGS(EFlags); 4040 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 4041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags); 4042 else 4043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags); 4044 4045 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 4046 IEM_MC_COMMIT_EFLAGS(EFlags); 4047 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32RegCopy); 4048 IEM_MC_ADVANCE_RIP(); 4049 IEM_MC_END(); 4050 return VINF_SUCCESS; 4051 4052 case IEMMODE_64BIT: 4053 IEM_MC_BEGIN(3, 3); 4054 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 4055 IEM_MC_ARG(uint64_t *, pu64Reg, 1); 4056 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 4057 IEM_MC_LOCAL(uint64_t, u64RegCopy); 4058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4059 4060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4061 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4062 IEM_MC_FETCH_GREG_U64(u64RegCopy, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4063 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy); 4064 IEM_MC_FETCH_EFLAGS(EFlags); 4065 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)) 4066 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags); 4067 else 4068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags); 4069 4070 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 4071 IEM_MC_COMMIT_EFLAGS(EFlags); 4072 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64RegCopy); 4073 IEM_MC_ADVANCE_RIP(); 4074 IEM_MC_END(); 4075 return VINF_SUCCESS; 4076 4077 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 4078 } 4079 } 4080 } 4081 3877 4082 /** Opcode 0x0f 0xc2. */ 3878 4083 FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib); … … 6677 6882 * We're accessing memory. 6678 6883 */ 6884 /** @todo the register must be committed separately! */ 6679 6885 IEM_MC_BEGIN(2, 2); 6680 6886 IEM_MC_ARG(uint8_t *, pu8Mem, 0); … … 6762 6968 switch (pIemCpu->enmEffOpSize) 6763 6969 { 6970 /** @todo the register must be committed separately! */ 6764 6971 case IEMMODE_16BIT: 6765 6972 IEM_MC_BEGIN(2, 2);
Note:
See TracChangeset
for help on using the changeset viewer.