Changeset 36815 in vbox for trunk/src/VBox
- Timestamp:
- Apr 22, 2011 2:13:52 PM (14 years ago)
- svn:sync-xref-src-repo-rev:
- 71372
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r36813 r36815 3893 3893 3894 3894 /** 3895 * Implements a 16-bit relativecall.3896 * 3897 * 3898 * @param offDisp The displacment offset.3899 * /3900 IEM_CIMPL_DEF_1(iemCImpl_call_rel_16, int16_t, offDisp) 3901 { 3902 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3903 uint16_t OldPC = pCtx->ip + cbInstr;3904 uint16_t NewPC = OldPC + offDisp;3905 if ( NewPC > pCtx->csHid.u32Limit)3895 * Implements an indirect call. 3896 * 3897 * @param uNewPC The new program counter (RIP) value (loaded from the 3898 * operand). 3899 * @param enmEffOpSize The effective operand size. 3900 */ 3901 IEM_CIMPL_DEF_1(iemCImpl_call_16, uint16_t, uNewPC) 3902 { 3903 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3904 uint16_t uOldPC = pCtx->ip + cbInstr; 3905 if (uNewPC > pCtx->csHid.u32Limit) 3906 3906 return iemRaiseGeneralProtectionFault0(pIemCpu); 3907 3907 3908 VBOXSTRICTRC rcStrict = iemMemStackPushU16(pIemCpu, OldPC);3908 VBOXSTRICTRC rcStrict = iemMemStackPushU16(pIemCpu, uOldPC); 3909 3909 if (rcStrict != VINF_SUCCESS) 3910 3910 return rcStrict; 3911 3911 3912 pCtx->rip = NewPC;3912 pCtx->rip = uNewPC; 3913 3913 return VINF_SUCCESS; 3914 } 3915 3916 3917 /** 3918 * Implements a 32-bit relative call. 3919 * 3914 3915 } 3916 3917 3918 /** 3919 * Implements a 16-bit relative call. 3920 3920 * 3921 3921 * @param offDisp The displacment offset. 3922 3922 */ 3923 IEM_CIMPL_DEF_1(iemCImpl_call_rel_ 32, int32_t, offDisp)3924 { 3925 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);3926 uint 32_t OldPC = pCtx->eip + cbInstr;3927 uint 32_t NewPC =OldPC + offDisp;3928 if ( NewPC > pCtx->csHid.u32Limit)3923 IEM_CIMPL_DEF_1(iemCImpl_call_rel_16, int16_t, offDisp) 3924 { 3925 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3926 uint16_t uOldPC = pCtx->ip + cbInstr; 3927 uint16_t uNewPC = uOldPC + offDisp; 3928 if (uNewPC > pCtx->csHid.u32Limit) 3929 3929 return iemRaiseGeneralProtectionFault0(pIemCpu); 3930 3930 3931 VBOXSTRICTRC rcStrict = iemMemStackPushU 32(pIemCpu,OldPC);3931 VBOXSTRICTRC rcStrict = iemMemStackPushU16(pIemCpu, uOldPC); 3932 3932 if (rcStrict != VINF_SUCCESS) 3933 3933 return rcStrict; 3934 3934 3935 pCtx->rip = NewPC;3935 pCtx->rip = uNewPC; 3936 3936 return VINF_SUCCESS; 3937 3937 } … … 3939 3939 3940 3940 /** 3941 * Implements a 64-bit relativecall.3942 * 3943 * 3944 * @param offDisp The displacment offset.3945 * /3946 IEM_CIMPL_DEF_1(iemCImpl_call_rel_64, int64_t, offDisp) 3947 { 3948 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3949 uint64_t OldPC = pCtx->rip + cbInstr;3950 uint 64_t NewPC = OldPC + offDisp;3951 if ( !IEM_IS_CANONICAL(NewPC))3952 return iemRaise NotCanonical(pIemCpu);3953 3954 VBOXSTRICTRC rcStrict = iemMemStackPushU 64(pIemCpu,OldPC);3941 * Implements a 32-bit indirect call. 3942 * 3943 * @param uNewPC The new program counter (RIP) value (loaded from the 3944 * operand). 3945 * @param enmEffOpSize The effective operand size. 3946 */ 3947 IEM_CIMPL_DEF_1(iemCImpl_call_32, uint32_t, uNewPC) 3948 { 3949 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3950 uint32_t uOldPC = pCtx->eip + cbInstr; 3951 if (uNewPC > pCtx->csHid.u32Limit) 3952 return iemRaiseGeneralProtectionFault0(pIemCpu); 3953 3954 VBOXSTRICTRC rcStrict = iemMemStackPushU32(pIemCpu, uOldPC); 3955 3955 if (rcStrict != VINF_SUCCESS) 3956 3956 return rcStrict; 3957 3957 3958 pCtx->rip = NewPC; 3958 pCtx->rip = uNewPC; 3959 return VINF_SUCCESS; 3960 3961 } 3962 3963 3964 /** 3965 * Implements a 32-bit relative call. 3966 * 3967 * @param offDisp The displacment offset. 3968 */ 3969 IEM_CIMPL_DEF_1(iemCImpl_call_rel_32, int32_t, offDisp) 3970 { 3971 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3972 uint32_t uOldPC = pCtx->eip + cbInstr; 3973 uint32_t uNewPC = uOldPC + offDisp; 3974 if (uNewPC > pCtx->csHid.u32Limit) 3975 return iemRaiseGeneralProtectionFault0(pIemCpu); 3976 3977 VBOXSTRICTRC rcStrict = iemMemStackPushU32(pIemCpu, uOldPC); 3978 if (rcStrict != VINF_SUCCESS) 3979 return rcStrict; 3980 3981 pCtx->rip = uNewPC; 3982 return VINF_SUCCESS; 3983 } 3984 3985 3986 /** 3987 * Implements a 64-bit indirect call. 3988 * 3989 * @param uNewPC The new program counter (RIP) value (loaded from the 3990 * operand). 3991 * @param enmEffOpSize The effective operand size. 3992 */ 3993 IEM_CIMPL_DEF_1(iemCImpl_call_64, uint64_t, uNewPC) 3994 { 3995 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 3996 uint64_t uOldPC = pCtx->rip + cbInstr; 3997 if (!IEM_IS_CANONICAL(uNewPC)) 3998 return iemRaiseGeneralProtectionFault0(pIemCpu); 3999 4000 VBOXSTRICTRC rcStrict = iemMemStackPushU64(pIemCpu, uOldPC); 4001 if (rcStrict != VINF_SUCCESS) 4002 return rcStrict; 4003 4004 pCtx->rip = uNewPC; 4005 return VINF_SUCCESS; 4006 4007 } 4008 4009 4010 /** 4011 * Implements a 64-bit relative call. 4012 * 4013 * @param offDisp The displacment offset. 4014 */ 4015 IEM_CIMPL_DEF_1(iemCImpl_call_rel_64, int64_t, offDisp) 4016 { 4017 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); 4018 uint64_t uOldPC = pCtx->rip + cbInstr; 4019 uint64_t uNewPC = uOldPC + offDisp; 4020 if (!IEM_IS_CANONICAL(uNewPC)) 4021 return iemRaiseNotCanonical(pIemCpu); 4022 4023 VBOXSTRICTRC rcStrict = iemMemStackPushU64(pIemCpu, uOldPC); 4024 if (rcStrict != VINF_SUCCESS) 4025 return rcStrict; 4026 4027 pCtx->rip = uNewPC; 3959 4028 return VINF_SUCCESS; 3960 4029 } … … 4526 4595 PCPUMSELREGHID pHid = iemSRegGetHid(pIemCpu, iSegReg); 4527 4596 4528 Assert(iSegReg < X86_SREG_GS && iSegReg != X86_SREG_CS);4597 Assert(iSegReg <= X86_SREG_GS && iSegReg != X86_SREG_CS); 4529 4598 4530 4599 /* -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r36768 r36815 374 374 375 375 376 ; 377 ; XCHG for memory operands. This implies locking. No flag changes. 378 ; 379 ; Each function takes two arguments, first the pointer to the memory, 380 ; then the pointer to the register. They all return void. 381 ; 382 BEGINPROC iemAImpl_xchg_u8 383 PROLOGUE_2_ARGS 384 mov T0_8, [A1] 385 xchg [A0], T0_8 386 mov [A1], T0_8 387 EPILOGUE_2_ARGS 388 ret 389 ENDPROC iemAImpl_xchg_u8 390 391 BEGINPROC iemAImpl_xchg_u16 392 PROLOGUE_2_ARGS 393 mov T0_16, [A1] 394 xchg [A0], T0_16 395 mov [A1], T0_16 396 EPILOGUE_2_ARGS 397 ret 398 ENDPROC iemAImpl_xchg_u16 399 400 BEGINPROC iemAImpl_xchg_u32 401 PROLOGUE_2_ARGS 402 mov T0_32, [A1] 403 xchg [A0], T0_32 404 mov [A1], T0_32 405 EPILOGUE_2_ARGS 406 ret 407 ENDPROC iemAImpl_xchg_u32 408 409 BEGINPROC iemAImpl_xchg_u64 410 %ifdef RT_ARCH_AMD64 411 PROLOGUE_2_ARGS 412 mov T0, [A1] 413 xchg [A0], T0 414 mov [A1], T0 415 EPILOGUE_2_ARGS 416 ret 417 %else 418 int3 419 %endif 420 ENDPROC iemAImpl_xchg_u64 421 422 376 423 ;; 377 424 ; Macro for implementing a unary operator. -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r36812 r36815 1894 1894 1895 1895 /** Opcode 0x0f 0xb7. */ 1896 FNIEMOP_STUB(iemOp_movzx_Gv_Ew); 1896 FNIEMOP_DEF(iemOp_movzx_Gv_Ew) 1897 { 1898 IEMOP_MNEMONIC("movzx Gv,Ew"); 1899 1900 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 1901 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */ 1902 1903 /** @todo Not entirely sure how the operand size prefix is handled here, 1904 * assuming that it will be ignored. Would be nice to have a few 1905 * test for this. */ 1906 /* 1907 * If rm is denoting a register, no more instruction bytes. 1908 */ 1909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1910 { 1911 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT) 1912 { 1913 IEM_MC_BEGIN(0, 1); 1914 IEM_MC_LOCAL(uint32_t, u32Value); 1915 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 1916 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value); 1917 IEM_MC_ADVANCE_RIP(); 1918 IEM_MC_END(); 1919 } 1920 else 1921 { 1922 IEM_MC_BEGIN(0, 1); 1923 IEM_MC_LOCAL(uint64_t, u64Value); 1924 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 1925 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value); 1926 IEM_MC_ADVANCE_RIP(); 1927 IEM_MC_END(); 1928 } 1929 } 1930 else 1931 { 1932 /* 1933 * We're loading a register from memory. 1934 */ 1935 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT) 1936 { 1937 IEM_MC_BEGIN(0, 2); 1938 IEM_MC_LOCAL(uint32_t, u32Value); 1939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 1940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 1941 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst); 1942 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value); 1943 IEM_MC_ADVANCE_RIP(); 1944 IEM_MC_END(); 1945 } 1946 else 1947 { 1948 IEM_MC_BEGIN(0, 2); 1949 IEM_MC_LOCAL(uint64_t, u64Value); 1950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 1951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 1952 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst); 1953 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value); 1954 IEM_MC_ADVANCE_RIP(); 1955 IEM_MC_END(); 1956 } 1957 } 1958 return VINF_SUCCESS; 1959 } 1960 1961 1897 1962 /** Opcode 0x0f 0xb8. */ 1898 1963 FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe); … … 4706 4771 4707 4772 /** Opcode 0x86. */ 4708 FNIEMOP_STUB(iemOp_xchg_Eb_Gb); 4773 FNIEMOP_DEF(iemOp_xchg_Eb_Gb) 4774 { 4775 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 4776 IEMOP_MNEMONIC("xchg Eb,Gb"); 4777 4778 /* 4779 * If rm is denoting a register, no more instruction bytes. 4780 */ 4781 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4782 { 4783 IEMOP_HLP_NO_LOCK_PREFIX(); 4784 4785 IEM_MC_BEGIN(0, 2); 4786 IEM_MC_LOCAL(uint8_t, uTmp1); 4787 IEM_MC_LOCAL(uint8_t, uTmp2); 4788 4789 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4790 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4791 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1); 4792 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2); 4793 4794 IEM_MC_ADVANCE_RIP(); 4795 IEM_MC_END(); 4796 } 4797 else 4798 { 4799 /* 4800 * We're accessing memory. 4801 */ 4802 IEM_MC_BEGIN(2, 2); 4803 IEM_MC_ARG(uint8_t *, pu8Mem, 0); 4804 IEM_MC_ARG(uint8_t *, pu8Reg, 1); 4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4806 4807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4808 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4809 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4810 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg); 4811 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW); 4812 4813 IEM_MC_ADVANCE_RIP(); 4814 IEM_MC_END(); 4815 } 4816 return VINF_SUCCESS; 4817 } 4818 4819 4709 4820 /** Opcode 0x87. */ 4710 FNIEMOP_STUB(iemOp_xchg_Ev_Gv); 4821 FNIEMOP_DEF(iemOp_xchg_Ev_Gv) 4822 { 4823 IEMOP_MNEMONIC("xchg Ev,Gv"); 4824 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); 4825 4826 /* 4827 * If rm is denoting a register, no more instruction bytes. 4828 */ 4829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4830 { 4831 IEMOP_HLP_NO_LOCK_PREFIX(); 4832 4833 switch (pIemCpu->enmEffOpSize) 4834 { 4835 case IEMMODE_16BIT: 4836 IEM_MC_BEGIN(0, 2); 4837 IEM_MC_LOCAL(uint16_t, uTmp1); 4838 IEM_MC_LOCAL(uint16_t, uTmp2); 4839 4840 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4841 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4842 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1); 4843 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2); 4844 4845 IEM_MC_ADVANCE_RIP(); 4846 IEM_MC_END(); 4847 return VINF_SUCCESS; 4848 4849 case IEMMODE_32BIT: 4850 IEM_MC_BEGIN(0, 2); 4851 IEM_MC_LOCAL(uint32_t, uTmp1); 4852 IEM_MC_LOCAL(uint32_t, uTmp2); 4853 4854 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4855 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4856 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1); 4857 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2); 4858 4859 IEM_MC_ADVANCE_RIP(); 4860 IEM_MC_END(); 4861 return VINF_SUCCESS; 4862 4863 case IEMMODE_64BIT: 4864 IEM_MC_BEGIN(0, 2); 4865 IEM_MC_LOCAL(uint64_t, uTmp1); 4866 IEM_MC_LOCAL(uint64_t, uTmp2); 4867 4868 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4869 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 4870 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1); 4871 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2); 4872 4873 IEM_MC_ADVANCE_RIP(); 4874 IEM_MC_END(); 4875 return VINF_SUCCESS; 4876 4877 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 4878 } 4879 } 4880 else 4881 { 4882 /* 4883 * We're accessing memory. 4884 */ 4885 switch (pIemCpu->enmEffOpSize) 4886 { 4887 case IEMMODE_16BIT: 4888 IEM_MC_BEGIN(2, 2); 4889 IEM_MC_ARG(uint16_t *, pu16Mem, 0); 4890 IEM_MC_ARG(uint16_t *, pu16Reg, 1); 4891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4892 4893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4894 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4895 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4896 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg); 4897 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW); 4898 4899 IEM_MC_ADVANCE_RIP(); 4900 IEM_MC_END(); 4901 return VINF_SUCCESS; 4902 4903 case IEMMODE_32BIT: 4904 IEM_MC_BEGIN(2, 2); 4905 IEM_MC_ARG(uint32_t *, pu32Mem, 0); 4906 IEM_MC_ARG(uint32_t *, pu32Reg, 1); 4907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4908 4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4910 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4911 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4912 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg); 4913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW); 4914 4915 IEM_MC_ADVANCE_RIP(); 4916 IEM_MC_END(); 4917 return VINF_SUCCESS; 4918 4919 case IEMMODE_64BIT: 4920 IEM_MC_BEGIN(2, 2); 4921 IEM_MC_ARG(uint64_t *, pu64Mem, 0); 4922 IEM_MC_ARG(uint64_t *, pu64Reg, 1); 4923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4924 4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm); 4926 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/); 4927 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); 4928 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg); 4929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW); 4930 4931 IEM_MC_ADVANCE_RIP(); 4932 IEM_MC_END(); 4933 return VINF_SUCCESS; 4934 4935 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 4936 } 4937 } 4938 } 4711 4939 4712 4940 … … 7241 7469 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm); 7242 7470 } 7243 default: 7244 AssertFailedReturn(VERR_INTERNAL_ERROR_3);7471 7472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7245 7473 } 7246 7474 } … … 8094 8322 FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm) 8095 8323 { 8096 AssertFailed(); // FNIEMOP_STUB 8097 return VERR_NOT_IMPLEMENTED; 8324 IEMOP_MNEMONIC("calln Ev"); 8325 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */ 8326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 8327 8328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 8329 { 8330 /* The new RIP is taken from a register. */ 8331 switch (pIemCpu->enmEffOpSize) 8332 { 8333 case IEMMODE_16BIT: 8334 IEM_MC_BEGIN(1, 0); 8335 IEM_MC_ARG(uint16_t, u16Target, 0); 8336 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 8337 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target); 8338 IEM_MC_END() 8339 return VINF_SUCCESS; 8340 8341 case IEMMODE_32BIT: 8342 IEM_MC_BEGIN(1, 0); 8343 IEM_MC_ARG(uint32_t, u32Target, 0); 8344 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 8345 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target); 8346 IEM_MC_END() 8347 return VINF_SUCCESS; 8348 8349 case IEMMODE_64BIT: 8350 IEM_MC_BEGIN(1, 0); 8351 IEM_MC_ARG(uint64_t, u64Target, 0); 8352 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); 8353 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target); 8354 IEM_MC_END() 8355 return VINF_SUCCESS; 8356 8357 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 8358 } 8359 } 8360 else 8361 { 8362 /* The new RIP is taken from a register. */ 8363 switch (pIemCpu->enmEffOpSize) 8364 { 8365 case IEMMODE_16BIT: 8366 IEM_MC_BEGIN(1, 1); 8367 IEM_MC_ARG(uint16_t, u16Target, 0); 8368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 8369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 8370 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc); 8371 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target); 8372 IEM_MC_END() 8373 return VINF_SUCCESS; 8374 8375 case IEMMODE_32BIT: 8376 IEM_MC_BEGIN(1, 1); 8377 IEM_MC_ARG(uint32_t, u32Target, 0); 8378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 8379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 8380 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc); 8381 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target); 8382 IEM_MC_END() 8383 return VINF_SUCCESS; 8384 8385 case IEMMODE_64BIT: 8386 IEM_MC_BEGIN(1, 1); 8387 IEM_MC_ARG(uint64_t, u64Target, 0); 8388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 8389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 8390 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc); 8391 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target); 8392 IEM_MC_END() 8393 return VINF_SUCCESS; 8394 8395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 8396 } 8397 } 8098 8398 } 8099 8399 … … 8258 8558 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 8259 8559 } 8260 8261 8560 } 8262 8561 } -
trunk/src/VBox/VMM/include/IEMInternal.h
r36794 r36815 443 443 /** @} */ 444 444 445 /** @name Exchange memory with register operations. 446 * @{ */ 447 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8, (uint8_t *pu8Mem, uint8_t *pu8Reg)); 448 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16,(uint16_t *pu16Mem, uint16_t *pu16Reg)); 449 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32,(uint32_t *pu32Mem, uint32_t *pu32Reg)); 450 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64,(uint64_t *pu64Mem, uint64_t *pu64Reg)); 451 /** @} */ 452 445 453 /** @name Signed multiplication operations (thrown in with the binary ops). 446 454 * @{ */
Note:
See TracChangeset
for help on using the changeset viewer.