Changeset 102394 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
- Timestamp:
- Nov 30, 2023 1:28:53 PM (14 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102385 r102394 124 124 # pragma GCC diagnostic ignored "-Wunused-function" 125 125 #endif 126 127 128 129 /*********************************************************************************************************************************130 * Defined Constants And Macros *131 *********************************************************************************************************************************/132 /** Always count instructions for now. */133 #define IEMNATIVE_WITH_INSTRUCTION_COUNTING134 126 135 127 … … 1586 1578 IEM_DECL_NATIVE_HLP_DEF(int, iemNativeHlpExecRaiseGp0,(PVMCPUCC pVCpu, uint8_t idxInstr)) 1587 1579 { 1588 pVCpu->iem.s.cInstructions += idxInstr; 1580 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 1581 pVCpu->iem.s.idxTbCurInstr = idxInstr; 1582 #else 1583 RT_NOREF(idxInstr); 1584 #endif 1589 1585 iemRaiseGeneralProtectionFault0Jmp(pVCpu); 1590 1586 #ifndef _MSC_VER … … 1599 1595 * Used by TB code to load unsigned 8-bit data w/ segmentation. 1600 1596 */ 1601 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1602 { 1603 RT_NOREF(idxInstr); /** @todo idxInstr */ 1597 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1598 { 1604 1599 return iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1605 1600 } … … 1609 1604 * Used by TB code to load unsigned 16-bit data w/ segmentation. 1610 1605 */ 1611 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1612 { 1613 RT_NOREF(idxInstr); /** @todo idxInstr */ 1606 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1607 { 1614 1608 return iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1615 1609 } … … 1619 1613 * Used by TB code to load unsigned 32-bit data w/ segmentation. 1620 1614 */ 1621 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1622 { 1623 RT_NOREF(idxInstr); /** @todo idxInstr */ 1615 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1616 { 1624 1617 return iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1625 1618 } … … 1629 1622 * Used by TB code to load unsigned 64-bit data w/ segmentation. 1630 1623 */ 1631 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1632 { 1633 RT_NOREF(idxInstr); /** @todo idxInstr */ 1624 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1625 { 1634 1626 return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1635 1627 } 1636 1628 1637 1629 1630 /** 1631 * Used by TB code to store unsigned 8-bit data w/ segmentation. 1632 */ 1633 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value)) 1634 { 1635 iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 1636 } 1637 1638 1639 /** 1640 * Used by TB code to store unsigned 16-bit data w/ segmentation. 1641 */ 1642 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value)) 1643 { 1644 iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 1645 } 1646 1647 1648 /** 1649 * Used by TB code to store unsigned 32-bit data w/ segmentation. 1650 */ 1651 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value)) 1652 { 1653 iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 1654 } 1655 1656 1657 /** 1658 * Used by TB code to store unsigned 64-bit data w/ segmentation. 1659 */ 1660 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value)) 1661 { 1662 iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 1663 } 1664 1665 1638 1666 /* Flat memory helpers: */ 1639 1667 … … 1641 1669 * Used by TB code to load unsigned 8-bit data w/ segmentation. 1642 1670 */ 1643 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1644 { 1645 RT_NOREF(idxInstr); /** @todo idxInstr */ 1671 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1672 { 1646 1673 return iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1647 1674 } … … 1651 1678 * Used by TB code to load unsigned 16-bit data w/ segmentation. 1652 1679 */ 1653 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1654 { 1655 RT_NOREF(idxInstr); /** @todo idxInstr */ 1680 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1681 { 1656 1682 return iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1657 1683 } … … 1661 1687 * Used by TB code to load unsigned 32-bit data w/ segmentation. 1662 1688 */ 1663 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1664 { 1665 RT_NOREF(idxInstr); /** @todo idxInstr */ 1689 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1690 { 1666 1691 return iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1667 1692 } … … 1671 1696 * Used by TB code to load unsigned 64-bit data w/ segmentation. 1672 1697 */ 1673 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr)) 1674 { 1675 RT_NOREF(idxInstr); /** @todo idxInstr */ 1698 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1699 { 1676 1700 return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1677 1701 } 1702 1703 1704 /** 1705 * Used by TB code to store unsigned 8-bit data w/ segmentation. 1706 */ 1707 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value)) 1708 { 1709 iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 1710 } 1711 1712 1713 /** 1714 * Used by TB code to store unsigned 16-bit data w/ segmentation. 1715 */ 1716 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)) 1717 { 1718 iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 1719 } 1720 1721 1722 /** 1723 * Used by TB code to store unsigned 32-bit data w/ segmentation. 1724 */ 1725 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)) 1726 { 1727 iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 1728 } 1729 1730 1731 /** 1732 * Used by TB code to store unsigned 64-bit data w/ segmentation. 1733 */ 1734 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)) 1735 { 1736 iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 1737 } 1738 1678 1739 1679 1740 … … 3658 3719 * ARM64: w0 = call status code. 3659 3720 */ 3660 off = iemNativeEmitLoadGprImm64(pReNative, off, ARMV8_A64_REG_X2, idxInstr); /** @todo 32-bit imm load? Fixed counter register? */ 3721 # ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 3722 off = iemNativeEmitLoadGprImm64(pReNative, off, ARMV8_A64_REG_X2, idxInstr); 3723 # endif 3661 3724 off = iemNativeEmitLoadGprFromVCpuU32(pReNative, off, ARMV8_A64_REG_X3, RT_UOFFSETOF(VMCPUCC, iem.s.rcPassUp)); 3662 3725 … … 3911 3974 iemNativeRegFlushGuestShadows(pReNative, UINT64_MAX); /** @todo optimize this */ 3912 3975 off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 4); 3976 3977 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 3978 /* The threaded function may throw / long jmp, so set current instruction 3979 number if we're counting. */ 3980 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, pCallEntry->idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 3981 #endif 3982 3913 3983 uint8_t const cParams = g_acIemThreadedFunctionUsedArgs[pCallEntry->enmFunction]; 3914 3984 … … 7638 7708 7639 7709 /********************************************************************************************************************************* 7640 * Memory fetches (IEM_MEM_FETCH_XXX).*7710 * Memory fetches and stores common * 7641 7711 *********************************************************************************************************************************/ 7642 7712 7643 7644 7645 7646 #define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \ 7647 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u8Dst, a_iSeg, a_GCPtrMem, sizeof(uint8_t)) 7648 7649 #define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \ 7650 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t)) 7651 7652 #define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7653 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t)) 7654 7655 #define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7656 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u64Dst, a_iSeg, a_GCPtrMem, sizeof(uint64_t)) 7657 7658 7659 #define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \ 7660 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u8Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t)) 7661 7662 #define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \ 7663 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t)) 7664 7665 #define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \ 7666 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t)) 7667 7668 #define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \ 7669 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u64Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t)) 7670 7671 7672 #define IEM_MC_FETCH_MEM_U16_DISP(a_u16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7673 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), a_offDisp) 7674 7675 #define IEM_MC_FETCH_MEM_U32_DISP(a_u32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7676 off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), a_offDisp) 7677 7678 7679 /** Emits code for IEM_MC_FETCH_MEM_U8/16/32/64 and 7680 * IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 (iSegReg = UINT8_MAX). */ 7713 /** Emits code for IEM_MC_FETCH_MEM_U8/16/32/64 and IEM_MC_STORE_MEM_U8/16/32/64, 7714 * and IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 and IEM_MC_STORE_MEM_FLAT_U8/16/32/64 7715 * (with iSegReg = UINT8_MAX). */ 7681 7716 DECL_INLINE_THROW(uint32_t) 7682 iemNativeEmitMemFetchDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr, 7683 uint8_t idxVarDst, uint8_t iSegReg, uint8_t idxVarGCPtrMem, uint8_t cbMem, uint8_t offDisp = 0) 7684 { 7685 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarDst); 7717 iemNativeEmitMemFetchStoreDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarValue, uint8_t iSegReg, 7718 uint8_t idxVarGCPtrMem, uint8_t cbMem, bool fFetch, uintptr_t pfnFunction, uint8_t idxInstr, 7719 uint8_t offDisp = 0) 7720 { 7721 /* 7722 * Assert sanity. 7723 */ 7724 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarValue); 7725 Assert( fFetch 7726 || pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Immediate 7727 || pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Stack); 7686 7728 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarGCPtrMem); 7687 7729 AssertStmt( pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate … … 7690 7732 Assert(iSegReg < 6 || iSegReg == UINT8_MAX); 7691 7733 Assert(cbMem == 1 || cbMem == 2 || cbMem == 4 || cbMem == 8); 7692 RT_NOREF(idxInstr); 7693 7734 AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4); 7694 7735 #ifdef VBOX_STRICT 7695 /*7696 * Check that the fExec flags we've got make sense.7697 */7698 off = iemNativeEmitExecFlagsCheck(pReNative, off, pReNative->fExec);7699 #endif7700 7701 /*7702 * To keep things simple we have to commit any pending writes first as we7703 * may end up making calls.7704 */7705 /** @todo we could postpone this till we make the call and reload the7706 * registers after returning from the call. Not sure if that's sensible or7707 * not, though. */7708 off = iemNativeRegFlushPendingWrites(pReNative, off);7709 7710 /*7711 * Move/spill/flush stuff out of call-volatile registers.7712 * This is the easy way out. We could contain this to the tlb-miss branch7713 * by saving and restoring active stuff here.7714 */7715 /** @todo save+restore active registers and maybe guest shadows in tlb-miss. */7716 off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */);7717 7718 /*7719 * Define labels and allocate the result register (trying for the return7720 * register if we can).7721 */7722 uint16_t const uTlbSeqNo = pReNative->uTlbSeqNo++;7723 uint32_t const idxLabelTlbMiss = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbMiss, UINT32_MAX, uTlbSeqNo);7724 uint32_t const idxLabelTlbDone = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbDone, UINT32_MAX, uTlbSeqNo);7725 uint8_t const idxRegDst = !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG))7726 ? iemNativeVarSetRegister(pReNative, idxVarDst, IEMNATIVE_CALL_RET_GREG, off)7727 : iemNativeVarAllocRegister(pReNative, idxVarDst, &off);7728 7729 /*7730 * First we try to go via the TLB.7731 */7732 //pReNative->pInstrBuf[off++] = 0xcc;7733 /** @todo later. */7734 7735 /*7736 * Call helper to do the fetching.7737 * We flush all guest register shadow copies here.7738 */7739 iemNativeLabelDefine(pReNative, idxLabelTlbMiss, off);7740 7741 uint8_t idxRegArgGCPtrMem;7742 uint8_t idxRegArgInstrIdx;7743 uintptr_t pfnFunction;7744 7736 if ( ( (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT 7745 7737 || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_PROT_FLAT … … 7751 7743 || (iSegReg == X86_SREG_CS && (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT) )) 7752 7744 { 7753 Assert Compile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4);7745 Assert(iSegReg == UINT8_MAX); 7754 7746 switch (cbMem) 7755 7747 { 7756 case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU8; break; 7757 case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU16; break; 7758 case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU32; break; 7759 case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU64; break; 7760 default: 7761 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE)); 7748 case 1: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8 : (uintptr_t)iemNativeHlpMemFlatStoreDataU8 )); break; 7749 case 2: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16 : (uintptr_t)iemNativeHlpMemFlatStoreDataU16)); break; 7750 case 4: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32 : (uintptr_t)iemNativeHlpMemFlatStoreDataU32)); break; 7751 case 8: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU64 : (uintptr_t)iemNativeHlpMemFlatStoreDataU64)); break; 7762 7752 } 7763 idxRegArgInstrIdx = IEMNATIVE_CALL_ARG2_GREG;7764 idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG1_GREG;7765 7753 } 7766 7754 else 7767 7755 { 7768 AssertStmt(iSegReg < 6, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_SEG_REG_NO)); 7769 AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 3); 7756 Assert(iSegReg < 6); 7770 7757 switch (cbMem) 7771 7758 { 7772 case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU8; break; 7773 case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU16; break; 7774 case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU32; break; 7775 case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU64; break; 7776 default: 7777 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE)); 7759 case 1: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU8 : (uintptr_t)iemNativeHlpMemStoreDataU8 )); break; 7760 case 2: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU16 : (uintptr_t)iemNativeHlpMemStoreDataU16)); break; 7761 case 4: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU32 : (uintptr_t)iemNativeHlpMemStoreDataU32)); break; 7762 case 8: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU64 : (uintptr_t)iemNativeHlpMemStoreDataU64)); break; 7778 7763 } 7779 off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, iSegReg); 7780 idxRegArgInstrIdx = IEMNATIVE_CALL_ARG3_GREG; 7781 idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG2_GREG; 7782 } 7783 7784 off = iemNativeEmitLoadGpr8Imm(pReNative, off, idxRegArgInstrIdx, idxInstr); 7785 7764 } 7765 #endif 7766 7767 7768 #ifdef VBOX_STRICT 7769 /* 7770 * Check that the fExec flags we've got make sense. 7771 */ 7772 off = iemNativeEmitExecFlagsCheck(pReNative, off, pReNative->fExec); 7773 #endif 7774 7775 /* 7776 * To keep things simple we have to commit any pending writes first as we 7777 * may end up making calls. 7778 */ 7779 /** @todo we could postpone this till we make the call and reload the 7780 * registers after returning from the call. Not sure if that's sensible or 7781 * not, though. */ 7782 off = iemNativeRegFlushPendingWrites(pReNative, off); 7783 7784 /* 7785 * Move/spill/flush stuff out of call-volatile registers. 7786 * This is the easy way out. We could contain this to the tlb-miss branch 7787 * by saving and restoring active stuff here. 7788 */ 7789 /** @todo save+restore active registers and maybe guest shadows in tlb-miss. */ 7790 off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */); 7791 7792 /* 7793 * Define labels and allocate the result register (trying for the return 7794 * register if we can). 7795 */ 7796 uint16_t const uTlbSeqNo = pReNative->uTlbSeqNo++; 7797 uint32_t const idxLabelTlbMiss = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbMiss, UINT32_MAX, uTlbSeqNo); 7798 uint32_t const idxLabelTlbDone = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbDone, UINT32_MAX, uTlbSeqNo); 7799 uint8_t const idxRegValueFetch = !fFetch ? UINT8_MAX /* special case value storing below */ 7800 : !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG)) 7801 ? iemNativeVarSetRegister(pReNative, idxVarValue, IEMNATIVE_CALL_RET_GREG, off) 7802 : iemNativeVarAllocRegister(pReNative, idxVarValue, &off); 7803 7804 /* 7805 * First we try to go via the TLB. 7806 */ 7807 //pReNative->pInstrBuf[off++] = 0xcc; 7808 /** @todo later. */ 7809 7810 /* 7811 * Call helper to do the fetching. 7812 * We flush all guest register shadow copies here. 7813 */ 7814 iemNativeLabelDefine(pReNative, idxLabelTlbMiss, off); 7815 7816 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 7817 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); 7818 #else 7819 RT_NOREF(idxInstr); 7820 #endif 7821 7822 uint8_t idxRegArgValue; 7823 if (iSegReg == UINT8_MAX) 7824 idxRegArgValue = IEMNATIVE_CALL_ARG2_GREG; 7825 else 7826 { 7827 /* IEMNATIVE_CALL_ARG2_GREG = iSegReg */ 7828 AssertStmt(iSegReg < 6, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_SEG_REG_NO)); 7829 off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, iSegReg); 7830 7831 idxRegArgValue = IEMNATIVE_CALL_ARG3_GREG; 7832 } 7833 7834 /* IEMNATIVE_CALL_ARG2/3_GREG = uValue (idxVarValue) - if store */ 7835 if (!fFetch) 7836 { 7837 if (pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Immediate) 7838 off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArgValue, pReNative->Core.aVars[idxVarValue].u.uValue); 7839 else 7840 { 7841 uint8_t const idxRegVarValue = pReNative->Core.aVars[idxVarValue].idxReg; 7842 if (idxRegVarValue < RT_ELEMENTS(pReNative->Core.aHstRegs)) 7843 { 7844 Assert(!(RT_BIT_32(idxRegVarValue) & IEMNATIVE_CALL_VOLATILE_GREG_MASK)); 7845 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegArgValue, idxRegVarValue); 7846 } 7847 else 7848 { 7849 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVarValue].idxStackSlot; 7850 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 7851 off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgValue, iemNativeStackCalcBpDisp(idxStackSlot)); 7852 } 7853 } 7854 } 7855 7856 /* IEMNATIVE_CALL_ARG1_GREG = GCPtrMem */ 7786 7857 if (pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate) 7787 off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArgGCPtrMem,7858 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, 7788 7859 pReNative->Core.aVars[idxVarGCPtrMem].u.uValue + offDisp); 7789 7860 else … … 7794 7865 Assert(!(RT_BIT_32(idxRegVarGCPtrMem) & IEMNATIVE_CALL_VOLATILE_GREG_MASK)); 7795 7866 if (!offDisp) 7796 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegArgGCPtrMem, idxRegVarGCPtrMem);7867 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegVarGCPtrMem); 7797 7868 else 7798 off = iemNativeEmitLoadGprFromGprWithAddend(pReNative, off, idxRegArgGCPtrMem, idxRegVarGCPtrMem, offDisp);7869 off = iemNativeEmitLoadGprFromGprWithAddend(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegVarGCPtrMem, offDisp); 7799 7870 } 7800 7871 else … … 7803 7874 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 7804 7875 AssertFailed(); /** @todo This was probably caused by iemNativeRegMoveAndFreeAndFlushAtCall above. Improve... */ 7805 off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgGCPtrMem, iemNativeStackCalcBpDisp(idxStackSlot));7876 off = iemNativeEmitLoadGprByBp(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, iemNativeStackCalcBpDisp(idxStackSlot)); 7806 7877 if (offDisp) 7807 off = iemNativeEmitAddGprImm(pReNative, off, idxRegArgGCPtrMem, offDisp);7878 off = iemNativeEmitAddGprImm(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, offDisp); 7808 7879 } 7809 7880 } 7881 7882 /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */ 7810 7883 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU); 7884 7885 /* Done setting up parameters, make the call. */ 7811 7886 off = iemNativeEmitCallImm(pReNative, off, pfnFunction); 7812 7887 7813 /* Put the result in the right register. */ 7814 Assert(idxRegDst == pReNative->Core.aVars[idxVarDst].idxReg); 7815 if (idxRegDst != IEMNATIVE_CALL_RET_GREG) 7816 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegDst, IEMNATIVE_CALL_RET_GREG); 7888 /* 7889 * Put the result in the right register if this is a fetch. 7890 */ 7891 if (fFetch) 7892 { 7893 Assert(idxRegValueFetch == pReNative->Core.aVars[idxVarValue].idxReg); 7894 if (idxRegValueFetch != IEMNATIVE_CALL_RET_GREG) 7895 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxVarValue, IEMNATIVE_CALL_RET_GREG); 7896 } 7817 7897 7818 7898 iemNativeLabelDefine(pReNative, idxLabelTlbDone, off); … … 7820 7900 return off; 7821 7901 } 7902 7903 7904 7905 /********************************************************************************************************************************* 7906 * Memory fetches (IEM_MEM_FETCH_XXX). * 7907 *********************************************************************************************************************************/ 7908 7909 #define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \ 7910 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, a_iSeg, a_GCPtrMem, sizeof(uint8_t), true /*fFetch*/, \ 7911 (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr) 7912 7913 #define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \ 7914 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \ 7915 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr) 7916 7917 #define IEM_MC_FETCH_MEM_U16_DISP(a_u16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7918 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \ 7919 (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr, a_offDisp) 7920 7921 #define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \ 7922 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \ 7923 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr) 7924 7925 #define IEM_MC_FETCH_MEM_U32_DISP(a_u32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \ 7926 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \ 7927 (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp) 7928 7929 #define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \ 7930 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, sizeof(uint64_t), true /*fFetch*/, \ 7931 (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr) 7932 7933 7934 #define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \ 7935 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), true /*fFetch*/, \ 7936 (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr) 7937 7938 #define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \ 7939 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \ 7940 (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr) 7941 7942 #define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \ 7943 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \ 7944 (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr) 7945 7946 #define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \ 7947 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), true /*fFetch*/, \ 7948 (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr) 7949 7950 7951 7952 /********************************************************************************************************************************* 7953 * Memory stores (IEM_MEM_STORE_XXX). * 7954 *********************************************************************************************************************************/ 7955 7956 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) \ 7957 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, sizeof(uint8_t), false /*fFetch*/, \ 7958 (uintptr_t)iemNativeHlpMemStoreDataU8, pCallEntry->idxInstr) 7959 7960 #define IEM_MC_STORE_MEM_U16(a_iSeg, a_GCPtrMem, a_u16Dst) \ 7961 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, sizeof(uint16_t), false /*fFetch*/, \ 7962 (uintptr_t)iemNativeHlpMemStoreDataU16, pCallEntry->idxInstr) 7963 7964 #define IEM_MC_STORE_MEM_U32(a_iSeg, a_GCPtrMem, a_u32Value) \ 7965 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, sizeof(uint32_t), false /*fFetch*/, \ 7966 (uintptr_t)iemNativeHlpMemStoreDataU32, pCallEntry->idxInstr) 7967 7968 #define IEM_MC_STORE_MEM_U64(a_iSeg, a_GCPtrMem, a_u64Value) \ 7969 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, sizeof(uint64_t), false /*fFetch*/, \ 7970 (uintptr_t)iemNativeHlpMemStoreDataU64, pCallEntry->idxInstr) 7971 7972 7973 #define IEM_MC_STORE_MEM_FLAT_U8(a_GCPtrMem, a_u8Value) \ 7974 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, sizeof(uint8_t), false /*fFetch*/, \ 7975 (uintptr_t)iemNativeHlpMemFlatStoreDataU8, pCallEntry->idxInstr) 7976 7977 #define IEM_MC_STORE_MEM_FLAT_U16(a_GCPtrMem, a_u16Value) \ 7978 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, sizeof(uint16_t), false /*fFetch*/, \ 7979 (uintptr_t)iemNativeHlpMemFlatStoreDataU16, pCallEntry->idxInstr) 7980 7981 #define IEM_MC_STORE_MEM_FLAT_U32(a_GCPtrMem, a_u32Value) \ 7982 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, sizeof(uint32_t), false /*fFetch*/, \ 7983 (uintptr_t)iemNativeHlpMemFlatStoreDataU32, pCallEntry->idxInstr) 7984 7985 #define IEM_MC_STORE_MEM_FLAT_U64(a_GCPtrMem, a_u64Value) \ 7986 off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, sizeof(uint64_t), false /*fFetch*/, \ 7987 (uintptr_t)iemNativeHlpMemFlatStoreDataU64, pCallEntry->idxInstr) 7822 7988 7823 7989
Note:
See TracChangeset
for help on using the changeset viewer.