VirtualBox

Ignore:
Timestamp:
Nov 30, 2023 1:28:53 PM (14 months ago)
Author:
vboxsync
Message:

VMM/IEM: Refactored the native IEM_MC_FETCH_MEM_Uxx code so it can be shared with the store MCs and is a little bit more efficient. Found 2 instructions that wasn't getting FLAT variants because they were using hardcoded addresses (Ov/Ob). Implemented a simple solution for the instruction counting over longjmp/throw. bugref:10371

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r102385 r102394  
    124124# pragma GCC diagnostic ignored "-Wunused-function"
    125125#endif
    126 
    127 
    128 
    129 /*********************************************************************************************************************************
    130 *   Defined Constants And Macros                                                                                                 *
    131 *********************************************************************************************************************************/
    132 /** Always count instructions for now. */
    133 #define IEMNATIVE_WITH_INSTRUCTION_COUNTING
    134126
    135127
     
    15861578IEM_DECL_NATIVE_HLP_DEF(int, iemNativeHlpExecRaiseGp0,(PVMCPUCC pVCpu, uint8_t idxInstr))
    15871579{
    1588     pVCpu->iem.s.cInstructions += idxInstr;
     1580#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
     1581    pVCpu->iem.s.idxTbCurInstr = idxInstr;
     1582#else
     1583    RT_NOREF(idxInstr);
     1584#endif
    15891585    iemRaiseGeneralProtectionFault0Jmp(pVCpu);
    15901586#ifndef _MSC_VER
     
    15991595 * Used by TB code to load unsigned 8-bit data w/ segmentation.
    16001596 */
    1601 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1602 {
    1603     RT_NOREF(idxInstr); /** @todo idxInstr */
     1597IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
     1598{
    16041599    return iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16051600}
     
    16091604 * Used by TB code to load unsigned 16-bit data w/ segmentation.
    16101605 */
    1611 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1612 {
    1613     RT_NOREF(idxInstr); /** @todo idxInstr */
     1606IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
     1607{
    16141608    return iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16151609}
     
    16191613 * Used by TB code to load unsigned 32-bit data w/ segmentation.
    16201614 */
    1621 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1622 {
    1623     RT_NOREF(idxInstr); /** @todo idxInstr */
     1615IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
     1616{
    16241617    return iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16251618}
     
    16291622 * Used by TB code to load unsigned 64-bit data w/ segmentation.
    16301623 */
    1631 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1632 {
    1633     RT_NOREF(idxInstr); /** @todo idxInstr */
     1624IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
     1625{
    16341626    return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16351627}
    16361628
    16371629
     1630/**
     1631 * Used by TB code to store unsigned 8-bit data w/ segmentation.
     1632 */
     1633IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value))
     1634{
     1635    iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */
     1636}
     1637
     1638
     1639/**
     1640 * Used by TB code to store unsigned 16-bit data w/ segmentation.
     1641 */
     1642IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value))
     1643{
     1644    iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */
     1645}
     1646
     1647
     1648/**
     1649 * Used by TB code to store unsigned 32-bit data w/ segmentation.
     1650 */
     1651IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value))
     1652{
     1653    iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */
     1654}
     1655
     1656
     1657/**
     1658 * Used by TB code to store unsigned 64-bit data w/ segmentation.
     1659 */
     1660IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value))
     1661{
     1662    iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */
     1663}
     1664
     1665
    16381666/* Flat memory helpers: */
    16391667
     
    16411669 * Used by TB code to load unsigned 8-bit data w/ segmentation.
    16421670 */
    1643 IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1644 {
    1645     RT_NOREF(idxInstr); /** @todo idxInstr */
     1671IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
     1672{
    16461673    return iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16471674}
     
    16511678 * Used by TB code to load unsigned 16-bit data w/ segmentation.
    16521679 */
    1653 IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1654 {
    1655     RT_NOREF(idxInstr); /** @todo idxInstr */
     1680IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
     1681{
    16561682    return iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16571683}
     
    16611687 * Used by TB code to load unsigned 32-bit data w/ segmentation.
    16621688 */
    1663 IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1664 {
    1665     RT_NOREF(idxInstr); /** @todo idxInstr */
     1689IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
     1690{
    16661691    return iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16671692}
     
    16711696 * Used by TB code to load unsigned 64-bit data w/ segmentation.
    16721697 */
    1673 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
    1674 {
    1675     RT_NOREF(idxInstr); /** @todo idxInstr */
     1698IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
     1699{
    16761700    return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
    16771701}
     1702
     1703
     1704/**
     1705 * Used by TB code to store unsigned 8-bit data w/ segmentation.
     1706 */
     1707IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value))
     1708{
     1709    iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */
     1710}
     1711
     1712
     1713/**
     1714 * Used by TB code to store unsigned 16-bit data w/ segmentation.
     1715 */
     1716IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value))
     1717{
     1718    iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */
     1719}
     1720
     1721
     1722/**
     1723 * Used by TB code to store unsigned 32-bit data w/ segmentation.
     1724 */
     1725IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value))
     1726{
     1727    iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */
     1728}
     1729
     1730
     1731/**
     1732 * Used by TB code to store unsigned 64-bit data w/ segmentation.
     1733 */
     1734IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value))
     1735{
     1736    iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */
     1737}
     1738
    16781739
    16791740
     
    36583719     * ARM64: w0 = call status code.
    36593720     */
    3660     off = iemNativeEmitLoadGprImm64(pReNative, off, ARMV8_A64_REG_X2, idxInstr); /** @todo 32-bit imm load? Fixed counter register? */
     3721# ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
     3722    off = iemNativeEmitLoadGprImm64(pReNative, off, ARMV8_A64_REG_X2, idxInstr);
     3723# endif
    36613724    off = iemNativeEmitLoadGprFromVCpuU32(pReNative, off, ARMV8_A64_REG_X3, RT_UOFFSETOF(VMCPUCC, iem.s.rcPassUp));
    36623725
     
    39113974    iemNativeRegFlushGuestShadows(pReNative, UINT64_MAX); /** @todo optimize this */
    39123975    off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 4);
     3976
     3977#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
     3978    /* The threaded function may throw / long jmp, so set current instruction
     3979       number if we're counting. */
     3980    off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, pCallEntry->idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
     3981#endif
     3982
    39133983    uint8_t const cParams = g_acIemThreadedFunctionUsedArgs[pCallEntry->enmFunction];
    39143984
     
    76387708
    76397709/*********************************************************************************************************************************
    7640 *   Memory fetches (IEM_MEM_FETCH_XXX).                                                                                          *
     7710*   Memory fetches and stores common                                                                                             *
    76417711*********************************************************************************************************************************/
    76427712
    7643 
    7644 
    7645 
    7646 #define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \
    7647     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u8Dst, a_iSeg, a_GCPtrMem, sizeof(uint8_t))
    7648 
    7649 #define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \
    7650     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t))
    7651 
    7652 #define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \
    7653     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t))
    7654 
    7655 #define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \
    7656     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u64Dst, a_iSeg, a_GCPtrMem, sizeof(uint64_t))
    7657 
    7658 
    7659 #define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \
    7660     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u8Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t))
    7661 
    7662 #define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \
    7663     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t))
    7664 
    7665 #define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \
    7666     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t))
    7667 
    7668 #define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \
    7669     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u64Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t))
    7670 
    7671 
    7672 #define IEM_MC_FETCH_MEM_U16_DISP(a_u16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \
    7673     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), a_offDisp)
    7674 
    7675 #define IEM_MC_FETCH_MEM_U32_DISP(a_u32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \
    7676     off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), a_offDisp)
    7677 
    7678 
    7679 /** Emits code for IEM_MC_FETCH_MEM_U8/16/32/64 and
    7680  *  IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 (iSegReg = UINT8_MAX). */
     7713/** Emits code for IEM_MC_FETCH_MEM_U8/16/32/64 and IEM_MC_STORE_MEM_U8/16/32/64,
     7714 * and IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 and IEM_MC_STORE_MEM_FLAT_U8/16/32/64
     7715 * (with iSegReg = UINT8_MAX). */
    76817716DECL_INLINE_THROW(uint32_t)
    7682 iemNativeEmitMemFetchDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
    7683                                 uint8_t idxVarDst, uint8_t iSegReg, uint8_t idxVarGCPtrMem, uint8_t cbMem, uint8_t offDisp = 0)
    7684 {
    7685     IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarDst);
     7717iemNativeEmitMemFetchStoreDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off,  uint8_t idxVarValue, uint8_t iSegReg,
     7718                                     uint8_t idxVarGCPtrMem, uint8_t cbMem, bool fFetch, uintptr_t pfnFunction, uint8_t idxInstr,
     7719                                     uint8_t offDisp = 0)
     7720{
     7721    /*
     7722     * Assert sanity.
     7723     */
     7724    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarValue);
     7725    Assert(   fFetch
     7726           || pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Immediate
     7727           || pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Stack);
    76867728    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarGCPtrMem);
    76877729    AssertStmt(   pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate
     
    76907732    Assert(iSegReg < 6 || iSegReg == UINT8_MAX);
    76917733    Assert(cbMem == 1 || cbMem == 2 || cbMem == 4 || cbMem == 8);
    7692     RT_NOREF(idxInstr);
    7693 
     7734    AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4);
    76947735#ifdef VBOX_STRICT
    7695     /*
    7696      * Check that the fExec flags we've got make sense.
    7697      */
    7698     off = iemNativeEmitExecFlagsCheck(pReNative, off, pReNative->fExec);
    7699 #endif
    7700 
    7701     /*
    7702      * To keep things simple we have to commit any pending writes first as we
    7703      * may end up making calls.
    7704      */
    7705     /** @todo we could postpone this till we make the call and reload the
    7706      * registers after returning from the call. Not sure if that's sensible or
    7707      * not, though. */
    7708     off = iemNativeRegFlushPendingWrites(pReNative, off);
    7709 
    7710     /*
    7711      * Move/spill/flush stuff out of call-volatile registers.
    7712      * This is the easy way out. We could contain this to the tlb-miss branch
    7713      * by saving and restoring active stuff here.
    7714      */
    7715     /** @todo save+restore active registers and maybe guest shadows in tlb-miss.  */
    7716     off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */);
    7717 
    7718     /*
    7719      * Define labels and allocate the result register (trying for the return
    7720      * register if we can).
    7721      */
    7722     uint16_t const uTlbSeqNo       = pReNative->uTlbSeqNo++;
    7723     uint32_t const idxLabelTlbMiss = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbMiss, UINT32_MAX, uTlbSeqNo);
    7724     uint32_t const idxLabelTlbDone = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbDone, UINT32_MAX, uTlbSeqNo);
    7725     uint8_t  const idxRegDst       = !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG))
    7726                                    ? iemNativeVarSetRegister(pReNative, idxVarDst, IEMNATIVE_CALL_RET_GREG, off)
    7727                                    : iemNativeVarAllocRegister(pReNative, idxVarDst, &off);
    7728 
    7729     /*
    7730      * First we try to go via the TLB.
    7731      */
    7732 //pReNative->pInstrBuf[off++] = 0xcc;
    7733     /** @todo later. */
    7734 
    7735     /*
    7736      * Call helper to do the fetching.
    7737      * We flush all guest register shadow copies here.
    7738      */
    7739     iemNativeLabelDefine(pReNative, idxLabelTlbMiss, off);
    7740 
    7741     uint8_t   idxRegArgGCPtrMem;
    7742     uint8_t   idxRegArgInstrIdx;
    7743     uintptr_t pfnFunction;
    77447736    if (   (   (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT
    77457737            || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_PROT_FLAT
     
    77517743            || (iSegReg == X86_SREG_CS && (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT) ))
    77527744    {
    7753         AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4);
     7745        Assert(iSegReg == UINT8_MAX);
    77547746        switch (cbMem)
    77557747        {
    7756             case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU8;  break;
    7757             case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU16; break;
    7758             case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU32; break;
    7759             case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU64; break;
    7760             default:
    7761                 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE));
     7748            case 1: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU8  : (uintptr_t)iemNativeHlpMemFlatStoreDataU8 )); break;
     7749            case 2: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU16 : (uintptr_t)iemNativeHlpMemFlatStoreDataU16)); break;
     7750            case 4: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU32 : (uintptr_t)iemNativeHlpMemFlatStoreDataU32)); break;
     7751            case 8: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFlatFetchDataU64 : (uintptr_t)iemNativeHlpMemFlatStoreDataU64)); break;
    77627752        }
    7763         idxRegArgInstrIdx = IEMNATIVE_CALL_ARG2_GREG;
    7764         idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG1_GREG;
    77657753    }
    77667754    else
    77677755    {
    7768         AssertStmt(iSegReg < 6, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_SEG_REG_NO));
    7769         AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 3);
     7756        Assert(iSegReg < 6);
    77707757        switch (cbMem)
    77717758        {
    7772             case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU8;  break;
    7773             case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU16; break;
    7774             case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU32; break;
    7775             case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU64; break;
    7776             default:
    7777                 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE));
     7759            case 1: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU8  : (uintptr_t)iemNativeHlpMemStoreDataU8 ));  break;
     7760            case 2: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU16 : (uintptr_t)iemNativeHlpMemStoreDataU16)); break;
     7761            case 4: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU32 : (uintptr_t)iemNativeHlpMemStoreDataU32)); break;
     7762            case 8: Assert(pfnFunction == (fFetch ? (uintptr_t)iemNativeHlpMemFetchDataU64 : (uintptr_t)iemNativeHlpMemStoreDataU64)); break;
    77787763        }
    7779         off = iemNativeEmitLoadGpr8Imm(pReNative, off,  IEMNATIVE_CALL_ARG1_GREG, iSegReg);
    7780         idxRegArgInstrIdx = IEMNATIVE_CALL_ARG3_GREG;
    7781         idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG2_GREG;
    7782     }
    7783 
    7784     off = iemNativeEmitLoadGpr8Imm(pReNative, off, idxRegArgInstrIdx, idxInstr);
    7785 
     7764    }
     7765#endif
     7766
     7767
     7768#ifdef VBOX_STRICT
     7769    /*
     7770     * Check that the fExec flags we've got make sense.
     7771     */
     7772    off = iemNativeEmitExecFlagsCheck(pReNative, off, pReNative->fExec);
     7773#endif
     7774
     7775    /*
     7776     * To keep things simple we have to commit any pending writes first as we
     7777     * may end up making calls.
     7778     */
     7779    /** @todo we could postpone this till we make the call and reload the
     7780     * registers after returning from the call. Not sure if that's sensible or
     7781     * not, though. */
     7782    off = iemNativeRegFlushPendingWrites(pReNative, off);
     7783
     7784    /*
     7785     * Move/spill/flush stuff out of call-volatile registers.
     7786     * This is the easy way out. We could contain this to the tlb-miss branch
     7787     * by saving and restoring active stuff here.
     7788     */
     7789    /** @todo save+restore active registers and maybe guest shadows in tlb-miss.  */
     7790    off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */);
     7791
     7792    /*
     7793     * Define labels and allocate the result register (trying for the return
     7794     * register if we can).
     7795     */
     7796    uint16_t const uTlbSeqNo        = pReNative->uTlbSeqNo++;
     7797    uint32_t const idxLabelTlbMiss  = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbMiss, UINT32_MAX, uTlbSeqNo);
     7798    uint32_t const idxLabelTlbDone  = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbDone, UINT32_MAX, uTlbSeqNo);
     7799    uint8_t  const idxRegValueFetch = !fFetch ? UINT8_MAX /* special case value storing below */
     7800                                    : !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG))
     7801                                    ? iemNativeVarSetRegister(pReNative, idxVarValue, IEMNATIVE_CALL_RET_GREG, off)
     7802                                    : iemNativeVarAllocRegister(pReNative, idxVarValue, &off);
     7803
     7804    /*
     7805     * First we try to go via the TLB.
     7806     */
     7807//pReNative->pInstrBuf[off++] = 0xcc;
     7808    /** @todo later. */
     7809
     7810    /*
     7811     * Call helper to do the fetching.
     7812     * We flush all guest register shadow copies here.
     7813     */
     7814    iemNativeLabelDefine(pReNative, idxLabelTlbMiss, off);
     7815
     7816#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
     7817    off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
     7818#else
     7819    RT_NOREF(idxInstr);
     7820#endif
     7821
     7822    uint8_t idxRegArgValue;
     7823    if (iSegReg == UINT8_MAX)
     7824        idxRegArgValue = IEMNATIVE_CALL_ARG2_GREG;
     7825    else
     7826    {
     7827        /* IEMNATIVE_CALL_ARG2_GREG = iSegReg */
     7828        AssertStmt(iSegReg < 6, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_SEG_REG_NO));
     7829        off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, iSegReg);
     7830
     7831        idxRegArgValue = IEMNATIVE_CALL_ARG3_GREG;
     7832    }
     7833
     7834    /* IEMNATIVE_CALL_ARG2/3_GREG = uValue (idxVarValue) - if store */
     7835    if (!fFetch)
     7836    {
     7837        if (pReNative->Core.aVars[idxVarValue].enmKind == kIemNativeVarKind_Immediate)
     7838            off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArgValue, pReNative->Core.aVars[idxVarValue].u.uValue);
     7839        else
     7840        {
     7841            uint8_t const idxRegVarValue = pReNative->Core.aVars[idxVarValue].idxReg;
     7842            if (idxRegVarValue < RT_ELEMENTS(pReNative->Core.aHstRegs))
     7843            {
     7844                Assert(!(RT_BIT_32(idxRegVarValue) & IEMNATIVE_CALL_VOLATILE_GREG_MASK));
     7845                off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegArgValue, idxRegVarValue);
     7846            }
     7847            else
     7848            {
     7849                uint8_t const idxStackSlot = pReNative->Core.aVars[idxVarValue].idxStackSlot;
     7850                AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED));
     7851                off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgValue, iemNativeStackCalcBpDisp(idxStackSlot));
     7852            }
     7853        }
     7854    }
     7855
     7856    /* IEMNATIVE_CALL_ARG1_GREG = GCPtrMem */
    77867857    if (pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate)
    7787         off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArgGCPtrMem,
     7858        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG1_GREG,
    77887859                                        pReNative->Core.aVars[idxVarGCPtrMem].u.uValue + offDisp);
    77897860    else
     
    77947865            Assert(!(RT_BIT_32(idxRegVarGCPtrMem) & IEMNATIVE_CALL_VOLATILE_GREG_MASK));
    77957866            if (!offDisp)
    7796                 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegArgGCPtrMem, idxRegVarGCPtrMem);
     7867                off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegVarGCPtrMem);
    77977868            else
    7798                 off = iemNativeEmitLoadGprFromGprWithAddend(pReNative, off, idxRegArgGCPtrMem, idxRegVarGCPtrMem, offDisp);
     7869                off = iemNativeEmitLoadGprFromGprWithAddend(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegVarGCPtrMem, offDisp);
    77997870        }
    78007871        else
     
    78037874            AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED));
    78047875            AssertFailed(); /** @todo This was probably caused by iemNativeRegMoveAndFreeAndFlushAtCall above. Improve... */
    7805             off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgGCPtrMem, iemNativeStackCalcBpDisp(idxStackSlot));
     7876            off = iemNativeEmitLoadGprByBp(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, iemNativeStackCalcBpDisp(idxStackSlot));
    78067877            if (offDisp)
    7807                 off = iemNativeEmitAddGprImm(pReNative, off, idxRegArgGCPtrMem, offDisp);
     7878                off = iemNativeEmitAddGprImm(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, offDisp);
    78087879        }
    78097880    }
     7881
     7882    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    78107883    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
     7884
     7885    /* Done setting up parameters, make the call. */
    78117886    off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
    78127887
    7813     /* Put the result in the right register. */
    7814     Assert(idxRegDst == pReNative->Core.aVars[idxVarDst].idxReg);
    7815     if (idxRegDst != IEMNATIVE_CALL_RET_GREG)
    7816         off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegDst, IEMNATIVE_CALL_RET_GREG);
     7888    /*
     7889     * Put the result in the right register if this is a fetch.
     7890     */
     7891    if (fFetch)
     7892    {
     7893        Assert(idxRegValueFetch == pReNative->Core.aVars[idxVarValue].idxReg);
     7894        if (idxRegValueFetch != IEMNATIVE_CALL_RET_GREG)
     7895            off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxVarValue, IEMNATIVE_CALL_RET_GREG);
     7896    }
    78177897
    78187898    iemNativeLabelDefine(pReNative, idxLabelTlbDone, off);
     
    78207900    return off;
    78217901}
     7902
     7903
     7904
     7905/*********************************************************************************************************************************
     7906*   Memory fetches (IEM_MEM_FETCH_XXX).                                                                                          *
     7907*********************************************************************************************************************************/
     7908
     7909#define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \
     7910    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, a_iSeg, a_GCPtrMem, sizeof(uint8_t), true /*fFetch*/, \
     7911                                               (uintptr_t)iemNativeHlpMemFetchDataU8, pCallEntry->idxInstr)
     7912
     7913#define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \
     7914    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \
     7915                                               (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr)
     7916
     7917#define IEM_MC_FETCH_MEM_U16_DISP(a_u16Dst, a_iSeg, a_GCPtrMem, a_offDisp) \
     7918    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \
     7919                                               (uintptr_t)iemNativeHlpMemFetchDataU16, pCallEntry->idxInstr, a_offDisp)
     7920
     7921#define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \
     7922    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \
     7923                                               (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr)
     7924
     7925#define IEM_MC_FETCH_MEM_U32_DISP(a_u32Dst, a_iSeg, a_GCPtrMem, a_offDisp) \
     7926    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \
     7927                                               (uintptr_t)iemNativeHlpMemFetchDataU32, pCallEntry->idxInstr, a_offDisp)
     7928
     7929#define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \
     7930    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, a_iSeg, a_GCPtrMem, sizeof(uint64_t), true /*fFetch*/, \
     7931                                               (uintptr_t)iemNativeHlpMemFetchDataU64, pCallEntry->idxInstr)
     7932
     7933
     7934#define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \
     7935    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint8_t), true /*fFetch*/, \
     7936                                               (uintptr_t)iemNativeHlpMemFlatFetchDataU8, pCallEntry->idxInstr)
     7937
     7938#define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \
     7939    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint16_t), true /*fFetch*/, \
     7940                                               (uintptr_t)iemNativeHlpMemFlatFetchDataU16, pCallEntry->idxInstr)
     7941
     7942#define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \
     7943    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint32_t), true /*fFetch*/, \
     7944                                               (uintptr_t)iemNativeHlpMemFlatFetchDataU32, pCallEntry->idxInstr)
     7945
     7946#define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \
     7947    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Dst, UINT8_MAX, a_GCPtrMem, sizeof(uint64_t), true /*fFetch*/, \
     7948                                               (uintptr_t)iemNativeHlpMemFlatFetchDataU64, pCallEntry->idxInstr)
     7949
     7950
     7951
     7952/*********************************************************************************************************************************
     7953*   Memory stores (IEM_MEM_STORE_XXX).                                                                                          *
     7954*********************************************************************************************************************************/
     7955
     7956#define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) \
     7957    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, sizeof(uint8_t), false /*fFetch*/, \
     7958                                               (uintptr_t)iemNativeHlpMemStoreDataU8, pCallEntry->idxInstr)
     7959
     7960#define IEM_MC_STORE_MEM_U16(a_iSeg, a_GCPtrMem, a_u16Dst) \
     7961    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, sizeof(uint16_t), false /*fFetch*/, \
     7962                                               (uintptr_t)iemNativeHlpMemStoreDataU16, pCallEntry->idxInstr)
     7963
     7964#define IEM_MC_STORE_MEM_U32(a_iSeg, a_GCPtrMem, a_u32Value) \
     7965    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, sizeof(uint32_t), false /*fFetch*/, \
     7966                                               (uintptr_t)iemNativeHlpMemStoreDataU32, pCallEntry->idxInstr)
     7967
     7968#define IEM_MC_STORE_MEM_U64(a_iSeg, a_GCPtrMem, a_u64Value) \
     7969    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, sizeof(uint64_t), false /*fFetch*/, \
     7970                                               (uintptr_t)iemNativeHlpMemStoreDataU64, pCallEntry->idxInstr)
     7971
     7972
     7973#define IEM_MC_STORE_MEM_FLAT_U8(a_GCPtrMem, a_u8Value) \
     7974    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u8Value, a_iSeg, a_GCPtrMem, sizeof(uint8_t), false /*fFetch*/, \
     7975                                               (uintptr_t)iemNativeHlpMemFlatStoreDataU8, pCallEntry->idxInstr)
     7976
     7977#define IEM_MC_STORE_MEM_FLAT_U16(a_GCPtrMem, a_u16Value) \
     7978    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u16Value, a_iSeg, a_GCPtrMem, sizeof(uint16_t), false /*fFetch*/, \
     7979                                               (uintptr_t)iemNativeHlpMemFlatStoreDataU16, pCallEntry->idxInstr)
     7980
     7981#define IEM_MC_STORE_MEM_FLAT_U32(a_GCPtrMem, a_u32Value) \
     7982    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u32Value, a_iSeg, a_GCPtrMem, sizeof(uint32_t), false /*fFetch*/, \
     7983                                               (uintptr_t)iemNativeHlpMemFlatStoreDataU32, pCallEntry->idxInstr)
     7984
     7985#define IEM_MC_STORE_MEM_FLAT_U64(a_GCPtrMem, a_u64Value) \
     7986    off = iemNativeEmitMemFetchStoreDataCommon(pReNative, off, a_u64Value, a_iSeg, a_GCPtrMem, sizeof(uint64_t), false /*fFetch*/, \
     7987                                               (uintptr_t)iemNativeHlpMemFlatStoreDataU64, pCallEntry->idxInstr)
    78227988
    78237989
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette