VirtualBox

Ignore:
Timestamp:
Nov 27, 2023 1:01:13 PM (14 months ago)
Author:
vboxsync
Message:

VMM/IEM: First recompiled memory access. This is without any TLB usage, so it'll will just make a call to fetch the memory. Fixed bug handling referenced variables. Optimized lable lookup. bugref:10371

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r102259 r102313  
    15721572 * Used by TB code when encountering a non-zero status or rcPassUp after a call.
    15731573 */
    1574 IEM_DECL_IMPL_DEF(int, iemNativeHlpExecStatusCodeFiddling,(PVMCPUCC pVCpu, int rc, uint8_t idxInstr))
     1574IEM_DECL_NATIVE_HLP_DEF(int, iemNativeHlpExecStatusCodeFiddling,(PVMCPUCC pVCpu, int rc, uint8_t idxInstr))
    15751575{
    15761576    pVCpu->iem.s.cInstructions += idxInstr;
     
    15821582 * Used by TB code when it wants to raise a \#GP(0).
    15831583 */
    1584 IEM_DECL_IMPL_DEF(int, iemNativeHlpExecRaiseGp0,(PVMCPUCC pVCpu, uint8_t idxInstr))
     1584IEM_DECL_NATIVE_HLP_DEF(int, iemNativeHlpExecRaiseGp0,(PVMCPUCC pVCpu, uint8_t idxInstr))
    15851585{
    15861586    pVCpu->iem.s.cInstructions += idxInstr;
     
    15901590#endif
    15911591}
     1592
     1593
     1594/* Segmented memory helpers: */
     1595
     1596/**
     1597 * Used by TB code to load unsigned 8-bit data w/ segmentation.
     1598 */
     1599IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1600{
     1601    RT_NOREF(idxInstr); /** @todo idxInstr */
     1602    return iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1603}
     1604
     1605
     1606/**
     1607 * Used by TB code to load unsigned 16-bit data w/ segmentation.
     1608 */
     1609IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1610{
     1611    RT_NOREF(idxInstr); /** @todo idxInstr */
     1612    return iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1613}
     1614
     1615
     1616/**
     1617 * Used by TB code to load unsigned 32-bit data w/ segmentation.
     1618 */
     1619IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1620{
     1621    RT_NOREF(idxInstr); /** @todo idxInstr */
     1622    return iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1623}
     1624
     1625
     1626/**
     1627 * Used by TB code to load unsigned 64-bit data w/ segmentation.
     1628 */
     1629IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1630{
     1631    RT_NOREF(idxInstr); /** @todo idxInstr */
     1632    return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1633}
     1634
     1635
     1636/* Flat memory helpers: */
     1637
     1638/**
     1639 * Used by TB code to load unsigned 8-bit data w/ segmentation.
     1640 */
     1641IEM_DECL_NATIVE_HLP_DEF(uint8_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1642{
     1643    RT_NOREF(idxInstr); /** @todo idxInstr */
     1644    return iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1645}
     1646
     1647
     1648/**
     1649 * Used by TB code to load unsigned 16-bit data w/ segmentation.
     1650 */
     1651IEM_DECL_NATIVE_HLP_DEF(uint16_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1652{
     1653    RT_NOREF(idxInstr); /** @todo idxInstr */
     1654    return iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1655}
     1656
     1657
     1658/**
     1659 * Used by TB code to load unsigned 32-bit data w/ segmentation.
     1660 */
     1661IEM_DECL_NATIVE_HLP_DEF(uint32_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1662{
     1663    RT_NOREF(idxInstr); /** @todo idxInstr */
     1664    return iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1665}
     1666
     1667
     1668/**
     1669 * Used by TB code to load unsigned 64-bit data w/ segmentation.
     1670 */
     1671IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t idxInstr))
     1672{
     1673    RT_NOREF(idxInstr); /** @todo idxInstr */
     1674    return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1675}
     1676
    15921677
    15931678
     
    16091694    pReNative->uCondSeqNo                  = 0;
    16101695    pReNative->uCheckIrqSeqNo              = 0;
     1696    pReNative->uTlbSeqNo                   = 0;
    16111697
    16121698    pReNative->Core.bmHstRegs              = IEMNATIVE_REG_FIXED_MASK
     
    16211707    AssertCompile(sizeof(pReNative->Core.bmStack) * 8 == IEMNATIVE_FRAME_VAR_SLOTS); /* Must set reserved slots to 1 otherwise. */
    16221708    pReNative->Core.u64ArgVars             = UINT64_MAX;
     1709
     1710    AssertCompile(RT_ELEMENTS(pReNative->aidxUniqueLabels) == 6);
     1711    pReNative->aidxUniqueLabels[0]         = UINT32_MAX;
     1712    pReNative->aidxUniqueLabels[1]         = UINT32_MAX;
     1713    pReNative->aidxUniqueLabels[2]         = UINT32_MAX;
     1714    pReNative->aidxUniqueLabels[3]         = UINT32_MAX;
     1715    pReNative->aidxUniqueLabels[4]         = UINT32_MAX;
     1716    pReNative->aidxUniqueLabels[5]         = UINT32_MAX;
    16231717
    16241718    /* Full host register reinit: */
     
    17421836                     uint32_t offWhere /*= UINT32_MAX*/, uint16_t uData /*= 0*/)
    17431837{
     1838    Assert(uData == 0 || enmType >= kIemNativeLabelType_FirstWithMultipleInstances);
     1839
    17441840    /*
    17451841     * Locate existing label definition.
     
    17521848    if (   pReNative->bmLabelTypes & RT_BIT_64(enmType)
    17531849#ifndef VBOX_STRICT
     1850        && enmType  >= kIemNativeLabelType_FirstWithMultipleInstances
    17541851        && offWhere == UINT32_MAX
    17551852        && uData    == 0
     
    17571854        )
    17581855    {
    1759         /** @todo Since this is only used for labels with uData = 0, just use a
    1760          *        lookup array? */
     1856#ifndef VBOX_STRICT
     1857        AssertStmt(enmType > kIemNativeLabelType_Invalid && enmType < kIemNativeLabelType_FirstWithMultipleInstances,
     1858                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
     1859        uint32_t const idxLabel = pReNative->aidxUniqueLabels[enmType];
     1860        if (idxLabel < pReNative->cLabels)
     1861            return idxLabel;
     1862#else
    17611863        for (uint32_t i = 0; i < cLabels; i++)
    17621864            if (   paLabels[i].enmType == enmType
    17631865                && paLabels[i].uData   == uData)
    17641866            {
    1765 #ifdef VBOX_STRICT
    17661867                AssertStmt(uData == 0, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
    17671868                AssertStmt(offWhere == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
    1768 #endif
    17691869                AssertStmt(paLabels[i].off == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_2));
     1870                AssertStmt(enmType < kIemNativeLabelType_FirstWithMultipleInstances && pReNative->aidxUniqueLabels[enmType] == i,
     1871                           IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
    17701872                return i;
    17711873            }
     1874        AssertStmt(   enmType >= kIemNativeLabelType_FirstWithMultipleInstances
     1875                   || pReNative->aidxUniqueLabels[enmType] == UINT32_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_1));
     1876#endif
    17721877    }
    17731878
     
    18011906    pReNative->bmLabelTypes |= RT_BIT_64(enmType);
    18021907
     1908    if (enmType < kIemNativeLabelType_FirstWithMultipleInstances)
     1909    {
     1910        Assert(uData == 0);
     1911        pReNative->aidxUniqueLabels[enmType] = cLabels;
     1912    }
     1913
    18031914    if (offWhere != UINT32_MAX)
    18041915    {
     
    18431954    if (RT_BIT_64(enmType) & pReNative->bmLabelTypes)
    18441955    {
     1956        if (enmType < kIemNativeLabelType_FirstWithMultipleInstances)
     1957            return pReNative->aidxUniqueLabels[enmType];
     1958
    18451959        PIEMNATIVELABEL paLabels = pReNative->paLabels;
    18461960        uint32_t const  cLabels  = pReNative->cLabels;
     
    23872501        Log12(("iemNativeRegMoveOrSpillStackVar: moving idxVar=%d from %s to %s (fGstRegShadows=%RX64)\n",
    23882502               idxVar,  g_apszIemNativeHstRegNames[idxRegOld], g_apszIemNativeHstRegNames[idxRegNew], fGstRegShadows));
     2503        off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegNew, idxRegOld);
     2504
    23892505        pReNative->Core.aHstRegs[idxRegNew].fGstRegShadows = fGstRegShadows;
    23902506        pReNative->Core.aHstRegs[idxRegNew].enmWhat        = kIemNativeWhat_Var;
     
    24172533        off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxRegOld);
    24182534
     2535        pReNative->Core.aVars[idxVar].idxReg    = UINT8_MAX;
    24192536        pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxRegOld);
    24202537        pReNative->Core.bmGstRegShadows        &= ~pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows;
     
    34423559
    34433560
     3561#ifdef VBOX_STRICT
     3562/**
     3563 * Emitting code that checks that IEMCPU::fExec matches @a fExec for all
     3564 * important bits.
     3565 *
     3566 * @note May of course trash IEMNATIVE_REG_FIXED_TMP0.
     3567 *       Trashes EFLAGS on AMD64.
     3568 */
     3569static uint32_t
     3570iemNativeEmitExecFlagsCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fExec)
     3571{
     3572    uint8_t const idxRegTmp = iemNativeRegAllocTmp(pReNative, &off);
     3573    off = iemNativeEmitLoadGprFromVCpuU32(pReNative, off, idxRegTmp, RT_UOFFSETOF(VMCPUCC, iem.s.fExec));
     3574    off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxRegTmp, IEMTB_F_IEM_F_MASK & IEMTB_F_KEY_MASK);
     3575    off = iemNativeEmitCmpGpr32WithImm(pReNative, off, idxRegTmp, fExec & IEMTB_F_KEY_MASK);
     3576
     3577#ifdef RT_ARCH_AMD64
     3578    uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
     3579
     3580    /* je/jz +1 */
     3581    pbCodeBuf[off++] = 0x74;
     3582    pbCodeBuf[off++] = 0x01;
     3583
     3584    /* int3 */
     3585    pbCodeBuf[off++] = 0xcc;
     3586
     3587# elif defined(RT_ARCH_ARM64)
     3588    /* mov TMP0, [gstreg] */
     3589    off = iemNativeEmitLoadGprWithGstShadowReg(pReNative, off, IEMNATIVE_REG_FIXED_TMP0, enmGstReg);
     3590
     3591    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
     3592    /* b.eq +1 */
     3593    pu32CodeBuf[off++] = Armv8A64MkInstrBCond(kArmv8InstrCond_Eq, 2);
     3594    /* brk #0x1000+enmGstReg */
     3595    pu32CodeBuf[off++] = Armv8A64MkInstrBrk(UINT32_C(0x2000));
     3596
     3597# else
     3598#  error "Port me!"
     3599# endif
     3600    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     3601
     3602    iemNativeRegFreeTmp(pReNative, idxRegTmp);
     3603    return off;
     3604}
     3605#endif /* VBOX_STRICT */
     3606
     3607
    34443608/**
    34453609 * Emits a code for checking the return code of a call and rcPassUp, returning
     
    50915255    uint8_t const a_Name = iemNativeArgAllocConst(pReNative, (a_iArg), sizeof(a_Type), (a_Value))
    50925256
    5093 #define IEM_MC_ARG_LOCAL_REF(a_Type, a_Name, a_iArg) \
     5257#define IEM_MC_ARG_LOCAL_REF(a_Type, a_Name, a_Local, a_iArg) \
    50945258    uint8_t const a_Name = iemNativeArgAllocLocalRef(pReNative, (a_iArg), (a_Local))
    50955259
     
    52345398    if (pReNative->Core.aVars[idxVar].enmKind != kIemNativeVarKind_Immediate)
    52355399    {
    5236         /* Only simple trasnsitions for now. */
     5400        /* Only simple transitions for now. */
    52375401        AssertStmt(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Invalid,
    52385402                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2));
     
    52645428    if (pReNative->Core.aVars[idxVar].enmKind != kIemNativeVarKind_VarRef)
    52655429    {
    5266         /* Only simple trasnsitions for now. */
     5430        /* Only simple transitions for now. */
    52675431        AssertStmt(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Invalid,
    52685432                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2));
    5269         pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_Immediate;
     5433        pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_VarRef;
    52705434    }
    52715435    AssertStmt(pReNative->Core.aVars[idxVar].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2));
     
    53005464    if (pReNative->Core.aVars[idxVar].enmKind != kIemNativeVarKind_GstRegRef)
    53015465    {
    5302         /* Only simple trasnsitions for now. */
     5466        /* Only simple transitions for now. */
    53035467        AssertStmt(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Invalid,
    53045468                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2));
     
    53425506{
    53435507    uint8_t const idxVar = iemNativeVarAllocInt(pReNative, cbType);
    5344     iemNativeVarSetKindToStack(pReNative, idxVar);
     5508    /* Don't set to stack now, leave that to the first use as for instance
     5509       IEM_MC_CALC_RM_EFF_ADDR may produce a const/immediate result (esp. in DOS). */
    53455510    return idxVar;
    53465511}
     
    55575722 * implied.
    55585723 *
     5724 * @returns idxReg
    55595725 * @param   pReNative   The recompiler state.
    55605726 * @param   idxVar      The variable.
     
    55645730 * @throws  VERR_IEM_VAR_IPE_10, VERR_IEM_VAR_IPE_11
    55655731 */
    5566 DECL_INLINE_THROW(void) iemNativeVarSetRegister(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, uint8_t idxReg, uint32_t off)
     5732DECL_INLINE_THROW(uint8_t) iemNativeVarSetRegister(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, uint8_t idxReg, uint32_t off)
    55675733{
    55685734    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVar);
     
    55765742    iemNativeVarSetKindToStack(pReNative, idxVar);
    55775743    pReNative->Core.aVars[idxVar].idxReg = idxReg;
     5744
     5745    return idxReg;
    55785746}
    55795747
     
    58205988    }
    58215989#endif
     5990
     5991    /*
     5992     * Before we do anything else, go over variables that are referenced and
     5993     * make sure they are not in a register.
     5994     */
     5995    uint32_t bmVars = pReNative->Core.bmVars;
     5996    if (bmVars)
     5997        do
     5998        {
     5999            uint8_t const idxVar = ASMBitFirstSetU32(bmVars) - 1;
     6000            bmVars &= ~RT_BIT_32(idxVar);
     6001
     6002            if (pReNative->Core.aVars[idxVar].idxReferrerVar != UINT8_MAX)
     6003            {
     6004                uint8_t const idxRegOld = pReNative->Core.aVars[idxVar].idxReg;
     6005                if (idxRegOld < RT_ELEMENTS(pReNative->Core.aHstRegs))
     6006                {
     6007                    uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot;
     6008                    Log12(("iemNativeEmitCallCommon: spilling idxVar=%d/idxReg=%d (referred to by %d) onto the stack (slot %#x bp+%d, off=%#x)\n",
     6009                           idxVar, idxRegOld, pReNative->Core.aVars[idxVar].idxReferrerVar,
     6010                           idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off));
     6011                    AssertStmt(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_7));
     6012                    off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxRegOld);
     6013
     6014                    pReNative->Core.aVars[idxVar].idxReg    = UINT8_MAX;
     6015                    pReNative->Core.bmHstRegs              &= ~RT_BIT_32(idxRegOld);
     6016                    pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxRegOld);
     6017                    pReNative->Core.bmGstRegShadows        &= ~pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows;
     6018                    pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows = 0;
     6019                }
     6020            }
     6021        } while (bmVars != 0);
    58226022
    58236023    uint8_t const cRegArgs = RT_MIN(cArgs, RT_ELEMENTS(g_aidxIemNativeCallRegs));
     
    59056105                        uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar;
    59066106                        Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars));
    5907                         AssertStmt(pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX,
     6107                        AssertStmt(   pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX
     6108                                   && pReNative->Core.aVars[idxOtherVar].idxReg       == UINT8_MAX,
    59086109                                   IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4));
    59096110                        off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG,
     
    59766177                            uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar;
    59776178                            Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars));
    5978                             AssertStmt(pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX,
     6179                            AssertStmt(   pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX
     6180                                       && pReNative->Core.aVars[idxOtherVar].idxReg       == UINT8_MAX,
    59796181                                       IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4));
    59806182                            off = iemNativeEmitLeaGprByBp(pReNative, off, idxArgReg,
     
    71967398    else
    71977399    {
    7198         /* lea ret32, [index64 << cShiftIndex (+ base64) (+ disp32)] */
    71997400        Assert(idxRegIndex != X86_GREG_xSP /*no-index*/);
    72007401        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    7201         if (idxRegRet >= 8 || (idxRegBase >= 8 && idxRegBase != UINT8_MAX) || idxRegIndex >= 8)
    7202             pbCodeBuf[off++] = (idxRegRet   >= 8                            ? X86_OP_REX_R : 0)
    7203                              | (idxRegBase  >= 8 && idxRegBase != UINT8_MAX ? X86_OP_REX_B : 0)
    7204                              | (idxRegIndex >= 8                            ? X86_OP_REX_X : 0);
    7205         pbCodeBuf[off++] = 0x8d;
    7206         uint8_t const bMod = u32EffAddr == 0 && (idxRegBase & 7) != X86_GREG_xBP && idxRegBase != UINT8_MAX ? X86_MOD_MEM0
    7207                            : (int8_t)u32EffAddr == (int32_t)u32EffAddr ? X86_MOD_MEM1 : X86_MOD_MEM4;
    7208         pbCodeBuf[off++] = X86_MODRM_MAKE(bMod, idxRegRet & 7, 4 /*SIB*/);
    7209         pbCodeBuf[off++] = X86_SIB_MAKE(idxRegBase != UINT8_MAX ? idxRegBase & 7 : 5 /*nobase/bp*/, idxRegIndex & 7, cShiftIndex);
    7210         if (bMod != X86_MOD_MEM0)
     7402        if (idxRegBase == UINT8_MAX)
    72117403        {
     7404            /* lea ret32, [(index64 << cShiftIndex) + disp32] */
     7405            if (idxRegRet >= 8 || idxRegIndex >= 8)
     7406                pbCodeBuf[off++] = (idxRegRet   >= 8 ? X86_OP_REX_R : 0)
     7407                                 | (idxRegIndex >= 8 ? X86_OP_REX_X : 0);
     7408            pbCodeBuf[off++] = 0x8d;
     7409            pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_MEM0, idxRegRet & 7, 4 /*SIB*/);
     7410            pbCodeBuf[off++] = X86_SIB_MAKE(5 /*nobase/bp*/, idxRegIndex & 7, cShiftIndex);
    72127411            pbCodeBuf[off++] = RT_BYTE1(u32EffAddr);
    7213             if (bMod == X86_MOD_MEM4)
     7412            pbCodeBuf[off++] = RT_BYTE2(u32EffAddr);
     7413            pbCodeBuf[off++] = RT_BYTE3(u32EffAddr);
     7414            pbCodeBuf[off++] = RT_BYTE4(u32EffAddr);
     7415        }
     7416        else
     7417        {
     7418            /* lea ret32, [(index64 << cShiftIndex) + base64 (+ disp32)] */
     7419            if (idxRegRet >= 8 || idxRegBase >= 8 || idxRegIndex >= 8)
     7420                pbCodeBuf[off++] = (idxRegRet   >= 8 ? X86_OP_REX_R : 0)
     7421                                 | (idxRegBase  >= 8 ? X86_OP_REX_B : 0)
     7422                                 | (idxRegIndex >= 8 ? X86_OP_REX_X : 0);
     7423            pbCodeBuf[off++] = 0x8d;
     7424            uint8_t const bMod = u32EffAddr == 0 && (idxRegBase & 7) != X86_GREG_xBP ? X86_MOD_MEM0
     7425                               : (int8_t)u32EffAddr == (int32_t)u32EffAddr           ? X86_MOD_MEM1 : X86_MOD_MEM4;
     7426            pbCodeBuf[off++] = X86_MODRM_MAKE(bMod, idxRegRet & 7, 4 /*SIB*/);
     7427            pbCodeBuf[off++] = X86_SIB_MAKE(idxRegBase & 7, idxRegIndex & 7, cShiftIndex);
     7428            if (bMod != X86_MOD_MEM0)
    72147429            {
    7215                 pbCodeBuf[off++] = RT_BYTE2(u32EffAddr);
    7216                 pbCodeBuf[off++] = RT_BYTE3(u32EffAddr);
    7217                 pbCodeBuf[off++] = RT_BYTE4(u32EffAddr);
     7430                pbCodeBuf[off++] = RT_BYTE1(u32EffAddr);
     7431                if (bMod == X86_MOD_MEM4)
     7432                {
     7433                    pbCodeBuf[off++] = RT_BYTE2(u32EffAddr);
     7434                    pbCodeBuf[off++] = RT_BYTE3(u32EffAddr);
     7435                    pbCodeBuf[off++] = RT_BYTE4(u32EffAddr);
     7436                }
    72187437            }
    72197438        }
     
    72927511#define IEM_MC_CALC_RM_EFF_ADDR_THREADED_64_ADDR32(a_GCPtrEff, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm) \
    72937512    off = iemNativeEmitCalcRmEffAddrThreadedAddr64(pReNative, off, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm, a_GCPtrEff, 32)
     7513
     7514
     7515
     7516/*********************************************************************************************************************************
     7517*   Memory fetches (IEM_MEM_FETCH_XXX).                                                                                          *
     7518*********************************************************************************************************************************/
     7519
     7520
     7521
     7522
     7523#define IEM_MC_FETCH_MEM_U8(a_u8Dst, a_iSeg, a_GCPtrMem) \
     7524    off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u8Dst, a_iSeg, a_GCPtrMem, sizeof(uint8_t))
     7525
     7526#define IEM_MC_FETCH_MEM_U16(a_u16Dst, a_iSeg, a_GCPtrMem) \
     7527    off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u16Dst, a_iSeg, a_GCPtrMem, sizeof(uint16_t))
     7528
     7529#define IEM_MC_FETCH_MEM_U32(a_u32Dst, a_iSeg, a_GCPtrMem) \
     7530    off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u32Dst, a_iSeg, a_GCPtrMem, sizeof(uint32_t))
     7531
     7532#define IEM_MC_FETCH_MEM_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \
     7533    off = iemNativeEmitMemFetchDataCommon(pReNative, off, pCallEntry->idxInstr, a_u64Dst, a_iSeg, a_GCPtrMem, sizeof(uint64_t))
     7534
     7535/** Emits code for IEM_MC_FETCH_MEM_U8/16/32/64. */
     7536DECL_INLINE_THROW(uint32_t)
     7537iemNativeEmitMemFetchDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxInstr,
     7538                                uint8_t idxVarDst, uint8_t iSegReg, uint8_t idxVarGCPtrMem, uint8_t cbMem)
     7539{
     7540    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarDst);
     7541    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarGCPtrMem);
     7542    AssertStmt(   pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate
     7543               || pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Stack,
     7544               IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_UNEXPECTED_KIND));
     7545    Assert(iSegReg < 6);
     7546    Assert(cbMem == 1 || cbMem == 2 || cbMem == 4 || cbMem == 8);
     7547    RT_NOREF(idxInstr);
     7548
     7549#ifdef VBOX_STRICT
     7550    /*
     7551     * Check that the fExec flags we've got make sense.
     7552     */
     7553    off = iemNativeEmitExecFlagsCheck(pReNative, off, pReNative->fExec);
     7554#endif
     7555
     7556    /*
     7557     * To keep things simple we have to commit any pending writes first as we
     7558     * may end up making calls.
     7559     */
     7560    /** @todo we could postpone this till we make the call and reload the
     7561     * registers after returning from the call. Not sure if that's sensible or
     7562     * not, though. */
     7563    off = iemNativeRegFlushPendingWrites(pReNative, off);
     7564
     7565    /*
     7566     * Move/spill/flush stuff out of call-volatile registers.
     7567     * This is the easy way out. We could contain this to the tlb-miss branch
     7568     * by saving and restoring active stuff here.
     7569     */
     7570    /** @todo save+restore active registers and maybe guest shadows in tlb-miss.  */
     7571    off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */);
     7572
     7573    /*
     7574     * Define labels and allocate the result register (trying for the return
     7575     * register if we can).
     7576     */
     7577    uint16_t const uTlbSeqNo       = pReNative->uTlbSeqNo++;
     7578    uint32_t const idxLabelTlbMiss = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbMiss, UINT32_MAX, uTlbSeqNo);
     7579    uint32_t const idxLabelTlbDone = iemNativeLabelCreate(pReNative, kIemNativeLabelType_TlbDone, UINT32_MAX, uTlbSeqNo);
     7580    uint8_t  const idxRegDst       = !(pReNative->Core.bmHstRegs & RT_BIT_32(IEMNATIVE_CALL_RET_GREG))
     7581                                   ? iemNativeVarSetRegister(pReNative, idxVarDst, IEMNATIVE_CALL_RET_GREG, off)
     7582                                   : iemNativeVarAllocRegister(pReNative, idxVarDst, &off);
     7583
     7584    /*
     7585     * First we try to go via the TLB.
     7586     */
     7587//pReNative->pInstrBuf[off++] = 0xcc;
     7588    /** @todo later. */
     7589
     7590    /*
     7591     * Call helper to do the fetching.
     7592     * We flush all guest register shadow copies here.
     7593     */
     7594    iemNativeLabelDefine(pReNative, idxLabelTlbMiss, off);
     7595
     7596    uint8_t   idxRegArgGCPtrMem;
     7597    uint8_t   idxRegArgInstrIdx;
     7598    uintptr_t pfnFunction;
     7599    if (   (   (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT
     7600            || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_PROT_FLAT
     7601            || (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_FLAT)
     7602        && (   iSegReg == X86_SREG_DS
     7603            || iSegReg == X86_SREG_ES
     7604            || iSegReg == X86_SREG_SS
     7605            || (iSegReg == X86_SREG_CS && (pReNative->fExec & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT) ))
     7606    {
     7607        AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 4);
     7608        switch (cbMem)
     7609        {
     7610            case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU8;  break;
     7611            case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU16; break;
     7612            case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU32; break;
     7613            case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFlatFetchDataU64; break;
     7614            default:
     7615                AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE));
     7616        }
     7617        idxRegArgInstrIdx = IEMNATIVE_CALL_ARG2_GREG;
     7618        idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG1_GREG;
     7619    }
     7620    else
     7621    {
     7622        AssertCompile(IEMNATIVE_CALL_ARG_GREG_COUNT >= 3);
     7623        switch (cbMem)
     7624        {
     7625            case 1: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU8;  break;
     7626            case 2: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU16; break;
     7627            case 4: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU32; break;
     7628            case 8: pfnFunction = (uintptr_t)iemNativeHlpMemFetchDataU64; break;
     7629            default:
     7630                AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_EMIT_BAD_MEM_SIZE));
     7631        }
     7632        off = iemNativeEmitLoadGpr8Imm(pReNative, off,  IEMNATIVE_CALL_ARG1_GREG, iSegReg);
     7633        idxRegArgInstrIdx = IEMNATIVE_CALL_ARG3_GREG;
     7634        idxRegArgGCPtrMem = IEMNATIVE_CALL_ARG2_GREG;
     7635    }
     7636
     7637    off = iemNativeEmitLoadGpr8Imm(pReNative, off, idxRegArgInstrIdx, idxInstr);
     7638
     7639    if (pReNative->Core.aVars[idxVarGCPtrMem].enmKind == kIemNativeVarKind_Immediate)
     7640        off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArgGCPtrMem, pReNative->Core.aVars[idxVarGCPtrMem].u.uValue);
     7641    else
     7642    {
     7643        uint8_t const idxRegVarGCPtrMem = pReNative->Core.aVars[idxVarGCPtrMem].idxReg;
     7644        if (idxRegVarGCPtrMem < RT_ELEMENTS(pReNative->Core.aHstRegs))
     7645        {
     7646            Assert(!(RT_BIT_32(idxRegVarGCPtrMem) & IEMNATIVE_CALL_VOLATILE_GREG_MASK));
     7647            off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegArgGCPtrMem, idxRegVarGCPtrMem);
     7648        }
     7649        else
     7650        {
     7651            AssertFailed(); /** @todo This was probably caused by iemNativeRegMoveAndFreeAndFlushAtCall above. Improve... */
     7652            off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgGCPtrMem, iemNativeVarCalcBpDisp(pReNative, idxVarGCPtrMem));
     7653        }
     7654    }
     7655    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
     7656    off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     7657
     7658    /* Put the result in the right register. */
     7659    Assert(idxRegDst == pReNative->Core.aVars[idxVarDst].idxReg);
     7660    if (idxRegDst != IEMNATIVE_CALL_RET_GREG)
     7661        off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegDst, IEMNATIVE_CALL_RET_GREG);
     7662
     7663    iemNativeLabelDefine(pReNative, idxLabelTlbDone, off);
     7664
     7665    return off;
     7666}
    72947667
    72957668
     
    77578130                                case kIemNativeLabelType_CheckIrq:
    77588131                                    pszName = "CheckIrq_CheckVM";
     8132                                    fNumbered = true;
     8133                                    break;
     8134                                case kIemNativeLabelType_TlbMiss:
     8135                                    pszName = "CheckIrq_TlbMiss";
     8136                                    fNumbered = true;
     8137                                    break;
     8138                                case kIemNativeLabelType_TlbDone:
     8139                                    pszName = "CheckIrq_TlbDone";
    77598140                                    fNumbered = true;
    77608141                                    break;
     
    80558436        uint32_t             cRecompiledCalls = 0;
    80568437#endif
    8057         uint32_t             fExec            = pTb->fFlags;
    80588438        PCIEMTHRDEDCALLENTRY pCallEntry       = pTb->Thrd.paCalls;
     8439        pReNative->fExec                      = pTb->fFlags & IEMTB_F_IEM_F_MASK;
    80598440        while (cCallsLeft-- > 0)
    80608441        {
     
    80658446             */
    80668447            if (pCallEntry->enmFunction == kIemThreadedFunc_BltIn_CheckMode)
    8067                 fExec = pCallEntry->auParams[0];
     8448                pReNative->fExec = pCallEntry->auParams[0] & IEMTB_F_IEM_F_MASK;
    80688449#ifdef IEMNATIVE_WITH_TB_DEBUG_INFO
    80698450            iemNativeDbgInfoAddNativeOffset(pReNative, off);
     
    80718452            {
    80728453                if (iGstInstr < (int32_t)pTb->cInstructions)
    8073                     iemNativeDbgInfoAddGuestInstruction(pReNative, fExec);
     8454                    iemNativeDbgInfoAddGuestInstruction(pReNative, pReNative->fExec);
    80748455                else
    80758456                    Assert(iGstInstr == pTb->cInstructions);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette