VirtualBox

Changeset 102733 in vbox


Ignore:
Timestamp:
Dec 29, 2023 7:40:03 PM (11 months ago)
Author:
vboxsync
Message:

VMM/IEM: Implemented making the TLB-missed call w/o trashing/flushing stuff in volatile registers. bugref:10371

Location:
trunk
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/VBox/err.h

    r102699 r102733  
    25762576/** Recompiler: Register allocator internal processing error \#11. */
    25772577#define VERR_IEM_REG_IPE_11                         (-5353)
     2578/** Recompiler: Register allocator internal processing error \#12. */
     2579#define VERR_IEM_REG_IPE_12                         (-5354)
     2580/** Recompiler: Register allocator internal processing error \#13. */
     2581#define VERR_IEM_REG_IPE_13                         (-5355)
    25782582
    25792583/** Recompiler: Out of variables. */
     
    26092613/** Recompiler: Variable management internal processing error \#11. */
    26102614#define VERR_IEM_VAR_IPE_11                         (-5375)
     2615/** Recompiler: Variable management internal processing error \#12. */
     2616#define VERR_IEM_VAR_IPE_12                         (-5376)
     2617/** Recompiler: Variable management internal processing error \#13. */
     2618#define VERR_IEM_VAR_IPE_13                         (-5377)
    26112619
    26122620/** Recompiler: Unimplemented case. */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r102724 r102733  
    42084208
    42094209/**
     4210 * Flushes guest register shadow copies held by a set of host registers.
     4211 *
     4212 * This is used with the TLB lookup code for ensuring that we don't carry on
     4213 * with any guest shadows in volatile registers, as these will get corrupted by
     4214 * a TLB miss.
     4215 *
     4216 * @param   pReNative       The native recompile state.
     4217 * @param   fHstRegs        Set of host registers to flush guest shadows for.
     4218 */
     4219DECLHIDDEN(void) iemNativeRegFlushGuestShadowsByHostMask(PIEMRECOMPILERSTATE pReNative, uint32_t fHstRegs) RT_NOEXCEPT
     4220{
     4221    /*
     4222     * Reduce the mask by what's currently shadowed.
     4223     */
     4224    uint32_t const bmHstRegsWithGstShadowOld = pReNative->Core.bmHstRegsWithGstShadow;
     4225    fHstRegs &= bmHstRegsWithGstShadowOld;
     4226    if (fHstRegs)
     4227    {
     4228        uint32_t const bmHstRegsWithGstShadowNew = bmHstRegsWithGstShadowOld & ~fHstRegs;
     4229        Log12(("iemNativeRegFlushGuestShadowsByHostMask: flushing %#RX32 (%#RX32 -> %#RX32)\n",
     4230               fHstRegs, bmHstRegsWithGstShadowOld, bmHstRegsWithGstShadowNew));
     4231        pReNative->Core.bmHstRegsWithGstShadow = bmHstRegsWithGstShadowNew;
     4232        if (bmHstRegsWithGstShadowNew)
     4233        {
     4234            /*
     4235             * Partial (likely).
     4236             */
     4237            uint64_t fGstShadows = 0;
     4238            do
     4239            {
     4240                unsigned const idxHstReg = ASMBitFirstSetU32(fHstRegs) - 1;
     4241                Assert(!(pReNative->Core.bmHstRegs & RT_BIT_32(idxHstReg)));
     4242                Assert(   (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)
     4243                       == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows);
     4244
     4245                fGstShadows |= pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows;
     4246                pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = 0;
     4247                fHstRegs &= ~RT_BIT_32(idxHstReg);
     4248            } while (fHstRegs != 0);
     4249            pReNative->Core.bmGstRegShadows &= ~fGstShadows;
     4250        }
     4251        else
     4252        {
     4253            /*
     4254             * Clear all.
     4255             */
     4256            do
     4257            {
     4258                unsigned const idxHstReg = ASMBitFirstSetU32(fHstRegs) - 1;
     4259                Assert(!(pReNative->Core.bmHstRegs & RT_BIT_32(idxHstReg)));
     4260                Assert(   (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)
     4261                       == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows);
     4262
     4263                pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = 0;
     4264                fHstRegs &= ~RT_BIT_32(idxHstReg);
     4265            } while (fHstRegs != 0);
     4266            pReNative->Core.bmGstRegShadows = 0;
     4267        }
     4268    }
     4269}
     4270
     4271
     4272/**
     4273 * Restores guest shadow copies in volatile registers.
     4274 *
     4275 * This is used after calling a helper function (think TLB miss) to restore the
     4276 * register state of volatile registers.
     4277 *
     4278 * @param   pReNative       The native recompile state.
     4279 * @param   fHstRegs        Set of host registers to flush guest shadows for.
     4280 * @see     iemNativeVarSaveVolatileRegsPreHlpCall(),
     4281 *          iemNativeVarRestoreVolatileRegsPostHlpCall()
     4282 */
     4283DECL_HIDDEN_THROW(uint32_t)
     4284iemNativeRegRestoreGuestShadowsInVolatileRegs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fHstRegsActiveShadows)
     4285{
     4286    uint32_t fHstRegs = pReNative->Core.bmHstRegsWithGstShadow & IEMNATIVE_CALL_VOLATILE_GREG_MASK;
     4287    if (fHstRegs)
     4288    {
     4289        Log12(("iemNativeRegRestoreGuestShadowsInVolatileRegs: %#RX32\n", fHstRegs));
     4290        do
     4291        {
     4292            unsigned const idxHstReg = ASMBitFirstSetU32(fHstRegs) - 1;
     4293
     4294            /* It's not fatal if a register is active holding a variable that
     4295               shadowing a guest register, ASSUMING all pending guest register
     4296               writes were flushed prior to the helper call. However, we'll be
     4297               emitting duplicate restores, so it wasts code space. */
     4298            Assert(!(pReNative->Core.bmHstRegs & ~fHstRegsActiveShadows & RT_BIT_32(idxHstReg)));
     4299            RT_NOREF(fHstRegsActiveShadows);
     4300
     4301            uint64_t const fGstRegShadows = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows;
     4302            Assert((pReNative->Core.bmGstRegShadows & fGstRegShadows) == fGstRegShadows);
     4303            AssertStmt(fGstRegShadows != 0 && fGstRegShadows < RT_BIT_64(kIemNativeGstReg_End),
     4304                       IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_12));
     4305
     4306            unsigned const idxGstReg = ASMBitFirstSetU64(fGstRegShadows) - 1;
     4307            off = iemNativeEmitLoadGprWithGstShadowReg(pReNative, off, idxHstReg, (IEMNATIVEGSTREG)idxGstReg);
     4308
     4309            fHstRegs &= ~RT_BIT_32(idxHstReg);
     4310        } while (fHstRegs != 0);
     4311    }
     4312    return off;
     4313}
     4314
     4315
     4316/**
    42104317 * Flushes delayed write of a specific guest register.
    42114318 *
     
    69777084    pReNative->Core.aVars[idxVar].fRegAcquired = true;
    69787085    return idxReg;
     7086}
     7087
     7088
     7089/**
     7090 * Emit code to save volatile registers prior to a call to a helper (TLB miss).
     7091 *
     7092 * This is used together with iemNativeVarRestoreVolatileRegsPostHlpCall() and
     7093 * optionally iemNativeRegRestoreGuestShadowsInVolatileRegs() to bypass the
     7094 * requirement of flushing anything in volatile host registers when making a
     7095 * call.
     7096 *
     7097 * @returns New @a off value.
     7098 * @param   pReNative           The recompiler state.
     7099 * @param   off                 The code buffer position.
     7100 * @param   fHstRegsNotToSave   Set of registers not to save & restore.
     7101 */
     7102DECL_INLINE_THROW(uint32_t)
     7103iemNativeVarSaveVolatileRegsPreHlpCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fHstRegsNotToSave)
     7104{
     7105    uint32_t fHstRegs = pReNative->Core.bmHstRegs & IEMNATIVE_CALL_VOLATILE_GREG_MASK & ~fHstRegsNotToSave;
     7106    if (fHstRegs)
     7107    {
     7108        do
     7109        {
     7110            unsigned int const idxHstReg = ASMBitFirstSetU32(fHstRegs) - 1;
     7111            fHstRegs &= ~RT_BIT_32(idxHstReg);
     7112
     7113            if (pReNative->Core.aHstRegs[idxHstReg].enmWhat == kIemNativeWhat_Var)
     7114            {
     7115                uint8_t const idxVar = pReNative->Core.aHstRegs[idxHstReg].idxVar;
     7116                AssertStmt(   idxVar < RT_ELEMENTS(pReNative->Core.aVars)
     7117                           && (pReNative->Core.bmVars & RT_BIT_32(idxVar))
     7118                           && pReNative->Core.aVars[idxVar].idxReg == idxHstReg,
     7119                           IEMNATIVE_DO_LONGJMP(pReNative,  VERR_IEM_VAR_IPE_12));
     7120                switch (pReNative->Core.aVars[idxVar].enmKind)
     7121                {
     7122                    case kIemNativeVarKind_Stack:
     7123                    {
     7124                        /* Temporarily spill the variable register. */
     7125                        uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar);
     7126                        Log12(("iemNativeVarSaveVolatileRegsPreHlpCall: spilling idxVar=%d/idxReg=%d onto the stack (slot %#x bp+%d, off=%#x)\n",
     7127                               idxVar, idxHstReg, idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off));
     7128                        off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxHstReg);
     7129                        continue;
     7130                    }
     7131
     7132                    case kIemNativeVarKind_Immediate:
     7133                    case kIemNativeVarKind_VarRef:
     7134                    case kIemNativeVarKind_GstRegRef:
     7135                        /* It is weird to have any of these loaded at this point. */
     7136                        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative,  VERR_IEM_VAR_IPE_13));
     7137                        continue;
     7138
     7139                    case kIemNativeVarKind_End:
     7140                    case kIemNativeVarKind_Invalid:
     7141                        break;
     7142                }
     7143                AssertFailed();
     7144            }
     7145        } while (fHstRegs);
     7146    }
     7147    return off;
     7148}
     7149
     7150
     7151/**
     7152 * Emit code to restore volatile registers after to a call to a helper.
     7153 *
     7154 * @returns New @a off value.
     7155 * @param   pReNative           The recompiler state.
     7156 * @param   off                 The code buffer position.
     7157 * @param   fHstRegsNotToSave   Set of registers not to save & restore.
     7158 * @see     iemNativeVarSaveVolatileRegsPreHlpCall(),
     7159 *          iemNativeRegRestoreGuestShadowsInVolatileRegs()
     7160 */
     7161DECL_INLINE_THROW(uint32_t)
     7162iemNativeVarRestoreVolatileRegsPostHlpCall(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fHstRegsNotToSave)
     7163{
     7164    uint32_t fHstRegs = pReNative->Core.bmHstRegs & IEMNATIVE_CALL_VOLATILE_GREG_MASK & ~fHstRegsNotToSave;
     7165    if (fHstRegs)
     7166    {
     7167        do
     7168        {
     7169            unsigned int const idxHstReg = ASMBitFirstSetU32(fHstRegs) - 1;
     7170            fHstRegs &= ~RT_BIT_32(idxHstReg);
     7171
     7172            if (pReNative->Core.aHstRegs[idxHstReg].enmWhat == kIemNativeWhat_Var)
     7173            {
     7174                uint8_t const idxVar = pReNative->Core.aHstRegs[idxHstReg].idxVar;
     7175                AssertStmt(   idxVar < RT_ELEMENTS(pReNative->Core.aVars)
     7176                           && (pReNative->Core.bmVars & RT_BIT_32(idxVar))
     7177                           && pReNative->Core.aVars[idxVar].idxReg == idxHstReg,
     7178                           IEMNATIVE_DO_LONGJMP(pReNative,  VERR_IEM_VAR_IPE_12));
     7179                switch (pReNative->Core.aVars[idxVar].enmKind)
     7180                {
     7181                    case kIemNativeVarKind_Stack:
     7182                    {
     7183                        /* Unspill the variable register. */
     7184                        uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar);
     7185                        Log12(("iemNativeVarRestoreVolatileRegsPostHlpCall: unspilling idxVar=%d/idxReg=%d (slot %#x bp+%d, off=%#x)\n",
     7186                               idxVar, idxHstReg, idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off));
     7187                        off = iemNativeEmitLoadGprByBp(pReNative, off, idxHstReg, iemNativeStackCalcBpDisp(idxStackSlot));
     7188                        continue;
     7189                    }
     7190
     7191                    case kIemNativeVarKind_Immediate:
     7192                    case kIemNativeVarKind_VarRef:
     7193                    case kIemNativeVarKind_GstRegRef:
     7194                        /* It is weird to have any of these loaded at this point. */
     7195                        AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative,  VERR_IEM_VAR_IPE_13));
     7196                        continue;
     7197
     7198                    case kIemNativeVarKind_End:
     7199                    case kIemNativeVarKind_Invalid:
     7200                        break;
     7201                }
     7202                AssertFailed();
     7203            }
     7204        } while (fHstRegs);
     7205    }
     7206    return off;
    69797207}
    69807208
     
    981010038        iemNativeRegFreeTmp(a_pReNative, idxReg2);
    981110039        iemNativeRegFreeTmp(a_pReNative, idxReg1);
     10040    }
     10041
     10042    uint32_t getRegsNotToSave() const
     10043    {
     10044        if (!fSkip)
     10045            return RT_BIT_32(idxReg1) | RT_BIT_32(idxReg2);
     10046        return 0;
     10047    }
     10048
     10049    /** This is only for avoid assertions. */
     10050    uint32_t getActiveRegsWithShadows() const
     10051    {
     10052#ifdef VBOX_STRICT
     10053        if (!fSkip)
     10054            return RT_BIT_32(idxRegSegBase) | RT_BIT_32(idxRegSegLimit) | RT_BIT_32(idxRegSegAttrib);
     10055#endif
     10056        return 0;
    981210057    }
    981310058} IEMNATIVEEMITTLBSTATE;
     
    1087311118    /* IEMNATIVE_CALL_ARG1_GREG = idxVarValue (first) */
    1087411119    off = iemNativeEmitLoadArgGregFromImmOrStackVar(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxVarValue,
    10875                                                     0 /*offAddend*/, true /*fVarAllowInVolatileReg*/);
     11120                                                    0 /*offAddend*/, IEMNATIVE_CALL_VOLATILE_GREG_MASK);
    1087611121
    1087711122    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
     
    1132811573     * may end up making calls.
    1132911574     */
    11330     /** @todo we could postpone this till we make the call and reload the
    11331      * registers after returning from the call. Not sure if that's sensible or
    11332      * not, though. */
    1133311575    off = iemNativeRegFlushPendingWrites(pReNative, off);
    1133411576
     11577#ifdef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP
    1133511578    /*
    1133611579     * Move/spill/flush stuff out of call-volatile registers.
     
    1134011583    /** @todo save+restore active registers and maybe guest shadows in tlb-miss.  */
    1134111584    off = iemNativeRegMoveAndFreeAndFlushAtCall(pReNative, off, 0 /* vacate all non-volatile regs */);
     11585#endif
    1134211586
    1134311587    /* The bUnmapInfo variable will get a register in the tlb-hit code path,
     
    1138411628#endif
    1138511629
     11630#ifndef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP
     11631    /* Save variables in volatile registers. */
     11632    uint32_t const fHstRegsNotToSave = TlbState.getRegsNotToSave() | RT_BIT_32(idxRegMemResult) | RT_BIT_32(idxRegUnmapInfo);
     11633    off = iemNativeVarSaveVolatileRegsPreHlpCall(pReNative, off, fHstRegsNotToSave);
     11634#endif
     11635
     11636    /* IEMNATIVE_CALL_ARG2_GREG = GCPtrMem - load first as it is from a variable. */
     11637    off = iemNativeEmitLoadArgGregFromImmOrStackVar(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, idxVarGCPtrMem, 0 /*cbAppend*/,
     11638#ifndef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP
     11639                                                    IEMNATIVE_CALL_VOLATILE_GREG_MASK, true /*fSpilledVarsInvolatileRegs*/);
     11640#else
     11641                                                    IEMNATIVE_CALL_VOLATILE_GREG_MASK);
     11642#endif
     11643
    1138611644    /* IEMNATIVE_CALL_ARG3_GREG = iSegReg */
    1138711645    if (iSegReg != UINT8_MAX)
     
    1139111649    }
    1139211650
    11393     /* IEMNATIVE_CALL_ARG2_GREG = GCPtrMem */
    11394     off = iemNativeEmitLoadArgGregFromImmOrStackVar(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, idxVarGCPtrMem);
    11395 
    1139611651    /* IEMNATIVE_CALL_ARG1_GREG = &idxVarUnmapInfo; stackslot address, load any register with result after the call. */
    11397 #if 0
    11398     off = iemNativeEmitLoadArgGregWithVarAddr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxVarUnmapInfo, true /*fFlushShadows*/);
    11399 #else
    1140011652    int32_t const offBpDispVarUnmapInfo = iemNativeStackCalcBpDisp(iemNativeVarGetStackSlot(pReNative, idxVarUnmapInfo));
    1140111653    off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, offBpDispVarUnmapInfo);
    11402 #endif
    1140311654
    1140411655    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
     
    1141411665    if (idxRegMemResult != IEMNATIVE_CALL_RET_GREG)
    1141511666        off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxRegMemResult, IEMNATIVE_CALL_RET_GREG);
     11667
     11668#ifndef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP
     11669    /* Restore variables and guest shadow registers to volatile registers. */
     11670    off = iemNativeVarRestoreVolatileRegsPostHlpCall(pReNative, off, fHstRegsNotToSave);
     11671    off = iemNativeRegRestoreGuestShadowsInVolatileRegs(pReNative, off, TlbState.getActiveRegsWithShadows());
     11672#endif
    1141611673
    1141711674    Assert(pReNative->Core.aVars[idxVarUnmapInfo].idxReg == idxRegUnmapInfo);
     
    1144211699         */
    1144311700        iemNativeLabelDefine(pReNative, idxLabelTlbDone, off);
     11701
     11702# ifndef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP
     11703        /* Temp Hack: Flush all guest shadows in volatile registers in case of TLB miss. */
     11704        iemNativeRegFlushGuestShadowsByHostMask(pReNative, IEMNATIVE_CALL_VOLATILE_GREG_MASK);
     11705# endif
    1144411706    }
    1144511707#else
     
    1155311815    /* IEMNATIVE_CALL_ARG1_GREG = idxVarUnmapInfo (first!) */
    1155411816    off = iemNativeEmitLoadArgGregFromStackVar(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxVarUnmapInfo,
    11555                                                0 /*offAddend*/, true /*fVarAllowInVolatileReg*/);
     11817                                               0 /*offAddend*/, IEMNATIVE_CALL_VOLATILE_GREG_MASK);
    1155611818
    1155711819    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r102724 r102733  
    836836                                                                  uint32_t fKeepVars = 0);
    837837DECLHIDDEN(void)            iemNativeRegFlushGuestShadows(PIEMRECOMPILERSTATE pReNative, uint64_t fGstRegs) RT_NOEXCEPT;
     838DECLHIDDEN(void)            iemNativeRegFlushGuestShadowsByHostMask(PIEMRECOMPILERSTATE pReNative, uint32_t fHstRegs) RT_NOEXCEPT;
    838839
    839840DECL_HIDDEN_THROW(uint8_t)  iemNativeVarGetStackSlot(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar);
  • trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h

    r102724 r102733  
    43014301DECL_FORCE_INLINE_THROW(uint32_t)
    43024302iemNativeEmitLoadArgGregFromStackVar(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxRegArg, uint8_t idxVar,
    4303                                      int32_t offAddend = 0, bool fVarAllowInVolatileReg = false)
     4303                                     int32_t offAddend = 0, uint32_t fHstVolatileRegsAllowed = UINT32_MAX,
     4304                                     bool fSpilledVarsInVolatileRegs = false)
    43044305{
    43054306    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVar);
     
    43084309
    43094310    uint8_t const idxRegVar = pReNative->Core.aVars[idxVar].idxReg;
    4310     if (idxRegVar < RT_ELEMENTS(pReNative->Core.aHstRegs))
    4311     {
    4312         Assert(!(RT_BIT_32(idxRegVar) & IEMNATIVE_CALL_VOLATILE_GREG_MASK) || fVarAllowInVolatileReg);
    4313         RT_NOREF(fVarAllowInVolatileReg);
     4311    if (   idxRegVar < RT_ELEMENTS(pReNative->Core.aHstRegs)
     4312        && (   (RT_BIT_32(idxRegVar) & (~IEMNATIVE_CALL_VOLATILE_GREG_MASK | fHstVolatileRegsAllowed))
     4313            || !fSpilledVarsInVolatileRegs ))
     4314    {
     4315        AssertStmt(   !(RT_BIT_32(idxRegVar) & IEMNATIVE_CALL_VOLATILE_GREG_MASK)
     4316                   || (RT_BIT_32(idxRegVar) & fHstVolatileRegsAllowed),
     4317                   IEMNATIVE_DO_LONGJMP(pReNative,  VERR_IEM_REG_IPE_13));
    43144318        if (!offAddend)
    43154319        {
     
    43394343DECL_FORCE_INLINE_THROW(uint32_t)
    43404344iemNativeEmitLoadArgGregFromImmOrStackVar(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxRegArg, uint8_t idxVar,
    4341                                           int32_t offAddend = 0, bool fVarAllowInVolatileReg = false)
     4345                                          int32_t offAddend = 0, uint32_t fHstVolatileRegsAllowed = 0,
     4346                                          bool fSpilledVarsInVolatileRegs = false)
    43424347{
    43434348    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVar);
     
    43454350        off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegArg, pReNative->Core.aVars[idxVar].u.uValue + offAddend);
    43464351    else
    4347         off = iemNativeEmitLoadArgGregFromStackVar(pReNative, off, idxRegArg, idxVar, offAddend, fVarAllowInVolatileReg);
     4352        off = iemNativeEmitLoadArgGregFromStackVar(pReNative, off, idxRegArg, idxVar, offAddend,
     4353                                                   fHstVolatileRegsAllowed, fSpilledVarsInVolatileRegs);
    43484354    return off;
    43494355}
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette