VirtualBox

Changeset 106187 in vbox for trunk


Ignore:
Timestamp:
Oct 1, 2024 9:05:44 AM (7 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
164971
Message:

VMM/IEM: Injecting postponed eflags calculations into the TLB miss code paths. Fixed if constexpr for gcc. bugref:10720

Location:
trunk
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/x86.h

    r106061 r106187  
    190190/** Bit 1 - Reserved, reads as 1. */
    191191#define X86_EFL_1           RT_BIT_32(1)
     192#define X86_EFL_1_BIT       1
    192193/** Bit 2 - PF - Parity flag - Status flag. */
    193194#define X86_EFL_PF          RT_BIT_32(2)
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp

    r106180 r106187  
    323323        /* jz nothing_pending */
    324324        uint32_t const offFixup1 = off;
    325         off = iemNativeEmitJccToFixedEx(pCodeBuf, off, off + 64, kIemNativeInstrCond_e);
     325        off = iemNativeEmitJccToFixedEx(pCodeBuf, off, IEMNATIVE_HAS_POSTPONED_EFLAGS_CALCS(pReNative) ? off + 512 : off + 64,
     326                                        kIemNativeInstrCond_e);
    326327
    327328# elif defined(RT_ARCH_ARM64)
     
    334335        uint32_t const offFixup1 = off;
    335336        off = iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToFixedEx(pCodeBuf, off, idxTmpReg1, true /*f64Bit*/,
    336                                                                    false /*fJmpIfNotZero*/, off + 16);
     337                                                                   false /*fJmpIfNotZero*/, off);
    337338# else
    338339#  error "port me"
     
    373374         */
    374375        iemNativeFixupFixedJump(pReNative, offFixup1, off);
     376        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    375377    }
    376378
     
    14391441    off = iemNativeVarSaveVolatileRegsPreHlpCall(pReNative, off, fHstRegsNotToSave);
    14401442
     1443#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1444    /* Do delayed EFLAGS calculations. There are no restrictions on volatile registers here. */
     1445    off = iemNativeDoPostponedEFlagsAtTlbMiss<0>(pReNative, off, &TlbState, fHstRegsNotToSave);
     1446#endif
     1447
    14411448    /* IEMNATIVE_CALL_ARG1_GREG = offInstr */
    14421449    off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, offInstr);
     
    14461453
    14471454    /* Done setting up parameters, make the call. */
    1448     off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)iemNativeHlpMemCodeNewPageTlbMissWithOff);
     1455    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, (uintptr_t)iemNativeHlpMemCodeNewPageTlbMissWithOff);
    14491456
    14501457    /* Move the result to the right register. */
     
    17361743    off = iemNativeVarSaveVolatileRegsPreHlpCall(pReNative, off, fHstRegsNotToSave);
    17371744
     1745#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1746    /* Do delayed EFLAGS calculations. There are no restrictions on volatile registers here. */
     1747    off = iemNativeDoPostponedEFlagsAtTlbMiss<0>(pReNative, off, &TlbState, fHstRegsNotToSave);
     1748#endif
     1749
    17381750    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    17391751    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    17401752
    17411753    /* Done setting up parameters, make the call. */
    1742     off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)iemNativeHlpMemCodeNewPageTlbMiss);
     1754    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, (uintptr_t)iemNativeHlpMemCodeNewPageTlbMiss);
    17431755
    17441756    /* Restore variables and guest shadow registers to volatile registers. */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r106180 r106187  
    17531753    }
    17541754
     1755#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1756    /* Do delayed EFLAGS calculations. */
     1757    off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)
     1758                                              | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState, fHstRegsNotToSave);
     1759#endif
     1760
    17551761    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    17561762    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    17571763
    17581764    /* Done setting up parameters, make the call. */
    1759     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     1765    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    17601766
    17611767    /* Restore variables and guest shadow registers to volatile registers. */
     
    24512457        off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegEffSp);
    24522458
     2459#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     2460    /* Do delayed EFLAGS calculations. */
     2461    off = iemNativeDoPostponedEFlagsAtTlbMiss<RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)>(pReNative, off, &TlbState, fHstRegsNotToSave);
     2462#endif
     2463
    24532464    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    24542465    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    24552466
    24562467    /* Done setting up parameters, make the call. */
    2457     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     2468    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    24582469
    24592470    /* Move the return register content to idxRegMemResult. */
     
    62636274    /* Updating the skipping according to the outputs is a little early, but
    62646275       we don't have any other hooks for references atm. */
    6265     if RT_CONSTEXPR((a_fEflOutput & X86_EFL_STATUS_BITS) == X86_EFL_STATUS_BITS)
     6276    if RT_CONSTEXPR_IF((a_fEflOutput & X86_EFL_STATUS_BITS) == X86_EFL_STATUS_BITS)
    62666277        off = iemNativeEmitStoreImmToVCpuU32(pReNative, off, 0, RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
    6267     else if RT_CONSTEXPR((a_fEflOutput & X86_EFL_STATUS_BITS) != 0)
     6278    else if RT_CONSTEXPR_IF((a_fEflOutput & X86_EFL_STATUS_BITS) != 0)
    62686279        off = iemNativeEmitAndImmIntoVCpuU32(pReNative, off, ~(a_fEflOutput & X86_EFL_STATUS_BITS),
    62696280                                             RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
     
    71557166 * and IEM_MC_FETCH_MEM_FLAT_U8/16/32/64 and IEM_MC_STORE_MEM_FLAT_U8/16/32/64
    71567167 * (with iSegReg = UINT8_MAX). */
     7168/** @todo Pass enmOp, cbMem, fAlignMaskAndClt and a iSegReg == UINT8_MAX
     7169 *        indicator as template parameters. */
    71577170DECL_INLINE_THROW(uint32_t)
    71587171iemNativeEmitMemFetchStoreDataCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off,  uint8_t idxVarValue, uint8_t iSegReg,
     
    74797492         * For SIMD based variables we pass the reference on the stack for both fetches and stores.
    74807493         *
    7481          * @note There was a register variable assigned to the variable for the TlbLookup case above
     7494         * Note! There was a register variable assigned to the variable for the TlbLookup case above
    74827495         *       which must not be freed or the value loaded into the register will not be synced into the register
    74837496         *       further down the road because the variable doesn't know it had a variable assigned.
    74847497         *
    7485          * @note For loads it is not required to sync what is in the assigned register with the stack slot
     7498         * Note! For loads it is not required to sync what is in the assigned register with the stack slot
    74867499         *       as it will be overwritten anyway.
    74877500         */
     
    75207533    }
    75217534
     7535#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     7536    /* Do delayed EFLAGS calculations. */
     7537    if (enmOp == kIemNativeEmitMemOp_Store || cbMem == sizeof(RTUINT128U) || cbMem == sizeof(RTUINT256U))
     7538    {
     7539        if (iSegReg == UINT8_MAX)
     7540            off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)
     7541                                                      | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState,
     7542                                                                                             fHstRegsNotToSave);
     7543        else
     7544            off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)
     7545                                                      | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)
     7546                                                      | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG)>(pReNative, off, &TlbState,
     7547                                                                                             fHstRegsNotToSave);
     7548    }
     7549    else if (iSegReg == UINT8_MAX)
     7550        off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)>(pReNative, off, &TlbState,
     7551                                                                                         fHstRegsNotToSave);
     7552    else
     7553        off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)
     7554                                                  | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState,
     7555                                                                                         fHstRegsNotToSave);
     7556#endif
     7557
    75227558    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    75237559    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    75247560
    75257561    /* Done setting up parameters, make the call. */
    7526     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     7562    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    75277563
    75287564    /*
     
    85018537    }
    85028538
     8539#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8540    /* Do delayed EFLAGS calculations. */
     8541    off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)
     8542                                              | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState, fHstRegsNotToSave);
     8543#endif
     8544
    85038545    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    85048546    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    85058547
    85068548    /* Done setting up parameters, make the call. */
    8507     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     8549    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    85088550
    85098551    /* Restore variables and guest shadow registers to volatile registers. */
     
    88448886        off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, idxRegEffSp);
    88458887
     8888#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8889    /* Do delayed EFLAGS calculations. */
     8890    off = iemNativeDoPostponedEFlagsAtTlbMiss<RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG)>(pReNative, off, &TlbState, fHstRegsNotToSave);
     8891#endif
     8892
    88468893    /* IEMNATIVE_CALL_ARG0_GREG = pVCpu */
    88478894    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    88488895
    88498896    /* Done setting up parameters, make the call. */
    8850     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     8897    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    88518898
    88528899    /* Move the return register content to idxRegMemResult. */
     
    94679514    }
    94689515
     9516#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     9517    /* Do delayed EFLAGS calculations. */
     9518    if (iSegReg == UINT8_MAX)
     9519        off = iemNativeDoPostponedEFlagsAtTlbMiss<RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)>(pReNative, off, &TlbState,
     9520                                                                                       fHstRegsNotToSave);
     9521    else
     9522        off = iemNativeDoPostponedEFlagsAtTlbMiss<  RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG)
     9523                                                  | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG)>(pReNative, off, &TlbState,
     9524                                                                                         fHstRegsNotToSave);
     9525#endif
     9526
    94699527    /* IEMNATIVE_CALL_ARG1_GREG = &idxVarUnmapInfo; stackslot address, load any register with result after the call. */
    94709528    int32_t const offBpDispVarUnmapInfo = iemNativeStackCalcBpDisp(iemNativeVarGetStackSlot(pReNative, idxVarUnmapInfo));
     
    94759533
    94769534    /* Done setting up parameters, make the call. */
    9477     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     9535    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    94789536
    94799537    /*
     
    96539711    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    96549712
    9655     /* Done setting up parameters, make the call. */
    9656     off = iemNativeEmitCallImm(pReNative, off, pfnFunction);
     9713    /* Done setting up parameters, make the call.
     9714       Note! Since we can only end up here if we took a TLB miss, any postponed EFLAGS
     9715             calculations has been done there already. Thus, a_fSkipEflChecks = true. */
     9716    off = iemNativeEmitCallImm<true /*a_fSkipEflChecks*/>(pReNative, off, pfnFunction);
    96579717
    96589718    /* The bUnmapInfo variable is implictly free by these MCs. */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r106180 r106187  
    59595959
    59605960
     5961/**
     5962 * Loads the guest shadow register @a enmGstReg into host reg @a idxHstReg, zero
     5963 * extending to 64-bit width, extended version.
     5964 *
     5965 * @returns New code buffer offset on success, UINT32_MAX on failure.
     5966 * @param   pCodeBuf    The code buffer.
     5967 * @param   off         The current code buffer position.
     5968 * @param   idxHstReg   The host register to load the guest register value into.
     5969 * @param   enmGstReg   The guest register to load.
     5970 *
     5971 * @note This does not mark @a idxHstReg as having a shadow copy of @a enmGstReg,
     5972 *       that is something the caller needs to do if applicable.
     5973 */
     5974DECL_HIDDEN_THROW(uint32_t)
     5975iemNativeEmitLoadGprWithGstShadowRegEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg)
     5976{
     5977    Assert((unsigned)enmGstReg < (unsigned)kIemNativeGstReg_End);
     5978    Assert(g_aGstShadowInfo[enmGstReg].cb != 0);
     5979
     5980    switch (g_aGstShadowInfo[enmGstReg].cb)
     5981    {
     5982        case sizeof(uint64_t):
     5983            return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off);
     5984        case sizeof(uint32_t):
     5985            return iemNativeEmitLoadGprFromVCpuU32Ex(pCodeBuf, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off);
     5986        case sizeof(uint16_t):
     5987            return iemNativeEmitLoadGprFromVCpuU16Ex(pCodeBuf, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off);
     5988#if 0 /* not present in the table. */
     5989        case sizeof(uint8_t):
     5990            return iemNativeEmitLoadGprFromVCpuU8Ex(pCodeBuf, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off);
     5991#endif
     5992        default:
     5993#ifdef IEM_WITH_THROW_CATCH
     5994            AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IPE_NOT_REACHED_DEFAULT_CASE));
     5995#else
     5996            AssertReleaseFailedReturn(off);
     5997#endif
     5998    }
     5999}
     6000
     6001
    59616002#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
    59626003/**
     
    60036044 *       Trashes EFLAGS on AMD64.
    60046045 */
    6005 DECL_HIDDEN_THROW(uint32_t)
    6006 iemNativeEmitTop32BitsClearCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg)
     6046DECL_FORCE_INLINE(uint32_t)
     6047iemNativeEmitTop32BitsClearCheckEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxReg)
    60076048{
    60086049# ifdef RT_ARCH_AMD64
    6009     uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 20);
    6010 
    60116050    /* rol reg64, 32 */
    6012     pbCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
    6013     pbCodeBuf[off++] = 0xc1;
    6014     pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
    6015     pbCodeBuf[off++] = 32;
     6051    pCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
     6052    pCodeBuf[off++] = 0xc1;
     6053    pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
     6054    pCodeBuf[off++] = 32;
    60166055
    60176056    /* test reg32, ffffffffh */
    60186057    if (idxReg >= 8)
    6019         pbCodeBuf[off++] = X86_OP_REX_B;
    6020     pbCodeBuf[off++] = 0xf7;
    6021     pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
    6022     pbCodeBuf[off++] = 0xff;
    6023     pbCodeBuf[off++] = 0xff;
    6024     pbCodeBuf[off++] = 0xff;
    6025     pbCodeBuf[off++] = 0xff;
     6058        pCodeBuf[off++] = X86_OP_REX_B;
     6059    pCodeBuf[off++] = 0xf7;
     6060    pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
     6061    pCodeBuf[off++] = 0xff;
     6062    pCodeBuf[off++] = 0xff;
     6063    pCodeBuf[off++] = 0xff;
     6064    pCodeBuf[off++] = 0xff;
    60266065
    60276066    /* je/jz +1 */
    6028     pbCodeBuf[off++] = 0x74;
    6029     pbCodeBuf[off++] = 0x01;
     6067    pCodeBuf[off++] = 0x74;
     6068    pCodeBuf[off++] = 0x01;
    60306069
    60316070    /* int3 */
    6032     pbCodeBuf[off++] = 0xcc;
     6071    pCodeBuf[off++] = 0xcc;
    60336072
    60346073    /* rol reg64, 32 */
    6035     pbCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
    6036     pbCodeBuf[off++] = 0xc1;
    6037     pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
    6038     pbCodeBuf[off++] = 32;
     6074    pCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
     6075    pCodeBuf[off++] = 0xc1;
     6076    pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
     6077    pCodeBuf[off++] = 32;
    60396078
    60406079# elif defined(RT_ARCH_ARM64)
    6041     uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
    60426080    /* lsr tmp0, reg64, #32 */
    6043     pu32CodeBuf[off++] = Armv8A64MkInstrLsrImm(IEMNATIVE_REG_FIXED_TMP0, idxReg, 32);
     6081    pCodeBuf[off++] = Armv8A64MkInstrLsrImm(IEMNATIVE_REG_FIXED_TMP0, idxReg, 32);
    60446082    /* cbz tmp0, +1 */
    6045     pu32CodeBuf[off++] = Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, 2, IEMNATIVE_REG_FIXED_TMP0);
     6083    pCodeBuf[off++] = Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, 2, IEMNATIVE_REG_FIXED_TMP0);
    60466084    /* brk #0x1100 */
    6047     pu32CodeBuf[off++] = Armv8A64MkInstrBrk(UINT32_C(0x1100));
     6085    pCodeBuf[off++] = Armv8A64MkInstrBrk(UINT32_C(0x1100));
    60486086
    60496087# else
    60506088#  error "Port me!"
    60516089# endif
     6090    return off;
     6091}
     6092
     6093
     6094/**
     6095 * Emitting code that checks that the value of @a idxReg is UINT32_MAX or less.
     6096 *
     6097 * @note May of course trash IEMNATIVE_REG_FIXED_TMP0.
     6098 *       Trashes EFLAGS on AMD64.
     6099 */
     6100DECL_HIDDEN_THROW(uint32_t)
     6101iemNativeEmitTop32BitsClearCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg)
     6102{
     6103# ifdef RT_ARCH_AMD64
     6104    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 20);
     6105# elif defined(RT_ARCH_ARM64)
     6106    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
     6107# else
     6108#  error "Port me!"
     6109# endif
     6110    off = iemNativeEmitTop32BitsClearCheckEx(pCodeBuf, off, idxReg);
    60526111    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    60536112    return off;
     
    60636122 *       Trashes EFLAGS on AMD64.
    60646123 */
    6065 DECL_HIDDEN_THROW(uint32_t)
    6066 iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg)
     6124DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestRegValueCheckEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf,
     6125                                                              uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg)
    60676126{
    60686127#if defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK)
     
    60736132
    60746133# ifdef RT_ARCH_AMD64
    6075     uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
    6076 
    60776134    /* cmp reg, [mem] */
    60786135    if (g_aGstShadowInfo[enmGstReg].cb == sizeof(uint8_t))
    60796136    {
    60806137        if (idxReg >= 8)
    6081             pbCodeBuf[off++] = X86_OP_REX_R;
    6082         pbCodeBuf[off++] = 0x38;
     6138            pCodeBuf[off++] = X86_OP_REX_R;
     6139        pCodeBuf[off++] = 0x38;
    60836140    }
    60846141    else
    60856142    {
    60866143        if (g_aGstShadowInfo[enmGstReg].cb == sizeof(uint64_t))
    6087             pbCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_R);
     6144            pCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_R);
    60886145        else
    60896146        {
    60906147            if (g_aGstShadowInfo[enmGstReg].cb == sizeof(uint16_t))
    6091                 pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
     6148                pCodeBuf[off++] = X86_OP_PRF_SIZE_OP;
    60926149            else
    60936150                AssertStmt(g_aGstShadowInfo[enmGstReg].cb == sizeof(uint32_t),
    60946151                           IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_LABEL_IPE_6));
    60956152            if (idxReg >= 8)
    6096                 pbCodeBuf[off++] = X86_OP_REX_R;
     6153                pCodeBuf[off++] = X86_OP_REX_R;
    60976154        }
    6098         pbCodeBuf[off++] = 0x39;
    6099     }
    6100     off = iemNativeEmitGprByVCpuDisp(pbCodeBuf, off, idxReg, g_aGstShadowInfo[enmGstReg].off);
     6155        pCodeBuf[off++] = 0x39;
     6156    }
     6157    off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, idxReg, g_aGstShadowInfo[enmGstReg].off);
    61016158
    61026159    /* je/jz +1 */
    6103     pbCodeBuf[off++] = 0x74;
    6104     pbCodeBuf[off++] = 0x01;
     6160    pCodeBuf[off++] = 0x74;
     6161    pCodeBuf[off++] = 0x01;
    61056162
    61066163    /* int3 */
    6107     pbCodeBuf[off++] = 0xcc;
     6164    pCodeBuf[off++] = 0xcc;
    61086165
    61096166    /* For values smaller than the register size, we must check that the rest
     
    61126169    {
    61136170        /* test reg64, imm32 */
    6114         pbCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
    6115         pbCodeBuf[off++] = 0xf7;
    6116         pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
    6117         pbCodeBuf[off++] = 0;
    6118         pbCodeBuf[off++] = g_aGstShadowInfo[enmGstReg].cb > sizeof(uint8_t) ? 0 : 0xff;
    6119         pbCodeBuf[off++] = 0xff;
    6120         pbCodeBuf[off++] = 0xff;
     6171        pCodeBuf[off++] = X86_OP_REX_W | (idxReg < 8 ? 0 : X86_OP_REX_B);
     6172        pCodeBuf[off++] = 0xf7;
     6173        pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxReg & 7);
     6174        pCodeBuf[off++] = 0;
     6175        pCodeBuf[off++] = g_aGstShadowInfo[enmGstReg].cb > sizeof(uint8_t) ? 0 : 0xff;
     6176        pCodeBuf[off++] = 0xff;
     6177        pCodeBuf[off++] = 0xff;
    61216178
    61226179        /* je/jz +1 */
    6123         pbCodeBuf[off++] = 0x74;
    6124         pbCodeBuf[off++] = 0x01;
     6180        pCodeBuf[off++] = 0x74;
     6181        pCodeBuf[off++] = 0x01;
    61256182
    61266183        /* int3 */
    6127         pbCodeBuf[off++] = 0xcc;
    6128         IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    6129     }
    6130     else
    6131     {
    6132         IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    6133         if (g_aGstShadowInfo[enmGstReg].cb == sizeof(uint32_t))
    6134             iemNativeEmitTop32BitsClearCheck(pReNative, off, idxReg);
    6135     }
     6184        pCodeBuf[off++] = 0xcc;
     6185    }
     6186    else if (g_aGstShadowInfo[enmGstReg].cb == sizeof(uint32_t))
     6187        iemNativeEmitTop32BitsClearCheckEx(pCodeBuf, off, idxReg);
     6188    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    61366189
    61376190# elif defined(RT_ARCH_ARM64)
    61386191    /* mov TMP0, [gstreg] */
    6139     off = iemNativeEmitLoadGprWithGstShadowReg(pReNative, off, IEMNATIVE_REG_FIXED_TMP0, enmGstReg);
    6140 
    6141     uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3);
     6192    off = iemNativeEmitLoadGprWithGstShadowRegEx(pCodeBuf, off, IEMNATIVE_REG_FIXED_TMP0, enmGstReg);
     6193
    61426194    /* sub tmp0, tmp0, idxReg */
    6143     pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(true /*fSub*/, IEMNATIVE_REG_FIXED_TMP0, IEMNATIVE_REG_FIXED_TMP0, idxReg);
    6144     /* cbz tmp0, +1 */
    6145     pu32CodeBuf[off++] = Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, 2, IEMNATIVE_REG_FIXED_TMP0);
     6195    pCodeBuf[off++] = Armv8A64MkInstrAddSubReg(true /*fSub*/, IEMNATIVE_REG_FIXED_TMP0, IEMNATIVE_REG_FIXED_TMP0, idxReg);
     6196    /* cbz tmp0, +2 */
     6197    pCodeBuf[off++] = Armv8A64MkInstrCbz(2, IEMNATIVE_REG_FIXED_TMP0);
    61466198    /* brk #0x1000+enmGstReg */
    6147     pu32CodeBuf[off++] = Armv8A64MkInstrBrk((uint32_t)enmGstReg | UINT32_C(0x1000));
     6199    pCodeBuf[off++] = Armv8A64MkInstrBrk((uint32_t)enmGstReg | UINT32_C(0x1000));
    61486200    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    61496201
     
    61546206}
    61556207
     6208
     6209/**
     6210 * Emitting code that checks that the content of register @a idxReg is the same
     6211 * as what's in the guest register @a enmGstReg, resulting in a breakpoint
     6212 * instruction if that's not the case.
     6213 *
     6214 * @note May of course trash IEMNATIVE_REG_FIXED_TMP0.
     6215 *       Trashes EFLAGS on AMD64.
     6216 */
     6217DECL_HIDDEN_THROW(uint32_t)
     6218iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg)
     6219{
     6220#ifdef RT_ARCH_AMD64
     6221    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
     6222#elif defined(RT_ARCH_ARM64)
     6223    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5);
     6224# else
     6225#  error "Port me!"
     6226# endif
     6227    return iemNativeEmitGuestRegValueCheckEx(pReNative, pCodeBuf, off, idxReg, enmGstReg);
     6228}
    61566229
    61576230# ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h

    r106180 r106187  
    230230    if (fEFlags)
    231231    {
    232         if RT_CONSTEXPR(a_fEflClobbered != X86_EFL_STATUS_BITS)
     232        if RT_CONSTEXPR_IF(a_fEflClobbered != X86_EFL_STATUS_BITS)
    233233        {
    234234            fEFlags &= ~a_fEflClobbered;
     
    270270    if (idxRegTmp == X86_GREG_xAX)
    271271    {
    272         /* sahf ; AH = EFLAGS */
    273         pCodeBuf[off++] = 0x9e;
     272        /* lahf ; AH = EFLAGS */
     273        pCodeBuf[off++] = 0x9f;
    274274        if (idxRegEfl <= X86_GREG_xBX)
    275275        {
     
    308308        pCodeBuf[off++] = 0x86;
    309309        pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 4 /*AH*/, 0 /*AL*/);
    310         /* sahf ; AH = EFLAGS */
    311         pCodeBuf[off++] = 0x9e;
     310        /* lahf ; AH = EFLAGS */
     311        pCodeBuf[off++] = 0x9f;
    312312        /* xchg al, ah  */
    313313        pCodeBuf[off++] = 0x86;
     
    358358
    359359
    360 template<uint32_t const a_bmInputRegs>
    361 static uint32_t iemNativeDoPostponedEFlagsAtTbExitInternal(PIEMRECOMPILERSTATE pReNative, uint32_t off, PIEMNATIVEINSTR pCodeBuf)
     360template<uint32_t const a_bmInputRegs, bool const a_fTlbMiss = false>
     361static uint32_t iemNativeDoPostponedEFlagsInternal(PIEMRECOMPILERSTATE pReNative, uint32_t off, PIEMNATIVEINSTR pCodeBuf,
     362                                                   uint32_t bmExtraTlbMissRegs = 0)
    362363{
    363364    /*
    364      * We can't do regular register allocations here, but since we're in an exit
    365      * path where all pending writes has been flushed and we have a known set of
    366      * registers with input for the exit label, we do our own simple stuff here.
     365     * In the TB exit code path we cannot do regular register allocation.  Nor
     366     * can we when we're in the TLB miss code, unless we're skipping the TLB
     367     * lookup.  Since the latter isn't an important usecase and should get along
     368     * fine on just volatile registers, we do not need to do anything special
     369     * for it.
     370     *
     371     * So, we do our own register allocating here.  Any register goes in the TB
     372     * exit path, excluding a_bmInputRegs, fixed and postponed related registers.
     373     * In the TLB miss we can use any volatile register and temporary registers
     374     * allocated in the TLB state.
    367375     *
    368376     * Note! On x86 we prefer using RAX as the first TMP register, so we can
     
    371379     *       shadow, since RAX is represented by bit 0 in the mask.
    372380     */
    373     uint32_t bmAvailableRegs = ~(a_bmInputRegs | IEMNATIVE_REG_FIXED_MASK) & IEMNATIVE_HST_GREG_MASK;
    374     if (pReNative->PostponedEfl.idxReg2 != UINT8_MAX)
    375         bmAvailableRegs &= ~(RT_BIT_32(pReNative->PostponedEfl.idxReg1) | RT_BIT_32(pReNative->PostponedEfl.idxReg2));
     381    uint32_t bmAvailableRegs;
     382    if RT_CONSTEXPR_IF(!a_fTlbMiss)
     383    {
     384        bmAvailableRegs = ~(a_bmInputRegs | IEMNATIVE_REG_FIXED_MASK) & IEMNATIVE_HST_GREG_MASK;
     385        if (pReNative->PostponedEfl.idxReg2 != UINT8_MAX)
     386            bmAvailableRegs &= ~(RT_BIT_32(pReNative->PostponedEfl.idxReg1) | RT_BIT_32(pReNative->PostponedEfl.idxReg2));
     387        else
     388            bmAvailableRegs &= ~RT_BIT_32(pReNative->PostponedEfl.idxReg1);
     389    }
    376390    else
    377         bmAvailableRegs &= ~RT_BIT_32(pReNative->PostponedEfl.idxReg1);
    378 
    379     /* Use existing EFLAGS shadow if available. */
    380     uint8_t idxRegEfl, idxRegTmp;
    381     if (pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(kIemNativeGstReg_EFlags))
    382     {
    383         idxRegEfl = pReNative->Core.aidxGstRegShadows[kIemNativeGstReg_EFlags];
    384         Assert(idxRegEfl < IEMNATIVE_HST_GREG_COUNT && (bmAvailableRegs & RT_BIT_32(idxRegEfl)));
     391    {
     392        /* Note! a_bmInputRegs takes precedence over bmExtraTlbMissRegs. */
     393        bmAvailableRegs  = (IEMNATIVE_CALL_VOLATILE_GREG_MASK | bmExtraTlbMissRegs)
     394                         & ~(a_bmInputRegs | IEMNATIVE_REG_FIXED_MASK)
     395                         & IEMNATIVE_HST_GREG_MASK;
     396    }
     397
     398    /* Use existing EFLAGS shadow if available. For the TLB-miss code path we
     399       need to weed out volatile registers here, as they will no longer be valid. */
     400    uint8_t idxRegTmp;
     401    uint8_t idxRegEfl = pReNative->Core.aidxGstRegShadows[kIemNativeGstReg_EFlags];
     402    if (   (pReNative->Core.bmGstRegShadows & RT_BIT_64(kIemNativeGstReg_EFlags))
     403        && (!a_fTlbMiss || !(RT_BIT_32(idxRegEfl) & IEMNATIVE_CALL_VOLATILE_GREG_MASK)))
     404    {
     405        Assert(idxRegEfl < IEMNATIVE_HST_GREG_COUNT);
     406        Assert(!(a_bmInputRegs & RT_BIT_32(idxRegEfl)));
     407        if RT_CONSTEXPR_IF(!a_fTlbMiss) Assert(bmAvailableRegs & RT_BIT_32(idxRegEfl));
    385408        bmAvailableRegs &= ~RT_BIT_32(idxRegEfl);
    386409#ifdef VBOX_STRICT
    387         /** @todo check shadow register content. */
     410        off = iemNativeEmitGuestRegValueCheckEx(pReNative, pCodeBuf, off, idxRegEfl, kIemNativeGstReg_EFlags);
    388411#endif
    389412
     
    420443     * Store EFLAGS.
    421444     */
     445#ifdef VBOX_STRICT
     446    /* check that X86_EFL_1 is set. */
     447    uint32_t offFixup1;
     448    off = iemNativeEmitTestBitInGprAndJmpToFixedIfSetEx(pCodeBuf, off, idxRegEfl, X86_EFL_1_BIT, off, &offFixup1);
     449    off = iemNativeEmitBrkEx(pCodeBuf, off, 0x3330);
     450    iemNativeFixupFixedJump(pReNative, offFixup1, off);
     451    /* Check that X86_EFL_RAZ_LO_MASK is zero. */
     452    off = iemNativeEmitTestAnyBitsInGpr32Ex(pCodeBuf, off, idxRegEfl, X86_EFL_RAZ_LO_MASK);
     453    uint32_t const offFixup2 = off;
     454    off = iemNativeEmitJccToFixedEx(pCodeBuf, off, off, kIemNativeInstrCond_e);
     455    off = iemNativeEmitBrkEx(pCodeBuf, off, 0x3331);
     456    iemNativeFixupFixedJump(pReNative, offFixup2, off);
     457#endif
    422458    off = iemNativeEmitStoreGprToVCpuU32Ex(pCodeBuf, off, idxRegEfl, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));
    423459    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     
    435471    {
    436472        PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
    437         return iemNativeDoPostponedEFlagsAtTbExitInternal<a_bmInputRegs>(pReNative, off, pCodeBuf);
     473        return iemNativeDoPostponedEFlagsInternal<a_bmInputRegs>(pReNative, off, pCodeBuf);
    438474    }
    439475    return off;
     
    446482{
    447483    if (pReNative->PostponedEfl.fEFlags)
    448         return iemNativeDoPostponedEFlagsAtTbExitInternal<a_bmInputRegs>(pReNative, off, pCodeBuf);
     484        return iemNativeDoPostponedEFlagsInternal<a_bmInputRegs>(pReNative, off, pCodeBuf);
     485    return off;
     486}
     487
     488
     489template<uint32_t const a_bmInputRegs>
     490DECL_FORCE_INLINE_THROW(uint32_t)
     491iemNativeDoPostponedEFlagsAtTlbMiss(PIEMRECOMPILERSTATE pReNative, uint32_t off, const IEMNATIVEEMITTLBSTATE *pTlbState,
     492                                    uint32_t bmTmpRegs)
     493{
     494    if (pReNative->PostponedEfl.fEFlags)
     495    {
     496        PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     497        return iemNativeDoPostponedEFlagsInternal<a_bmInputRegs, true>(pReNative, off, pCodeBuf,
     498                                                                       pTlbState->getRegsNotToSave() | bmTmpRegs);
     499    }
    449500    return off;
    450501}
     
    490541# endif
    491542        Log5(("iemNativeEmitEFlagsForLogical: Skipping %#x\n", X86_EFL_STATUS_BITS));
     543        return off;
    492544    }
    493545# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    494     else if (      (  (fEflPostponing = IEMLIVENESS_STATE_GET_CAN_BE_POSTPONED_SET(pLivenessEntry) & IEMLIVENESSBIT_STATUS_EFL_MASK)
    495                     | fEflClobbered)
    496                 == IEMLIVENESSBIT_STATUS_EFL_MASK
    497              && idxRegResult != UINT8_MAX)
     546    if (      (  (fEflPostponing = IEMLIVENESS_STATE_GET_CAN_BE_POSTPONED_SET(pLivenessEntry) & IEMLIVENESSBIT_STATUS_EFL_MASK)
     547               | fEflClobbered)
     548           == IEMLIVENESSBIT_STATUS_EFL_MASK
     549        && idxRegResult != UINT8_MAX)
    498550    {
    499551        STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeEflPostponedLogical);
    500         pReNative->fSkippingEFlags       = 0;
    501552        pReNative->PostponedEfl.fEFlags  = X86_EFL_STATUS_BITS;
    502553        pReNative->PostponedEfl.enmOp    = kIemNativePostponedEflOp_Logical;
     
    518569         * Collect flags and merge them with eflags.
    519570         */
    520         /** @todo we could alternatively use SAHF here when host rax is free since,
     571        /** @todo we could alternatively use LAHF here when host rax is free since,
    521572         *        OF is cleared. */
    522573        PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
     
    579630#endif
    580631        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     632    }
    581633
    582634#ifdef IEMNATIVE_WITH_EFLAGS_SKIPPING
    583         pReNative->fSkippingEFlags &= ~X86_EFL_STATUS_BITS;
     635    pReNative->fSkippingEFlags = 0;
    584636# ifdef IEMNATIVE_STRICT_EFLAGS_SKIPPING
    585         off = iemNativeEmitStoreImmToVCpuU32(pReNative, off, 0, RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
     637    off = iemNativeEmitStoreImmToVCpuU32(pReNative, off, 0, RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
    586638# endif
    587639#endif
    588     }
    589640    return off;
    590641}
     
    18981949        PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64);
    18991950        /** @todo kIemNativeEmitEFlagsForShiftType_SignedRight: we could alternatively
    1900          *        use SAHF here when host rax is free since, OF is cleared. */
     1951         *        use LAHF here when host rax is free since, OF is cleared. */
    19011952        /* pushf */
    19021953        pCodeBuf[off++] = 0x9c;
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106180 r106187  
    11921192#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    11931193# ifdef RT_ARCH_AMD64
    1194 #  define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1194#  ifdef VBOX_STRICT
     1195#   define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   64
     1196#  else
     1197#   define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1198#  endif
    11951199# elif defined(RT_ARCH_ARM64) || defined(DOXYGEN_RUNNING)
    1196 #  define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1200#  ifdef VBOX_STRICT
     1201#   define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   48
     1202#  else
     1203#   define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1204#  endif
    11971205# else
    11981206#  error "port me"
     
    12101218#else
    12111219# define IEMNATIVE_CLEAR_POSTPONED_EFLAGS(a_pReNative, a_fEflClobbered) ((void)0)
     1220#endif
     1221
     1222/** @def IEMNATIVE_HAS_POSTPONED_EFLAGS_CALCS
     1223 * Macro for testing whether there are currently any postponed EFLAGS calcs w/o
     1224 * needing to \#ifdef the check.
     1225 */
     1226#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1227# define IEMNATIVE_HAS_POSTPONED_EFLAGS_CALCS(a_pReNative) ((a_pReNative)->PostponedEfl.fEFlags != 0)
     1228#else
     1229# define IEMNATIVE_HAS_POSTPONED_EFLAGS_CALCS(a_pReNative) false
    12121230#endif
    12131231
     
    21292147DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    21302148                                                                 uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg);
     2149DECL_HIDDEN_THROW(uint32_t) iemNativeEmitLoadGprWithGstShadowRegEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off,
     2150                                                                   uint8_t idxHstReg, IEMNATIVEGSTREG enmGstReg);
    21312151#ifdef VBOX_STRICT
    21322152DECL_HIDDEN_THROW(uint32_t) iemNativeEmitTop32BitsClearCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg);
    21332153DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg,
    21342154                                                            IEMNATIVEGSTREG enmGstReg);
     2155DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestRegValueCheckEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf,
     2156                                                              uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg);
    21352157# ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
    21362158DECL_HIDDEN_THROW(uint32_t) iemNativeEmitGuestSimdRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxSimdReg,
  • trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h

    r106180 r106187  
    278278
    279279#elif defined(RT_ARCH_ARM64)
    280     if RT_CONSTEXPR((a_uImm32 >> 16) == 0)
     280    if RT_CONSTEXPR_IF((a_uImm32 >> 16) == 0)
    281281        /* movz gpr, imm16 */
    282282        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32,                    0, false /*f64Bit*/);
    283     else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == 0)
     283    else if RT_CONSTEXPR_IF((a_uImm32 & UINT32_C(0xffff)) == 0)
    284284        /* movz gpr, imm16, lsl #16 */
    285285        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32 >> 16,              1, false /*f64Bit*/);
    286     else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == UINT32_C(0xffff))
     286    else if RT_CONSTEXPR_IF((a_uImm32 & UINT32_C(0xffff)) == UINT32_C(0xffff))
    287287        /* movn gpr, imm16, lsl #16 */
    288288        pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32 >> 16,             1, false /*f64Bit*/);
    289     else if RT_CONSTEXPR((a_uImm32 >> 16) == UINT32_C(0xffff))
     289    else if RT_CONSTEXPR_IF((a_uImm32 >> 16) == UINT32_C(0xffff))
    290290        /* movn gpr, imm16 */
    291291        pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32,                   0, false /*f64Bit*/);
     
    769769 * @note Bits 32 thru 63 in the GPR will be zero after the operation.
    770770 */
    771 DECL_INLINE_THROW(uint32_t)
     771DECL_FORCE_INLINE_THROW(uint32_t)
    772772iemNativeEmitLoadGprFromVCpuU32Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    773773{
     
    814814 * @note Bits 16 thru 63 in the GPR will be zero after the operation.
    815815 */
     816DECL_FORCE_INLINE_THROW(uint32_t)
     817iemNativeEmitLoadGprFromVCpuU16Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
     818{
     819#ifdef RT_ARCH_AMD64
     820    /* movzx reg32, mem16 */
     821    if (iGpr >= 8)
     822        pCodeBuf[off++] = X86_OP_REX_R;
     823    pCodeBuf[off++] = 0x0f;
     824    pCodeBuf[off++] = 0xb7;
     825    off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, iGpr, offVCpu);
     826
     827#elif defined(RT_ARCH_ARM64)
     828    off = iemNativeEmitGprByVCpuLdStEx(pCodeBuf, off, iGpr, offVCpu, kArmv8A64InstrLdStType_Ld_Half, sizeof(uint16_t));
     829
     830#else
     831# error "port me"
     832#endif
     833    return off;
     834}
     835
     836
     837/**
     838 * Emits a 16-bit GPR load of a VCpu value.
     839 * @note Bits 16 thru 63 in the GPR will be zero after the operation.
     840 */
    816841DECL_INLINE_THROW(uint32_t)
    817842iemNativeEmitLoadGprFromVCpuU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu)
    818843{
    819844#ifdef RT_ARCH_AMD64
    820     /* movzx reg32, mem16 */
    821     uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8);
    822     if (iGpr >= 8)
    823         pbCodeBuf[off++] = X86_OP_REX_R;
    824     pbCodeBuf[off++] = 0x0f;
    825     pbCodeBuf[off++] = 0xb7;
    826     off = iemNativeEmitGprByVCpuDisp(pbCodeBuf, off, iGpr, offVCpu);
     845    off = iemNativeEmitLoadGprFromVCpuU16Ex(iemNativeInstrBufEnsure(pReNative, off, 8), off, iGpr, offVCpu);
    827846    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    828847
     
    75667585 */
    75677586DECL_FORCE_INLINE_THROW(uint32_t)
    7568 iemNativeEmitTestAnyBitsInGpr32Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprSrc, uint32_t fBits)
     7587iemNativeEmitTestAnyBitsInGpr32Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprSrc, uint32_t fBits,
     7588                                  uint8_t iTmpReg = UINT8_MAX)
    75697589{
    75707590    Assert(fBits != 0);
     
    75927612        pCodeBuf[off++] = RT_BYTE4(fBits);
    75937613    }
     7614    RT_NOREF(iTmpReg);
    75947615
    75957616#elif defined(RT_ARCH_ARM64)
     
    75997620    if (Armv8A64ConvertMask32ToImmRImmS(fBits, &uImmNandS, &uImmR))
    76007621        pCodeBuf[off++] = Armv8A64MkInstrAndsImm(ARMV8_A64_REG_XZR, iGprSrc, uImmNandS, uImmR, false /*f64Bit*/);
     7622    else if (iTmpReg != UINT8_MAX)
     7623    {
     7624        off = iemNativeEmitLoadGpr32ImmEx(pCodeBuf, off, iTmpReg, fBits);
     7625        pCodeBuf[off++] = Armv8A64MkInstrAnds(ARMV8_A64_REG_XZR, iGprSrc, iTmpReg, false /*f64Bit*/);
     7626    }
    76017627    else
    76027628# ifdef IEM_WITH_THROW_CATCH
     
    82198245DECL_INLINE_THROW(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
    82208246{
    8221     if RT_CONSTEXPR(!a_fSkipEflChecks)
     8247    if RT_CONSTEXPR_IF(!a_fSkipEflChecks)
    82228248    {
    82238249        IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS);
     
    83888414    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
    83898415
    8390     if RT_CONSTEXPR(a_fActuallyExitingTb)
     8416    if RT_CONSTEXPR_IF(a_fActuallyExitingTb)
    83918417        iemNativeMarkCurCondBranchAsExiting(pReNative);
    83928418
    83938419#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8394     if RT_CONSTEXPR(a_fPostponedEfl)
     8420    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    83958421        off = iemNativeDoPostponedEFlagsAtTbExitEx<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off,
    83968422                                                                                                           pCodeBuf);
     
    84298455    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
    84308456
    8431     if RT_CONSTEXPR(a_fActuallyExitingTb)
     8457    if RT_CONSTEXPR_IF(a_fActuallyExitingTb)
    84328458        iemNativeMarkCurCondBranchAsExiting(pReNative);
    84338459
    84348460#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8435     if RT_CONSTEXPR(a_fPostponedEfl)
     8461    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    84368462        off = iemNativeDoPostponedEFlagsAtTbExit<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off);
    84378463#endif
     
    84758501
    84768502#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8477     if RT_CONSTEXPR(a_fPostponedEfl)
     8503    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    84788504        if (pReNative->PostponedEfl.fEFlags)
    84798505        {
     
    87388764     *        it's the same number of instructions as the TST + B.CC stuff? */
    87398765# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8740     if RT_CONSTEXPR(a_fPostponedEfl)
     8766    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    87418767        if (pReNative->PostponedEfl.fEFlags)
    87428768        {
     
    87928818    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    87938819# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8794     if RT_CONSTEXPR(a_fPostponedEfl)
     8820    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    87958821        if (pReNative->PostponedEfl.fEFlags)
    87968822        {
     
    88678893    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    88688894# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    8869     if RT_CONSTEXPR(a_fPostponedEfl)
     8895    if RT_CONSTEXPR_IF(a_fPostponedEfl)
    88708896        if (pReNative->PostponedEfl.fEFlags)
    88718897        {
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette