VirtualBox

Changeset 106113 in vbox for trunk/src/VBox/VMM


Ignore:
Timestamp:
Sep 21, 2024 12:01:43 AM (4 months ago)
Author:
vboxsync
Message:

VMM/IEM: Added special register allocator functions for EFLAGS so we can properly assert liveness info validity. bugref:10720 bugref:10372

Location:
trunk/src/VBox/VMM
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp

    r106090 r106113  
    209209{
    210210    uint8_t const         idxEflReg  = !a_fCheckIrqs ? UINT8_MAX
    211                                      : iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    212                                                                        kIemNativeGstRegUse_ReadOnly);
     211                                     : iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly,
     212                                                                          RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
    213213    uint8_t const         idxTmpReg1 = iemNativeRegAllocTmp(pReNative, &off);
    214214    uint8_t const         idxTmpReg2 = a_fCheckIrqs ? iemNativeRegAllocTmp(pReNative, &off) : UINT8_MAX;
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r106097 r106113  
    439439    off = iemNativeRegFlushPendingWrites(pReNative, off);
    440440
    441     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    442                                                               kIemNativeGstRegUse_ForUpdate, false /*fNoVolatileRegs*/,
    443                                                               true /*fSkipLivenessAssert*/);
     441    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
     442                                                                 RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER),
     443                                                                 RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER));
    444444    off = iemNativeEmitTestAnyBitsInGprAndTbExitIfAnySet(pReNative, off, idxEflReg,
    445445                                                         X86_EFL_TF | CPUMCTX_DBG_HIT_DRX_MASK | CPUMCTX_DBG_DBGF_MASK,
     
    17951795                       PUSH FS in real mode, so we have to try emulate that here.
    17961796                       We borrow the now unused idxReg1 from the TLB lookup code here. */
    1797                     uint8_t idxRegEfl = iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(pReNative, &off,
    1798                                                                                         kIemNativeGstReg_EFlags);
     1797                    uint8_t const idxRegEfl = iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(pReNative, &off,
     1798                                                                                              kIemNativeGstReg_EFlags);
    17991799                    if (idxRegEfl != UINT8_MAX)
    18001800                    {
     
    33483348
    33493349
     3350/**
     3351 * Helper function to convert X86_EFL_xxx masks to liveness masks.
     3352 *
     3353 * The compiler should be able to figure this out at compile time, so sprinkling
     3354 * constexpr where ever possible here to nudge it along.
     3355 */
     3356template<uint32_t const a_fEfl>
     3357RT_CONSTEXPR uint64_t iemNativeEflagsToLivenessMask(void)
     3358{
     3359    return (a_fEfl & ~X86_EFL_STATUS_BITS ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OTHER) : 0)
     3360         | (a_fEfl & X86_EFL_CF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_CF)    : 0)
     3361         | (a_fEfl & X86_EFL_PF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_PF)    : 0)
     3362         | (a_fEfl & X86_EFL_AF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_AF)    : 0)
     3363         | (a_fEfl & X86_EFL_ZF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_ZF)    : 0)
     3364         | (a_fEfl & X86_EFL_SF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_SF)    : 0)
     3365         | (a_fEfl & X86_EFL_OF           ? RT_BIT_64(IEMLIVENESSBIT_IDX_EFL_OF)    : 0);
     3366}
     3367
     3368
     3369/**
     3370 * Helper function to convert a single X86_EFL_xxxx value to bit number.
     3371 *
     3372 * The compiler should be able to figure this out at compile time, so sprinkling
     3373 * constexpr where ever possible here to nudge it along.
     3374 */
     3375template<uint32_t const a_fEfl>
     3376RT_CONSTEXPR unsigned iemNativeEflagsToSingleBitNo(void)
     3377{
     3378    AssertCompile(   a_fEfl == X86_EFL_CF
     3379                  || a_fEfl == X86_EFL_PF
     3380                  || a_fEfl == X86_EFL_AF
     3381                  || a_fEfl == X86_EFL_ZF
     3382                  || a_fEfl == X86_EFL_SF
     3383                  || a_fEfl == X86_EFL_OF
     3384                  || a_fEfl == X86_EFL_DF);
     3385    return a_fEfl == X86_EFL_CF ? X86_EFL_CF_BIT
     3386         : a_fEfl == X86_EFL_PF ? X86_EFL_PF_BIT
     3387         : a_fEfl == X86_EFL_AF ? X86_EFL_AF_BIT
     3388         : a_fEfl == X86_EFL_ZF ? X86_EFL_ZF_BIT
     3389         : a_fEfl == X86_EFL_SF ? X86_EFL_SF_BIT
     3390         : a_fEfl == X86_EFL_OF ? X86_EFL_OF_BIT
     3391         :                        X86_EFL_DF_BIT;
     3392}
     3393
     3394
    33503395#define IEM_MC_IF_EFL_ANY_BITS_SET(a_fBits) \
    3351         off = iemNativeEmitIfEflagAnysBitsSet(pReNative, off, (a_fBits)); \
     3396        off = iemNativeEmitIfEflagAnysBitsSet(pReNative, off, (a_fBits), iemNativeEflagsToLivenessMask<a_fBits>()); \
    33523397        do {
    33533398
    33543399/** Emits code for IEM_MC_IF_EFL_ANY_BITS_SET. */
    3355 DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagAnysBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
     3400DECL_INLINE_THROW(uint32_t)
     3401iemNativeEmitIfEflagAnysBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl, uint64_t fLivenessEflBits)
    33563402{
    33573403    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitsInEfl);
     
    33593405
    33603406    /* Get the eflags. */
    3361     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3362                                                               kIemNativeGstRegUse_ReadOnly);
     3407    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
    33633408
    33643409    /* Test and jump. */
     
    33763421
    33773422#define IEM_MC_IF_EFL_NO_BITS_SET(a_fBits) \
    3378         off = iemNativeEmitIfEflagNoBitsSet(pReNative, off, (a_fBits)); \
     3423        off = iemNativeEmitIfEflagNoBitsSet(pReNative, off, (a_fBits), iemNativeEflagsToLivenessMask<a_fBits>()); \
    33793424        do {
    33803425
    33813426/** Emits code for IEM_MC_IF_EFL_NO_BITS_SET. */
    3382 DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagNoBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl)
     3427DECL_INLINE_THROW(uint32_t)
     3428iemNativeEmitIfEflagNoBitsSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitsInEfl, uint64_t fLivenessEflBits)
    33833429{
    33843430    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitsInEfl);
     
    33863432
    33873433    /* Get the eflags. */
    3388     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3389                                                               kIemNativeGstRegUse_ReadOnly);
     3434    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
    33903435
    33913436    /* Test and jump. */
     
    34033448
    34043449#define IEM_MC_IF_EFL_BIT_SET(a_fBit) \
    3405         off = iemNativeEmitIfEflagsBitSet(pReNative, off, (a_fBit)); \
     3450        off = iemNativeEmitIfEflagsBitSet(pReNative, off, iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3451                                          iemNativeEflagsToLivenessMask<a_fBit>()); \
    34063452        do {
    34073453
    34083454/** Emits code for IEM_MC_IF_EFL_BIT_SET. */
    3409 DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagsBitSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
    3410 {
    3411     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitInEfl);
     3455DECL_INLINE_THROW(uint32_t)
     3456iemNativeEmitIfEflagsBitSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, unsigned iBitNo, uint64_t fLivenessEflBit)
     3457{
     3458    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo));
    34123459    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    34133460
    34143461    /* Get the eflags. */
    3415     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3416                                                               kIemNativeGstRegUse_ReadOnly);
    3417 
    3418     unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
    3419     Assert(RT_BIT_32(iBitNo) == fBitInEfl);
     3462    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
    34203463
    34213464    /* Test and jump. */
     
    34333476
    34343477#define IEM_MC_IF_EFL_BIT_NOT_SET(a_fBit) \
    3435         off = iemNativeEmitIfEflagsBitNotSet(pReNative, off, (a_fBit)); \
     3478        off = iemNativeEmitIfEflagsBitNotSet(pReNative, off, iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3479                                             iemNativeEflagsToLivenessMask<a_fBit>()); \
    34363480        do {
    34373481
    34383482/** Emits code for IEM_MC_IF_EFL_BIT_NOT_SET. */
    3439 DECL_INLINE_THROW(uint32_t) iemNativeEmitIfEflagsBitNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl)
    3440 {
    3441     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitInEfl);
     3483DECL_INLINE_THROW(uint32_t)
     3484iemNativeEmitIfEflagsBitNotSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, unsigned iBitNo, uint64_t fLivenessEflBit)
     3485{
     3486    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo));
    34423487    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    34433488
    34443489    /* Get the eflags. */
    3445     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3446                                                               kIemNativeGstRegUse_ReadOnly);
    3447 
    3448     unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
    3449     Assert(RT_BIT_32(iBitNo) == fBitInEfl);
     3490    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
    34503491
    34513492    /* Test and jump. */
     
    34623503
    34633504
    3464 #define IEM_MC_IF_EFL_BITS_EQ(a_fBit1, a_fBit2)         \
    3465     off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, a_fBit1, a_fBit2, false /*fInverted*/); \
     3505#define IEM_MC_IF_EFL_BITS_EQ(a_fBit1, a_fBit2) \
     3506    off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, false /*fInverted*/, \
     3507                                            iemNativeEflagsToSingleBitNo<a_fBit1>(), \
     3508                                            iemNativeEflagsToSingleBitNo<a_fBit2>(), \
     3509                                            iemNativeEflagsToLivenessMask<a_fBit1 | a_fBit2>()); \
    34663510    do {
    34673511
    3468 #define IEM_MC_IF_EFL_BITS_NE(a_fBit1, a_fBit2)         \
    3469     off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, a_fBit1, a_fBit2, true /*fInverted*/); \
     3512#define IEM_MC_IF_EFL_BITS_NE(a_fBit1, a_fBit2) \
     3513    off = iemNativeEmitIfEflagsTwoBitsEqual(pReNative, off, true /*fInverted*/, \
     3514                                            iemNativeEflagsToSingleBitNo<a_fBit1>(), \
     3515                                            iemNativeEflagsToSingleBitNo<a_fBit2>(), \
     3516                                            iemNativeEflagsToLivenessMask<a_fBit1 | a_fBit2>()); \
    34703517    do {
    34713518
     
    34733520DECL_INLINE_THROW(uint32_t)
    34743521iemNativeEmitIfEflagsTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3475                                   uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
    3476 {
    3477     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBit1InEfl | fBit2InEfl);
     3522                                  bool fInverted, unsigned iBitNo1, unsigned iBitNo2, uint64_t fLivenessEflBits)
     3523{
     3524    Assert(iBitNo1 != iBitNo2);
     3525    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo1) | RT_BIT_32(iBitNo2));
    34783526    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    34793527
    34803528    /* Get the eflags. */
    3481     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3482                                                               kIemNativeGstRegUse_ReadOnly);
    3483 
    3484     unsigned const iBitNo1 = ASMBitFirstSetU32(fBit1InEfl) - 1;
    3485     Assert(RT_BIT_32(iBitNo1) == fBit1InEfl);
    3486 
    3487     unsigned const iBitNo2 = ASMBitFirstSetU32(fBit2InEfl) - 1;
    3488     Assert(RT_BIT_32(iBitNo2) == fBit2InEfl);
    3489     Assert(iBitNo1 != iBitNo2);
     3529    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
    34903530
    34913531#ifdef RT_ARCH_AMD64
    3492     uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBit1InEfl);
     3532    uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, RT_BIT_64(iBitNo1));
    34933533
    34943534    off = iemNativeEmitAndGpr32ByGpr32(pReNative, off, idxTmpReg, idxEflReg);
     
    35363576
    35373577#define IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(a_fBit, a_fBit1, a_fBit2) \
    3538     off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, a_fBit, a_fBit1, a_fBit2, false /*fInverted*/); \
     3578    off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, false /*fInverted*/, \
     3579                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3580                                                        iemNativeEflagsToSingleBitNo<a_fBit1>(), \
     3581                                                        iemNativeEflagsToSingleBitNo<a_fBit2>(), \
     3582                                                        iemNativeEflagsToLivenessMask<a_fBit | a_fBit1 | a_fBit2>()); \
    35393583    do {
    35403584
    35413585#define IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(a_fBit, a_fBit1, a_fBit2) \
    3542     off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, a_fBit, a_fBit1, a_fBit2, true /*fInverted*/); \
     3586    off = iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(pReNative, off, true /*fInverted*/, \
     3587                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3588                                                        iemNativeEflagsToSingleBitNo<a_fBit1>(), \
     3589                                                        iemNativeEflagsToSingleBitNo<a_fBit2>(), \
     3590                                                        iemNativeEflagsToLivenessMask<a_fBit | a_fBit1 | a_fBit2>()); \
    35433591    do {
    35443592
     
    35463594 *  IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE. */
    35473595DECL_INLINE_THROW(uint32_t)
    3548 iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl,
    3549                                               uint32_t fBit1InEfl, uint32_t fBit2InEfl, bool fInverted)
    3550 {
    3551     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitInEfl | fBit1InEfl | fBit2InEfl);
     3596iemNativeEmitIfEflagsBitNotSetAndTwoBitsEqual(PIEMRECOMPILERSTATE pReNative, uint32_t off, bool fInverted,
     3597                                              unsigned iBitNo, unsigned iBitNo1, unsigned iBitNo2, uint64_t fLivenessEflBits)
     3598{
     3599    Assert(iBitNo1 != iBitNo);
     3600    Assert(iBitNo2 != iBitNo);
     3601    Assert(iBitNo2 != iBitNo1);
     3602    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo) | RT_BIT_32(iBitNo1) | RT_BIT_32(iBitNo2));
    35523603    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    35533604
     
    35573608
    35583609    /* Get the eflags. */
    3559     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3560                                                               kIemNativeGstRegUse_ReadOnly);
    3561 
    3562     /* Translate the flag masks to bit numbers. */
    3563     unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
    3564     Assert(RT_BIT_32(iBitNo) == fBitInEfl);
    3565 
    3566     unsigned const iBitNo1 = ASMBitFirstSetU32(fBit1InEfl) - 1;
    3567     Assert(RT_BIT_32(iBitNo1) == fBit1InEfl);
    3568     Assert(iBitNo1 != iBitNo);
    3569 
    3570     unsigned const iBitNo2 = ASMBitFirstSetU32(fBit2InEfl) - 1;
    3571     Assert(RT_BIT_32(iBitNo2) == fBit2InEfl);
    3572     Assert(iBitNo2 != iBitNo);
    3573     Assert(iBitNo2 != iBitNo1);
     3610    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBits);
    35743611
    35753612#ifdef RT_ARCH_AMD64
    3576     uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, fBit1InEfl); /* This must come before we jump anywhere! */
     3613    uint8_t const idxTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, RT_BIT_64(iBitNo1)); /* This must come before we jump anywhere! */
    35773614#elif defined(RT_ARCH_ARM64)
    35783615    uint8_t const idxTmpReg = iemNativeRegAllocTmp(pReNative, &off);
     
    37233760
    37243761#define IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(a_fBit) \
    3725     off = iemNativeEmitIfCxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/); \
     3762    off = iemNativeEmitIfCxIsNotOneAndTestEflagsBit(pReNative, off, true /*fCheckIfSet*/, \
     3763                                                    iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3764                                                    iemNativeEflagsToLivenessMask<a_fBit>()); \
    37263765    do {
    37273766
    37283767#define IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(a_fBit) \
    3729     off = iemNativeEmitIfCxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/); \
     3768    off = iemNativeEmitIfCxIsNotOneAndTestEflagsBit(pReNative, off, false /*fCheckIfSet*/, \
     3769                                                    iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3770                                                    iemNativeEflagsToLivenessMask<a_fBit>()); \
    37303771    do {
    37313772
     
    37333774 *  IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET. */
    37343775DECL_INLINE_THROW(uint32_t)
    3735 iemNativeEmitIfCxIsNotOneAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fBitInEfl, bool fCheckIfSet)
    3736 {
    3737     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitInEfl);
     3776iemNativeEmitIfCxIsNotOneAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
     3777                                          bool fCheckIfSet, unsigned iBitNo, uint64_t fLivenessEflBit)
     3778{
     3779    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo));
    37383780    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    37393781
     
    37423784       register allocator state.
    37433785       Doing EFLAGS first as it's more likely to be loaded, right? */
    3744     uint8_t const idxEflReg    = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3745                                                                  kIemNativeGstRegUse_ReadOnly);
     3786    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEflBit);
    37463787    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, IEMNATIVEGSTREG_GPR(X86_GREG_xCX),
    37473788                                                                 kIemNativeGstRegUse_ReadOnly);
     
    37603801
    37613802    /* Check the EFlags bit. */
    3762     unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
    3763     Assert(RT_BIT_32(iBitNo) == fBitInEfl);
    37643803    off = iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, idxEflReg, iBitNo, pEntry->idxLabelElse,
    37653804                                                     !fCheckIfSet /*fJmpIfSet*/);
     
    37743813
    37753814#define IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(a_fBit) \
    3776     off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/, false /*f64Bit*/); \
     3815    off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, true /*fCheckIfSet*/, false /*f64Bit*/, \
     3816                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3817                                                        iemNativeEflagsToLivenessMask<a_fBit>()); \
    37773818    do {
    37783819
    37793820#define IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(a_fBit) \
    3780     off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/, false /*f64Bit*/); \
     3821    off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, false /*fCheckIfSet*/, false /*f64Bit*/, \
     3822                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3823                                                        iemNativeEflagsToLivenessMask<a_fBit>()); \
    37813824    do {
    37823825
    37833826#define IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(a_fBit) \
    3784     off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, true /*fCheckIfSet*/, true /*f64Bit*/); \
     3827    off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, true /*fCheckIfSet*/, true /*f64Bit*/, \
     3828                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3829                                                        iemNativeEflagsToLivenessMask<a_fBit>()); \
    37853830    do {
    37863831
    37873832#define IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(a_fBit) \
    3788     off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, a_fBit, false /*fCheckIfSet*/, true /*f64Bit*/); \
     3833    off = iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(pReNative, off, false /*fCheckIfSet*/, true /*f64Bit*/, \
     3834                                                        iemNativeEflagsToSingleBitNo<a_fBit>(), \
     3835                                                        iemNativeEflagsToLivenessMask<a_fBit>()); \
    37893836    do {
    37903837
     
    37943841 *  IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET. */
    37953842DECL_INLINE_THROW(uint32_t)
    3796 iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    3797                                                uint32_t fBitInEfl, bool fCheckIfSet, bool f64Bit)
    3798 {
    3799     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fBitInEfl);
     3843iemNativeEmitIfRcxEcxIsNotOneAndTestEflagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off, bool fCheckIfSet, bool f64Bit,
     3844                                              unsigned iBitNo, uint64_t fLivenessEFlBit)
     3845
     3846{
     3847    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, RT_BIT_32(iBitNo));
    38003848    PIEMNATIVECOND const pEntry = iemNativeCondPushIf(pReNative);
    38013849
     
    38043852       register allocator state.
    38053853       Doing EFLAGS first as it's more likely to be loaded, right? */
    3806     uint8_t const idxEflReg    = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    3807                                                                  kIemNativeGstRegUse_ReadOnly);
     3854    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ReadOnly, fLivenessEFlBit);
    38083855    uint8_t const idxGstRcxReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, IEMNATIVEGSTREG_GPR(X86_GREG_xCX),
    38093856                                                                 kIemNativeGstRegUse_ReadOnly);
     
    38193866
    38203867    /* Check the EFlags bit. */
    3821     unsigned const iBitNo = ASMBitFirstSetU32(fBitInEfl) - 1;
    3822     Assert(RT_BIT_32(iBitNo) == fBitInEfl);
    38233868    off = iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, idxEflReg, iBitNo, pEntry->idxLabelElse,
    38243869                                                     !fCheckIfSet /*fJmpIfSet*/);
     
    58455890#undef  IEM_MC_FETCH_EFLAGS /* should not be used */
    58465891#define IEM_MC_FETCH_EFLAGS_EX(a_EFlags, a_fEflInput, a_fEflOutput) \
    5847     off = iemNativeEmitFetchEFlags(pReNative, off, a_EFlags, a_fEflInput, a_fEflOutput)
     5892    off = iemNativeEmitFetchEFlags<a_fEflInput,  iemNativeEflagsToLivenessMask<a_fEflInput>(),\
     5893                                   a_fEflOutput, iemNativeEflagsToLivenessMask<a_fEflOutput>()>(pReNative, off, a_EFlags)
    58485894
    58495895/** Handles IEM_MC_FETCH_EFLAGS_EX. */
    5850 DECL_INLINE_THROW(uint32_t)
    5851 iemNativeEmitFetchEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarEFlags,
    5852                          uint32_t fEflInput, uint32_t fEflOutput)
     5896template<uint32_t const a_fEflInput,  uint64_t const a_fLivenessEflInput,
     5897         uint32_t const a_fEflOutput, uint64_t const a_fLivenessEflOutput>
     5898DECL_INLINE_THROW(uint32_t)
     5899iemNativeEmitFetchEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarEFlags)
    58535900{
    58545901    IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarEFlags);
    58555902    IEMNATIVE_ASSERT_VAR_SIZE(pReNative, idxVarEFlags, sizeof(uint32_t));
    5856     RT_NOREF(fEflInput, fEflOutput);
     5903    /** @todo fix NOT AssertCompile(a_fEflInput != 0 || a_fEflOutput != 0); */
    58575904
    58585905#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
    58595906# ifdef VBOX_STRICT
    58605907    if (   pReNative->idxCurCall != 0
    5861         && (fEflInput != 0 || fEflOutput != 0) /* for NOT these are both zero for now. */)
    5862     {
    5863         PCIEMLIVENESSENTRY const pLivenessEntry = &pReNative->paLivenessEntries[pReNative->idxCurCall - 1];
    5864         uint32_t const           fBoth          = fEflInput | fEflOutput;
     5908        && (a_fEflInput != 0 || a_fEflOutput != 0) /* for NOT these are both zero for now. */)
     5909    {
     5910        PCIEMLIVENESSENTRY const    pLivenessEntry = &pReNative->paLivenessEntries[pReNative->idxCurCall - 1];
     5911        RT_CONSTEXPR uint32_t const fBoth          = a_fEflInput | a_fEflOutput;
    58655912# define ASSERT_ONE_EFL(a_fElfConst, a_idxField) \
    58665913            AssertMsg(   !(fBoth & (a_fElfConst)) \
    5867                       || (!(fEflInput & (a_fElfConst)) \
     5914                      || (!(a_fEflInput & (a_fElfConst)) \
    58685915                          ? IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)) \
    5869                           : !(fEflOutput & (a_fElfConst)) \
     5916                          : !(a_fEflOutput & (a_fElfConst)) \
    58705917                          ? IEMLIVENESS_STATE_IS_INPUT_EXPECTED(  iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)) \
    58715918                          : IEMLIVENESS_STATE_IS_MODIFY_EXPECTED( iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)) ), \
     
    58835930#endif
    58845931
    5885     IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, fEflInput);
     5932    IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(pReNative, off, a_fEflInput);
    58865933
    58875934    /** @todo This could be prettier...*/
     
    58965943         *        zero, but since iemNativeVarRegisterSet clears the shadowing,
    58975944         *        that's counter productive... */
    5898         uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    5899                                                                   kIemNativeGstRegUse_ForUpdate, false /*fNoVolatileRegs*/,
    5900                                                                   true /** @todo EFlags shadowing+liveness weirdness (@bugref{10720}). */);
     5945        uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
     5946                                                                     a_fLivenessEflInput, a_fLivenessEflOutput);
    59015947        iemNativeVarRegisterSet(pReNative, idxVarEFlags, idxGstReg, off, true /*fAllocated*/);
    59025948    }
     
    59055951        /* Register argument variable: Avoid assertions in generic call code and load it the traditional way. */
    59065952        uint8_t const idxVarReg = iemNativeVarRegisterAcquire(pReNative, idxVarEFlags, &off, false /*fInitialized*/);
    5907         uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(pReNative, &off, kIemNativeGstReg_EFlags);
     5953        uint8_t const idxGstReg = iemNativeRegAllocTmpForGuestEFlagsIfAlreadyPresent(pReNative, &off,
     5954                                                                                     a_fLivenessEflInput, a_fLivenessEflOutput);
    59085955        if (idxGstReg != UINT8_MAX)
    59095956        {
     
    59265973#define IEM_MC_COMMIT_EFLAGS_EX(a_EFlags, a_fEflInput, a_fEflOutput) \
    59275974    IEMNATIVE_EFLAGS_OPTIMIZATION_STATS(a_fEflInput, a_fEflOutput); \
    5928     off = iemNativeEmitCommitEFlags(pReNative, off, a_EFlags, a_fEflOutput, true /*fUpdateSkipping*/)
     5975    off = iemNativeEmitCommitEFlags<true /*fUpdateSkipping*/, a_fEflOutput, \
     5976                                    iemNativeEflagsToLivenessMask<a_fEflInput>(), \
     5977                                    iemNativeEflagsToLivenessMask<a_fEflOutput>()>(pReNative, off, a_EFlags)
    59295978
    59305979#undef IEM_MC_COMMIT_EFLAGS_OPT /* should not be used */
    59315980#define IEM_MC_COMMIT_EFLAGS_OPT_EX(a_EFlags, a_fEflInput, a_fEflOutput) \
    59325981    IEMNATIVE_EFLAGS_OPTIMIZATION_STATS(a_fEflInput, a_fEflOutput); \
    5933     off = iemNativeEmitCommitEFlags(pReNative, off, a_EFlags, a_fEflOutput, false /*fUpdateSkipping*/)
     5982    off = iemNativeEmitCommitEFlags<false /*fUpdateSkipping*/, a_fEflOutput, \
     5983                                    iemNativeEflagsToLivenessMask<a_fEflInput>(), \
     5984                                    iemNativeEflagsToLivenessMask<a_fEflOutput>()>(pReNative, off, a_EFlags)
    59345985
    59355986/** Handles IEM_MC_COMMIT_EFLAGS_EX. */
    5936 DECL_INLINE_THROW(uint32_t)
    5937 iemNativeEmitCommitEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarEFlags, uint32_t fEflOutput,
    5938                           bool fUpdateSkipping)
    5939 {
    5940     RT_NOREF(fEflOutput);
     5987template<bool const a_fUpdateSkipping, uint32_t const a_fEflOutput,
     5988         uint64_t const a_fLivenessEflInputBits, uint64_t const a_fLivenessEflOutputBits>
     5989DECL_INLINE_THROW(uint32_t) iemNativeEmitCommitEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarEFlags)
     5990{
    59415991    uint8_t const idxReg = iemNativeVarRegisterAcquire(pReNative, idxVarEFlags, &off, true /*fInitialized*/);
    59425992    IEMNATIVE_ASSERT_VAR_SIZE(pReNative, idxVarEFlags, sizeof(uint32_t));
     5993
     5994#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
     5995# ifdef VBOX_STRICT
     5996    if (   pReNative->idxCurCall != 0
     5997        && (a_fLivenessEflInputBits != 0 || a_fLivenessEflOutputBits != 0) /* for NOT these are both zero for now. */)
     5998    {
     5999        PCIEMLIVENESSENTRY const pLivenessEntry = &pReNative->paLivenessEntries[pReNative->idxCurCall - 1];
     6000# define ASSERT_ONE_EFL(a_idxField) \
     6001            if RT_CONSTEXPR_IF(((a_fLivenessEflInputBits | a_fLivenessEflOutputBits) & RT_BIT_64(a_idxField)) != 0) \
     6002                AssertMsg(!(a_fLivenessEflInputBits & RT_BIT_64(a_idxField)) \
     6003                          ? IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)) \
     6004                          : !(a_fLivenessEflOutputBits & RT_BIT_64(a_idxField)) \
     6005                          ? IEMLIVENESS_STATE_IS_INPUT_EXPECTED(  iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)) \
     6006                          : IEMLIVENESS_STATE_IS_MODIFY_EXPECTED( iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)), \
     6007                          ("%s - %u\n", #a_idxField, iemNativeLivenessGetStateByGstRegEx(pLivenessEntry, a_idxField)))
     6008        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OTHER);
     6009        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_CF);
     6010        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_PF);
     6011        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_AF);
     6012        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_ZF);
     6013        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_SF);
     6014        ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OF);
     6015# undef ASSERT_ONE_EFL
     6016    }
     6017# endif
     6018#endif
    59436019
    59446020#ifdef VBOX_STRICT
     
    59556031    iemNativeFixupFixedJump(pReNative, offFixup, off);
    59566032
    5957     /** @todo validate that only bits in the fElfOutput mask changed. */
     6033    /** @todo validate that only bits in the a_fEflOutput mask changed. */
    59586034#endif
    59596035
    59606036#ifdef IEMNATIVE_STRICT_EFLAGS_SKIPPING
    5961     if (fUpdateSkipping)
    5962     {
    5963         if ((fEflOutput & X86_EFL_STATUS_BITS) == X86_EFL_STATUS_BITS)
     6037    if RT_CONSTEXPR_IF(a_fUpdateSkipping)
     6038    {
     6039        if RT_CONSTEXPR_IF((a_fEflOutput & X86_EFL_STATUS_BITS) == X86_EFL_STATUS_BITS)
    59646040            off = iemNativeEmitStoreImmToVCpuU32(pReNative, off, 0, RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
    59656041        else
    5966             off = iemNativeEmitAndImmIntoVCpuU32(pReNative, off, ~(fEflOutput & X86_EFL_STATUS_BITS),
     6042            off = iemNativeEmitAndImmIntoVCpuU32(pReNative, off, ~(a_fEflOutput & X86_EFL_STATUS_BITS),
    59676043                                                 RT_UOFFSETOF(VMCPU, iem.s.fSkippingEFlags));
    59686044    }
    5969 #else
    5970     RT_NOREF_PV(fUpdateSkipping);
    59716045#endif
    59726046
     
    59866060
    59876061#define IEM_MC_SET_EFL_BIT(a_fBit) \
    5988     off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Set>(pReNative, off, a_fBit)
     6062    off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Set,   a_fBit, iemNativeEflagsToLivenessMask<a_fBit>()>(pReNative, off)
    59896063
    59906064#define IEM_MC_CLEAR_EFL_BIT(a_fBit) \
    5991     off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Clear>(pReNative, off, a_fBit)
     6065    off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Clear, a_fBit, iemNativeEflagsToLivenessMask<a_fBit>()>(pReNative, off)
    59926066
    59936067#define IEM_MC_FLIP_EFL_BIT(a_fBit) \
    5994     off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Flip>(pReNative, off, a_fBit)
     6068    off = iemNativeEmitModifyEFlagsBit<kIemNativeEmitEflOp_Flip,  a_fBit, iemNativeEflagsToLivenessMask<a_fBit>()>(pReNative, off)
    59956069
    59966070/** Handles IEM_MC_SET_EFL_BIT/IEM_MC_CLEAR_EFL_BIT/IEM_MC_FLIP_EFL_BIT. */
    5997 template<IEMNATIVEMITEFLOP const a_enmOp>
    5998 DECL_INLINE_THROW(uint32_t) iemNativeEmitModifyEFlagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t fEflBit)
    5999 {
    6000     uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, kIemNativeGstReg_EFlags,
    6001                                                               kIemNativeGstRegUse_ForUpdate, false /*fNoVolatileRegs*/,
    6002                                                               true /*fSkipLivenessAssert*/); /** @todo proper liveness / eflags fix */
     6071template<IEMNATIVEMITEFLOP const a_enmOp, uint32_t const a_fEflBit, uint64_t const a_fLivenessEflBit>
     6072DECL_INLINE_THROW(uint32_t) iemNativeEmitModifyEFlagsBit(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     6073{
     6074    uint8_t const idxEflReg = iemNativeRegAllocTmpForGuestEFlags(pReNative, &off, kIemNativeGstRegUse_ForUpdate,
     6075                                                                 a_enmOp == kIemNativeEmitEflOp_Flip ? a_fLivenessEflBit : 0,
     6076                                                                 a_fLivenessEflBit);
    60036077
    60046078    /* Using 'if constexpr' forces code elimination in debug builds with VC. */
    60056079    if RT_CONSTEXPR_IF(a_enmOp == kIemNativeEmitEflOp_Set)
    6006         off = iemNativeEmitOrGpr32ByImm(pReNative, off, idxEflReg, fEflBit);
     6080        off = iemNativeEmitOrGpr32ByImm(pReNative, off, idxEflReg, a_fEflBit);
    60076081    else if RT_CONSTEXPR_IF(a_enmOp == kIemNativeEmitEflOp_Clear)
    6008         off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxEflReg, ~fEflBit);
     6082        off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxEflReg, ~a_fEflBit);
    60096083    else if RT_CONSTEXPR_IF(a_enmOp == kIemNativeEmitEflOp_Flip)
    6010         off = iemNativeEmitXorGpr32ByImm(pReNative, off, idxEflReg, fEflBit);
     6084        off = iemNativeEmitXorGpr32ByImm(pReNative, off, idxEflReg, a_fEflBit);
    60116085    else
    60126086        AssertCompile(   a_enmOp == kIemNativeEmitEflOp_Set /* AssertCompile(false) works with VC 2019 but not clang 15. */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r106099 r106113  
    35673567
    35683568/**
    3569  * Allocates a temporary host general purpose register for keeping a guest
    3570  * register value.
    3571  *
    3572  * Since we may already have a register holding the guest register value,
    3573  * code will be emitted to do the loading if that's not the case. Code may also
    3574  * be emitted if we have to free up a register to satify the request.
    3575  *
    3576  * @returns The host register number; throws VBox status code on failure, so no
    3577  *          need to check the return value.
    3578  * @param   pReNative       The native recompile state.
    3579  * @param   poff            Pointer to the variable with the code buffer
    3580  *                          position. This will be update if we need to move a
    3581  *                          variable from register to stack in order to satisfy
    3582  *                          the request.
    3583  * @param   enmGstReg       The guest register that will is to be updated.
    3584  * @param   enmIntendedUse  How the caller will be using the host register.
    3585  * @param   fNoVolatileRegs Set if no volatile register allowed, clear if any
    3586  *                          register is okay (default).  The ASSUMPTION here is
    3587  *                          that the caller has already flushed all volatile
    3588  *                          registers, so this is only applied if we allocate a
    3589  *                          new register.
    3590  * @param   fSkipLivenessAssert     Hack for liveness input validation of EFLAGS.
    3591  * @sa      iemNativeRegAllocTmpForGuestRegIfAlreadyPresent
    3592  */
    3593 DECL_HIDDEN_THROW(uint8_t)
    3594 iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
    3595                                 IEMNATIVEGSTREGUSE enmIntendedUse /*= kIemNativeGstRegUse_ReadOnly*/,
    3596                                 bool fNoVolatileRegs /*= false*/, bool fSkipLivenessAssert /*= false*/)
     3569 * Common worker for iemNativeRegAllocTmpForGuestReg() and
     3570 * iemNativeRegAllocTmpForGuestEFlags().
     3571 *
     3572 * See iemNativeRegAllocTmpForGuestReg() for details.
     3573 */
     3574static uint8_t
     3575iemNativeRegAllocTmpForGuestRegCommon(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
     3576                                      IEMNATIVEGSTREGUSE enmIntendedUse, bool fNoVolatileRegs)
    35973577{
    35983578    Assert(enmGstReg < kIemNativeGstReg_End && g_aGstShadowInfo[enmGstReg].cb != 0);
    3599 #ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
    3600     AssertMsg(   fSkipLivenessAssert
    3601               || pReNative->idxCurCall == 0
    3602               || enmGstReg == kIemNativeGstReg_Pc
    3603               || (enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
    3604                   ? IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
    3605                   : enmIntendedUse == kIemNativeGstRegUse_ForUpdate
    3606                   ? IEMLIVENESS_STATE_IS_MODIFY_EXPECTED( iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
    3607                   : IEMLIVENESS_STATE_IS_INPUT_EXPECTED(  iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)) ),
    3608               ("%s - %u\n", g_aGstShadowInfo[enmGstReg].pszName, iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)));
    3609 #endif
    3610     RT_NOREF(fSkipLivenessAssert);
    36113579#if defined(LOG_ENABLED) || defined(VBOX_STRICT)
    36123580    static const char * const s_pszIntendedUse[] = { "fetch", "update", "full write", "destructive calc" };
     
    37863754
    37873755/**
    3788  * Allocates a temporary host general purpose register that already holds the
    3789  * given guest register value.
    3790  *
    3791  * The use case for this function is places where the shadowing state cannot be
    3792  * modified due to branching and such.  This will fail if the we don't have a
    3793  * current shadow copy handy or if it's incompatible.  The only code that will
    3794  * be emitted here is value checking code in strict builds.
    3795  *
    3796  * The intended use can only be readonly!
    3797  *
    3798  * @returns The host register number, UINT8_MAX if not present.
     3756 * Allocates a temporary host general purpose register for keeping a guest
     3757 * register value.
     3758 *
     3759 * Since we may already have a register holding the guest register value,
     3760 * code will be emitted to do the loading if that's not the case. Code may also
     3761 * be emitted if we have to free up a register to satify the request.
     3762 *
     3763 * @returns The host register number; throws VBox status code on failure, so no
     3764 *          need to check the return value.
    37993765 * @param   pReNative       The native recompile state.
    3800  * @param   poff            Pointer to the instruction buffer offset.
    3801  *                          Will be updated in strict builds if a register is
    3802  *                          found.
     3766 * @param   poff            Pointer to the variable with the code buffer
     3767 *                          position. This will be update if we need to move a
     3768 *                          variable from register to stack in order to satisfy
     3769 *                          the request.
    38033770 * @param   enmGstReg       The guest register that will is to be updated.
    3804  * @note    In strict builds, this may throw instruction buffer growth failures.
    3805  *          Non-strict builds will not throw anything.
    3806  * @sa iemNativeRegAllocTmpForGuestReg
     3771 * @param   enmIntendedUse  How the caller will be using the host register.
     3772 * @param   fNoVolatileRegs Set if no volatile register allowed, clear if any
     3773 *                          register is okay (default).  The ASSUMPTION here is
     3774 *                          that the caller has already flushed all volatile
     3775 *                          registers, so this is only applied if we allocate a
     3776 *                          new register.
     3777 * @param   fSkipLivenessAssert     Hack for liveness input validation of EFLAGS.
     3778 * @sa      iemNativeRegAllocTmpForGuestRegIfAlreadyPresent
    38073779 */
    38083780DECL_HIDDEN_THROW(uint8_t)
    3809 iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3781iemNativeRegAllocTmpForGuestReg(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg,
     3782                                IEMNATIVEGSTREGUSE enmIntendedUse /*= kIemNativeGstRegUse_ReadOnly*/,
     3783                                bool fNoVolatileRegs /*= false*/, bool fSkipLivenessAssert /*= false*/)
     3784{
     3785#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
     3786    AssertMsg(   fSkipLivenessAssert
     3787              || pReNative->idxCurCall == 0
     3788              || enmGstReg == kIemNativeGstReg_Pc
     3789              || (enmIntendedUse == kIemNativeGstRegUse_ForFullWrite
     3790                  ? IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
     3791                  : enmIntendedUse == kIemNativeGstRegUse_ForUpdate
     3792                  ? IEMLIVENESS_STATE_IS_MODIFY_EXPECTED( iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
     3793                  : IEMLIVENESS_STATE_IS_INPUT_EXPECTED(  iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)) ),
     3794              ("%s - %u\n", g_aGstShadowInfo[enmGstReg].pszName, iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)));
     3795#endif
     3796    RT_NOREF(fSkipLivenessAssert);
     3797
     3798    return iemNativeRegAllocTmpForGuestRegCommon(pReNative, poff, enmGstReg, enmIntendedUse, fNoVolatileRegs);
     3799}
     3800
     3801
     3802#if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && defined(VBOX_STRICT)
     3803/**
     3804 * Specialized version of iemNativeRegAllocTmpForGuestReg for EFLAGS.
     3805 *
     3806 * This takes additional arguments for covering liveness assertions in strict
     3807 * builds, it's otherwise the same as iemNativeRegAllocTmpForGuestReg() with
     3808 * kIemNativeGstReg_EFlags as argument.
     3809 */
     3810DECL_HIDDEN_THROW(uint8_t)
     3811iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREGUSE enmIntendedUse,
     3812                                   uint64_t fRead, uint64_t fWrite /*= 0*/, uint64_t fPotentialCall /*= 0*/)
     3813{
     3814    if (pReNative->idxCurCall != 0 && (fRead || fWrite /*|| fPotentialCall*/))
     3815    {
     3816        Assert(!(fRead & ~IEMLIVENESSBIT_ALL_EFL_MASK));
     3817        Assert(!(fWrite & ~IEMLIVENESSBIT_ALL_EFL_MASK));
     3818        Assert(!(fPotentialCall & ~IEMLIVENESSBIT_ALL_EFL_MASK));
     3819        uint64_t const fAll = fRead | fWrite /*| fPotentialCall*/;
     3820        uint32_t       fState;
     3821# define MY_ASSERT_ONE_EFL(a_enmGstEfl) \
     3822        fState = iemNativeLivenessGetPrevStateByGstRegEx(pReNative, (IEMNATIVEGSTREG)(a_enmGstEfl)); \
     3823        AssertMsg(   !( fAll   & RT_BIT_64(a_enmGstEfl)) \
     3824                  || (  fRead  & RT_BIT_64(a_enmGstEfl) \
     3825                      ? fWrite & RT_BIT_64(a_enmGstEfl) \
     3826                        ? IEMLIVENESS_STATE_IS_MODIFY_EXPECTED(fState) \
     3827                        : IEMLIVENESS_STATE_IS_INPUT_EXPECTED(fState) \
     3828                      : IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(fState) \
     3829                      ) \
     3830                  , ("%s - %u\n", #a_enmGstEfl, fState))
     3831        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OTHER);
     3832        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_CF);
     3833        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_PF);
     3834        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_AF);
     3835        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_ZF);
     3836        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_SF);
     3837        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OF);
     3838# undef MY_ASSERT_ONE_EFL
     3839    }
     3840    RT_NOREF(fPotentialCall);
     3841    return iemNativeRegAllocTmpForGuestRegCommon(pReNative, poff, kIemNativeGstReg_EFlags,
     3842                                                 enmIntendedUse, false /*fNoVolatileRegs*/);
     3843}
     3844#endif
     3845
     3846
     3847
     3848/**
     3849 * Common worker for iemNativeRegAllocTmpForGuestRegIfAlreadyPresent and
     3850 * iemNativeRegAllocTmpForGuestEFlagsIfAlreadyPresent.
     3851 *
     3852 * See iemNativeRegAllocTmpForGuestRegIfAlreadyPresent() for details.
     3853 */
     3854DECL_FORCE_INLINE(uint8_t)
     3855iemNativeRegAllocTmpForGuestRegIfAlreadyPresentCommon(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
    38103856{
    38113857    Assert(enmGstReg < kIemNativeGstReg_End && g_aGstShadowInfo[enmGstReg].cb != 0);
    3812 #ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
    3813     AssertMsg(   pReNative->idxCurCall == 0
    3814               || IEMLIVENESS_STATE_IS_INPUT_EXPECTED(iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
    3815               || enmGstReg == kIemNativeGstReg_Pc
    3816               || enmGstReg == kIemNativeGstReg_EFlags /** @todo EFlags shadowing+liveness is weird and needs fixing (@bugref{10720}) */,
    3817               ("%s - %u\n", g_aGstShadowInfo[enmGstReg].pszName, iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)));
    3818 #endif
    38193858
    38203859    /*
     
    38523891    return UINT8_MAX;
    38533892}
     3893
     3894
     3895/**
     3896 * Allocates a temporary host general purpose register that already holds the
     3897 * given guest register value.
     3898 *
     3899 * The use case for this function is places where the shadowing state cannot be
     3900 * modified due to branching and such.  This will fail if the we don't have a
     3901 * current shadow copy handy or if it's incompatible.  The only code that will
     3902 * be emitted here is value checking code in strict builds.
     3903 *
     3904 * The intended use can only be readonly!
     3905 *
     3906 * @returns The host register number, UINT8_MAX if not present.
     3907 * @param   pReNative       The native recompile state.
     3908 * @param   poff            Pointer to the instruction buffer offset.
     3909 *                          Will be updated in strict builds if a register is
     3910 *                          found.
     3911 * @param   enmGstReg       The guest register that will is to be updated.
     3912 * @note    In strict builds, this may throw instruction buffer growth failures.
     3913 *          Non-strict builds will not throw anything.
     3914 * @sa iemNativeRegAllocTmpForGuestReg
     3915 */
     3916DECL_HIDDEN_THROW(uint8_t)
     3917iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREG enmGstReg)
     3918{
     3919#ifdef IEMNATIVE_WITH_LIVENESS_ANALYSIS
     3920    AssertMsg(   pReNative->idxCurCall == 0
     3921              || IEMLIVENESS_STATE_IS_INPUT_EXPECTED(iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg))
     3922              || enmGstReg == kIemNativeGstReg_Pc
     3923              , ("%s - %u\n", g_aGstShadowInfo[enmGstReg].pszName, iemNativeLivenessGetPrevStateByGstReg(pReNative, enmGstReg)));
     3924#endif
     3925    return iemNativeRegAllocTmpForGuestRegIfAlreadyPresentCommon(pReNative, poff, enmGstReg);
     3926}
     3927
     3928
     3929#if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && defined(VBOX_STRICT)
     3930/**
     3931 * Specialized version of iemNativeRegAllocTmpForGuestRegIfAlreadyPresent for
     3932 * EFLAGS.
     3933 *
     3934 * This takes additional arguments for covering liveness assertions in strict
     3935 * builds, it's otherwise the same as
     3936 * iemNativeRegAllocTmpForGuestRegIfAlreadyPresent() with
     3937 * kIemNativeGstReg_EFlags as argument.
     3938 *
     3939 * @note The @a fWrite parameter is necessary to complete the liveness picture,
     3940 *       as iemNativeEmitFetchEFlags() may fetch flags in prep for a later
     3941 *       commit.  It the operation clobbers all the flags, @a fRead will be
     3942 *       zero, so better verify the whole picture while we're here.
     3943 */
     3944DECL_HIDDEN_THROW(uint8_t) iemNativeRegAllocTmpForGuestEFlagsIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     3945                                                                              uint64_t fRead, uint64_t fWrite /*=0*/)
     3946{
     3947    if (pReNative->idxCurCall != 0)
     3948    {
     3949        Assert(fRead | fWrite);
     3950        Assert(!(fRead & ~IEMLIVENESSBIT_ALL_EFL_MASK));
     3951        Assert(!(fWrite & ~IEMLIVENESSBIT_ALL_EFL_MASK));
     3952        uint64_t const fAll = fRead | fWrite;
     3953        uint32_t       fState;
     3954# define MY_ASSERT_ONE_EFL(a_enmGstEfl) \
     3955        fState = iemNativeLivenessGetPrevStateByGstRegEx(pReNative, (IEMNATIVEGSTREG)(a_enmGstEfl)); \
     3956        AssertMsg(   !( fAll   & RT_BIT_64(a_enmGstEfl)) \
     3957                  || (  fRead  & RT_BIT_64(a_enmGstEfl) \
     3958                      ? fWrite & RT_BIT_64(a_enmGstEfl) \
     3959                        ? IEMLIVENESS_STATE_IS_MODIFY_EXPECTED(fState) \
     3960                        : IEMLIVENESS_STATE_IS_INPUT_EXPECTED(fState) \
     3961                      : IEMLIVENESS_STATE_IS_CLOBBER_EXPECTED(fState) \
     3962                      ) \
     3963                  , ("%s - %u\n", #a_enmGstEfl, fState))
     3964        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OTHER);
     3965        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_CF);
     3966        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_PF);
     3967        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_AF);
     3968        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_ZF);
     3969        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_SF);
     3970        MY_ASSERT_ONE_EFL(IEMLIVENESSBIT_IDX_EFL_OF);
     3971# undef MY_ASSERT_ONE_EFL
     3972    }
     3973    RT_NOREF(fRead);
     3974    return iemNativeRegAllocTmpForGuestRegIfAlreadyPresentCommon(pReNative, poff, kIemNativeGstReg_EFlags);
     3975}
     3976#endif
    38543977
    38553978
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106101 r106113  
    18221822                                                            IEMNATIVEGSTREGUSE enmIntendedUse = kIemNativeGstRegUse_ReadOnly,
    18231823                                                            bool fNoVolatileRegs = false, bool fSkipLivenessAssert = false);
     1824#if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && defined(VBOX_STRICT)
     1825DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     1826                                                               IEMNATIVEGSTREGUSE enmIntendedUse, uint64_t fRead,
     1827                                                               uint64_t fWrite = 0, uint64_t fPotentialCall = 0);
     1828#else
     1829DECL_FORCE_INLINE_THROW(uint8_t)
     1830iemNativeRegAllocTmpForGuestEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, IEMNATIVEGSTREGUSE enmIntendedUse,
     1831                                   uint64_t fRead, uint64_t fWrite = 0, uint64_t fPotentialCall = 0)
     1832{
     1833    RT_NOREF(fRead, fWrite, fPotentialCall);
     1834    return iemNativeRegAllocTmpForGuestReg(pReNative, poff, kIemNativeGstReg_EFlags, enmIntendedUse);
     1835}
     1836#endif
     1837
    18241838DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
    18251839                                                                            IEMNATIVEGSTREG enmGstReg);
     1840#if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && defined(VBOX_STRICT)
     1841DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpForGuestEFlagsIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     1842                                                                               uint64_t fRead, uint64_t fWrite = 0);
     1843#else
     1844DECL_FORCE_INLINE_THROW(uint8_t)
     1845iemNativeRegAllocTmpForGuestEFlagsIfAlreadyPresent(PIEMRECOMPILERSTATE pReNative, uint32_t *poff,
     1846                                                   uint64_t fRead, uint64_t fWrite = 0)
     1847{
     1848    RT_NOREF(fRead, fWrite);
     1849    return iemNativeRegAllocTmpForGuestRegIfAlreadyPresent(pReNative, poff, kIemNativeGstReg_EFlags);
     1850}
     1851#endif
    18261852
    18271853DECL_HIDDEN_THROW(uint32_t) iemNativeRegAllocArgs(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cArgs);
     
    23522378}
    23532379
    2354 
    23552380# ifdef VBOX_STRICT
     2381
    23562382/** For assertions only - caller checks that idxCurCall isn't zero. */
    23572383DECL_FORCE_INLINE(uint32_t)
     
    23602386    return iemNativeLivenessGetStateByGstReg(&pReNative->paLivenessEntries[pReNative->idxCurCall - 1], enmGstReg);
    23612387}
     2388
     2389
     2390/** For assertions only - caller checks that idxCurCall isn't zero. */
     2391DECL_FORCE_INLINE(uint32_t)
     2392iemNativeLivenessGetPrevStateByGstRegEx(PIEMRECOMPILERSTATE pReNative, IEMNATIVEGSTREG enmGstReg)
     2393{
     2394    return iemNativeLivenessGetStateByGstRegEx(&pReNative->paLivenessEntries[pReNative->idxCurCall - 1], enmGstReg);
     2395}
     2396
    23622397# endif /* VBOX_STRICT */
    2363 
    23642398#endif /* IEMNATIVE_WITH_LIVENESS_ANALYSIS */
    23652399
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette