VirtualBox

Changeset 102603 in vbox


Ignore:
Timestamp:
Dec 14, 2023 11:06:41 PM (14 months ago)
Author:
vboxsync
Message:

VMM/IEM: AMD64 version of BODY_CHECK_OPCODES. Disabled because ARM64 isn't done. bugreg:10371

Location:
trunk/src/VBox/VMM
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r102593 r102603  
    15951595
    15961596
     1597/**
     1598 * Used by TB code when it wants to raise a \#GP(0).
     1599 * @see iemThreadeFuncWorkerObsoleteTb
     1600 */
     1601IEM_DECL_NATIVE_HLP_DEF(int, iemNativeHlpObsoleteTb,(PVMCPUCC pVCpu))
     1602{
     1603    /* We set fSafeToFree to false where as we're being called in the context
     1604       of a TB callback function, which for native TBs means we cannot release
     1605       the executable memory till we've returned our way back to iemTbExec as
     1606       that return path codes via the native code generated for the TB. */
     1607    iemThreadedTbObsolete(pVCpu, pVCpu->iem.s.pCurTbR3, false /*fSafeToFree*/);
     1608    return VINF_IEM_REEXEC_BREAK;
     1609}
     1610
     1611
    15971612/*********************************************************************************************************************************
    15981613*   Helpers: Segmented memory fetches and stores.                                                                                *
     
    24202435    pReNative->Core.u64ArgVars             = UINT64_MAX;
    24212436
    2422     AssertCompile(RT_ELEMENTS(pReNative->aidxUniqueLabels) == 6);
     2437    AssertCompile(RT_ELEMENTS(pReNative->aidxUniqueLabels) == 7);
    24232438    pReNative->aidxUniqueLabels[0]         = UINT32_MAX;
    24242439    pReNative->aidxUniqueLabels[1]         = UINT32_MAX;
     
    24272442    pReNative->aidxUniqueLabels[4]         = UINT32_MAX;
    24282443    pReNative->aidxUniqueLabels[5]         = UINT32_MAX;
     2444    pReNative->aidxUniqueLabels[6]         = UINT32_MAX;
    24292445
    24302446    /* Full host register reinit: */
     
    32913307 *                          This will be update if we need to move a variable from
    32923308 *                          register to stack in order to satisfy the request.
    3293  * @param   fPreferVolatile Wheter to prefer volatile over non-volatile
     3309 * @param   fPreferVolatile Whether to prefer volatile over non-volatile
    32943310 *                          registers (@c true, default) or the other way around
    32953311 *                          (@c false, for iemNativeRegAllocTmpForGuestReg()).
     
    33183334    {
    33193335        idxReg = iemNativeRegAllocFindFree(pReNative, poff, fPreferVolatile);
     3336        AssertStmt(idxReg != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_ALLOCATOR_NO_FREE_TMP));
     3337    }
     3338    return iemNativeRegMarkAllocated(pReNative, idxReg, kIemNativeWhat_Tmp);
     3339}
     3340
     3341
     3342/**
     3343 * Alternative version of iemNativeRegAllocTmp that takes mask with acceptable
     3344 * registers.
     3345 *
     3346 * @returns The host register number; throws VBox status code on failure,
     3347 *          so no need to check the return value.
     3348 * @param   pReNative       The native recompile state.
     3349 * @param   poff            Pointer to the variable with the code buffer position.
     3350 *                          This will be update if we need to move a variable from
     3351 *                          register to stack in order to satisfy the request.
     3352 * @param   fRegMask        Mask of acceptable registers.
     3353 * @param   fPreferVolatile Whether to prefer volatile over non-volatile
     3354 *                          registers (@c true, default) or the other way around
     3355 *                          (@c false, for iemNativeRegAllocTmpForGuestReg()).
     3356 */
     3357DECL_HIDDEN_THROW(uint8_t) iemNativeRegAllocTmpEx(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint32_t fRegMask,
     3358                                                  bool fPreferVolatile /*= true*/)
     3359{
     3360    Assert(!(fRegMask & ~IEMNATIVE_HST_GREG_MASK));
     3361    Assert(!(fRegMask & IEMNATIVE_REG_FIXED_MASK));
     3362
     3363    /*
     3364     * Try find a completely unused register, preferably a call-volatile one.
     3365     */
     3366    uint8_t  idxReg;
     3367    uint32_t fRegs = ~pReNative->Core.bmHstRegs
     3368                   & ~pReNative->Core.bmHstRegsWithGstShadow
     3369                   & (~IEMNATIVE_REG_FIXED_MASK & IEMNATIVE_HST_GREG_MASK)
     3370                   & fRegMask;
     3371    if (fRegs)
     3372    {
     3373        if (fPreferVolatile)
     3374            idxReg = (uint8_t)ASMBitFirstSetU32(  fRegs & IEMNATIVE_CALL_VOLATILE_GREG_MASK
     3375                                                ? fRegs & IEMNATIVE_CALL_VOLATILE_GREG_MASK : fRegs) - 1;
     3376        else
     3377            idxReg = (uint8_t)ASMBitFirstSetU32(  fRegs & ~IEMNATIVE_CALL_VOLATILE_GREG_MASK
     3378                                                ? fRegs & ~IEMNATIVE_CALL_VOLATILE_GREG_MASK : fRegs) - 1;
     3379        Assert(pReNative->Core.aHstRegs[idxReg].fGstRegShadows == 0);
     3380        Assert(!(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxReg)));
     3381    }
     3382    else
     3383    {
     3384        idxReg = iemNativeRegAllocFindFree(pReNative, poff, fPreferVolatile, fRegMask);
    33203385        AssertStmt(idxReg != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_ALLOCATOR_NO_FREE_TMP));
    33213386    }
     
    33393404 * @param   uImm            The immediate value that the register must hold upon
    33403405 *                          return.
    3341  * @param   fPreferVolatile Wheter to prefer volatile over non-volatile
     3406 * @param   fPreferVolatile Whether to prefer volatile over non-volatile
    33423407 *                          registers (@c true, default) or the other way around
    33433408 *                          (@c false).
     
    47654830    off = iemNativeEmitCheckCallRetAndPassUp(pReNative, off, pCallEntry->idxInstr);
    47664831
     4832    return off;
     4833}
     4834
     4835
     4836/**
     4837 * Emits the code at the ObsoleteTb label.
     4838 */
     4839static uint32_t iemNativeEmitObsoleteTb(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnLabel)
     4840{
     4841    uint32_t const idxLabel = iemNativeLabelFind(pReNative, kIemNativeLabelType_ObsoleteTb);
     4842    if (idxLabel != UINT32_MAX)
     4843    {
     4844        iemNativeLabelDefine(pReNative, idxLabel, off);
     4845
     4846        /* int iemNativeHlpObsoleteTb(PVMCPUCC pVCpu) */
     4847        off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
     4848        off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)iemNativeHlpObsoleteTb);
     4849
     4850        /* jump back to the return sequence. */
     4851        off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel);
     4852    }
    47674853    return off;
    47684854}
     
    1104511131
    1104611132/**
     11133 * Sets idxTbCurInstr in preparation of raising an exception.
     11134 */
     11135/** @todo Optimize this, so we don't set the same value more than once.  Just
     11136 *        needs some tracking. */
     11137#ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
     11138# define BODY_SET_CUR_INSTR() \
     11139    off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, pCallEntry->idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr))
     11140#else
     11141# define BODY_SET_CUR_INSTR() ((void)0)
     11142#endif
     11143
     11144
     11145/**
    1104711146 * Macro that emits the 16/32-bit CS.LIM check.
    1104811147 */
    1104911148#define BODY_CHECK_CS_LIM(a_cbInstr) \
    11050     off = iemNativeEmitBltInCheckCsLim(pReNative, off, (a_cbInstr), pCallEntry->idxInstr)
     11149    off = iemNativeEmitBltInCheckCsLim(pReNative, off, (a_cbInstr))
    1105111150
    1105211151DECL_FORCE_INLINE(uint32_t)
    11053 iemNativeEmitBltInCheckCsLim(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr, uint8_t idxInstr)
     11152iemNativeEmitBltInCheckCsLim(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t cbInstr)
    1105411153{
    1105511154    Assert(cbInstr >  0);
    1105611155    Assert(cbInstr < 16);
    11057 
    11058     /* Before we start, update the instruction number in case we raise an exception. */
    11059 #ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING
    11060     off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr));
    11061 #else
    11062     RT_NOREF(idxInstr);
    11063 #endif
    1106411156
    1106511157    /*
     
    1108011172                                                                  kIemNativeGstRegUse_ReadOnly);
    1108111173#ifdef RT_ARCH_AMD64
    11082     uint8_t * const pbCodeBuf   = iemNativeInstrBufEnsure(pReNative, off, 8+1);
     11174    uint8_t * const pbCodeBuf   = iemNativeInstrBufEnsure(pReNative, off, 8);
    1108311175#elif defined(RT_ARCH_ARM64)
    11084     uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
     11176    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);
    1108511177#else
    1108611178# error "Port me"
     
    1114911241}
    1115011242
     11243
     11244/**
     11245 * Macro that implements opcode (re-)checking.
     11246 */
     11247#define BODY_CHECK_OPCODES_DISABLED(a_pTb, a_idxRange, a_offRange, a_cbInstr) \
     11248    off = iemNativeEmitBltInCheckOpcodes(pReNative, off, (a_pTb), (a_idxRange), (a_offRange))
     11249
     11250DECL_FORCE_INLINE(uint32_t)
     11251iemNativeEmitBltInCheckOpcodes(PIEMRECOMPILERSTATE pReNative, uint32_t off, PCIEMTB pTb, uint8_t idxRange, uint16_t offRange)
     11252{
     11253    Assert(idxRange < pTb->cRanges && pTb->cRanges <= RT_ELEMENTS(pTb->aRanges));
     11254    Assert(offRange < pTb->aRanges[idxRange].cbOpcodes);
     11255
     11256    uint32_t const      idxLabelObsoleteTb = iemNativeLabelCreate(pReNative, kIemNativeLabelType_ObsoleteTb);
     11257
     11258    /*
     11259     * Where to start and how much to compare.
     11260     *
     11261     * Looking at the ranges produced when r160746 was running a DOS VM with TB
     11262     * logging, the ranges can be anything from 1 byte to at least 0x197 bytes,
     11263     * with the 6, 5, 4, 7, 8, 40, 3, 2, 9 and 10 being the top 10 in the sample.
     11264     *
     11265     * The top 10 for the early boot phase of a 64-bit debian 9.4 VM: 5, 9, 8,
     11266     * 12, 10, 11, 6, 13, 15 and 16.  Max 0x359 bytes. Same revision as above.
     11267     */
     11268    uint16_t            offPage     = pTb->aRanges[idxRange].offPhysPage + offRange;
     11269    uint16_t            cbLeft      = pTb->aRanges[idxRange].cbOpcodes   - offRange;
     11270    uint8_t const      *pbOpcodes   = &pTb->pabOpcodes[pTb->aRanges[idxRange].offOpcodes];
     11271    uint32_t            offConsolidatedJump = UINT32_MAX;
     11272
     11273#ifdef RT_ARCH_AMD64
     11274    /* AMD64/x86 offers a bunch of options.  Smaller stuff will can be
     11275       completely inlined, for larger we use REPE CMPS.  */
     11276# define CHECK_OPCODES_CMP_IMMXX(a_idxReg, a_bOpcode) /* cost: 3 bytes */  do { \
     11277            pbCodeBuf[off++] = a_bOpcode; \
     11278            Assert(offPage < 127); \
     11279            pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_MEM1, 7, a_idxReg); \
     11280            pbCodeBuf[off++] = RT_BYTE1(offPage); \
     11281        } while (0)
     11282
     11283# define CHECK_OPCODES_CMP_JMP() /* cost: 7 bytes first time, then 2 bytes */ do { \
     11284            if (offConsolidatedJump != UINT32_MAX) \
     11285            { \
     11286                int32_t const offDisp = (int32_t)offConsolidatedJump - (int32_t)(off + 2); \
     11287                Assert(offDisp >= -128); \
     11288                pbCodeBuf[off++] = 0x75; /* jnz near */ \
     11289                pbCodeBuf[off++] = (uint8_t)offDisp; \
     11290            } \
     11291            else \
     11292            { \
     11293                pbCodeBuf[off++] = 0x74; /* jz near +5 */ \
     11294                pbCodeBuf[off++] = 0x05; \
     11295                offConsolidatedJump = off; \
     11296                pbCodeBuf[off++] = 0xe9; /* jmp rel32 */ \
     11297                iemNativeAddFixup(pReNative, off, idxLabelObsoleteTb, kIemNativeFixupType_Rel32, -4); \
     11298                pbCodeBuf[off++] = 0x00; \
     11299                pbCodeBuf[off++] = 0x00; \
     11300                pbCodeBuf[off++] = 0x00; \
     11301                pbCodeBuf[off++] = 0x00; \
     11302            } \
     11303        } while (0)
     11304
     11305# define CHECK_OPCODES_CMP_IMM32(a_idxReg) /* cost: 3+4+2 = 9 */ do { \
     11306        CHECK_OPCODES_CMP_IMMXX(a_idxReg, 0x81); \
     11307        pbCodeBuf[off++] = *pbOpcodes++; \
     11308        pbCodeBuf[off++] = *pbOpcodes++; \
     11309        pbCodeBuf[off++] = *pbOpcodes++; \
     11310        pbCodeBuf[off++] = *pbOpcodes++; \
     11311        cbLeft  -= 4; \
     11312        offPage += 4; \
     11313        CHECK_OPCODES_CMP_JMP(); \
     11314    } while (0)
     11315
     11316# define CHECK_OPCODES_CMP_IMM16(a_idxReg) /* cost: 1+3+2+2 = 8 */ do { \
     11317        pbCodeBuf[off++] = X86_OP_PRF_SIZE_OP; \
     11318        CHECK_OPCODES_CMP_IMMXX(a_idxReg, 0x81); \
     11319        pbCodeBuf[off++] = *pbOpcodes++; \
     11320        pbCodeBuf[off++] = *pbOpcodes++; \
     11321        cbLeft  -= 2; \
     11322        offPage += 2; \
     11323        CHECK_OPCODES_CMP_JMP(); \
     11324    } while (0)
     11325
     11326# define CHECK_OPCODES_CMP_IMM8(a_idxReg) /* cost: 3+1+2 = 6 */ do { \
     11327        CHECK_OPCODES_CMP_IMMXX(a_idxReg, 0x80); \
     11328        pbCodeBuf[off++] = *pbOpcodes++; \
     11329        cbLeft  -= 1; \
     11330        offPage += 1; \
     11331        CHECK_OPCODES_CMP_JMP(); \
     11332    } while (0)
     11333
     11334# define CHECK_OPCODES_CMPSX(a_bOpcode, a_cbToSubtract, a_bPrefix) /* cost: 2+2 = 4 */ do { \
     11335        if (a_bPrefix) \
     11336            pbCodeBuf[off++] = (a_bPrefix); \
     11337        pbCodeBuf[off++] = (a_bOpcode); \
     11338        CHECK_OPCODES_CMP_JMP(); \
     11339        cbLeft -= (a_cbToSubtract); \
     11340    } while (0)
     11341
     11342# define CHECK_OPCODES_ECX_IMM(a_uValue) /* cost: 5 */ do { \
     11343        pbCodeBuf[off++] = 0xb8 + X86_GREG_xCX; \
     11344        pbCodeBuf[off++] = RT_BYTE1(a_uValue); \
     11345        pbCodeBuf[off++] = RT_BYTE2(a_uValue); \
     11346        pbCodeBuf[off++] = RT_BYTE3(a_uValue); \
     11347        pbCodeBuf[off++] = RT_BYTE4(a_uValue); \
     11348    } while (0)
     11349
     11350    if (cbLeft <= 24)
     11351    {
     11352        uint8_t const idxRegTmp = iemNativeRegAllocTmpEx(pReNative, &off,
     11353                                                           (  RT_BIT_32(X86_GREG_xAX)
     11354                                                            | RT_BIT_32(X86_GREG_xCX)
     11355                                                            | RT_BIT_32(X86_GREG_xDX)
     11356                                                            | RT_BIT_32(X86_GREG_xBX)
     11357                                                            | RT_BIT_32(X86_GREG_xSI)
     11358                                                            | RT_BIT_32(X86_GREG_xDI))
     11359                                                         & ~IEMNATIVE_REG_FIXED_MASK); /* pick reg not requiring rex prefix */
     11360        off = iemNativeEmitLoadGprFromVCpuU64(pReNative, off, idxRegTmp, RT_UOFFSETOF(VMCPUCC, iem.s.pbInstrBuf));
     11361        if (offPage >= 128 - cbLeft)
     11362        {
     11363            off = iemNativeEmitAddGprImm(pReNative, off, idxRegTmp, offPage & ~(uint16_t)3);
     11364            offPage &= 3;
     11365        }
     11366
     11367        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5 + 14 + 54 + 8 + 6 /* = 87 */);
     11368
     11369        if (cbLeft > 8)
     11370            switch (offPage & 3)
     11371            {
     11372                case 0:
     11373                    break;
     11374                case 1: /* cost: 6 + 8 = 14 */
     11375                    CHECK_OPCODES_CMP_IMM8(idxRegTmp);
     11376                    RT_FALL_THRU();
     11377                case 2: /* cost: 8 */
     11378                    CHECK_OPCODES_CMP_IMM16(idxRegTmp);
     11379                    break;
     11380                case 3: /* cost: 6 */
     11381                    CHECK_OPCODES_CMP_IMM8(idxRegTmp);
     11382                    break;
     11383            }
     11384
     11385        while (cbLeft >= 4)
     11386            CHECK_OPCODES_CMP_IMM32(idxRegTmp);     /* max iteration: 24/4 = 6; --> cost: 6 * 9 = 54 */
     11387
     11388        if (cbLeft >= 2)
     11389            CHECK_OPCODES_CMP_IMM16(idxRegTmp);     /* cost: 8 */
     11390        if (cbLeft)
     11391            CHECK_OPCODES_CMP_IMM8(idxRegTmp);      /* cost: 6 */
     11392
     11393        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     11394        iemNativeRegFreeTmp(pReNative, idxRegTmp);
     11395    }
     11396    else
     11397    {
     11398        /* RDI = &pbInstrBuf[offPage] */
     11399        uint8_t const idxRegDi = iemNativeRegAllocTmpEx(pReNative, &off, RT_BIT_32(X86_GREG_xDI));
     11400        off = iemNativeEmitLoadGprFromVCpuU64(pReNative, off, idxRegDi, RT_UOFFSETOF(VMCPU, iem.s.pbInstrBuf));
     11401        if (offPage != 0)
     11402            off = iemNativeEmitAddGprImm(pReNative, off, idxRegDi, offPage);
     11403
     11404        /* RSI = pbOpcodes */
     11405        uint8_t const idxRegSi = iemNativeRegAllocTmpEx(pReNative, &off, RT_BIT_32(X86_GREG_xSI));
     11406        off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegSi, (uintptr_t)pbOpcodes);
     11407
     11408        /* RCX = counts. */
     11409        uint8_t const idxRegCx = iemNativeRegAllocTmpEx(pReNative, &off, RT_BIT_32(X86_GREG_xCX));
     11410
     11411        uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5 + 10 + 5 + 5 + 3 + 4 + 3 /*= 35*/);
     11412
     11413        /** @todo profile and optimize this further.  Maybe an idea to align by
     11414         *        offPage if the two cannot be reconsidled. */
     11415        /* Align by the page offset, so that at least one of the accesses are naturally aligned. */
     11416        switch (offPage & 7)                                            /* max cost: 10 */
     11417        {
     11418            case 0:
     11419                break;
     11420            case 1: /* cost: 3+4+3 = 10 */
     11421                CHECK_OPCODES_CMPSX(0xa6, 1, 0);
     11422                RT_FALL_THRU();
     11423            case 2: /* cost: 4+3 = 7 */
     11424                CHECK_OPCODES_CMPSX(0xa7, 2, X86_OP_PRF_SIZE_OP);
     11425                CHECK_OPCODES_CMPSX(0xa7, 4, 0);
     11426                break;
     11427            case 3: /* cost: 3+3 = 6 */
     11428                CHECK_OPCODES_CMPSX(0xa6, 1, 0);
     11429                RT_FALL_THRU();
     11430            case 4: /* cost: 3 */
     11431                CHECK_OPCODES_CMPSX(0xa7, 4, 0);
     11432                break;
     11433            case 5: /* cost: 3+4 = 7 */
     11434                CHECK_OPCODES_CMPSX(0xa6, 1, 0);
     11435                RT_FALL_THRU();
     11436            case 6: /* cost: 4 */
     11437                CHECK_OPCODES_CMPSX(0xa7, 2, X86_OP_PRF_SIZE_OP);
     11438                break;
     11439            case 7: /* cost: 3 */
     11440                CHECK_OPCODES_CMPSX(0xa6, 1, 0);
     11441                break;
     11442        }
     11443
     11444        /* Compare qwords: */
     11445        uint32_t const cQWords = cbLeft >> 3;
     11446        CHECK_OPCODES_ECX_IMM(cQWords);                                     /* cost: 5 */
     11447
     11448        pbCodeBuf[off++] = X86_OP_PRF_REPZ;                                 /* cost: 5 */
     11449        CHECK_OPCODES_CMPSX(0xa7, 0, X86_OP_REX_W);
     11450        cbLeft &= 7;
     11451
     11452        if (cbLeft & 4)
     11453            CHECK_OPCODES_CMPSX(0xa7, 0, 0);                                /* cost: 3 */
     11454        if (cbLeft & 2)
     11455            CHECK_OPCODES_CMPSX(0xa7, 0, X86_OP_PRF_SIZE_OP);               /* cost: 4 */
     11456        if (cbLeft & 2)
     11457            CHECK_OPCODES_CMPSX(0xa6, 0, 0);                                /* cost: 3 */
     11458
     11459        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     11460        iemNativeRegFreeTmp(pReNative, idxRegCx);
     11461        iemNativeRegFreeTmp(pReNative, idxRegSi);
     11462        iemNativeRegFreeTmp(pReNative, idxRegDi);
     11463    }
     11464
     11465#elif defined(RT_ARCH_ARM64)
     11466    uint8_t const idxRegTmp = iemNativeRegAllocTmp(pReNative, &off);
     11467    off = iemNativeEmitLoadGprFromVCpuU64(pReNative, off, idxRegTmp, RT_UOFFSETOF(VMCPU, iem.s.pbInstrBuf));
     11468# if 0
     11469
     11470    uint32_t * const    pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
     11471    /** @todo continue here */
     11472# else
     11473    AssertReleaseFailed();
     11474    RT_NOREF(pReNative, off, pTb, idxRange, offRange);
     11475# endif
     11476    iemNativeRegFreeTmp(pReNative, idxRegTmp);
     11477#else
     11478# error "Port me"
     11479#endif
     11480    return off;
     11481}
     11482
     11483
    1115111484#ifdef BODY_CHECK_CS_LIM
    1115211485/**
     
    1115711490{
    1115811491    uint32_t const cbInstr = (uint32_t)pCallEntry->auParams[0];
     11492    BODY_SET_CUR_INSTR();
    1115911493    BODY_CHECK_CS_LIM(cbInstr);
    1116011494    return off;
     
    1116911503 */
    1117011504static IEM_DECL_IEMNATIVERECOMPFUNC_DEF(iemNativeRecompFunc_BltIn_CheckCsLimAndOpcodes)
    11171 {
    11172     PCIEMTB const  pTb      = pVCpu->iem.s.pCurTbR3;
    11173     uint32_t const cbInstr  = (uint32_t)pCallEntry->auParams[0];
    11174     uint32_t const idxRange = (uint32_t)pCallEntry->auParams[1];
    11175     uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    11176     BODY_CHECK_CS_LIM(cbInstr);
    11177     BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
    11178     return off;
    11179 }
    11180 #endif
    11181 
    11182 
    11183 #if defined(BODY_CHECK_OPCODES)
    11184 /**
    11185  * Built-in function for re-checking opcodes after an instruction that may have
    11186  * modified them.
    11187  */
    11188 static IEM_DECL_IEMNATIVERECOMPFUNC_DEF(iemNativeRecompFunc_BltIn_CheckOpcodes)
    1118911505{
    1119011506    PCIEMTB const  pTb      = pReNative->pTbOrg;
     
    1119211508    uint32_t const idxRange = (uint32_t)pCallEntry->auParams[1];
    1119311509    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
     11510    BODY_SET_CUR_INSTR();
     11511    BODY_CHECK_CS_LIM(cbInstr);
    1119411512    BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
    1119511513    return off;
     
    1119811516
    1119911517
    11200 #if defined(BODY_CHECK_OPCODES) && defined(BODY_CONSIDER_CS_LIM_CHECKING)
    11201 /**
    11202  * Built-in function for re-checking opcodes and considering the need for CS.LIM
    11203  * checking after an instruction that may have modified them.
    11204  */
    11205 static IEM_DECL_IEMNATIVERECOMPFUNC_DEF(iemNativeRecompFunc_BltIn_CheckOpcodesConsiderCsLim)
     11518#if defined(BODY_CHECK_OPCODES)
     11519/**
     11520 * Built-in function for re-checking opcodes after an instruction that may have
     11521 * modified them.
     11522 */
     11523static IEM_DECL_IEMNATIVERECOMPFUNC_DEF(iemNativeRecompFunc_BltIn_CheckOpcodes)
    1120611524{
    1120711525    PCIEMTB const  pTb      = pReNative->pTbOrg;
     
    1120911527    uint32_t const idxRange = (uint32_t)pCallEntry->auParams[1];
    1121011528    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
     11529    BODY_SET_CUR_INSTR();
     11530    BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
     11531    return off;
     11532}
     11533#endif
     11534
     11535
     11536#if defined(BODY_CHECK_OPCODES) && defined(BODY_CONSIDER_CS_LIM_CHECKING)
     11537/**
     11538 * Built-in function for re-checking opcodes and considering the need for CS.LIM
     11539 * checking after an instruction that may have modified them.
     11540 */
     11541static IEM_DECL_IEMNATIVERECOMPFUNC_DEF(iemNativeRecompFunc_BltIn_CheckOpcodesConsiderCsLim)
     11542{
     11543    PCIEMTB const  pTb      = pReNative->pTbOrg;
     11544    uint32_t const cbInstr  = (uint32_t)pCallEntry->auParams[0];
     11545    uint32_t const idxRange = (uint32_t)pCallEntry->auParams[1];
     11546    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
     11547    BODY_SET_CUR_INSTR();
    1121111548    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1121211549    BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
     
    1123411571    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1123511572    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11573    BODY_SET_CUR_INSTR();
    1123611574    BODY_CHECK_CS_LIM(cbInstr);
    1123711575    BODY_CHECK_PC_AFTER_BRANCH(pTb, idxRange, cbInstr);
     
    1125711595    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1125811596    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11597    BODY_SET_CUR_INSTR();
    1125911598    BODY_CHECK_PC_AFTER_BRANCH(pTb, idxRange, cbInstr);
    1126011599    BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
     
    1128011619    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1128111620    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11621    BODY_SET_CUR_INSTR();
    1128211622    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1128311623    BODY_CHECK_PC_AFTER_BRANCH(pTb, idxRange, cbInstr);
     
    1130611646    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1130711647    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11648    BODY_SET_CUR_INSTR();
    1130811649    BODY_CHECK_CS_LIM(cbInstr);
    1130911650    BODY_LOAD_TLB_AFTER_BRANCH(pTb, idxRange, cbInstr);
     
    1133211673    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1133311674    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11675    BODY_SET_CUR_INSTR();
    1133411676    BODY_LOAD_TLB_AFTER_BRANCH(pTb, idxRange, cbInstr);
    1133511677    BODY_CHECK_OPCODES(pTb, idxRange, offRange, cbInstr);
     
    1135711699    uint32_t const offRange = (uint32_t)pCallEntry->auParams[2];
    1135811700    //LogFunc(("idxRange=%u @ %#x LB %#x: offPhysPage=%#x LB %#x\n", idxRange, offRange, cbInstr, pTb->aRanges[idxRange].offPhysPage, pTb->aRanges[idxRange].cbOpcodes));
     11701    BODY_SET_CUR_INSTR();
    1135911702    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1136011703    BODY_LOAD_TLB_AFTER_BRANCH(pTb, idxRange, cbInstr);
     
    1139011733    uint32_t const offRange1   = (uint32_t)pCallEntry->auParams[2];
    1139111734    uint32_t const idxRange2   = idxRange1 + 1;
     11735    BODY_SET_CUR_INSTR();
    1139211736    BODY_CHECK_CS_LIM(cbInstr);
    1139311737    BODY_CHECK_OPCODES(pTb, idxRange1, offRange1, cbInstr);
     
    1141811762    uint32_t const offRange1   = (uint32_t)pCallEntry->auParams[2];
    1141911763    uint32_t const idxRange2   = idxRange1 + 1;
     11764    BODY_SET_CUR_INSTR();
    1142011765    BODY_CHECK_OPCODES(pTb, idxRange1, offRange1, cbInstr);
    1142111766    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
     
    1144611791    uint32_t const offRange1   = (uint32_t)pCallEntry->auParams[2];
    1144711792    uint32_t const idxRange2   = idxRange1 + 1;
     11793    BODY_SET_CUR_INSTR();
    1144811794    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1144911795    BODY_CHECK_OPCODES(pTb, idxRange1, offRange1, cbInstr);
     
    1147211818    //uint32_t const offRange1   = (uint32_t)uParam2;
    1147311819    uint32_t const idxRange2   = idxRange1 + 1;
     11820    BODY_SET_CUR_INSTR();
    1147411821    BODY_CHECK_CS_LIM(cbInstr);
    1147511822    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
     
    1149711844    //uint32_t const offRange1   = (uint32_t)pCallEntry->auParams[2];
    1149811845    uint32_t const idxRange2   = idxRange1 + 1;
     11846    BODY_SET_CUR_INSTR();
    1149911847    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
    1150011848    BODY_CHECK_OPCODES(pTb, idxRange2, 0, cbInstr);
     
    1152111869    //uint32_t const offRange1   = (uint32_t)pCallEntry->auParams[2];
    1152211870    uint32_t const idxRange2   = idxRange1 + 1;
     11871    BODY_SET_CUR_INSTR();
    1152311872    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1152411873    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, cbStartPage, idxRange2, cbInstr);
     
    1154111890    uint32_t const cbInstr     = (uint32_t)pCallEntry->auParams[0];
    1154211891    uint32_t const idxRange    = (uint32_t)pCallEntry->auParams[1];
     11892    BODY_SET_CUR_INSTR();
    1154311893    BODY_CHECK_CS_LIM(cbInstr);
    1154411894    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, 0, idxRange, cbInstr);
     
    1156211912    uint32_t const cbInstr     = (uint32_t)pCallEntry->auParams[0];
    1156311913    uint32_t const idxRange    = (uint32_t)pCallEntry->auParams[1];
     11914    BODY_SET_CUR_INSTR();
    1156411915    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, 0, idxRange, cbInstr);
    1156511916    //Assert(pVCpu->iem.s.offCurInstrStart == 0);
     
    1158311934    uint32_t const cbInstr     = (uint32_t)pCallEntry->auParams[0];
    1158411935    uint32_t const idxRange    = (uint32_t)pCallEntry->auParams[1];
     11936    BODY_SET_CUR_INSTR();
    1158511937    BODY_CONSIDER_CS_LIM_CHECKING(pTb, cbInstr);
    1158611938    BODY_LOAD_TLB_FOR_NEW_PAGE(pTb, 0, idxRange, cbInstr);
     
    1191712269                                case kIemNativeLabelType_RaiseGp0:
    1191812270                                    pszName = "RaiseGp0";
     12271                                    break;
     12272                                case kIemNativeLabelType_ObsoleteTb:
     12273                                    pszName = "ObsoleteTb";
    1191912274                                    break;
    1192012275                                case kIemNativeLabelType_If:
     
    1231612671        if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_RaiseGp0))
    1231712672            off = iemNativeEmitRaiseGp0(pReNative, off, idxReturnLabel);
     12673        if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ObsoleteTb))
     12674            off = iemNativeEmitObsoleteTb(pReNative, off, idxReturnLabel);
    1231812675    }
    1231912676    IEMNATIVE_CATCH_LONGJMP_BEGIN(pReNative, rc);
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r102585 r102603  
    13621362    /** @name Decoder state.
    13631363     * @{ */
    1364 #ifndef IEM_WITH_OPAQUE_DECODER_STATE
    1365 # ifdef IEM_WITH_CODE_TLB
     1364#ifdef IEM_WITH_CODE_TLB
    13661365    /** The offset of the next instruction byte. */
    13671366    uint32_t                offInstrNextByte;                                                               /* 0x08 */
     
    13831382     */
    13841383    uint8_t const          *pbInstrBuf;                                                                     /* 0x10 */
    1385 #  if ARCH_BITS == 32
     1384# if ARCH_BITS == 32
    13861385    uint32_t                uInstrBufHigh; /** The high dword of the host context pbInstrBuf member. */
    1387 #  endif
     1386# endif
    13881387    /** The program counter corresponding to pbInstrBuf.
    13891388     * This is set to a non-canonical address when we need to invalidate it. */
     
    13941393     * This takes the CS segment limit into account. */
    13951394    uint16_t                cbInstrBufTotal;                                                                /* 0x28 */
     1395# ifndef IEM_WITH_OPAQUE_DECODER_STATE
    13961396    /** Offset into pbInstrBuf of the first byte of the current instruction.
    13971397     * Can be negative to efficiently handle cross page instructions. */
     
    14201420    uint8_t                 bUnused;                                                                        /* 0x35 */
    14211421#  endif
    1422 # else  /* !IEM_WITH_CODE_TLB */
     1422# else  /* IEM_WITH_OPAQUE_DECODER_STATE */
     1423    uint8_t                 abOpaqueDecoderPart1[0x36 - 0x2a];
     1424# endif /* IEM_WITH_OPAQUE_DECODER_STATE */
     1425
     1426#else  /* !IEM_WITH_CODE_TLB */
     1427#  ifndef IEM_WITH_OPAQUE_DECODER_STATE
    14231428    /** The size of what has currently been fetched into abOpcode. */
    14241429    uint8_t                 cbOpcode;                                                                       /*       0x08 */
     
    14411446    uint8_t                 uRexIndex;                                                                      /*       0x12 */
    14421447
    1443 # endif /* !IEM_WITH_CODE_TLB */
    1444 
     1448# else  /* IEM_WITH_OPAQUE_DECODER_STATE */
     1449    uint8_t                 abOpaqueDecoderPart1[0x13 - 0x08];
     1450# endif /* IEM_WITH_OPAQUE_DECODER_STATE */
     1451#endif /* !IEM_WITH_CODE_TLB */
     1452
     1453#ifndef IEM_WITH_OPAQUE_DECODER_STATE
    14451454    /** The effective operand mode. */
    14461455    IEMMODE                 enmEffOpSize;                                                                   /* 0x36, 0x13 */
     
    14811490    uint8_t                 abAlignment2c[0x4f - 0x2f];                                                     /*       0x2f */
    14821491# endif
     1492
    14831493#else  /* IEM_WITH_OPAQUE_DECODER_STATE */
    1484     uint8_t                 abOpaqueDecoder[0x4f - 0x8];
     1494# ifdef IEM_WITH_CODE_TLB
     1495    uint8_t                 abOpaqueDecoderPart2[0x4f - 0x36];
     1496# else
     1497    uint8_t                 abOpaqueDecoderPart2[0x4f - 0x13];
     1498# endif
    14851499#endif /* IEM_WITH_OPAQUE_DECODER_STATE */
    14861500    /** @} */
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r102587 r102603  
    310310    kIemNativeLabelType_NonZeroRetOrPassUp,
    311311    kIemNativeLabelType_RaiseGp0,
     312    kIemNativeLabelType_ObsoleteTb,
    312313    /* Labels with data, potentially multiple instances per TB: */
    313314    kIemNativeLabelType_FirstWithMultipleInstances,
     
    806807
    807808DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmp(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, bool fPreferVolatile = true);
     809DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpEx(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint32_t fRegMask,
     810                                                   bool fPreferVolatile = true);
    808811DECL_HIDDEN_THROW(uint8_t)  iemNativeRegAllocTmpImm(PIEMRECOMPILERSTATE pReNative, uint32_t *poff, uint64_t uImm,
    809812                                                    bool fPreferVolatile = true);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette