VirtualBox

Changeset 104947 in vbox for trunk/src/VBox/VMM/include


Ignore:
Timestamp:
Jun 17, 2024 8:23:40 PM (5 months ago)
Author:
vboxsync
Message:

VMM/IEM,tstVMStructSize: Made the IEM TLB size more easily configurable. Include the IEM statistics in the tstVMStructSize testcase so it can be run to figure out the correct paddings. bugref:10687

Location:
trunk/src/VBox/VMM/include
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r104941 r104947  
    261261#ifndef RT_IN_ASSEMBLER /* ASM-NOINC-START - the rest of the file */
    262262
    263 # if !defined(IN_TSTVMSTRUCT) && !defined(DOXYGEN_RUNNING)
     263# if !defined(IEM_WITHOUT_INSTRUCTION_STATS) && !defined(DOXYGEN_RUNNING)
    264264/** Instruction statistics.   */
    265265typedef struct IEMINSTRSTATS
     
    517517                                     | IEMTLBE_F_PHYS_REV )
    518518
     519/** The TLB size (power of two).
     520 * We initially chose 256 because that way we can obtain the result directly
     521 * from a 8-bit register without an additional AND instruction.
     522 * See also @bugref{10687}. */
     523#define IEMTLB_ENTRY_COUNT                      256
     524#define IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO      8
     525AssertCompile(RT_BIT_32(IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO) == IEMTLB_ENTRY_COUNT);
    519526
    520527/**
     
    572579    uint32_t            au32Padding[6];
    573580
    574     /** The TLB entries.
    575      * We've choosen 256 because that way we can obtain the result directly from a
    576      * 8-bit register without an additional AND instruction. */
    577     IEMTLBENTRY         aEntries[256];
     581    /** The TLB entries. */
     582    IEMTLBENTRY         aEntries[IEMTLB_ENTRY_COUNT];
    578583} IEMTLB;
    579584AssertCompileSizeAlignment(IEMTLB, 64);
     
    607612 * @param   a_uTag      Value returned by IEMTLB_CALC_TAG.
    608613 */
    609 #define IEMTLB_TAG_TO_INDEX(a_uTag)         ( (uint8_t)(a_uTag) )
     614#if IEMTLB_ENTRY_COUNT == 256
     615# define IEMTLB_TAG_TO_INDEX(a_uTag)        ( (uint8_t)(a_uTag) )
     616#else
     617# define IEMTLB_TAG_TO_INDEX(a_uTag)        ( (a_uTag) & (IEMTLB_ENTRY_COUNT - 1U) )
     618AssertCompile(RT_IS_POWER_OF_TWO(IEMTLB_ENTRY_COUNT));
     619#endif
    610620/**
    611621 * Converts a TLB tag value into a TLB index.
     
    21642174    uint32_t                aStatInts[256];
    21652175
    2166 #if defined(VBOX_WITH_STATISTICS) && !defined(IN_TSTVMSTRUCT) && !defined(DOXYGEN_RUNNING)
     2176#if defined(VBOX_WITH_STATISTICS) && !defined(DOXYGEN_RUNNING) && !defined(IEM_WITHOUT_INSTRUCTION_STATS)
    21672177    /** Instruction statistics for ring-0/raw-mode. */
    21682178    IEMINSTRSTATS           StatsRZ;
  • trunk/src/VBox/VMM/include/IEMN8veRecompilerTlbLookup.h

    r104148 r104947  
    631631    AssertCompile(RTASSERT_OFFSET_OF(IEMTLB, uTlbPhysRev) < RTASSERT_OFFSET_OF(IEMTLB, aEntries));
    632632    AssertCompile(RTASSERT_OFFSET_OF(VMCPUCC, iem.s.DataTlb.aEntries) < _64K);
    633     AssertCompile(RTASSERT_OFFSET_OF(VMCPUCC, iem.s.CodeTlb.aEntries) < _64K); /* if larger do: ADD x3, x27, x3, LSL #y */
    634     pCodeBuf[off++] = Armv8A64MkInstrMovZ(pTlbState->idxReg4, offVCpuTlb + RT_UOFFSETOF(IEMTLB, aEntries));
    635     pCodeBuf[off++] = Armv8A64MkInstrAddReg(pTlbState->idxReg4, IEMNATIVE_REG_FIXED_PVMCPU, pTlbState->idxReg4);
     633    if (offVCpuTlb + RT_UOFFSETOF(IEMTLB, aEntries) < _64K)
     634    {
     635        pCodeBuf[off++] = Armv8A64MkInstrMovZ(pTlbState->idxReg4, offVCpuTlb + RT_UOFFSETOF(IEMTLB, aEntries));
     636        pCodeBuf[off++] = Armv8A64MkInstrAddReg(pTlbState->idxReg4, IEMNATIVE_REG_FIXED_PVMCPU, pTlbState->idxReg4);
     637    }
     638    else
     639    {
     640        AssertCompileMemberAlignment(VMCPUCC, iem.s.CodeTlb.aEntries, 64);
     641        AssertCompileMemberAlignment(IEMTLB, aEntries, 64);
     642        AssertCompile(RTASSERT_OFFSET_OF(VMCPUCC, iem.s.CodeTlb.aEntries) < _64K*64U);
     643        pCodeBuf[off++] = Armv8A64MkInstrMovZ(pTlbState->idxReg4, (offVCpuTlb + RT_UOFFSETOF(IEMTLB, aEntries)) >> 6);
     644        pCodeBuf[off++] = Armv8A64MkInstrAddReg(pTlbState->idxReg4, IEMNATIVE_REG_FIXED_PVMCPU, pTlbState->idxReg4,
     645                                                true /*64Bit*/, false /*fSetFlags*/, 6 /*cShift*/, kArmv8A64InstrShift_Lsl);
     646    }
    636647    pCodeBuf[off++] = Armv8A64MkInstrLdPairGpr(pTlbState->idxReg3, pTlbState->idxReg5, pTlbState->idxReg4,
    637648                                               (RT_OFFSETOF(IEMTLB, uTlbRevision) - RT_OFFSETOF(IEMTLB, aEntries)) / 8);
     
    649660# endif
    650661# if defined(RT_ARCH_AMD64)
     662#  if IEMTLB_ENTRY_COUNT == 256
    651663    /* movzx reg2, byte reg1 */
    652664    off = iemNativeEmitLoadGprFromGpr8Ex(pCodeBuf, off, pTlbState->idxReg2, pTlbState->idxReg1);
     665#  else
     666    /* mov   reg2, reg1 */
     667    off = iemNativeEmitLoadGprFromGpr32Ex(pCodeBuf, off, pTlbState->idxReg2, pTlbState->idxReg1);
     668    /* and   reg2, IEMTLB_ENTRY_COUNT - 1U */
     669    off = iemNativeEmitAndGpr32ByImmEx(pCodeBuf, off, pTlbState->idxReg2, IEMTLB_ENTRY_COUNT - 1U);
     670#  endif
    653671    /* shl   reg2, 5 ; reg2 *= sizeof(IEMTLBENTRY) */
    654672    AssertCompileSize(IEMTLBENTRY, 32);
     
    666684
    667685# elif defined(RT_ARCH_ARM64)
    668     /* reg2 = (reg1 & 0xff) << 5 */
    669     pCodeBuf[off++] = Armv8A64MkInstrUbfiz(pTlbState->idxReg2, pTlbState->idxReg1, 5, 8);
     686    /* reg2 = (reg1 & tlbmask) << 5 */
     687    pCodeBuf[off++] = Armv8A64MkInstrUbfiz(pTlbState->idxReg2, pTlbState->idxReg1, 5, IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO);
    670688#  ifdef IEMNATIVE_WITH_TLB_LOOKUP_LOAD_STORE_PAIR
    671689    /* reg2 += &pVCpu->iem.s.DataTlb.aEntries / CodeTlb.aEntries */
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette