VirtualBox

Changeset 102800 in vbox for trunk/src


Ignore:
Timestamp:
Jan 9, 2024 10:19:17 PM (13 months ago)
Author:
vboxsync
Message:

VMM/IEM: Stats, go straight for the safe fallback functions in the memory access helpers. bugref:10371

Location:
trunk/src/VBox/VMM
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r102793 r102800  
    108108#if (defined(RT_ARCH_AMD64) && 1) || (defined(RT_ARCH_ARM64) && 1)
    109109# define IEMNATIVE_WITH_TLB_LOOKUP
     110#endif
     111#ifdef IEMNATIVE_WITH_TLB_LOOKUP
     112# define IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     113#endif
     114#ifdef IEMNATIVE_WITH_TLB_LOOKUP
     115# define IEMNATIVE_WITH_TLB_LOOKUP_STORE
     116#endif
     117#ifdef IEMNATIVE_WITH_TLB_LOOKUP
     118# define IEMNATIVE_WITH_TLB_LOOKUP_MAPPING
    110119#endif
    111120#ifdef IEMNATIVE_WITH_TLB_LOOKUP
     
    16641673IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    16651674{
    1666     return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1675#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1676    return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1677#else
     1678    return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem);
     1679#endif
    16671680}
    16681681
     
    16741687IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    16751688{
    1676     return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1689#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1690    return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1691#else
     1692    return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem);
     1693#endif
    16771694}
    16781695
     
    16841701IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    16851702{
    1686     return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1703#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1704    return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1705#else
     1706    return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem);
     1707#endif
    16871708}
    16881709
     
    16931714IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    16941715{
    1695     return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1716#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1717    return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1718#else
     1719    return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem);
     1720#endif
    16961721}
    16971722
     
    17021727IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17031728{
    1704     return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1729#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1730    return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1731#else
     1732    return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem);
     1733#endif
    17051734}
    17061735
     
    17121741IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17131742{
    1714     return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1743#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1744    return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1745#else
     1746    return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem);
     1747#endif
    17151748}
    17161749
     
    17221755IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17231756{
    1724     return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1757#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1758    return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1759#else
     1760    return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem);
     1761#endif
    17251762}
    17261763
     
    17311768IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17321769{
    1733     return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1770#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1771    return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1772#else
     1773    return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem);
     1774#endif
    17341775}
    17351776
     
    17411782IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17421783{
    1743     return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1784#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1785    return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1786#else
     1787    return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem);
     1788#endif
    17441789}
    17451790
     
    17501795IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg))
    17511796{
    1752     return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */
     1797#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1798    return iemMemFetchDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem);
     1799#else
     1800    return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem);
     1801#endif
    17531802}
    17541803
     
    17591808IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value))
    17601809{
    1761     iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */
     1810#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     1811    iemMemStoreDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem, u8Value);
     1812#else
     1813    iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value);
     1814#endif
    17621815}
    17631816
     
    17681821IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value))
    17691822{
    1770     iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */
     1823#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     1824    iemMemStoreDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem, u16Value);
     1825#else
     1826    iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value);
     1827#endif
    17711828}
    17721829
     
    17771834IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value))
    17781835{
    1779     iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */
     1836#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     1837    iemMemStoreDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem, u32Value);
     1838#else
     1839    iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value);
     1840#endif
    17801841}
    17811842
     
    17861847IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value))
    17871848{
    1788     iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */
     1849#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     1850    iemMemStoreDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem, u64Value);
     1851#else
     1852    iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value);
     1853#endif
    17891854}
    17901855
     
    18951960IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    18961961{
    1897     return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */
     1962#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1963    return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     1964#else
     1965    return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem);
     1966#endif
    18981967}
    18991968
     
    19061975IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19071976{
    1908     return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */
     1977#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1978    return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     1979#else
     1980    return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem);
     1981#endif
    19091982}
    19101983
     
    19171990IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19181991{
    1919     return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */
     1992#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     1993    return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     1994#else
     1995    return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem);
     1996#endif
    19201997}
    19211998
     
    19272004IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19282005{
    1929     return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */
     2006#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2007    return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2008#else
     2009    return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem);
     2010#endif
    19302011}
    19312012
     
    19372018IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19382019{
    1939     return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */
     2020#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2021    return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2022#else
     2023    return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem);
     2024#endif
    19402025}
    19412026
     
    19482033IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19492034{
    1950     return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */
     2035#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2036    return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2037#else
     2038    return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem);
     2039#endif
    19512040}
    19522041
     
    19592048IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19602049{
    1961     return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */
     2050#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2051    return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2052#else
     2053    return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem);
     2054#endif
    19622055}
    19632056
     
    19692062IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19702063{
    1971     return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */
     2064#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2065    return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2066#else
     2067    return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem);
     2068#endif
    19722069}
    19732070
     
    19802077IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19812078{
    1982     return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */
     2079#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2080    return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2081#else
     2082    return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem);
     2083#endif
    19832084}
    19842085
     
    19892090IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem))
    19902091{
    1991     return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */
     2092#ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH
     2093    return iemMemFetchDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     2094#else
     2095    return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem);
     2096#endif
    19922097}
    19932098
     
    19982103IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value))
    19992104{
    2000     iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */
     2105#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     2106    iemMemStoreDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u8Value);
     2107#else
     2108    iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value);
     2109#endif
    20012110}
    20022111
     
    20072116IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value))
    20082117{
    2009     iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */
     2118#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     2119    iemMemStoreDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u16Value);
     2120#else
     2121    iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value);
     2122#endif
    20102123}
    20112124
     
    20162129IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value))
    20172130{
    2018     iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */
     2131#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     2132    iemMemStoreDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u32Value);
     2133#else
     2134    iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value);
     2135#endif
    20192136}
    20202137
     
    20252142IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value))
    20262143{
    2027     iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */
     2144#ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE
     2145    iemMemStoreDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u64Value);
     2146#else
     2147    iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value);
     2148#endif
    20282149}
    20292150
     
    1105111172         */
    1105211173        PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64);
     11174# ifdef VBOX_WITH_STATISTICS
     11175        off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2,
     11176                                                  enmOp == kIemNativeEmitMemOp_Store
     11177                                                  ? RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForFetch)
     11178                                                  : RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStore));
     11179# endif
    1105311180        switch (enmOp)
    1105411181        {
     
    1176711894         */
    1176811895        PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64);
     11896# ifdef VBOX_WITH_STATISTICS
     11897        off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2,
     11898                                                  RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack));
     11899# endif
    1176911900        if (idxRegValue != UINT8_MAX)
    1177011901        {
     
    1211112242         */
    1211212243        PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32);
     12244# ifdef VBOX_WITH_STATISTICS
     12245        off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2,
     12246                                                  RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack));
     12247# endif
    1211312248        switch (cbMem)
    1211412249        {
     
    1262012755        off = iemNativeEmitTlbLookup(pReNative, off, &TlbState, iSegReg, cbMem, fAlignMask, fAccess,
    1262112756                                     idxLabelTlbLookup, idxLabelTlbMiss, idxRegMemResult);
     12757# ifdef VBOX_WITH_STATISTICS
     12758        off = iemNativeEmitIncStamCounterInVCpu(pReNative, off, TlbState.idxReg1, TlbState.idxReg2,
     12759                                                RT_UOFFSETOF(VMCPUCC,  iem.s.StatNativeTlbHitsForMapped));
     12760# endif
    1262212761
    1262312762        /* [idxVarUnmapInfo] = 0; */
  • trunk/src/VBox/VMM/VMMR3/IEMR3.cpp

    r102557 r102800  
    297297        STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbHits,            STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
    298298                        "Data TLB hits",                            "/IEM/CPU%u/DataTlb-Hits", idCpu);
     299#  ifdef VBOX_WITH_IEM_RECOMPILER
     300        STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStack, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
     301                        "Data TLB native stack access hits",        "/IEM/CPU%u/DataTlb-Hits-Native-Stack", idCpu);
     302        STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForFetch, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
     303                        "Data TLB native data fetch hits",          "/IEM/CPU%u/DataTlb-Hits-Native-Fetch", idCpu);
     304        STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStore, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
     305                        "Data TLB native data store hits",          "/IEM/CPU%u/DataTlb-Hits-Native-Store", idCpu);
     306        STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForMapped, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE,
     307                        "Data TLB native mapped data hits",         "/IEM/CPU%u/DataTlb-Hits-Native-Mapped", idCpu);
     308#  endif
    299309# endif
    300310        STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbMisses,          STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT,
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r102790 r102800  
    17101710    /** Pointer to the native recompiler state for ring-3. */
    17111711    R3PTRTYPE(struct IEMRECOMPILERSTATE *)  pNativeRecompilerStateR3;
    1712     /** Alignment padding. */
    1713     uint64_t                auAlignment10[3];
     1712
    17141713    /** Statistics: Times TB execution was broken off before reaching the end. */
    17151714    STAMCOUNTER             StatTbExecBreaks;
     
    17361735    /** Native TB statistics: Number of threaded calls per TB that weren't recompiled. */
    17371736    STAMPROFILE             StatNativeCallsThreaded;
     1737    /** Native recompiled execution: TLB hits for data fetches. */
     1738    STAMCOUNTER             StatNativeTlbHitsForFetch;
     1739    /** Native recompiled execution: TLB hits for data stores. */
     1740    STAMCOUNTER             StatNativeTlbHitsForStore;
     1741    /** Native recompiled execution: TLB hits for stack accesses. */
     1742    STAMCOUNTER             StatNativeTlbHitsForStack;
     1743    /** Native recompiled execution: TLB hits for mapped accesses. */
     1744    STAMCOUNTER             StatNativeTlbHitsForMapped;
     1745    uint64_t                au64Padding[7];
    17381746    /** @} */
    17391747
  • trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h

    r102790 r102800  
    881881
    882882
     883/** This is just as a typesafe alternative to RT_UOFFSETOF. */
     884DECL_FORCE_INLINE(uint32_t) iemNativeVCpuOffsetFromStamCounterPtr(PVMCPU pVCpu, PSTAMCOUNTER pStamCounter)
     885{
     886    uintptr_t const off = (uintptr_t)pStamCounter - (uintptr_t)pVCpu;
     887    Assert(off < sizeof(VMCPU));
     888    return off;
     889}
     890
     891
     892/** This is just as a typesafe alternative to RT_UOFFSETOF. */
     893DECL_FORCE_INLINE(uint32_t) iemNativeVCpuOffsetFromU64Ptr(PVMCPU pVCpu, uint64_t *pu64)
     894{
     895    uintptr_t const off = (uintptr_t)pu64 - (uintptr_t)pVCpu;
     896    Assert(off < sizeof(VMCPU));
     897    return off;
     898}
     899
     900
     901/**
     902 * Emits code for incrementing a statistics counter (STAMCOUNTER/uint64_t) in VMCPU.
     903 *
     904 * @note The two temp registers are not required for AMD64.  ARM64 always
     905 *       requires the first, and the 2nd is needed if the offset cannot be
     906 *       encoded as an immediate.
     907 */
     908DECL_FORCE_INLINE(uint32_t)
     909iemNativeEmitIncStamCounterInVCpuEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxTmp1, uint8_t idxTmp2, uint32_t offVCpu)
     910{
     911#ifdef RT_ARCH_AMD64
     912    /* inc qword [pVCpu + off] */
     913    pCodeBuf[off++] = X86_OP_REX_W;
     914    pCodeBuf[off++] = 0xff;
     915    off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, 0, offVCpu);
     916    RT_NOREF(idxTmp1, idxTmp2);
     917
     918#elif defined(RT_ARCH_ARM64)
     919    /* Determine how we're to access pVCpu first. */
     920    uint32_t const cbData = sizeof(STAMCOUNTER);
     921    if (offVCpu < _4K * cbData && !(offVCpu & (cbData - 1)))
     922    {
     923        /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */
     924        pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1,
     925                                                   IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData);
     926        pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1);
     927        pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, idxTmp1,
     928                                                   IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData);
     929    }
     930    else if (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx) < (unsigned)(_4K * cbData) && !(offVCpu & (cbData - 1)))
     931    {
     932        pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PCPUMCTX,
     933                                                   (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData);
     934        pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1);
     935        pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PCPUMCTX,
     936                                                   (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData);
     937    }
     938    else
     939    {
     940        /* The offset is too large, so we must load it into a register and use
     941           ldr Wt, [<Xn|SP>, (<Wm>|<Xm>)]. */
     942        off = iemNativeEmitLoadGprImmEx(pReNative, off, idxTmp2, offVCpu);
     943        pCodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PVMCPU, idxTmp2);
     944        pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1);
     945        pCodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_St_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PVMCPU, idxTmp2);
     946    }
     947
     948#else
     949# error "port me"
     950#endif
     951    return off;
     952}
     953
     954
     955/**
     956 * Emits code for incrementing a statistics counter (STAMCOUNTER/uint64_t) in VMCPU.
     957 *
     958 * @note The two temp registers are not required for AMD64.  ARM64 always
     959 *       requires the first, and the 2nd is needed if the offset cannot be
     960 *       encoded as an immediate.
     961 */
     962DECL_FORCE_INLINE(uint32_t)
     963iemNativeEmitIncStamCounterInVCpu(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxTmp1, uint8_t idxTmp2, uint32_t offVCpu)
     964{
     965#ifdef RT_ARCH_AMD64
     966    off = iemNativeEmitIncStamCounterInVCpuEx(iemNativeInstrBufEnsure(pReNative, off, 7), off, idxTmp1, idxTmp2, offVCpu);
     967#elif defined(RT_ARCH_ARM64)
     968    off = iemNativeEmitIncStamCounterInVCpuEx(iemNativeInstrBufEnsure(pReNative, off, 4+3), off, idxTmp1, idxTmp2, offVCpu);
     969#else
     970# error "port me"
     971#endif
     972    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     973    return off;
     974}
     975
     976
    883977/**
    884978 * Emits a gprdst = gprsrc load.
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette