- Timestamp:
- Jan 9, 2024 10:19:17 PM (13 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102793 r102800 108 108 #if (defined(RT_ARCH_AMD64) && 1) || (defined(RT_ARCH_ARM64) && 1) 109 109 # define IEMNATIVE_WITH_TLB_LOOKUP 110 #endif 111 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 112 # define IEMNATIVE_WITH_TLB_LOOKUP_FETCH 113 #endif 114 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 115 # define IEMNATIVE_WITH_TLB_LOOKUP_STORE 116 #endif 117 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 118 # define IEMNATIVE_WITH_TLB_LOOKUP_MAPPING 110 119 #endif 111 120 #ifdef IEMNATIVE_WITH_TLB_LOOKUP … … 1664 1673 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1665 1674 { 1666 return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1675 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1676 return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1677 #else 1678 return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1679 #endif 1667 1680 } 1668 1681 … … 1674 1687 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1675 1688 { 1676 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1689 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1690 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1691 #else 1692 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1693 #endif 1677 1694 } 1678 1695 … … 1684 1701 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1685 1702 { 1686 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1703 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1704 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1705 #else 1706 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1707 #endif 1687 1708 } 1688 1709 … … 1693 1714 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1694 1715 { 1695 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1716 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1717 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1718 #else 1719 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1720 #endif 1696 1721 } 1697 1722 … … 1702 1727 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1703 1728 { 1704 return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1729 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1730 return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1731 #else 1732 return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1733 #endif 1705 1734 } 1706 1735 … … 1712 1741 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1713 1742 { 1714 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1743 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1744 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1745 #else 1746 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1747 #endif 1715 1748 } 1716 1749 … … 1722 1755 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1723 1756 { 1724 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1757 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1758 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1759 #else 1760 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1761 #endif 1725 1762 } 1726 1763 … … 1731 1768 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1732 1769 { 1733 return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1770 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1771 return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem); 1772 #else 1773 return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); 1774 #endif 1734 1775 } 1735 1776 … … 1741 1782 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1742 1783 { 1743 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1784 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1785 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem); 1786 #else 1787 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); 1788 #endif 1744 1789 } 1745 1790 … … 1750 1795 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1751 1796 { 1752 return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1797 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1798 return iemMemFetchDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem); 1799 #else 1800 return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); 1801 #endif 1753 1802 } 1754 1803 … … 1759 1808 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value)) 1760 1809 { 1761 iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 1810 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1811 iemMemStoreDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem, u8Value); 1812 #else 1813 iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); 1814 #endif 1762 1815 } 1763 1816 … … 1768 1821 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value)) 1769 1822 { 1770 iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 1823 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1824 iemMemStoreDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem, u16Value); 1825 #else 1826 iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); 1827 #endif 1771 1828 } 1772 1829 … … 1777 1834 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value)) 1778 1835 { 1779 iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 1836 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1837 iemMemStoreDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem, u32Value); 1838 #else 1839 iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); 1840 #endif 1780 1841 } 1781 1842 … … 1786 1847 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value)) 1787 1848 { 1788 iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 1849 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1850 iemMemStoreDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem, u64Value); 1851 #else 1852 iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); 1853 #endif 1789 1854 } 1790 1855 … … 1895 1960 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1896 1961 { 1897 return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1962 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1963 return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1964 #else 1965 return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1966 #endif 1898 1967 } 1899 1968 … … 1906 1975 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1907 1976 { 1908 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1977 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1978 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1979 #else 1980 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1981 #endif 1909 1982 } 1910 1983 … … 1917 1990 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1918 1991 { 1919 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1992 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1993 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1994 #else 1995 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1996 #endif 1920 1997 } 1921 1998 … … 1927 2004 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1928 2005 { 1929 return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 2006 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2007 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2008 #else 2009 return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 2010 #endif 1930 2011 } 1931 2012 … … 1937 2018 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1938 2019 { 1939 return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2020 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2021 return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2022 #else 2023 return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2024 #endif 1940 2025 } 1941 2026 … … 1948 2033 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1949 2034 { 1950 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2035 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2036 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2037 #else 2038 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2039 #endif 1951 2040 } 1952 2041 … … 1959 2048 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1960 2049 { 1961 return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2050 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2051 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2052 #else 2053 return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2054 #endif 1962 2055 } 1963 2056 … … 1969 2062 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1970 2063 { 1971 return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */ 2064 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2065 return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2066 #else 2067 return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); 2068 #endif 1972 2069 } 1973 2070 … … 1980 2077 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1981 2078 { 1982 return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */ 2079 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2080 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2081 #else 2082 return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); 2083 #endif 1983 2084 } 1984 2085 … … 1989 2090 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1990 2091 { 1991 return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 2092 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2093 return iemMemFetchDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2094 #else 2095 return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); 2096 #endif 1992 2097 } 1993 2098 … … 1998 2103 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value)) 1999 2104 { 2000 iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 2105 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2106 iemMemStoreDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u8Value); 2107 #else 2108 iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); 2109 #endif 2001 2110 } 2002 2111 … … 2007 2116 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)) 2008 2117 { 2009 iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 2118 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2119 iemMemStoreDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u16Value); 2120 #else 2121 iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); 2122 #endif 2010 2123 } 2011 2124 … … 2016 2129 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)) 2017 2130 { 2018 iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 2131 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2132 iemMemStoreDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u32Value); 2133 #else 2134 iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); 2135 #endif 2019 2136 } 2020 2137 … … 2025 2142 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)) 2026 2143 { 2027 iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 2144 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2145 iemMemStoreDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u64Value); 2146 #else 2147 iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); 2148 #endif 2028 2149 } 2029 2150 … … 11051 11172 */ 11052 11173 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64); 11174 # ifdef VBOX_WITH_STATISTICS 11175 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 11176 enmOp == kIemNativeEmitMemOp_Store 11177 ? RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForFetch) 11178 : RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStore)); 11179 # endif 11053 11180 switch (enmOp) 11054 11181 { … … 11767 11894 */ 11768 11895 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64); 11896 # ifdef VBOX_WITH_STATISTICS 11897 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 11898 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack)); 11899 # endif 11769 11900 if (idxRegValue != UINT8_MAX) 11770 11901 { … … 12111 12242 */ 12112 12243 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32); 12244 # ifdef VBOX_WITH_STATISTICS 12245 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 12246 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack)); 12247 # endif 12113 12248 switch (cbMem) 12114 12249 { … … 12620 12755 off = iemNativeEmitTlbLookup(pReNative, off, &TlbState, iSegReg, cbMem, fAlignMask, fAccess, 12621 12756 idxLabelTlbLookup, idxLabelTlbMiss, idxRegMemResult); 12757 # ifdef VBOX_WITH_STATISTICS 12758 off = iemNativeEmitIncStamCounterInVCpu(pReNative, off, TlbState.idxReg1, TlbState.idxReg2, 12759 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForMapped)); 12760 # endif 12622 12761 12623 12762 /* [idxVarUnmapInfo] = 0; */ -
trunk/src/VBox/VMM/VMMR3/IEMR3.cpp
r102557 r102800 297 297 STAMR3RegisterF(pVM, &pVCpu->iem.s.DataTlb.cTlbHits, STAMTYPE_U64_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, 298 298 "Data TLB hits", "/IEM/CPU%u/DataTlb-Hits", idCpu); 299 # ifdef VBOX_WITH_IEM_RECOMPILER 300 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStack, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE, 301 "Data TLB native stack access hits", "/IEM/CPU%u/DataTlb-Hits-Native-Stack", idCpu); 302 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForFetch, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE, 303 "Data TLB native data fetch hits", "/IEM/CPU%u/DataTlb-Hits-Native-Fetch", idCpu); 304 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForStore, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE, 305 "Data TLB native data store hits", "/IEM/CPU%u/DataTlb-Hits-Native-Store", idCpu); 306 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeTlbHitsForMapped, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_NONE, 307 "Data TLB native mapped data hits", "/IEM/CPU%u/DataTlb-Hits-Native-Mapped", idCpu); 308 # endif 299 309 # endif 300 310 STAMR3RegisterF(pVM, &pVCpu->iem.s.CodeTlb.cTlbMisses, STAMTYPE_U32_RESET, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, -
trunk/src/VBox/VMM/include/IEMInternal.h
r102790 r102800 1710 1710 /** Pointer to the native recompiler state for ring-3. */ 1711 1711 R3PTRTYPE(struct IEMRECOMPILERSTATE *) pNativeRecompilerStateR3; 1712 /** Alignment padding. */ 1713 uint64_t auAlignment10[3]; 1712 1714 1713 /** Statistics: Times TB execution was broken off before reaching the end. */ 1715 1714 STAMCOUNTER StatTbExecBreaks; … … 1736 1735 /** Native TB statistics: Number of threaded calls per TB that weren't recompiled. */ 1737 1736 STAMPROFILE StatNativeCallsThreaded; 1737 /** Native recompiled execution: TLB hits for data fetches. */ 1738 STAMCOUNTER StatNativeTlbHitsForFetch; 1739 /** Native recompiled execution: TLB hits for data stores. */ 1740 STAMCOUNTER StatNativeTlbHitsForStore; 1741 /** Native recompiled execution: TLB hits for stack accesses. */ 1742 STAMCOUNTER StatNativeTlbHitsForStack; 1743 /** Native recompiled execution: TLB hits for mapped accesses. */ 1744 STAMCOUNTER StatNativeTlbHitsForMapped; 1745 uint64_t au64Padding[7]; 1738 1746 /** @} */ 1739 1747 -
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r102790 r102800 881 881 882 882 883 /** This is just as a typesafe alternative to RT_UOFFSETOF. */ 884 DECL_FORCE_INLINE(uint32_t) iemNativeVCpuOffsetFromStamCounterPtr(PVMCPU pVCpu, PSTAMCOUNTER pStamCounter) 885 { 886 uintptr_t const off = (uintptr_t)pStamCounter - (uintptr_t)pVCpu; 887 Assert(off < sizeof(VMCPU)); 888 return off; 889 } 890 891 892 /** This is just as a typesafe alternative to RT_UOFFSETOF. */ 893 DECL_FORCE_INLINE(uint32_t) iemNativeVCpuOffsetFromU64Ptr(PVMCPU pVCpu, uint64_t *pu64) 894 { 895 uintptr_t const off = (uintptr_t)pu64 - (uintptr_t)pVCpu; 896 Assert(off < sizeof(VMCPU)); 897 return off; 898 } 899 900 901 /** 902 * Emits code for incrementing a statistics counter (STAMCOUNTER/uint64_t) in VMCPU. 903 * 904 * @note The two temp registers are not required for AMD64. ARM64 always 905 * requires the first, and the 2nd is needed if the offset cannot be 906 * encoded as an immediate. 907 */ 908 DECL_FORCE_INLINE(uint32_t) 909 iemNativeEmitIncStamCounterInVCpuEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxTmp1, uint8_t idxTmp2, uint32_t offVCpu) 910 { 911 #ifdef RT_ARCH_AMD64 912 /* inc qword [pVCpu + off] */ 913 pCodeBuf[off++] = X86_OP_REX_W; 914 pCodeBuf[off++] = 0xff; 915 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, 0, offVCpu); 916 RT_NOREF(idxTmp1, idxTmp2); 917 918 #elif defined(RT_ARCH_ARM64) 919 /* Determine how we're to access pVCpu first. */ 920 uint32_t const cbData = sizeof(STAMCOUNTER); 921 if (offVCpu < _4K * cbData && !(offVCpu & (cbData - 1))) 922 { 923 /* Use the unsigned variant of ldr Wt, [<Xn|SP>, #off]. */ 924 pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1, 925 IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData); 926 pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1); 927 pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, idxTmp1, 928 IEMNATIVE_REG_FIXED_PVMCPU, offVCpu / cbData); 929 } 930 else if (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx) < (unsigned)(_4K * cbData) && !(offVCpu & (cbData - 1))) 931 { 932 pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PCPUMCTX, 933 (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData); 934 pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1); 935 pCodeBuf[off++] = Armv8A64MkInstrStLdRUOff(kArmv8A64InstrLdStType_St_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PCPUMCTX, 936 (offVCpu - RT_UOFFSETOF(VMCPU, cpum.GstCtx)) / cbData); 937 } 938 else 939 { 940 /* The offset is too large, so we must load it into a register and use 941 ldr Wt, [<Xn|SP>, (<Wm>|<Xm>)]. */ 942 off = iemNativeEmitLoadGprImmEx(pReNative, off, idxTmp2, offVCpu); 943 pCodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_Ld_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PVMCPU, idxTmp2); 944 pCodeBuf[off++] = Armv8A64MkInstrAddUImm12(idxTmp1, 1); 945 pCodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(kArmv8A64InstrLdStType_St_Dword, idxTmp1, IEMNATIVE_REG_FIXED_PVMCPU, idxTmp2); 946 } 947 948 #else 949 # error "port me" 950 #endif 951 return off; 952 } 953 954 955 /** 956 * Emits code for incrementing a statistics counter (STAMCOUNTER/uint64_t) in VMCPU. 957 * 958 * @note The two temp registers are not required for AMD64. ARM64 always 959 * requires the first, and the 2nd is needed if the offset cannot be 960 * encoded as an immediate. 961 */ 962 DECL_FORCE_INLINE(uint32_t) 963 iemNativeEmitIncStamCounterInVCpu(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxTmp1, uint8_t idxTmp2, uint32_t offVCpu) 964 { 965 #ifdef RT_ARCH_AMD64 966 off = iemNativeEmitIncStamCounterInVCpuEx(iemNativeInstrBufEnsure(pReNative, off, 7), off, idxTmp1, idxTmp2, offVCpu); 967 #elif defined(RT_ARCH_ARM64) 968 off = iemNativeEmitIncStamCounterInVCpuEx(iemNativeInstrBufEnsure(pReNative, off, 4+3), off, idxTmp1, idxTmp2, offVCpu); 969 #else 970 # error "port me" 971 #endif 972 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 973 return off; 974 } 975 976 883 977 /** 884 978 * Emits a gprdst = gprsrc load.
Note:
See TracChangeset
for help on using the changeset viewer.