Changeset 102800 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Jan 9, 2024 10:19:17 PM (13 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102793 r102800 108 108 #if (defined(RT_ARCH_AMD64) && 1) || (defined(RT_ARCH_ARM64) && 1) 109 109 # define IEMNATIVE_WITH_TLB_LOOKUP 110 #endif 111 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 112 # define IEMNATIVE_WITH_TLB_LOOKUP_FETCH 113 #endif 114 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 115 # define IEMNATIVE_WITH_TLB_LOOKUP_STORE 116 #endif 117 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 118 # define IEMNATIVE_WITH_TLB_LOOKUP_MAPPING 110 119 #endif 111 120 #ifdef IEMNATIVE_WITH_TLB_LOOKUP … … 1664 1673 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1665 1674 { 1666 return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1675 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1676 return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1677 #else 1678 return (uint64_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1679 #endif 1667 1680 } 1668 1681 … … 1674 1687 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1675 1688 { 1676 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1689 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1690 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1691 #else 1692 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1693 #endif 1677 1694 } 1678 1695 … … 1684 1701 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1685 1702 { 1686 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1703 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1704 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1705 #else 1706 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1707 #endif 1687 1708 } 1688 1709 … … 1693 1714 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1694 1715 { 1695 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1716 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1717 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem); 1718 #else 1719 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8Jmp(pVCpu, iSegReg, GCPtrMem); 1720 #endif 1696 1721 } 1697 1722 … … 1702 1727 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1703 1728 { 1704 return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1729 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1730 return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1731 #else 1732 return (uint64_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1733 #endif 1705 1734 } 1706 1735 … … 1712 1741 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1713 1742 { 1714 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1743 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1744 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1745 #else 1746 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1747 #endif 1715 1748 } 1716 1749 … … 1722 1755 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1723 1756 { 1724 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1757 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1758 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem); 1759 #else 1760 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16Jmp(pVCpu, iSegReg, GCPtrMem); 1761 #endif 1725 1762 } 1726 1763 … … 1731 1768 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1732 1769 { 1733 return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1770 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1771 return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem); 1772 #else 1773 return (uint64_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); 1774 #endif 1734 1775 } 1735 1776 … … 1741 1782 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1742 1783 { 1743 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1784 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1785 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem); 1786 #else 1787 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32Jmp(pVCpu, iSegReg, GCPtrMem); 1788 #endif 1744 1789 } 1745 1790 … … 1750 1795 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg)) 1751 1796 { 1752 return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); /** @todo use iemMemFetchDataU8SafeJmp */ 1797 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1798 return iemMemFetchDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem); 1799 #else 1800 return iemMemFetchDataU64Jmp(pVCpu, iSegReg, GCPtrMem); 1801 #endif 1753 1802 } 1754 1803 … … 1759 1808 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint8_t u8Value)) 1760 1809 { 1761 iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 1810 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1811 iemMemStoreDataU8SafeJmp(pVCpu, iSegReg, GCPtrMem, u8Value); 1812 #else 1813 iemMemStoreDataU8Jmp(pVCpu, iSegReg, GCPtrMem, u8Value); 1814 #endif 1762 1815 } 1763 1816 … … 1768 1821 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint16_t u16Value)) 1769 1822 { 1770 iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 1823 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1824 iemMemStoreDataU16SafeJmp(pVCpu, iSegReg, GCPtrMem, u16Value); 1825 #else 1826 iemMemStoreDataU16Jmp(pVCpu, iSegReg, GCPtrMem, u16Value); 1827 #endif 1771 1828 } 1772 1829 … … 1777 1834 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint32_t u32Value)) 1778 1835 { 1779 iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 1836 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1837 iemMemStoreDataU32SafeJmp(pVCpu, iSegReg, GCPtrMem, u32Value); 1838 #else 1839 iemMemStoreDataU32Jmp(pVCpu, iSegReg, GCPtrMem, u32Value); 1840 #endif 1780 1841 } 1781 1842 … … 1786 1847 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t iSegReg, uint64_t u64Value)) 1787 1848 { 1788 iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 1849 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 1850 iemMemStoreDataU64SafeJmp(pVCpu, iSegReg, GCPtrMem, u64Value); 1851 #else 1852 iemMemStoreDataU64Jmp(pVCpu, iSegReg, GCPtrMem, u64Value); 1853 #endif 1789 1854 } 1790 1855 … … 1895 1960 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1896 1961 { 1897 return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1962 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1963 return (uint64_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1964 #else 1965 return (uint64_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1966 #endif 1898 1967 } 1899 1968 … … 1906 1975 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1907 1976 { 1908 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1977 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1978 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1979 #else 1980 return (uint64_t)(uint16_t)(int16_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1981 #endif 1909 1982 } 1910 1983 … … 1917 1990 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1918 1991 { 1919 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 1992 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 1993 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 1994 #else 1995 return (uint64_t)(uint32_t)(int32_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 1996 #endif 1920 1997 } 1921 1998 … … 1927 2004 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU8_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1928 2005 { 1929 return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 2006 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2007 return (uint64_t)(int64_t)(int8_t)iemMemFetchDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2008 #else 2009 return (uint64_t)(int64_t)(int8_t)iemMemFlatFetchDataU8Jmp(pVCpu, GCPtrMem); 2010 #endif 1930 2011 } 1931 2012 … … 1937 2018 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1938 2019 { 1939 return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2020 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2021 return (uint64_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2022 #else 2023 return (uint64_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2024 #endif 1940 2025 } 1941 2026 … … 1948 2033 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1949 2034 { 1950 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2035 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2036 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2037 #else 2038 return (uint64_t)(uint32_t)(int32_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2039 #endif 1951 2040 } 1952 2041 … … 1959 2048 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU16_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1960 2049 { 1961 return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU16SafeJmp */ 2050 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2051 return (uint64_t)(int64_t)(int16_t)iemMemFetchDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2052 #else 2053 return (uint64_t)(int64_t)(int16_t)iemMemFlatFetchDataU16Jmp(pVCpu, GCPtrMem); 2054 #endif 1962 2055 } 1963 2056 … … 1969 2062 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1970 2063 { 1971 return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */ 2064 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2065 return (uint64_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2066 #else 2067 return (uint64_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); 2068 #endif 1972 2069 } 1973 2070 … … 1980 2077 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU32_Sx_U64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1981 2078 { 1982 return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU32SafeJmp */ 2079 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2080 return (uint64_t)(int64_t)(int32_t)iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2081 #else 2082 return (uint64_t)(int64_t)(int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, GCPtrMem); 2083 #endif 1983 2084 } 1984 2085 … … 1989 2090 IEM_DECL_NATIVE_HLP_DEF(uint64_t, iemNativeHlpMemFlatFetchDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem)) 1990 2091 { 1991 return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); /** @todo use iemMemFlatFetchDataU8SafeJmp */ 2092 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_FETCH 2093 return iemMemFetchDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem); 2094 #else 2095 return iemMemFlatFetchDataU64Jmp(pVCpu, GCPtrMem); 2096 #endif 1992 2097 } 1993 2098 … … 1998 2103 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU8,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint8_t u8Value)) 1999 2104 { 2000 iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); /** @todo use iemMemStoreDataU8SafeJmp */ 2105 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2106 iemMemStoreDataU8SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u8Value); 2107 #else 2108 iemMemFlatStoreDataU8Jmp(pVCpu, GCPtrMem, u8Value); 2109 #endif 2001 2110 } 2002 2111 … … 2007 2116 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU16,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t u16Value)) 2008 2117 { 2009 iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); /** @todo use iemMemStoreDataU16SafeJmp */ 2118 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2119 iemMemStoreDataU16SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u16Value); 2120 #else 2121 iemMemFlatStoreDataU16Jmp(pVCpu, GCPtrMem, u16Value); 2122 #endif 2010 2123 } 2011 2124 … … 2016 2129 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU32,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t u32Value)) 2017 2130 { 2018 iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); /** @todo use iemMemStoreDataU32SafeJmp */ 2131 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2132 iemMemStoreDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u32Value); 2133 #else 2134 iemMemFlatStoreDataU32Jmp(pVCpu, GCPtrMem, u32Value); 2135 #endif 2019 2136 } 2020 2137 … … 2025 2142 IEM_DECL_NATIVE_HLP_DEF(void, iemNativeHlpMemFlatStoreDataU64,(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t u64Value)) 2026 2143 { 2027 iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); /** @todo use iemMemStoreDataU64SafeJmp */ 2144 #ifdef IEMNATIVE_WITH_TLB_LOOKUP_STORE 2145 iemMemStoreDataU64SafeJmp(pVCpu, UINT8_MAX, GCPtrMem, u64Value); 2146 #else 2147 iemMemFlatStoreDataU64Jmp(pVCpu, GCPtrMem, u64Value); 2148 #endif 2028 2149 } 2029 2150 … … 11051 11172 */ 11052 11173 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64); 11174 # ifdef VBOX_WITH_STATISTICS 11175 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 11176 enmOp == kIemNativeEmitMemOp_Store 11177 ? RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForFetch) 11178 : RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStore)); 11179 # endif 11053 11180 switch (enmOp) 11054 11181 { … … 11767 11894 */ 11768 11895 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 64); 11896 # ifdef VBOX_WITH_STATISTICS 11897 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 11898 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack)); 11899 # endif 11769 11900 if (idxRegValue != UINT8_MAX) 11770 11901 { … … 12111 12242 */ 12112 12243 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32); 12244 # ifdef VBOX_WITH_STATISTICS 12245 off = iemNativeEmitIncStamCounterInVCpuEx(pCodeBuf, off, TlbState.idxReg1, TlbState.idxReg2, 12246 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForStack)); 12247 # endif 12113 12248 switch (cbMem) 12114 12249 { … … 12620 12755 off = iemNativeEmitTlbLookup(pReNative, off, &TlbState, iSegReg, cbMem, fAlignMask, fAccess, 12621 12756 idxLabelTlbLookup, idxLabelTlbMiss, idxRegMemResult); 12757 # ifdef VBOX_WITH_STATISTICS 12758 off = iemNativeEmitIncStamCounterInVCpu(pReNative, off, TlbState.idxReg1, TlbState.idxReg2, 12759 RT_UOFFSETOF(VMCPUCC, iem.s.StatNativeTlbHitsForMapped)); 12760 # endif 12622 12761 12623 12762 /* [idxVarUnmapInfo] = 0; */
Note:
See TracChangeset
for help on using the changeset viewer.