Changeset 93515 in vbox for trunk/include/iprt
- Timestamp:
- Jan 31, 2022 10:17:19 PM (3 years ago)
- svn:sync-xref-src-repo-rev:
- 149642
- Location:
- trunk/include/iprt
- Files:
-
- 1 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm-amd64-x86.h
r93115 r93515 32 32 #include <iprt/types.h> 33 33 #include <iprt/assert.h> 34 #include <iprt/x86-helpers.h> 34 35 #if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86) 35 36 # error "Not on AMD64 or x86" … … 1566 1567 1567 1568 /** 1568 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.1569 *1570 * @returns true/false.1571 * @param uEBX EBX return from ASMCpuId(0)1572 * @param uECX ECX return from ASMCpuId(0)1573 * @param uEDX EDX return from ASMCpuId(0)1574 */1575 DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)1576 {1577 /* 'GenuineIntel' */1578 return uEBX == UINT32_C(0x756e6547) /* 'Genu' */1579 && uEDX == UINT32_C(0x49656e69) /* 'ineI' */1580 && uECX == UINT32_C(0x6c65746e); /* 'ntel' */1581 }1582 1583 1584 /**1585 1569 * Tests if this is a genuine Intel CPU. 1586 1570 * … … 1592 1576 uint32_t uEAX, uEBX, uECX, uEDX; 1593 1577 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX); 1594 return ASMIsIntelCpuEx(uEBX, uECX, uEDX); 1595 } 1596 1597 1598 /** 1599 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output. 1600 * 1601 * @returns true/false. 1602 * @param uEBX EBX return from ASMCpuId(0) 1603 * @param uECX ECX return from ASMCpuId(0) 1604 * @param uEDX EDX return from ASMCpuId(0) 1605 */ 1606 DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1607 { 1608 /* 'AuthenticAMD' */ 1609 return uEBX == UINT32_C(0x68747541) /* 'Auth' */ 1610 && uEDX == UINT32_C(0x69746e65) /* 'enti' */ 1611 && uECX == UINT32_C(0x444d4163); /* 'dAMD' */ 1578 return RTX86IsIntelCpu(uEBX, uECX, uEDX); 1612 1579 } 1613 1580 … … 1623 1590 uint32_t uEAX, uEBX, uECX, uEDX; 1624 1591 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX); 1625 return ASMIsAmdCpuEx(uEBX, uECX, uEDX); 1626 } 1627 1628 1629 /** 1630 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output. 1631 * 1632 * @returns true/false. 1633 * @param uEBX EBX return from ASMCpuId(0). 1634 * @param uECX ECX return from ASMCpuId(0). 1635 * @param uEDX EDX return from ASMCpuId(0). 1636 */ 1637 DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1638 { 1639 /* 'CentaurHauls' */ 1640 return uEBX == UINT32_C(0x746e6543) /* 'Cent' */ 1641 && uEDX == UINT32_C(0x48727561) /* 'aurH' */ 1642 && uECX == UINT32_C(0x736c7561); /* 'auls' */ 1592 return RTX86IsAmdCpu(uEBX, uECX, uEDX); 1643 1593 } 1644 1594 … … 1654 1604 uint32_t uEAX, uEBX, uECX, uEDX; 1655 1605 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX); 1656 return ASMIsViaCentaurCpuEx(uEBX, uECX, uEDX); 1657 } 1658 1659 1660 /** 1661 * Tests if it a Shanghai CPU based on the ASMCpuId(0) output. 1662 * 1663 * @returns true/false. 1664 * @param uEBX EBX return from ASMCpuId(0). 1665 * @param uECX ECX return from ASMCpuId(0). 1666 * @param uEDX EDX return from ASMCpuId(0). 1667 */ 1668 DECLINLINE(bool) ASMIsShanghaiCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1669 { 1670 /* ' Shanghai ' */ 1671 return uEBX == UINT32_C(0x68532020) /* ' Sh' */ 1672 && uEDX == UINT32_C(0x68676e61) /* 'angh' */ 1673 && uECX == UINT32_C(0x20206961); /* 'ai ' */ 1606 return RTX86IsViaCentaurCpu(uEBX, uECX, uEDX); 1674 1607 } 1675 1608 … … 1685 1618 uint32_t uEAX, uEBX, uECX, uEDX; 1686 1619 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX); 1687 return ASMIsShanghaiCpuEx(uEBX, uECX, uEDX); 1688 } 1689 1690 1691 /** 1692 * Tests if it a genuine Hygon CPU based on the ASMCpuId(0) output. 1693 * 1694 * @returns true/false. 1695 * @param uEBX EBX return from ASMCpuId(0) 1696 * @param uECX ECX return from ASMCpuId(0) 1697 * @param uEDX EDX return from ASMCpuId(0) 1698 */ 1699 DECLINLINE(bool) ASMIsHygonCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1700 { 1701 /* 'HygonGenuine' */ 1702 return uEBX == UINT32_C(0x6f677948) /* Hygo */ 1703 && uECX == UINT32_C(0x656e6975) /* uine */ 1704 && uEDX == UINT32_C(0x6e65476e); /* nGen */ 1620 return RTX86IsShanghaiCpu(uEBX, uECX, uEDX); 1705 1621 } 1706 1622 … … 1716 1632 uint32_t uEAX, uEBX, uECX, uEDX; 1717 1633 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX); 1718 return ASMIsHygonCpuEx(uEBX, uECX, uEDX); 1719 } 1720 1721 1722 /** 1723 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range. 1724 * 1725 * 1726 * @returns true/false. 1727 * @param uEAX The EAX value of CPUID leaf 0x00000000. 1728 * 1729 * @note This only succeeds if there are at least two leaves in the range. 1730 * @remarks The upper range limit is just some half reasonable value we've 1731 * picked out of thin air. 1732 */ 1733 DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX) 1734 { 1735 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff); 1736 } 1737 1738 1739 /** 1740 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range. 1741 * 1742 * This only succeeds if there are at least two leaves in the range. 1743 * 1744 * @returns true/false. 1745 * @param uEAX The EAX value of CPUID leaf 0x80000000. 1746 * 1747 * @note This only succeeds if there are at least two leaves in the range. 1748 * @remarks The upper range limit is just some half reasonable value we've 1749 * picked out of thin air. 1750 */ 1751 DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX) 1752 { 1753 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff); 1754 } 1755 1756 1757 /** 1758 * Checks whether ASMCpuId_EAX(0x40000000) indicates a valid range. 1759 * 1760 * This only succeeds if there are at least two leaves in the range. 1761 * 1762 * @returns true/false. 1763 * @param uEAX The EAX value of CPUID leaf 0x40000000. 1764 * 1765 * @note Unlike ASMIsValidStdRange() and ASMIsValidExtRange(), a single leaf 1766 * is okay here. So, you always need to check the range. 1767 * @remarks The upper range limit is take from the intel docs. 1768 */ 1769 DECLINLINE(bool) ASMIsValidHypervisorRange(uint32_t uEAX) 1770 { 1771 return uEAX >= UINT32_C(0x40000000) && uEAX <= UINT32_C(0x4fffffff); 1772 } 1773 1774 1775 /** 1776 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001) 1777 * 1778 * @returns Family. 1779 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001). 1780 */ 1781 DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX) 1782 { 1783 return ((uEAX >> 8) & 0xf) == 0xf 1784 ? ((uEAX >> 20) & 0x7f) + 0xf 1785 : ((uEAX >> 8) & 0xf); 1786 } 1787 1788 1789 /** 1790 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant. 1791 * 1792 * @returns Model. 1793 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1794 */ 1795 DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX) 1796 { 1797 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */ 1798 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0) 1799 : ((uEAX >> 4) & 0xf); 1800 } 1801 1802 1803 /** 1804 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant. 1805 * 1806 * @returns Model. 1807 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1808 */ 1809 DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX) 1810 { 1811 return ((uEAX >> 8) & 0xf) == 0xf 1812 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0) 1813 : ((uEAX >> 4) & 0xf); 1814 } 1815 1816 1817 /** 1818 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001) 1819 * 1820 * @returns Model. 1821 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1822 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu(). 1823 */ 1824 DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel) 1825 { 1826 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */ 1827 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0) 1828 : ((uEAX >> 4) & 0xf); 1829 } 1830 1831 1832 /** 1833 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001) 1834 * 1835 * @returns Model. 1836 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1837 */ 1838 DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX) 1839 { 1840 return uEAX & 0xf; 1634 return RTX86IsHygonCpu(uEBX, uECX, uEDX); 1841 1635 } 1842 1636 -
trunk/include/iprt/x86-helpers.h
r93505 r93515 1 1 /** @file 2 * IPRT - AMD64 and x86 Specific Assembly Functions.2 * IPRT - X86 and AMD64 Helpers. 3 3 */ 4 4 … … 24 24 */ 25 25 26 #ifndef IPRT_INCLUDED_ asm_amd64_x86_h27 #define IPRT_INCLUDED_ asm_amd64_x86_h26 #ifndef IPRT_INCLUDED_x86_helpers_h 27 #define IPRT_INCLUDED_x86_helpers_h 28 28 #ifndef RT_WITHOUT_PRAGMA_ONCE 29 29 # pragma once … … 31 31 32 32 #include <iprt/types.h> 33 #include <iprt/assert.h> 34 #if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86) 35 # error "Not on AMD64 or x86" 36 #endif 37 38 #if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN 39 /* Emit the intrinsics at all optimization levels. */ 40 # include <iprt/sanitized/intrin.h> 41 # pragma intrinsic(_ReadWriteBarrier) 42 # pragma intrinsic(__cpuid) 43 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/ 44 # pragma intrinsic(__cpuidex) 45 # endif 46 # pragma intrinsic(_enable) 47 # pragma intrinsic(_disable) 48 # pragma intrinsic(__rdtsc) 49 # pragma intrinsic(__readmsr) 50 # pragma intrinsic(__writemsr) 51 # pragma intrinsic(__outbyte) 52 # pragma intrinsic(__outbytestring) 53 # pragma intrinsic(__outword) 54 # pragma intrinsic(__outwordstring) 55 # pragma intrinsic(__outdword) 56 # pragma intrinsic(__outdwordstring) 57 # pragma intrinsic(__inbyte) 58 # pragma intrinsic(__inbytestring) 59 # pragma intrinsic(__inword) 60 # pragma intrinsic(__inwordstring) 61 # pragma intrinsic(__indword) 62 # pragma intrinsic(__indwordstring) 63 # pragma intrinsic(__invlpg) 64 # pragma intrinsic(__wbinvd) 65 # pragma intrinsic(__readcr0) 66 # pragma intrinsic(__readcr2) 67 # pragma intrinsic(__readcr3) 68 # pragma intrinsic(__readcr4) 69 # pragma intrinsic(__writecr0) 70 # pragma intrinsic(__writecr3) 71 # pragma intrinsic(__writecr4) 72 # pragma intrinsic(__readdr) 73 # pragma intrinsic(__writedr) 74 # ifdef RT_ARCH_AMD64 75 # pragma intrinsic(__readcr8) 76 # pragma intrinsic(__writecr8) 77 # endif 78 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2005 79 # pragma intrinsic(__halt) 80 # endif 81 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008 82 /*# pragma intrinsic(__readeflags) - buggy intrinsics in VC++ 2010, reordering/optimizers issues 83 # pragma intrinsic(__writeeflags) */ 84 # pragma intrinsic(__rdtscp) 85 # endif 86 # if defined(RT_ARCH_AMD64) && RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015 /*?*/ 87 # pragma intrinsic(_readfsbase_u64) 88 # pragma intrinsic(_readgsbase_u64) 89 # pragma intrinsic(_writefsbase_u64) 90 # pragma intrinsic(_writegsbase_u64) 91 # endif 92 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013 93 # pragma intrinsic(__lidt) 94 # pragma intrinsic(__sidt) 95 # pragma intrinsic(_lgdt) 96 # pragma intrinsic(_sgdt) 97 # endif 98 #endif 99 100 101 /* 102 * Undefine all symbols we have Watcom C/C++ #pragma aux'es for. 103 */ 104 #if defined(__WATCOMC__) && ARCH_BITS == 16 105 # include "asm-amd64-x86-watcom-16.h" 106 #elif defined(__WATCOMC__) && ARCH_BITS == 32 107 # include "asm-amd64-x86-watcom-32.h" 108 #endif 109 110 111 /** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines 112 * @ingroup grp_rt_asm 33 34 35 /** @defgroup grp_rt_x86_helpers x86 Helper Functions 36 * @ingroup grp_rt_x86 113 37 * @{ 114 38 */ 115 116 /** @todo find a more proper place for these structures? */117 118 #pragma pack(1)119 /** IDTR */120 typedef struct RTIDTR121 {122 /** Size of the IDT. */123 uint16_t cbIdt;124 /** Address of the IDT. */125 #if ARCH_BITS != 64126 uint32_t pIdt;127 #else128 uint64_t pIdt;129 #endif130 } RTIDTR, RT_FAR *PRTIDTR;131 #pragma pack()132 133 #pragma pack(1)134 /** @internal */135 typedef struct RTIDTRALIGNEDINT136 {137 /** Alignment padding. */138 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];139 /** The IDTR structure. */140 RTIDTR Idtr;141 } RTIDTRALIGNEDINT;142 #pragma pack()143 144 /** Wrapped RTIDTR for preventing misalignment exceptions. */145 typedef union RTIDTRALIGNED146 {147 /** Try make sure this structure has optimal alignment. */148 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];149 /** Aligned structure. */150 RTIDTRALIGNEDINT s;151 } RTIDTRALIGNED;152 AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);153 /** Pointer to a an RTIDTR alignment wrapper. */154 typedef RTIDTRALIGNED RT_FAR *PRIDTRALIGNED;155 156 157 #pragma pack(1)158 /** GDTR */159 typedef struct RTGDTR160 {161 /** Size of the GDT. */162 uint16_t cbGdt;163 /** Address of the GDT. */164 #if ARCH_BITS != 64165 uint32_t pGdt;166 #else167 uint64_t pGdt;168 #endif169 } RTGDTR, RT_FAR *PRTGDTR;170 #pragma pack()171 172 #pragma pack(1)173 /** @internal */174 typedef struct RTGDTRALIGNEDINT175 {176 /** Alignment padding. */177 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];178 /** The GDTR structure. */179 RTGDTR Gdtr;180 } RTGDTRALIGNEDINT;181 #pragma pack()182 183 /** Wrapped RTGDTR for preventing misalignment exceptions. */184 typedef union RTGDTRALIGNED185 {186 /** Try make sure this structure has optimal alignment. */187 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];188 /** Aligned structure. */189 RTGDTRALIGNEDINT s;190 } RTGDTRALIGNED;191 AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);192 /** Pointer to a an RTGDTR alignment wrapper. */193 typedef RTGDTRALIGNED RT_FAR *PRGDTRALIGNED;194 195 196 /**197 * Gets the content of the IDTR CPU register.198 * @param pIdtr Where to store the IDTR contents.199 */200 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013201 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetIDTR(PRTIDTR pIdtr);202 #else203 DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)204 {205 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013206 __sidt(pIdtr);207 # elif RT_INLINE_ASM_GNU_STYLE208 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));209 # else210 __asm211 {212 # ifdef RT_ARCH_AMD64213 mov rax, [pIdtr]214 sidt [rax]215 # else216 mov eax, [pIdtr]217 sidt [eax]218 # endif219 }220 # endif221 }222 #endif223 224 225 /**226 * Gets the content of the IDTR.LIMIT CPU register.227 * @returns IDTR limit.228 */229 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013230 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMGetIdtrLimit(void);231 #else232 DECLINLINE(uint16_t) ASMGetIdtrLimit(void)233 {234 RTIDTRALIGNED TmpIdtr;235 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013236 __sidt(&TmpIdtr);237 # elif RT_INLINE_ASM_GNU_STYLE238 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));239 # else240 __asm241 {242 sidt [TmpIdtr.s.Idtr]243 }244 # endif245 return TmpIdtr.s.Idtr.cbIdt;246 }247 #endif248 249 250 /**251 * Sets the content of the IDTR CPU register.252 * @param pIdtr Where to load the IDTR contents from253 */254 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013255 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr);256 #else257 DECLINLINE(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr)258 {259 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013260 __lidt((void *)pIdtr);261 # elif RT_INLINE_ASM_GNU_STYLE262 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));263 # else264 __asm265 {266 # ifdef RT_ARCH_AMD64267 mov rax, [pIdtr]268 lidt [rax]269 # else270 mov eax, [pIdtr]271 lidt [eax]272 # endif273 }274 # endif275 }276 #endif277 278 279 /**280 * Gets the content of the GDTR CPU register.281 * @param pGdtr Where to store the GDTR contents.282 */283 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013284 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetGDTR(PRTGDTR pGdtr);285 #else286 DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)287 {288 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013289 _sgdt(pGdtr);290 # elif RT_INLINE_ASM_GNU_STYLE291 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));292 # else293 __asm294 {295 # ifdef RT_ARCH_AMD64296 mov rax, [pGdtr]297 sgdt [rax]298 # else299 mov eax, [pGdtr]300 sgdt [eax]301 # endif302 }303 # endif304 }305 #endif306 307 308 /**309 * Sets the content of the GDTR CPU register.310 * @param pGdtr Where to load the GDTR contents from311 */312 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013313 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr);314 #else315 DECLINLINE(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr)316 {317 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013318 _lgdt((void *)pGdtr);319 # elif RT_INLINE_ASM_GNU_STYLE320 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));321 # else322 __asm323 {324 # ifdef RT_ARCH_AMD64325 mov rax, [pGdtr]326 lgdt [rax]327 # else328 mov eax, [pGdtr]329 lgdt [eax]330 # endif331 }332 # endif333 }334 #endif335 336 337 338 /**339 * Get the cs register.340 * @returns cs.341 */342 #if RT_INLINE_ASM_EXTERNAL343 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetCS(void);344 #else345 DECLINLINE(RTSEL) ASMGetCS(void)346 {347 RTSEL SelCS;348 # if RT_INLINE_ASM_GNU_STYLE349 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));350 # else351 __asm352 {353 mov ax, cs354 mov [SelCS], ax355 }356 # endif357 return SelCS;358 }359 #endif360 361 362 /**363 * Get the DS register.364 * @returns DS.365 */366 #if RT_INLINE_ASM_EXTERNAL367 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetDS(void);368 #else369 DECLINLINE(RTSEL) ASMGetDS(void)370 {371 RTSEL SelDS;372 # if RT_INLINE_ASM_GNU_STYLE373 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));374 # else375 __asm376 {377 mov ax, ds378 mov [SelDS], ax379 }380 # endif381 return SelDS;382 }383 #endif384 385 386 /**387 * Get the ES register.388 * @returns ES.389 */390 #if RT_INLINE_ASM_EXTERNAL391 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetES(void);392 #else393 DECLINLINE(RTSEL) ASMGetES(void)394 {395 RTSEL SelES;396 # if RT_INLINE_ASM_GNU_STYLE397 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));398 # else399 __asm400 {401 mov ax, es402 mov [SelES], ax403 }404 # endif405 return SelES;406 }407 #endif408 409 410 /**411 * Get the FS register.412 * @returns FS.413 */414 #if RT_INLINE_ASM_EXTERNAL415 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetFS(void);416 #else417 DECLINLINE(RTSEL) ASMGetFS(void)418 {419 RTSEL SelFS;420 # if RT_INLINE_ASM_GNU_STYLE421 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));422 # else423 __asm424 {425 mov ax, fs426 mov [SelFS], ax427 }428 # endif429 return SelFS;430 }431 # endif432 433 #ifdef RT_ARCH_AMD64434 435 /**436 * Get the FS base register.437 * @returns FS base address.438 */439 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/440 DECLASM(uint64_t) ASMGetFSBase(void);441 #else442 DECLINLINE(uint64_t) ASMGetFSBase(void)443 {444 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015445 return (uint64_t)_readfsbase_u64();446 # elif RT_INLINE_ASM_GNU_STYLE447 uint64_t uFSBase;448 __asm__ __volatile__("rdfsbase %0\n\t" : "=r" (uFSBase));449 return uFSBase;450 # endif451 }452 # endif453 454 455 /**456 * Set the FS base register.457 * @param uNewBase The new base value.458 */459 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/460 DECLASM(void) ASMSetFSBase(uint64_t uNewBase);461 #else462 DECLINLINE(void) ASMSetFSBase(uint64_t uNewBase)463 {464 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015465 _writefsbase_u64(uNewBase);466 # elif RT_INLINE_ASM_GNU_STYLE467 __asm__ __volatile__("wrfsbase %0\n\t" : : "r" (uNewBase));468 # endif469 }470 # endif471 472 #endif /* RT_ARCH_AMD64 */473 474 /**475 * Get the GS register.476 * @returns GS.477 */478 #if RT_INLINE_ASM_EXTERNAL479 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetGS(void);480 #else481 DECLINLINE(RTSEL) ASMGetGS(void)482 {483 RTSEL SelGS;484 # if RT_INLINE_ASM_GNU_STYLE485 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));486 # else487 __asm488 {489 mov ax, gs490 mov [SelGS], ax491 }492 # endif493 return SelGS;494 }495 #endif496 497 #ifdef RT_ARCH_AMD64498 499 /**500 * Get the GS base register.501 * @returns GS base address.502 */503 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/504 DECLASM(uint64_t) ASMGetGSBase(void);505 #else506 DECLINLINE(uint64_t) ASMGetGSBase(void)507 {508 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015509 return (uint64_t)_readgsbase_u64();510 # elif RT_INLINE_ASM_GNU_STYLE511 uint64_t uGSBase;512 __asm__ __volatile__("rdgsbase %0\n\t" : "=r" (uGSBase));513 return uGSBase;514 # endif515 }516 # endif517 518 519 /**520 * Set the GS base register.521 * @param uNewBase The new base value.522 */523 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/524 DECLASM(void) ASMSetGSBase(uint64_t uNewBase);525 #else526 DECLINLINE(void) ASMSetGSBase(uint64_t uNewBase)527 {528 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015529 _writegsbase_u64(uNewBase);530 # elif RT_INLINE_ASM_GNU_STYLE531 __asm__ __volatile__("wrgsbase %0\n\t" : : "r" (uNewBase));532 # endif533 }534 # endif535 536 #endif /* RT_ARCH_AMD64 */537 538 539 /**540 * Get the SS register.541 * @returns SS.542 */543 #if RT_INLINE_ASM_EXTERNAL544 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetSS(void);545 #else546 DECLINLINE(RTSEL) ASMGetSS(void)547 {548 RTSEL SelSS;549 # if RT_INLINE_ASM_GNU_STYLE550 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));551 # else552 __asm553 {554 mov ax, ss555 mov [SelSS], ax556 }557 # endif558 return SelSS;559 }560 #endif561 562 563 /**564 * Get the TR register.565 * @returns TR.566 */567 #if RT_INLINE_ASM_EXTERNAL568 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetTR(void);569 #else570 DECLINLINE(RTSEL) ASMGetTR(void)571 {572 RTSEL SelTR;573 # if RT_INLINE_ASM_GNU_STYLE574 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));575 # else576 __asm577 {578 str ax579 mov [SelTR], ax580 }581 # endif582 return SelTR;583 }584 #endif585 586 587 /**588 * Get the LDTR register.589 * @returns LDTR.590 */591 #if RT_INLINE_ASM_EXTERNAL592 RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetLDTR(void);593 #else594 DECLINLINE(RTSEL) ASMGetLDTR(void)595 {596 RTSEL SelLDTR;597 # if RT_INLINE_ASM_GNU_STYLE598 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));599 # else600 __asm601 {602 sldt ax603 mov [SelLDTR], ax604 }605 # endif606 return SelLDTR;607 }608 #endif609 610 611 /**612 * Get the access rights for the segment selector.613 *614 * @returns The access rights on success or UINT32_MAX on failure.615 * @param uSel The selector value.616 *617 * @remarks Using UINT32_MAX for failure is chosen because valid access rights618 * always have bits 0:7 as 0 (on both Intel & AMD).619 */620 #if RT_INLINE_ASM_EXTERNAL621 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetSegAttr(uint32_t uSel);622 #else623 DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)624 {625 uint32_t uAttr;626 /* LAR only accesses 16-bit of the source operand, but eax for the627 destination operand is required for getting the full 32-bit access rights. */628 # if RT_INLINE_ASM_GNU_STYLE629 __asm__ __volatile__("lar %1, %%eax\n\t"630 "jz done%=\n\t"631 "movl $0xffffffff, %%eax\n\t"632 "done%=:\n\t"633 "movl %%eax, %0\n\t"634 : "=r" (uAttr)635 : "r" (uSel)636 : "cc", "%eax");637 # else638 __asm639 {640 lar eax, [uSel]641 jz done642 mov eax, 0ffffffffh643 done:644 mov [uAttr], eax645 }646 # endif647 return uAttr;648 }649 #endif650 651 652 /**653 * Get the [RE]FLAGS register.654 * @returns [RE]FLAGS.655 */656 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */657 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMGetFlags(void);658 #else659 DECLINLINE(RTCCUINTREG) ASMGetFlags(void)660 {661 RTCCUINTREG uFlags;662 # if RT_INLINE_ASM_GNU_STYLE663 # ifdef RT_ARCH_AMD64664 __asm__ __volatile__("pushfq\n\t"665 "popq %0\n\t"666 : "=r" (uFlags));667 # else668 __asm__ __volatile__("pushfl\n\t"669 "popl %0\n\t"670 : "=r" (uFlags));671 # endif672 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008673 uFlags = __readeflags();674 # else675 __asm676 {677 # ifdef RT_ARCH_AMD64678 pushfq679 pop [uFlags]680 # else681 pushfd682 pop [uFlags]683 # endif684 }685 # endif686 return uFlags;687 }688 #endif689 690 691 /**692 * Set the [RE]FLAGS register.693 * @param uFlags The new [RE]FLAGS value.694 */695 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - see __readeflags() above. */696 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetFlags(RTCCUINTREG uFlags);697 #else698 DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)699 {700 # if RT_INLINE_ASM_GNU_STYLE701 # ifdef RT_ARCH_AMD64702 __asm__ __volatile__("pushq %0\n\t"703 "popfq\n\t"704 : : "g" (uFlags));705 # else706 __asm__ __volatile__("pushl %0\n\t"707 "popfl\n\t"708 : : "g" (uFlags));709 # endif710 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008711 __writeeflags(uFlags);712 # else713 __asm714 {715 # ifdef RT_ARCH_AMD64716 push [uFlags]717 popfq718 # else719 push [uFlags]720 popfd721 # endif722 }723 # endif724 }725 #endif726 727 728 /**729 * Modifies the [RE]FLAGS register.730 * @returns Original value.731 * @param fAndEfl Flags to keep (applied first).732 * @param fOrEfl Flags to be set.733 */734 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */735 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl);736 #else737 DECLINLINE(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl)738 {739 RTCCUINTREG fOldEfl;740 # if RT_INLINE_ASM_GNU_STYLE741 # ifdef RT_ARCH_AMD64742 __asm__ __volatile__("pushfq\n\t"743 "movq (%%rsp), %0\n\t"744 "andq %0, %1\n\t"745 "orq %3, %1\n\t"746 "mov %1, (%%rsp)\n\t"747 "popfq\n\t"748 : "=&r" (fOldEfl),749 "=r" (fAndEfl)750 : "1" (fAndEfl),751 "rn" (fOrEfl) );752 # else753 __asm__ __volatile__("pushfl\n\t"754 "movl (%%esp), %0\n\t"755 "andl %1, (%%esp)\n\t"756 "orl %2, (%%esp)\n\t"757 "popfl\n\t"758 : "=&r" (fOldEfl)759 : "rn" (fAndEfl),760 "rn" (fOrEfl) );761 # endif762 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008763 fOldEfl = __readeflags();764 __writeeflags((fOldEfl & fAndEfl) | fOrEfl);765 # else766 __asm767 {768 # ifdef RT_ARCH_AMD64769 mov rdx, [fAndEfl]770 mov rcx, [fOrEfl]771 pushfq772 mov rax, [rsp]773 and rdx, rax774 or rdx, rcx775 mov [rsp], rdx776 popfq777 mov [fOldEfl], rax778 # else779 mov edx, [fAndEfl]780 mov ecx, [fOrEfl]781 pushfd782 mov eax, [esp]783 and edx, eax784 or edx, ecx785 mov [esp], edx786 popfd787 mov [fOldEfl], eax788 # endif789 }790 # endif791 return fOldEfl;792 }793 #endif794 795 796 /**797 * Modifies the [RE]FLAGS register by ORing in one or more flags.798 * @returns Original value.799 * @param fOrEfl The flags to be set (ORed in).800 */801 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */802 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl);803 #else804 DECLINLINE(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl)805 {806 RTCCUINTREG fOldEfl;807 # if RT_INLINE_ASM_GNU_STYLE808 # ifdef RT_ARCH_AMD64809 __asm__ __volatile__("pushfq\n\t"810 "movq (%%rsp), %0\n\t"811 "orq %1, (%%rsp)\n\t"812 "popfq\n\t"813 : "=&r" (fOldEfl)814 : "rn" (fOrEfl) );815 # else816 __asm__ __volatile__("pushfl\n\t"817 "movl (%%esp), %0\n\t"818 "orl %1, (%%esp)\n\t"819 "popfl\n\t"820 : "=&r" (fOldEfl)821 : "rn" (fOrEfl) );822 # endif823 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008824 fOldEfl = __readeflags();825 __writeeflags(fOldEfl | fOrEfl);826 # else827 __asm828 {829 # ifdef RT_ARCH_AMD64830 mov rcx, [fOrEfl]831 pushfq832 mov rdx, [rsp]833 or [rsp], rcx834 popfq835 mov [fOldEfl], rax836 # else837 mov ecx, [fOrEfl]838 pushfd839 mov edx, [esp]840 or [esp], ecx841 popfd842 mov [fOldEfl], eax843 # endif844 }845 # endif846 return fOldEfl;847 }848 #endif849 850 851 /**852 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.853 * @returns Original value.854 * @param fAndEfl The flags to keep.855 */856 #if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */857 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl);858 #else859 DECLINLINE(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl)860 {861 RTCCUINTREG fOldEfl;862 # if RT_INLINE_ASM_GNU_STYLE863 # ifdef RT_ARCH_AMD64864 __asm__ __volatile__("pushfq\n\t"865 "movq (%%rsp), %0\n\t"866 "andq %1, (%%rsp)\n\t"867 "popfq\n\t"868 : "=&r" (fOldEfl)869 : "rn" (fAndEfl) );870 # else871 __asm__ __volatile__("pushfl\n\t"872 "movl (%%esp), %0\n\t"873 "andl %1, (%%esp)\n\t"874 "popfl\n\t"875 : "=&r" (fOldEfl)876 : "rn" (fAndEfl) );877 # endif878 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008879 fOldEfl = __readeflags();880 __writeeflags(fOldEfl & fAndEfl);881 # else882 __asm883 {884 # ifdef RT_ARCH_AMD64885 mov rdx, [fAndEfl]886 pushfq887 mov rdx, [rsp]888 and [rsp], rdx889 popfq890 mov [fOldEfl], rax891 # else892 mov edx, [fAndEfl]893 pushfd894 mov edx, [esp]895 and [esp], edx896 popfd897 mov [fOldEfl], eax898 # endif899 }900 # endif901 return fOldEfl;902 }903 #endif904 905 906 /**907 * Gets the content of the CPU timestamp counter register.908 *909 * @returns TSC.910 */911 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN912 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTSC(void);913 #else914 DECLINLINE(uint64_t) ASMReadTSC(void)915 {916 RTUINT64U u;917 # if RT_INLINE_ASM_GNU_STYLE918 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));919 # else920 # if RT_INLINE_ASM_USES_INTRIN921 u.u = __rdtsc();922 # else923 __asm924 {925 rdtsc926 mov [u.s.Lo], eax927 mov [u.s.Hi], edx928 }929 # endif930 # endif931 return u.u;932 }933 #endif934 935 936 /**937 * Gets the content of the CPU timestamp counter register and the938 * assoicated AUX value.939 *940 * @returns TSC.941 * @param puAux Where to store the AUX value.942 */943 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2008944 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux);945 #else946 DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux)947 {948 RTUINT64U u;949 # if RT_INLINE_ASM_GNU_STYLE950 /* rdtscp is not supported by ancient linux build VM of course :-( */951 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */952 __asm__ __volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));953 # else954 # if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008955 u.u = __rdtscp(puAux);956 # else957 __asm958 {959 rdtscp960 mov [u.s.Lo], eax961 mov [u.s.Hi], edx962 mov eax, [puAux]963 mov [eax], ecx964 }965 # endif966 # endif967 return u.u;968 }969 #endif970 971 972 /**973 * Performs the cpuid instruction returning all registers.974 *975 * @param uOperator CPUID operation (eax).976 * @param pvEAX Where to store eax.977 * @param pvEBX Where to store ebx.978 * @param pvECX Where to store ecx.979 * @param pvEDX Where to store edx.980 * @remark We're using void pointers to ease the use of special bitfield structures and such.981 */982 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN983 DECLASM(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);984 #else985 DECLINLINE(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)986 {987 # if RT_INLINE_ASM_GNU_STYLE988 # ifdef RT_ARCH_AMD64989 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;990 __asm__ __volatile__ ("cpuid\n\t"991 : "=a" (uRAX),992 "=b" (uRBX),993 "=c" (uRCX),994 "=d" (uRDX)995 : "0" (uOperator), "2" (0));996 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;997 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;998 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;999 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;1000 # else1001 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"1002 "cpuid\n\t"1003 "xchgl %%ebx, %1\n\t"1004 : "=a" (*(uint32_t *)pvEAX),1005 "=r" (*(uint32_t *)pvEBX),1006 "=c" (*(uint32_t *)pvECX),1007 "=d" (*(uint32_t *)pvEDX)1008 : "0" (uOperator), "2" (0));1009 # endif1010 1011 # elif RT_INLINE_ASM_USES_INTRIN1012 int aInfo[4];1013 __cpuid(aInfo, uOperator);1014 *(uint32_t RT_FAR *)pvEAX = aInfo[0];1015 *(uint32_t RT_FAR *)pvEBX = aInfo[1];1016 *(uint32_t RT_FAR *)pvECX = aInfo[2];1017 *(uint32_t RT_FAR *)pvEDX = aInfo[3];1018 1019 # else1020 uint32_t uEAX;1021 uint32_t uEBX;1022 uint32_t uECX;1023 uint32_t uEDX;1024 __asm1025 {1026 push ebx1027 mov eax, [uOperator]1028 cpuid1029 mov [uEAX], eax1030 mov [uEBX], ebx1031 mov [uECX], ecx1032 mov [uEDX], edx1033 pop ebx1034 }1035 *(uint32_t RT_FAR *)pvEAX = uEAX;1036 *(uint32_t RT_FAR *)pvEBX = uEBX;1037 *(uint32_t RT_FAR *)pvECX = uECX;1038 *(uint32_t RT_FAR *)pvEDX = uEDX;1039 # endif1040 }1041 #endif1042 1043 1044 /**1045 * Performs the CPUID instruction with EAX and ECX input returning ALL output1046 * registers.1047 *1048 * @param uOperator CPUID operation (eax).1049 * @param uIdxECX ecx index1050 * @param pvEAX Where to store eax.1051 * @param pvEBX Where to store ebx.1052 * @param pvECX Where to store ecx.1053 * @param pvEDX Where to store edx.1054 * @remark We're using void pointers to ease the use of special bitfield structures and such.1055 */1056 #if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN1057 DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);1058 #else1059 DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)1060 {1061 # if RT_INLINE_ASM_GNU_STYLE1062 # ifdef RT_ARCH_AMD641063 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;1064 __asm__ ("cpuid\n\t"1065 : "=a" (uRAX),1066 "=b" (uRBX),1067 "=c" (uRCX),1068 "=d" (uRDX)1069 : "0" (uOperator),1070 "2" (uIdxECX));1071 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;1072 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;1073 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;1074 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;1075 # else1076 __asm__ ("xchgl %%ebx, %1\n\t"1077 "cpuid\n\t"1078 "xchgl %%ebx, %1\n\t"1079 : "=a" (*(uint32_t *)pvEAX),1080 "=r" (*(uint32_t *)pvEBX),1081 "=c" (*(uint32_t *)pvECX),1082 "=d" (*(uint32_t *)pvEDX)1083 : "0" (uOperator),1084 "2" (uIdxECX));1085 # endif1086 1087 # elif RT_INLINE_ASM_USES_INTRIN1088 int aInfo[4];1089 __cpuidex(aInfo, uOperator, uIdxECX);1090 *(uint32_t RT_FAR *)pvEAX = aInfo[0];1091 *(uint32_t RT_FAR *)pvEBX = aInfo[1];1092 *(uint32_t RT_FAR *)pvECX = aInfo[2];1093 *(uint32_t RT_FAR *)pvEDX = aInfo[3];1094 1095 # else1096 uint32_t uEAX;1097 uint32_t uEBX;1098 uint32_t uECX;1099 uint32_t uEDX;1100 __asm1101 {1102 push ebx1103 mov eax, [uOperator]1104 mov ecx, [uIdxECX]1105 cpuid1106 mov [uEAX], eax1107 mov [uEBX], ebx1108 mov [uECX], ecx1109 mov [uEDX], edx1110 pop ebx1111 }1112 *(uint32_t RT_FAR *)pvEAX = uEAX;1113 *(uint32_t RT_FAR *)pvEBX = uEBX;1114 *(uint32_t RT_FAR *)pvECX = uECX;1115 *(uint32_t RT_FAR *)pvEDX = uEDX;1116 # endif1117 }1118 #endif1119 1120 1121 /**1122 * CPUID variant that initializes all 4 registers before the CPUID instruction.1123 *1124 * @returns The EAX result value.1125 * @param uOperator CPUID operation (eax).1126 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.1127 * @param uInitECX The value to assign ECX prior to the CPUID instruction.1128 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.1129 * @param pvEAX Where to store eax. Optional.1130 * @param pvEBX Where to store ebx. Optional.1131 * @param pvECX Where to store ecx. Optional.1132 * @param pvEDX Where to store edx. Optional.1133 */1134 DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,1135 void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);1136 1137 1138 /**1139 * Performs the cpuid instruction returning ecx and edx.1140 *1141 * @param uOperator CPUID operation (eax).1142 * @param pvECX Where to store ecx.1143 * @param pvEDX Where to store edx.1144 * @remark We're using void pointers to ease the use of special bitfield structures and such.1145 */1146 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1147 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX);1148 #else1149 DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX)1150 {1151 uint32_t uEBX;1152 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);1153 }1154 #endif1155 1156 1157 /**1158 * Performs the cpuid instruction returning eax.1159 *1160 * @param uOperator CPUID operation (eax).1161 * @returns EAX after cpuid operation.1162 */1163 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1164 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);1165 #else1166 DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)1167 {1168 RTCCUINTREG xAX;1169 # if RT_INLINE_ASM_GNU_STYLE1170 # ifdef RT_ARCH_AMD641171 __asm__ ("cpuid"1172 : "=a" (xAX)1173 : "0" (uOperator)1174 : "rbx", "rcx", "rdx");1175 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1176 __asm__ ("push %%ebx\n\t"1177 "cpuid\n\t"1178 "pop %%ebx\n\t"1179 : "=a" (xAX)1180 : "0" (uOperator)1181 : "ecx", "edx");1182 # else1183 __asm__ ("cpuid"1184 : "=a" (xAX)1185 : "0" (uOperator)1186 : "edx", "ecx", "ebx");1187 # endif1188 1189 # elif RT_INLINE_ASM_USES_INTRIN1190 int aInfo[4];1191 __cpuid(aInfo, uOperator);1192 xAX = aInfo[0];1193 1194 # else1195 __asm1196 {1197 push ebx1198 mov eax, [uOperator]1199 cpuid1200 mov [xAX], eax1201 pop ebx1202 }1203 # endif1204 return (uint32_t)xAX;1205 }1206 #endif1207 1208 1209 /**1210 * Performs the cpuid instruction returning ebx.1211 *1212 * @param uOperator CPUID operation (eax).1213 * @returns EBX after cpuid operation.1214 */1215 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1216 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);1217 #else1218 DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)1219 {1220 RTCCUINTREG xBX;1221 # if RT_INLINE_ASM_GNU_STYLE1222 # ifdef RT_ARCH_AMD641223 RTCCUINTREG uSpill;1224 __asm__ ("cpuid"1225 : "=a" (uSpill),1226 "=b" (xBX)1227 : "0" (uOperator)1228 : "rdx", "rcx");1229 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1230 __asm__ ("push %%ebx\n\t"1231 "cpuid\n\t"1232 "mov %%ebx, %%edx\n\t"1233 "pop %%ebx\n\t"1234 : "=a" (uOperator),1235 "=d" (xBX)1236 : "0" (uOperator)1237 : "ecx");1238 # else1239 __asm__ ("cpuid"1240 : "=a" (uOperator),1241 "=b" (xBX)1242 : "0" (uOperator)1243 : "edx", "ecx");1244 # endif1245 1246 # elif RT_INLINE_ASM_USES_INTRIN1247 int aInfo[4];1248 __cpuid(aInfo, uOperator);1249 xBX = aInfo[1];1250 1251 # else1252 __asm1253 {1254 push ebx1255 mov eax, [uOperator]1256 cpuid1257 mov [xBX], ebx1258 pop ebx1259 }1260 # endif1261 return (uint32_t)xBX;1262 }1263 #endif1264 1265 1266 /**1267 * Performs the cpuid instruction returning ecx.1268 *1269 * @param uOperator CPUID operation (eax).1270 * @returns ECX after cpuid operation.1271 */1272 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1273 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);1274 #else1275 DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)1276 {1277 RTCCUINTREG xCX;1278 # if RT_INLINE_ASM_GNU_STYLE1279 # ifdef RT_ARCH_AMD641280 RTCCUINTREG uSpill;1281 __asm__ ("cpuid"1282 : "=a" (uSpill),1283 "=c" (xCX)1284 : "0" (uOperator)1285 : "rbx", "rdx");1286 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1287 __asm__ ("push %%ebx\n\t"1288 "cpuid\n\t"1289 "pop %%ebx\n\t"1290 : "=a" (uOperator),1291 "=c" (xCX)1292 : "0" (uOperator)1293 : "edx");1294 # else1295 __asm__ ("cpuid"1296 : "=a" (uOperator),1297 "=c" (xCX)1298 : "0" (uOperator)1299 : "ebx", "edx");1300 1301 # endif1302 1303 # elif RT_INLINE_ASM_USES_INTRIN1304 int aInfo[4];1305 __cpuid(aInfo, uOperator);1306 xCX = aInfo[2];1307 1308 # else1309 __asm1310 {1311 push ebx1312 mov eax, [uOperator]1313 cpuid1314 mov [xCX], ecx1315 pop ebx1316 }1317 # endif1318 return (uint32_t)xCX;1319 }1320 #endif1321 1322 1323 /**1324 * Performs the cpuid instruction returning edx.1325 *1326 * @param uOperator CPUID operation (eax).1327 * @returns EDX after cpuid operation.1328 */1329 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1330 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);1331 #else1332 DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)1333 {1334 RTCCUINTREG xDX;1335 # if RT_INLINE_ASM_GNU_STYLE1336 # ifdef RT_ARCH_AMD641337 RTCCUINTREG uSpill;1338 __asm__ ("cpuid"1339 : "=a" (uSpill),1340 "=d" (xDX)1341 : "0" (uOperator)1342 : "rbx", "rcx");1343 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1344 __asm__ ("push %%ebx\n\t"1345 "cpuid\n\t"1346 "pop %%ebx\n\t"1347 : "=a" (uOperator),1348 "=d" (xDX)1349 : "0" (uOperator)1350 : "ecx");1351 # else1352 __asm__ ("cpuid"1353 : "=a" (uOperator),1354 "=d" (xDX)1355 : "0" (uOperator)1356 : "ebx", "ecx");1357 # endif1358 1359 # elif RT_INLINE_ASM_USES_INTRIN1360 int aInfo[4];1361 __cpuid(aInfo, uOperator);1362 xDX = aInfo[3];1363 1364 # else1365 __asm1366 {1367 push ebx1368 mov eax, [uOperator]1369 cpuid1370 mov [xDX], edx1371 pop ebx1372 }1373 # endif1374 return (uint32_t)xDX;1375 }1376 #endif1377 1378 1379 /**1380 * Checks if the current CPU supports CPUID.1381 *1382 * @returns true if CPUID is supported.1383 */1384 #ifdef __WATCOMC__1385 DECLASM(bool) ASMHasCpuId(void);1386 #else1387 DECLINLINE(bool) ASMHasCpuId(void)1388 {1389 # ifdef RT_ARCH_AMD641390 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */1391 # else /* !RT_ARCH_AMD64 */1392 bool fRet = false;1393 # if RT_INLINE_ASM_GNU_STYLE1394 uint32_t u1;1395 uint32_t u2;1396 __asm__ ("pushf\n\t"1397 "pop %1\n\t"1398 "mov %1, %2\n\t"1399 "xorl $0x200000, %1\n\t"1400 "push %1\n\t"1401 "popf\n\t"1402 "pushf\n\t"1403 "pop %1\n\t"1404 "cmpl %1, %2\n\t"1405 "setne %0\n\t"1406 "push %2\n\t"1407 "popf\n\t"1408 : "=m" (fRet), "=r" (u1), "=r" (u2));1409 # else1410 __asm1411 {1412 pushfd1413 pop eax1414 mov ebx, eax1415 xor eax, 0200000h1416 push eax1417 popfd1418 pushfd1419 pop eax1420 cmp eax, ebx1421 setne fRet1422 push ebx1423 popfd1424 }1425 # endif1426 return fRet;1427 # endif /* !RT_ARCH_AMD64 */1428 }1429 #endif1430 1431 1432 /**1433 * Gets the APIC ID of the current CPU.1434 *1435 * @returns the APIC ID.1436 */1437 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1438 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMGetApicId(void);1439 #else1440 DECLINLINE(uint8_t) ASMGetApicId(void)1441 {1442 RTCCUINTREG xBX;1443 # if RT_INLINE_ASM_GNU_STYLE1444 # ifdef RT_ARCH_AMD641445 RTCCUINTREG uSpill;1446 __asm__ __volatile__ ("cpuid"1447 : "=a" (uSpill),1448 "=b" (xBX)1449 : "0" (1)1450 : "rcx", "rdx");1451 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1452 RTCCUINTREG uSpill;1453 __asm__ __volatile__ ("mov %%ebx,%1\n\t"1454 "cpuid\n\t"1455 "xchgl %%ebx,%1\n\t"1456 : "=a" (uSpill),1457 "=rm" (xBX)1458 : "0" (1)1459 : "ecx", "edx");1460 # else1461 RTCCUINTREG uSpill;1462 __asm__ __volatile__ ("cpuid"1463 : "=a" (uSpill),1464 "=b" (xBX)1465 : "0" (1)1466 : "ecx", "edx");1467 # endif1468 1469 # elif RT_INLINE_ASM_USES_INTRIN1470 int aInfo[4];1471 __cpuid(aInfo, 1);1472 xBX = aInfo[1];1473 1474 # else1475 __asm1476 {1477 push ebx1478 mov eax, 11479 cpuid1480 mov [xBX], ebx1481 pop ebx1482 }1483 # endif1484 return (uint8_t)(xBX >> 24);1485 }1486 #endif1487 1488 1489 /**1490 * Gets the APIC ID of the current CPU using leaf 0xb.1491 *1492 * @returns the APIC ID.1493 */1494 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2010 /*?*/1495 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetApicIdExt0B(void);1496 #else1497 DECLINLINE(uint32_t) ASMGetApicIdExt0B(void)1498 {1499 # if RT_INLINE_ASM_GNU_STYLE1500 RTCCUINTREG xDX;1501 # ifdef RT_ARCH_AMD641502 RTCCUINTREG uSpillEax, uSpillEcx;1503 __asm__ __volatile__ ("cpuid"1504 : "=a" (uSpillEax),1505 "=c" (uSpillEcx),1506 "=d" (xDX)1507 : "0" (0xb),1508 "1" (0)1509 : "rbx");1510 # elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)1511 RTCCUINTREG uSpillEax, uSpillEcx, uSpillEbx;1512 __asm__ __volatile__ ("mov %%ebx,%2\n\t"1513 "cpuid\n\t"1514 "xchgl %%ebx,%2\n\t"1515 : "=a" (uSpillEax),1516 "=c" (uSpillEcx),1517 "=rm" (uSpillEbx),1518 "=d" (xDX)1519 : "0" (0xb),1520 "1" (0));1521 # else1522 RTCCUINTREG uSpillEax, uSpillEcx;1523 __asm__ __volatile__ ("cpuid"1524 : "=a" (uSpillEax),1525 "=c" (uSpillEcx),1526 "=d" (xDX)1527 : "0" (0xb),1528 "1" (0)1529 : "ebx");1530 # endif1531 return (uint32_t)xDX;1532 1533 # elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/1534 1535 int aInfo[4];1536 __cpuidex(aInfo, 0xb, 0);1537 return aInfo[3];1538 1539 # else1540 RTCCUINTREG xDX;1541 __asm1542 {1543 push ebx1544 mov eax, 0xb1545 xor ecx, ecx1546 cpuid1547 mov [xDX], edx1548 pop ebx1549 }1550 return (uint32_t)xDX;1551 # endif1552 }1553 #endif1554 1555 1556 /**1557 * Gets the APIC ID of the current CPU using leaf 8000001E.1558 *1559 * @returns the APIC ID.1560 */1561 DECLINLINE(uint32_t) ASMGetApicIdExt8000001E(void)1562 {1563 return ASMCpuId_EAX(0x8000001e);1564 }1565 39 1566 40 … … 1573 47 * @param uEDX EDX return from ASMCpuId(0) 1574 48 */ 1575 DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)49 DECLINLINE(bool) RTX86IsIntelCpu(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1576 50 { 1577 51 /* 'GenuineIntel' */ … … 1583 57 1584 58 /** 1585 * Tests if this is a genuine Intel CPU.1586 *1587 * @returns true/false.1588 * @remarks ASSUMES that cpuid is supported by the CPU.1589 */1590 DECLINLINE(bool) ASMIsIntelCpu(void)1591 {1592 uint32_t uEAX, uEBX, uECX, uEDX;1593 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);1594 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);1595 }1596 1597 1598 /**1599 59 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output. 1600 60 * … … 1604 64 * @param uEDX EDX return from ASMCpuId(0) 1605 65 */ 1606 DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)66 DECLINLINE(bool) RTX86IsAmdCpu(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1607 67 { 1608 68 /* 'AuthenticAMD' */ … … 1614 74 1615 75 /** 1616 * Tests if this is an authentic AMD CPU.1617 *1618 * @returns true/false.1619 * @remarks ASSUMES that cpuid is supported by the CPU.1620 */1621 DECLINLINE(bool) ASMIsAmdCpu(void)1622 {1623 uint32_t uEAX, uEBX, uECX, uEDX;1624 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);1625 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);1626 }1627 1628 1629 /**1630 76 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output. 1631 77 * … … 1635 81 * @param uEDX EDX return from ASMCpuId(0). 1636 82 */ 1637 DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)83 DECLINLINE(bool) RTX86IsViaCentaurCpu(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1638 84 { 1639 85 /* 'CentaurHauls' */ … … 1645 91 1646 92 /** 1647 * Tests if this is a centaur hauling VIA CPU.1648 *1649 * @returns true/false.1650 * @remarks ASSUMES that cpuid is supported by the CPU.1651 */1652 DECLINLINE(bool) ASMIsViaCentaurCpu(void)1653 {1654 uint32_t uEAX, uEBX, uECX, uEDX;1655 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);1656 return ASMIsViaCentaurCpuEx(uEBX, uECX, uEDX);1657 }1658 1659 1660 /**1661 93 * Tests if it a Shanghai CPU based on the ASMCpuId(0) output. 1662 94 * … … 1666 98 * @param uEDX EDX return from ASMCpuId(0). 1667 99 */ 1668 DECLINLINE(bool) ASMIsShanghaiCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)100 DECLINLINE(bool) RTX86IsShanghaiCpu(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1669 101 { 1670 102 /* ' Shanghai ' */ … … 1676 108 1677 109 /** 1678 * Tests if this is a Shanghai CPU.1679 *1680 * @returns true/false.1681 * @remarks ASSUMES that cpuid is supported by the CPU.1682 */1683 DECLINLINE(bool) ASMIsShanghaiCpu(void)1684 {1685 uint32_t uEAX, uEBX, uECX, uEDX;1686 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);1687 return ASMIsShanghaiCpuEx(uEBX, uECX, uEDX);1688 }1689 1690 1691 /**1692 110 * Tests if it a genuine Hygon CPU based on the ASMCpuId(0) output. 1693 111 * … … 1697 115 * @param uEDX EDX return from ASMCpuId(0) 1698 116 */ 1699 DECLINLINE(bool) ASMIsHygonCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)117 DECLINLINE(bool) RTX86IsHygonCpu(uint32_t uEBX, uint32_t uECX, uint32_t uEDX) 1700 118 { 1701 119 /* 'HygonGenuine' */ … … 1707 125 1708 126 /** 1709 * Tests if this is a genuine Hygon CPU.1710 *1711 * @returns true/false.1712 * @remarks ASSUMES that cpuid is supported by the CPU.1713 */1714 DECLINLINE(bool) ASMIsHygonCpu(void)1715 {1716 uint32_t uEAX, uEBX, uECX, uEDX;1717 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);1718 return ASMIsHygonCpuEx(uEBX, uECX, uEDX);1719 }1720 1721 1722 /**1723 127 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range. 1724 128 * … … 1731 135 * picked out of thin air. 1732 136 */ 1733 DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)137 DECLINLINE(bool) RTX86IsValidStdRange(uint32_t uEAX) 1734 138 { 1735 139 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff); … … 1749 153 * picked out of thin air. 1750 154 */ 1751 DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)155 DECLINLINE(bool) RTX86IsValidExtRange(uint32_t uEAX) 1752 156 { 1753 157 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff); … … 1763 167 * @param uEAX The EAX value of CPUID leaf 0x40000000. 1764 168 * 1765 * @note Unlike ASMIsValidStdRange() and ASMIsValidExtRange(), a single leaf1766 * is okay here. So, you always need to check the range.169 * @note Unlike RTX86IsValidStdRange() and RTX86IsValidExtRange(), a single 170 * leaf is okay here. So, you always need to check the range. 1767 171 * @remarks The upper range limit is take from the intel docs. 1768 172 */ 1769 DECLINLINE(bool) ASMIsValidHypervisorRange(uint32_t uEAX)173 DECLINLINE(bool) RTX86IsValidHypervisorRange(uint32_t uEAX) 1770 174 { 1771 175 return uEAX >= UINT32_C(0x40000000) && uEAX <= UINT32_C(0x4fffffff); … … 1779 183 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001). 1780 184 */ 1781 DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)185 DECLINLINE(uint32_t) RTX86GetCpuFamily(uint32_t uEAX) 1782 186 { 1783 187 return ((uEAX >> 8) & 0xf) == 0xf … … 1793 197 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1794 198 */ 1795 DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)199 DECLINLINE(uint32_t) RTX86GetCpuModelIntel(uint32_t uEAX) 1796 200 { 1797 201 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */ … … 1807 211 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1808 212 */ 1809 DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)213 DECLINLINE(uint32_t) RTX86GetCpuModelAMD(uint32_t uEAX) 1810 214 { 1811 215 return ((uEAX >> 8) & 0xf) == 0xf … … 1820 224 * @returns Model. 1821 225 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1822 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu(). 1823 */ 1824 DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel) 226 * @param fIntel Whether it's an intel CPU. Use RTX86IsIntelCpu() or 227 * RTX86IsIntelCpu(). 228 */ 229 DECLINLINE(uint32_t) RTX86GetCpuModel(uint32_t uEAX, bool fIntel) 1825 230 { 1826 231 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */ … … 1836 241 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001). 1837 242 */ 1838 DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)243 DECLINLINE(uint32_t) RTX86GetCpuStepping(uint32_t uEAX) 1839 244 { 1840 245 return uEAX & 0xf; … … 1842 247 1843 248 1844 /**1845 * Get cr0.1846 * @returns cr0.1847 */1848 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1849 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR0(void);1850 #else1851 DECLINLINE(RTCCUINTXREG) ASMGetCR0(void)1852 {1853 RTCCUINTXREG uCR0;1854 # if RT_INLINE_ASM_USES_INTRIN1855 uCR0 = __readcr0();1856 1857 # elif RT_INLINE_ASM_GNU_STYLE1858 # ifdef RT_ARCH_AMD641859 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));1860 # else1861 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));1862 # endif1863 # else1864 __asm1865 {1866 # ifdef RT_ARCH_AMD641867 mov rax, cr01868 mov [uCR0], rax1869 # else1870 mov eax, cr01871 mov [uCR0], eax1872 # endif1873 }1874 # endif1875 return uCR0;1876 }1877 #endif1878 1879 1880 /**1881 * Sets the CR0 register.1882 * @param uCR0 The new CR0 value.1883 */1884 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1885 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR0(RTCCUINTXREG uCR0);1886 #else1887 DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0)1888 {1889 # if RT_INLINE_ASM_USES_INTRIN1890 __writecr0(uCR0);1891 1892 # elif RT_INLINE_ASM_GNU_STYLE1893 # ifdef RT_ARCH_AMD641894 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));1895 # else1896 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));1897 # endif1898 # else1899 __asm1900 {1901 # ifdef RT_ARCH_AMD641902 mov rax, [uCR0]1903 mov cr0, rax1904 # else1905 mov eax, [uCR0]1906 mov cr0, eax1907 # endif1908 }1909 # endif1910 }1911 #endif1912 1913 1914 /**1915 * Get cr2.1916 * @returns cr2.1917 */1918 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1919 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR2(void);1920 #else1921 DECLINLINE(RTCCUINTXREG) ASMGetCR2(void)1922 {1923 RTCCUINTXREG uCR2;1924 # if RT_INLINE_ASM_USES_INTRIN1925 uCR2 = __readcr2();1926 1927 # elif RT_INLINE_ASM_GNU_STYLE1928 # ifdef RT_ARCH_AMD641929 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));1930 # else1931 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));1932 # endif1933 # else1934 __asm1935 {1936 # ifdef RT_ARCH_AMD641937 mov rax, cr21938 mov [uCR2], rax1939 # else1940 mov eax, cr21941 mov [uCR2], eax1942 # endif1943 }1944 # endif1945 return uCR2;1946 }1947 #endif1948 1949 1950 /**1951 * Sets the CR2 register.1952 * @param uCR2 The new CR0 value.1953 */1954 #if RT_INLINE_ASM_EXTERNAL1955 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR2(RTCCUINTXREG uCR2);1956 #else1957 DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2)1958 {1959 # if RT_INLINE_ASM_GNU_STYLE1960 # ifdef RT_ARCH_AMD641961 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));1962 # else1963 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));1964 # endif1965 # else1966 __asm1967 {1968 # ifdef RT_ARCH_AMD641969 mov rax, [uCR2]1970 mov cr2, rax1971 # else1972 mov eax, [uCR2]1973 mov cr2, eax1974 # endif1975 }1976 # endif1977 }1978 #endif1979 1980 1981 /**1982 * Get cr3.1983 * @returns cr3.1984 */1985 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1986 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR3(void);1987 #else1988 DECLINLINE(RTCCUINTXREG) ASMGetCR3(void)1989 {1990 RTCCUINTXREG uCR3;1991 # if RT_INLINE_ASM_USES_INTRIN1992 uCR3 = __readcr3();1993 1994 # elif RT_INLINE_ASM_GNU_STYLE1995 # ifdef RT_ARCH_AMD641996 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));1997 # else1998 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));1999 # endif2000 # else2001 __asm2002 {2003 # ifdef RT_ARCH_AMD642004 mov rax, cr32005 mov [uCR3], rax2006 # else2007 mov eax, cr32008 mov [uCR3], eax2009 # endif2010 }2011 # endif2012 return uCR3;2013 }2014 #endif2015 2016 2017 /**2018 * Sets the CR3 register.2019 *2020 * @param uCR3 New CR3 value.2021 */2022 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2023 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR3(RTCCUINTXREG uCR3);2024 #else2025 DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3)2026 {2027 # if RT_INLINE_ASM_USES_INTRIN2028 __writecr3(uCR3);2029 2030 # elif RT_INLINE_ASM_GNU_STYLE2031 # ifdef RT_ARCH_AMD642032 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));2033 # else2034 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));2035 # endif2036 # else2037 __asm2038 {2039 # ifdef RT_ARCH_AMD642040 mov rax, [uCR3]2041 mov cr3, rax2042 # else2043 mov eax, [uCR3]2044 mov cr3, eax2045 # endif2046 }2047 # endif2048 }2049 #endif2050 2051 2052 /**2053 * Reloads the CR3 register.2054 */2055 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2056 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMReloadCR3(void);2057 #else2058 DECLINLINE(void) ASMReloadCR3(void)2059 {2060 # if RT_INLINE_ASM_USES_INTRIN2061 __writecr3(__readcr3());2062 2063 # elif RT_INLINE_ASM_GNU_STYLE2064 RTCCUINTXREG u;2065 # ifdef RT_ARCH_AMD642066 __asm__ __volatile__("movq %%cr3, %0\n\t"2067 "movq %0, %%cr3\n\t"2068 : "=r" (u));2069 # else2070 __asm__ __volatile__("movl %%cr3, %0\n\t"2071 "movl %0, %%cr3\n\t"2072 : "=r" (u));2073 # endif2074 # else2075 __asm2076 {2077 # ifdef RT_ARCH_AMD642078 mov rax, cr32079 mov cr3, rax2080 # else2081 mov eax, cr32082 mov cr3, eax2083 # endif2084 }2085 # endif2086 }2087 #endif2088 2089 2090 /**2091 * Get cr4.2092 * @returns cr4.2093 */2094 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2095 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR4(void);2096 #else2097 DECLINLINE(RTCCUINTXREG) ASMGetCR4(void)2098 {2099 RTCCUINTXREG uCR4;2100 # if RT_INLINE_ASM_USES_INTRIN2101 uCR4 = __readcr4();2102 2103 # elif RT_INLINE_ASM_GNU_STYLE2104 # ifdef RT_ARCH_AMD642105 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));2106 # else2107 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));2108 # endif2109 # else2110 __asm2111 {2112 # ifdef RT_ARCH_AMD642113 mov rax, cr42114 mov [uCR4], rax2115 # else2116 push eax /* just in case */2117 /*mov eax, cr4*/2118 _emit 0x0f2119 _emit 0x202120 _emit 0xe02121 mov [uCR4], eax2122 pop eax2123 # endif2124 }2125 # endif2126 return uCR4;2127 }2128 #endif2129 2130 2131 /**2132 * Sets the CR4 register.2133 *2134 * @param uCR4 New CR4 value.2135 */2136 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2137 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR4(RTCCUINTXREG uCR4);2138 #else2139 DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4)2140 {2141 # if RT_INLINE_ASM_USES_INTRIN2142 __writecr4(uCR4);2143 2144 # elif RT_INLINE_ASM_GNU_STYLE2145 # ifdef RT_ARCH_AMD642146 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));2147 # else2148 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));2149 # endif2150 # else2151 __asm2152 {2153 # ifdef RT_ARCH_AMD642154 mov rax, [uCR4]2155 mov cr4, rax2156 # else2157 mov eax, [uCR4]2158 _emit 0x0F2159 _emit 0x222160 _emit 0xE0 /* mov cr4, eax */2161 # endif2162 }2163 # endif2164 }2165 #endif2166 2167 2168 /**2169 * Get cr8.2170 * @returns cr8.2171 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.2172 */2173 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2174 DECLASM(RTCCUINTXREG) ASMGetCR8(void);2175 #else2176 DECLINLINE(RTCCUINTXREG) ASMGetCR8(void)2177 {2178 # ifdef RT_ARCH_AMD642179 RTCCUINTXREG uCR8;2180 # if RT_INLINE_ASM_USES_INTRIN2181 uCR8 = __readcr8();2182 2183 # elif RT_INLINE_ASM_GNU_STYLE2184 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));2185 # else2186 __asm2187 {2188 mov rax, cr82189 mov [uCR8], rax2190 }2191 # endif2192 return uCR8;2193 # else /* !RT_ARCH_AMD64 */2194 return 0;2195 # endif /* !RT_ARCH_AMD64 */2196 }2197 #endif2198 2199 2200 /**2201 * Get XCR0 (eXtended feature Control Register 0).2202 * @returns xcr0.2203 */2204 DECLASM(uint64_t) ASMGetXcr0(void);2205 2206 /**2207 * Sets the XCR0 register.2208 * @param uXcr0 The new XCR0 value.2209 */2210 DECLASM(void) ASMSetXcr0(uint64_t uXcr0);2211 2212 struct X86XSAVEAREA;2213 /**2214 * Save extended CPU state.2215 * @param pXStateArea Where to save the state.2216 * @param fComponents Which state components to save.2217 */2218 DECLASM(void) ASMXSave(struct X86XSAVEAREA RT_FAR *pXStateArea, uint64_t fComponents);2219 2220 /**2221 * Loads extended CPU state.2222 * @param pXStateArea Where to load the state from.2223 * @param fComponents Which state components to load.2224 */2225 DECLASM(void) ASMXRstor(struct X86XSAVEAREA const RT_FAR *pXStateArea, uint64_t fComponents);2226 2227 2228 struct X86FXSTATE;2229 /**2230 * Save FPU and SSE CPU state.2231 * @param pXStateArea Where to save the state.2232 */2233 DECLASM(void) ASMFxSave(struct X86FXSTATE RT_FAR *pXStateArea);2234 2235 /**2236 * Load FPU and SSE CPU state.2237 * @param pXStateArea Where to load the state from.2238 */2239 DECLASM(void) ASMFxRstor(struct X86FXSTATE const RT_FAR *pXStateArea);2240 2241 2242 /**2243 * Enables interrupts (EFLAGS.IF).2244 */2245 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2246 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntEnable(void);2247 #else2248 DECLINLINE(void) ASMIntEnable(void)2249 {2250 # if RT_INLINE_ASM_GNU_STYLE2251 __asm("sti\n");2252 # elif RT_INLINE_ASM_USES_INTRIN2253 _enable();2254 # else2255 __asm sti2256 # endif2257 }2258 #endif2259 2260 2261 /**2262 * Disables interrupts (!EFLAGS.IF).2263 */2264 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2265 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntDisable(void);2266 #else2267 DECLINLINE(void) ASMIntDisable(void)2268 {2269 # if RT_INLINE_ASM_GNU_STYLE2270 __asm("cli\n");2271 # elif RT_INLINE_ASM_USES_INTRIN2272 _disable();2273 # else2274 __asm cli2275 # endif2276 }2277 #endif2278 2279 2280 /**2281 * Disables interrupts and returns previous xFLAGS.2282 */2283 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2284 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMIntDisableFlags(void);2285 #else2286 DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)2287 {2288 RTCCUINTREG xFlags;2289 # if RT_INLINE_ASM_GNU_STYLE2290 # ifdef RT_ARCH_AMD642291 __asm__ __volatile__("pushfq\n\t"2292 "cli\n\t"2293 "popq %0\n\t"2294 : "=r" (xFlags));2295 # else2296 __asm__ __volatile__("pushfl\n\t"2297 "cli\n\t"2298 "popl %0\n\t"2299 : "=r" (xFlags));2300 # endif2301 # elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)2302 xFlags = ASMGetFlags();2303 _disable();2304 # else2305 __asm {2306 pushfd2307 cli2308 pop [xFlags]2309 }2310 # endif2311 return xFlags;2312 }2313 #endif2314 2315 2316 /**2317 * Are interrupts enabled?2318 *2319 * @returns true / false.2320 */2321 DECLINLINE(bool) ASMIntAreEnabled(void)2322 {2323 RTCCUINTREG uFlags = ASMGetFlags();2324 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;2325 }2326 2327 2328 /**2329 * Halts the CPU until interrupted.2330 */2331 #if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS20052332 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMHalt(void);2333 #else2334 DECLINLINE(void) ASMHalt(void)2335 {2336 # if RT_INLINE_ASM_GNU_STYLE2337 __asm__ __volatile__("hlt\n\t");2338 # elif RT_INLINE_ASM_USES_INTRIN2339 __halt();2340 # else2341 __asm {2342 hlt2343 }2344 # endif2345 }2346 #endif2347 2348 2349 /**2350 * Reads a machine specific register.2351 *2352 * @returns Register content.2353 * @param uRegister Register to read.2354 */2355 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2356 RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMRdMsr(uint32_t uRegister);2357 #else2358 DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)2359 {2360 RTUINT64U u;2361 # if RT_INLINE_ASM_GNU_STYLE2362 __asm__ __volatile__("rdmsr\n\t"2363 : "=a" (u.s.Lo),2364 "=d" (u.s.Hi)2365 : "c" (uRegister));2366 2367 # elif RT_INLINE_ASM_USES_INTRIN2368 u.u = __readmsr(uRegister);2369 2370 # else2371 __asm2372 {2373 mov ecx, [uRegister]2374 rdmsr2375 mov [u.s.Lo], eax2376 mov [u.s.Hi], edx2377 }2378 # endif2379 2380 return u.u;2381 }2382 #endif2383 2384 2385 /**2386 * Writes a machine specific register.2387 *2388 * @returns Register content.2389 * @param uRegister Register to write to.2390 * @param u64Val Value to write.2391 */2392 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2393 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);2394 #else2395 DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)2396 {2397 RTUINT64U u;2398 2399 u.u = u64Val;2400 # if RT_INLINE_ASM_GNU_STYLE2401 __asm__ __volatile__("wrmsr\n\t"2402 ::"a" (u.s.Lo),2403 "d" (u.s.Hi),2404 "c" (uRegister));2405 2406 # elif RT_INLINE_ASM_USES_INTRIN2407 __writemsr(uRegister, u.u);2408 2409 # else2410 __asm2411 {2412 mov ecx, [uRegister]2413 mov edx, [u.s.Hi]2414 mov eax, [u.s.Lo]2415 wrmsr2416 }2417 # endif2418 }2419 #endif2420 2421 2422 /**2423 * Reads a machine specific register, extended version (for AMD).2424 *2425 * @returns Register content.2426 * @param uRegister Register to read.2427 * @param uXDI RDI/EDI value.2428 */2429 #if RT_INLINE_ASM_EXTERNAL2430 RT_ASM_DECL_PRAGMA_WATCOM_386(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI);2431 #else2432 DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI)2433 {2434 RTUINT64U u;2435 # if RT_INLINE_ASM_GNU_STYLE2436 __asm__ __volatile__("rdmsr\n\t"2437 : "=a" (u.s.Lo),2438 "=d" (u.s.Hi)2439 : "c" (uRegister),2440 "D" (uXDI));2441 2442 # else2443 __asm2444 {2445 mov ecx, [uRegister]2446 xchg edi, [uXDI]2447 rdmsr2448 mov [u.s.Lo], eax2449 mov [u.s.Hi], edx2450 xchg edi, [uXDI]2451 }2452 # endif2453 2454 return u.u;2455 }2456 #endif2457 2458 2459 /**2460 * Writes a machine specific register, extended version (for AMD).2461 *2462 * @returns Register content.2463 * @param uRegister Register to write to.2464 * @param uXDI RDI/EDI value.2465 * @param u64Val Value to write.2466 */2467 #if RT_INLINE_ASM_EXTERNAL2468 RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val);2469 #else2470 DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val)2471 {2472 RTUINT64U u;2473 2474 u.u = u64Val;2475 # if RT_INLINE_ASM_GNU_STYLE2476 __asm__ __volatile__("wrmsr\n\t"2477 ::"a" (u.s.Lo),2478 "d" (u.s.Hi),2479 "c" (uRegister),2480 "D" (uXDI));2481 2482 # else2483 __asm2484 {2485 mov ecx, [uRegister]2486 xchg edi, [uXDI]2487 mov edx, [u.s.Hi]2488 mov eax, [u.s.Lo]2489 wrmsr2490 xchg edi, [uXDI]2491 }2492 # endif2493 }2494 #endif2495 2496 2497 2498 /**2499 * Reads low part of a machine specific register.2500 *2501 * @returns Register content.2502 * @param uRegister Register to read.2503 */2504 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2505 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);2506 #else2507 DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)2508 {2509 uint32_t u32;2510 # if RT_INLINE_ASM_GNU_STYLE2511 __asm__ __volatile__("rdmsr\n\t"2512 : "=a" (u32)2513 : "c" (uRegister)2514 : "edx");2515 2516 # elif RT_INLINE_ASM_USES_INTRIN2517 u32 = (uint32_t)__readmsr(uRegister);2518 2519 #else2520 __asm2521 {2522 mov ecx, [uRegister]2523 rdmsr2524 mov [u32], eax2525 }2526 # endif2527 2528 return u32;2529 }2530 #endif2531 2532 2533 /**2534 * Reads high part of a machine specific register.2535 *2536 * @returns Register content.2537 * @param uRegister Register to read.2538 */2539 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2540 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_High(uint32_t uRegister);2541 #else2542 DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)2543 {2544 uint32_t u32;2545 # if RT_INLINE_ASM_GNU_STYLE2546 __asm__ __volatile__("rdmsr\n\t"2547 : "=d" (u32)2548 : "c" (uRegister)2549 : "eax");2550 2551 # elif RT_INLINE_ASM_USES_INTRIN2552 u32 = (uint32_t)(__readmsr(uRegister) >> 32);2553 2554 # else2555 __asm2556 {2557 mov ecx, [uRegister]2558 rdmsr2559 mov [u32], edx2560 }2561 # endif2562 2563 return u32;2564 }2565 #endif2566 2567 2568 /**2569 * Gets dr0.2570 *2571 * @returns dr0.2572 */2573 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2574 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR0(void);2575 #else2576 DECLINLINE(RTCCUINTXREG) ASMGetDR0(void)2577 {2578 RTCCUINTXREG uDR0;2579 # if RT_INLINE_ASM_USES_INTRIN2580 uDR0 = __readdr(0);2581 # elif RT_INLINE_ASM_GNU_STYLE2582 # ifdef RT_ARCH_AMD642583 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));2584 # else2585 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));2586 # endif2587 # else2588 __asm2589 {2590 # ifdef RT_ARCH_AMD642591 mov rax, dr02592 mov [uDR0], rax2593 # else2594 mov eax, dr02595 mov [uDR0], eax2596 # endif2597 }2598 # endif2599 return uDR0;2600 }2601 #endif2602 2603 2604 /**2605 * Gets dr1.2606 *2607 * @returns dr1.2608 */2609 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2610 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR1(void);2611 #else2612 DECLINLINE(RTCCUINTXREG) ASMGetDR1(void)2613 {2614 RTCCUINTXREG uDR1;2615 # if RT_INLINE_ASM_USES_INTRIN2616 uDR1 = __readdr(1);2617 # elif RT_INLINE_ASM_GNU_STYLE2618 # ifdef RT_ARCH_AMD642619 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));2620 # else2621 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));2622 # endif2623 # else2624 __asm2625 {2626 # ifdef RT_ARCH_AMD642627 mov rax, dr12628 mov [uDR1], rax2629 # else2630 mov eax, dr12631 mov [uDR1], eax2632 # endif2633 }2634 # endif2635 return uDR1;2636 }2637 #endif2638 2639 2640 /**2641 * Gets dr2.2642 *2643 * @returns dr2.2644 */2645 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2646 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR2(void);2647 #else2648 DECLINLINE(RTCCUINTXREG) ASMGetDR2(void)2649 {2650 RTCCUINTXREG uDR2;2651 # if RT_INLINE_ASM_USES_INTRIN2652 uDR2 = __readdr(2);2653 # elif RT_INLINE_ASM_GNU_STYLE2654 # ifdef RT_ARCH_AMD642655 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));2656 # else2657 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));2658 # endif2659 # else2660 __asm2661 {2662 # ifdef RT_ARCH_AMD642663 mov rax, dr22664 mov [uDR2], rax2665 # else2666 mov eax, dr22667 mov [uDR2], eax2668 # endif2669 }2670 # endif2671 return uDR2;2672 }2673 #endif2674 2675 2676 /**2677 * Gets dr3.2678 *2679 * @returns dr3.2680 */2681 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2682 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR3(void);2683 #else2684 DECLINLINE(RTCCUINTXREG) ASMGetDR3(void)2685 {2686 RTCCUINTXREG uDR3;2687 # if RT_INLINE_ASM_USES_INTRIN2688 uDR3 = __readdr(3);2689 # elif RT_INLINE_ASM_GNU_STYLE2690 # ifdef RT_ARCH_AMD642691 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));2692 # else2693 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));2694 # endif2695 # else2696 __asm2697 {2698 # ifdef RT_ARCH_AMD642699 mov rax, dr32700 mov [uDR3], rax2701 # else2702 mov eax, dr32703 mov [uDR3], eax2704 # endif2705 }2706 # endif2707 return uDR3;2708 }2709 #endif2710 2711 2712 /**2713 * Gets dr6.2714 *2715 * @returns dr6.2716 */2717 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2718 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR6(void);2719 #else2720 DECLINLINE(RTCCUINTXREG) ASMGetDR6(void)2721 {2722 RTCCUINTXREG uDR6;2723 # if RT_INLINE_ASM_USES_INTRIN2724 uDR6 = __readdr(6);2725 # elif RT_INLINE_ASM_GNU_STYLE2726 # ifdef RT_ARCH_AMD642727 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));2728 # else2729 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));2730 # endif2731 # else2732 __asm2733 {2734 # ifdef RT_ARCH_AMD642735 mov rax, dr62736 mov [uDR6], rax2737 # else2738 mov eax, dr62739 mov [uDR6], eax2740 # endif2741 }2742 # endif2743 return uDR6;2744 }2745 #endif2746 2747 2748 /**2749 * Reads and clears DR6.2750 *2751 * @returns DR6.2752 */2753 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2754 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetAndClearDR6(void);2755 #else2756 DECLINLINE(RTCCUINTXREG) ASMGetAndClearDR6(void)2757 {2758 RTCCUINTXREG uDR6;2759 # if RT_INLINE_ASM_USES_INTRIN2760 uDR6 = __readdr(6);2761 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */2762 # elif RT_INLINE_ASM_GNU_STYLE2763 RTCCUINTXREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */2764 # ifdef RT_ARCH_AMD642765 __asm__ __volatile__("movq %%dr6, %0\n\t"2766 "movq %1, %%dr6\n\t"2767 : "=r" (uDR6)2768 : "r" (uNewValue));2769 # else2770 __asm__ __volatile__("movl %%dr6, %0\n\t"2771 "movl %1, %%dr6\n\t"2772 : "=r" (uDR6)2773 : "r" (uNewValue));2774 # endif2775 # else2776 __asm2777 {2778 # ifdef RT_ARCH_AMD642779 mov rax, dr62780 mov [uDR6], rax2781 mov rcx, rax2782 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */2783 mov dr6, rcx2784 # else2785 mov eax, dr62786 mov [uDR6], eax2787 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */2788 mov dr6, ecx2789 # endif2790 }2791 # endif2792 return uDR6;2793 }2794 #endif2795 2796 2797 /**2798 * Gets dr7.2799 *2800 * @returns dr7.2801 */2802 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2803 RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR7(void);2804 #else2805 DECLINLINE(RTCCUINTXREG) ASMGetDR7(void)2806 {2807 RTCCUINTXREG uDR7;2808 # if RT_INLINE_ASM_USES_INTRIN2809 uDR7 = __readdr(7);2810 # elif RT_INLINE_ASM_GNU_STYLE2811 # ifdef RT_ARCH_AMD642812 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));2813 # else2814 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));2815 # endif2816 # else2817 __asm2818 {2819 # ifdef RT_ARCH_AMD642820 mov rax, dr72821 mov [uDR7], rax2822 # else2823 mov eax, dr72824 mov [uDR7], eax2825 # endif2826 }2827 # endif2828 return uDR7;2829 }2830 #endif2831 2832 2833 /**2834 * Sets dr0.2835 *2836 * @param uDRVal Debug register value to write2837 */2838 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2839 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR0(RTCCUINTXREG uDRVal);2840 #else2841 DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal)2842 {2843 # if RT_INLINE_ASM_USES_INTRIN2844 __writedr(0, uDRVal);2845 # elif RT_INLINE_ASM_GNU_STYLE2846 # ifdef RT_ARCH_AMD642847 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));2848 # else2849 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));2850 # endif2851 # else2852 __asm2853 {2854 # ifdef RT_ARCH_AMD642855 mov rax, [uDRVal]2856 mov dr0, rax2857 # else2858 mov eax, [uDRVal]2859 mov dr0, eax2860 # endif2861 }2862 # endif2863 }2864 #endif2865 2866 2867 /**2868 * Sets dr1.2869 *2870 * @param uDRVal Debug register value to write2871 */2872 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2873 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR1(RTCCUINTXREG uDRVal);2874 #else2875 DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal)2876 {2877 # if RT_INLINE_ASM_USES_INTRIN2878 __writedr(1, uDRVal);2879 # elif RT_INLINE_ASM_GNU_STYLE2880 # ifdef RT_ARCH_AMD642881 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));2882 # else2883 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));2884 # endif2885 # else2886 __asm2887 {2888 # ifdef RT_ARCH_AMD642889 mov rax, [uDRVal]2890 mov dr1, rax2891 # else2892 mov eax, [uDRVal]2893 mov dr1, eax2894 # endif2895 }2896 # endif2897 }2898 #endif2899 2900 2901 /**2902 * Sets dr2.2903 *2904 * @param uDRVal Debug register value to write2905 */2906 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2907 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR2(RTCCUINTXREG uDRVal);2908 #else2909 DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal)2910 {2911 # if RT_INLINE_ASM_USES_INTRIN2912 __writedr(2, uDRVal);2913 # elif RT_INLINE_ASM_GNU_STYLE2914 # ifdef RT_ARCH_AMD642915 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));2916 # else2917 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));2918 # endif2919 # else2920 __asm2921 {2922 # ifdef RT_ARCH_AMD642923 mov rax, [uDRVal]2924 mov dr2, rax2925 # else2926 mov eax, [uDRVal]2927 mov dr2, eax2928 # endif2929 }2930 # endif2931 }2932 #endif2933 2934 2935 /**2936 * Sets dr3.2937 *2938 * @param uDRVal Debug register value to write2939 */2940 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2941 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR3(RTCCUINTXREG uDRVal);2942 #else2943 DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal)2944 {2945 # if RT_INLINE_ASM_USES_INTRIN2946 __writedr(3, uDRVal);2947 # elif RT_INLINE_ASM_GNU_STYLE2948 # ifdef RT_ARCH_AMD642949 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));2950 # else2951 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));2952 # endif2953 # else2954 __asm2955 {2956 # ifdef RT_ARCH_AMD642957 mov rax, [uDRVal]2958 mov dr3, rax2959 # else2960 mov eax, [uDRVal]2961 mov dr3, eax2962 # endif2963 }2964 # endif2965 }2966 #endif2967 2968 2969 /**2970 * Sets dr6.2971 *2972 * @param uDRVal Debug register value to write2973 */2974 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN2975 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR6(RTCCUINTXREG uDRVal);2976 #else2977 DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal)2978 {2979 # if RT_INLINE_ASM_USES_INTRIN2980 __writedr(6, uDRVal);2981 # elif RT_INLINE_ASM_GNU_STYLE2982 # ifdef RT_ARCH_AMD642983 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));2984 # else2985 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));2986 # endif2987 # else2988 __asm2989 {2990 # ifdef RT_ARCH_AMD642991 mov rax, [uDRVal]2992 mov dr6, rax2993 # else2994 mov eax, [uDRVal]2995 mov dr6, eax2996 # endif2997 }2998 # endif2999 }3000 #endif3001 3002 3003 /**3004 * Sets dr7.3005 *3006 * @param uDRVal Debug register value to write3007 */3008 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3009 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR7(RTCCUINTXREG uDRVal);3010 #else3011 DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal)3012 {3013 # if RT_INLINE_ASM_USES_INTRIN3014 __writedr(7, uDRVal);3015 # elif RT_INLINE_ASM_GNU_STYLE3016 # ifdef RT_ARCH_AMD643017 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));3018 # else3019 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));3020 # endif3021 # else3022 __asm3023 {3024 # ifdef RT_ARCH_AMD643025 mov rax, [uDRVal]3026 mov dr7, rax3027 # else3028 mov eax, [uDRVal]3029 mov dr7, eax3030 # endif3031 }3032 # endif3033 }3034 #endif3035 3036 3037 /**3038 * Writes a 8-bit unsigned integer to an I/O port, ordered.3039 *3040 * @param Port I/O port to write to.3041 * @param u8 8-bit integer to write.3042 */3043 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3044 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);3045 #else3046 DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)3047 {3048 # if RT_INLINE_ASM_GNU_STYLE3049 __asm__ __volatile__("outb %b1, %w0\n\t"3050 :: "Nd" (Port),3051 "a" (u8));3052 3053 # elif RT_INLINE_ASM_USES_INTRIN3054 __outbyte(Port, u8);3055 3056 # else3057 __asm3058 {3059 mov dx, [Port]3060 mov al, [u8]3061 out dx, al3062 }3063 # endif3064 }3065 #endif3066 3067 3068 /**3069 * Reads a 8-bit unsigned integer from an I/O port, ordered.3070 *3071 * @returns 8-bit integer.3072 * @param Port I/O port to read from.3073 */3074 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3075 RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMInU8(RTIOPORT Port);3076 #else3077 DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)3078 {3079 uint8_t u8;3080 # if RT_INLINE_ASM_GNU_STYLE3081 __asm__ __volatile__("inb %w1, %b0\n\t"3082 : "=a" (u8)3083 : "Nd" (Port));3084 3085 # elif RT_INLINE_ASM_USES_INTRIN3086 u8 = __inbyte(Port);3087 3088 # else3089 __asm3090 {3091 mov dx, [Port]3092 in al, dx3093 mov [u8], al3094 }3095 # endif3096 return u8;3097 }3098 #endif3099 3100 3101 /**3102 * Writes a 16-bit unsigned integer to an I/O port, ordered.3103 *3104 * @param Port I/O port to write to.3105 * @param u16 16-bit integer to write.3106 */3107 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3108 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);3109 #else3110 DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)3111 {3112 # if RT_INLINE_ASM_GNU_STYLE3113 __asm__ __volatile__("outw %w1, %w0\n\t"3114 :: "Nd" (Port),3115 "a" (u16));3116 3117 # elif RT_INLINE_ASM_USES_INTRIN3118 __outword(Port, u16);3119 3120 # else3121 __asm3122 {3123 mov dx, [Port]3124 mov ax, [u16]3125 out dx, ax3126 }3127 # endif3128 }3129 #endif3130 3131 3132 /**3133 * Reads a 16-bit unsigned integer from an I/O port, ordered.3134 *3135 * @returns 16-bit integer.3136 * @param Port I/O port to read from.3137 */3138 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3139 RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMInU16(RTIOPORT Port);3140 #else3141 DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)3142 {3143 uint16_t u16;3144 # if RT_INLINE_ASM_GNU_STYLE3145 __asm__ __volatile__("inw %w1, %w0\n\t"3146 : "=a" (u16)3147 : "Nd" (Port));3148 3149 # elif RT_INLINE_ASM_USES_INTRIN3150 u16 = __inword(Port);3151 3152 # else3153 __asm3154 {3155 mov dx, [Port]3156 in ax, dx3157 mov [u16], ax3158 }3159 # endif3160 return u16;3161 }3162 #endif3163 3164 3165 /**3166 * Writes a 32-bit unsigned integer to an I/O port, ordered.3167 *3168 * @param Port I/O port to write to.3169 * @param u32 32-bit integer to write.3170 */3171 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3172 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);3173 #else3174 DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)3175 {3176 # if RT_INLINE_ASM_GNU_STYLE3177 __asm__ __volatile__("outl %1, %w0\n\t"3178 :: "Nd" (Port),3179 "a" (u32));3180 3181 # elif RT_INLINE_ASM_USES_INTRIN3182 __outdword(Port, u32);3183 3184 # else3185 __asm3186 {3187 mov dx, [Port]3188 mov eax, [u32]3189 out dx, eax3190 }3191 # endif3192 }3193 #endif3194 3195 3196 /**3197 * Reads a 32-bit unsigned integer from an I/O port, ordered.3198 *3199 * @returns 32-bit integer.3200 * @param Port I/O port to read from.3201 */3202 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3203 RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMInU32(RTIOPORT Port);3204 #else3205 DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)3206 {3207 uint32_t u32;3208 # if RT_INLINE_ASM_GNU_STYLE3209 __asm__ __volatile__("inl %w1, %0\n\t"3210 : "=a" (u32)3211 : "Nd" (Port));3212 3213 # elif RT_INLINE_ASM_USES_INTRIN3214 u32 = __indword(Port);3215 3216 # else3217 __asm3218 {3219 mov dx, [Port]3220 in eax, dx3221 mov [u32], eax3222 }3223 # endif3224 return u32;3225 }3226 #endif3227 3228 3229 /**3230 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.3231 *3232 * @param Port I/O port to write to.3233 * @param pau8 Pointer to the string buffer.3234 * @param c The number of items to write.3235 */3236 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3237 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c);3238 #else3239 DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c)3240 {3241 # if RT_INLINE_ASM_GNU_STYLE3242 __asm__ __volatile__("rep; outsb\n\t"3243 : "+S" (pau8),3244 "+c" (c)3245 : "d" (Port));3246 3247 # elif RT_INLINE_ASM_USES_INTRIN3248 __outbytestring(Port, (unsigned char RT_FAR *)pau8, (unsigned long)c);3249 3250 # else3251 __asm3252 {3253 mov dx, [Port]3254 mov ecx, [c]3255 mov eax, [pau8]3256 xchg esi, eax3257 rep outsb3258 xchg esi, eax3259 }3260 # endif3261 }3262 #endif3263 3264 3265 /**3266 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.3267 *3268 * @param Port I/O port to read from.3269 * @param pau8 Pointer to the string buffer (output).3270 * @param c The number of items to read.3271 */3272 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3273 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c);3274 #else3275 DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c)3276 {3277 # if RT_INLINE_ASM_GNU_STYLE3278 __asm__ __volatile__("rep; insb\n\t"3279 : "+D" (pau8),3280 "+c" (c)3281 : "d" (Port));3282 3283 # elif RT_INLINE_ASM_USES_INTRIN3284 __inbytestring(Port, pau8, (unsigned long)c);3285 3286 # else3287 __asm3288 {3289 mov dx, [Port]3290 mov ecx, [c]3291 mov eax, [pau8]3292 xchg edi, eax3293 rep insb3294 xchg edi, eax3295 }3296 # endif3297 }3298 #endif3299 3300 3301 /**3302 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.3303 *3304 * @param Port I/O port to write to.3305 * @param pau16 Pointer to the string buffer.3306 * @param c The number of items to write.3307 */3308 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3309 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c);3310 #else3311 DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c)3312 {3313 # if RT_INLINE_ASM_GNU_STYLE3314 __asm__ __volatile__("rep; outsw\n\t"3315 : "+S" (pau16),3316 "+c" (c)3317 : "d" (Port));3318 3319 # elif RT_INLINE_ASM_USES_INTRIN3320 __outwordstring(Port, (unsigned short RT_FAR *)pau16, (unsigned long)c);3321 3322 # else3323 __asm3324 {3325 mov dx, [Port]3326 mov ecx, [c]3327 mov eax, [pau16]3328 xchg esi, eax3329 rep outsw3330 xchg esi, eax3331 }3332 # endif3333 }3334 #endif3335 3336 3337 /**3338 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.3339 *3340 * @param Port I/O port to read from.3341 * @param pau16 Pointer to the string buffer (output).3342 * @param c The number of items to read.3343 */3344 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3345 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c);3346 #else3347 DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c)3348 {3349 # if RT_INLINE_ASM_GNU_STYLE3350 __asm__ __volatile__("rep; insw\n\t"3351 : "+D" (pau16),3352 "+c" (c)3353 : "d" (Port));3354 3355 # elif RT_INLINE_ASM_USES_INTRIN3356 __inwordstring(Port, pau16, (unsigned long)c);3357 3358 # else3359 __asm3360 {3361 mov dx, [Port]3362 mov ecx, [c]3363 mov eax, [pau16]3364 xchg edi, eax3365 rep insw3366 xchg edi, eax3367 }3368 # endif3369 }3370 #endif3371 3372 3373 /**3374 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.3375 *3376 * @param Port I/O port to write to.3377 * @param pau32 Pointer to the string buffer.3378 * @param c The number of items to write.3379 */3380 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3381 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c);3382 #else3383 DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c)3384 {3385 # if RT_INLINE_ASM_GNU_STYLE3386 __asm__ __volatile__("rep; outsl\n\t"3387 : "+S" (pau32),3388 "+c" (c)3389 : "d" (Port));3390 3391 # elif RT_INLINE_ASM_USES_INTRIN3392 __outdwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);3393 3394 # else3395 __asm3396 {3397 mov dx, [Port]3398 mov ecx, [c]3399 mov eax, [pau32]3400 xchg esi, eax3401 rep outsd3402 xchg esi, eax3403 }3404 # endif3405 }3406 #endif3407 3408 3409 /**3410 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.3411 *3412 * @param Port I/O port to read from.3413 * @param pau32 Pointer to the string buffer (output).3414 * @param c The number of items to read.3415 */3416 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3417 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c);3418 #else3419 DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c)3420 {3421 # if RT_INLINE_ASM_GNU_STYLE3422 __asm__ __volatile__("rep; insl\n\t"3423 : "+D" (pau32),3424 "+c" (c)3425 : "d" (Port));3426 3427 # elif RT_INLINE_ASM_USES_INTRIN3428 __indwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);3429 3430 # else3431 __asm3432 {3433 mov dx, [Port]3434 mov ecx, [c]3435 mov eax, [pau32]3436 xchg edi, eax3437 rep insd3438 xchg edi, eax3439 }3440 # endif3441 }3442 #endif3443 3444 3445 /**3446 * Invalidate page.3447 *3448 * @param uPtr Address of the page to invalidate.3449 */3450 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3451 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidatePage(RTCCUINTXREG uPtr);3452 #else3453 DECLINLINE(void) ASMInvalidatePage(RTCCUINTXREG uPtr)3454 {3455 # if RT_INLINE_ASM_USES_INTRIN3456 __invlpg((void RT_FAR *)uPtr);3457 3458 # elif RT_INLINE_ASM_GNU_STYLE3459 __asm__ __volatile__("invlpg %0\n\t"3460 : : "m" (*(uint8_t RT_FAR *)(uintptr_t)uPtr));3461 # else3462 __asm3463 {3464 # ifdef RT_ARCH_AMD643465 mov rax, [uPtr]3466 invlpg [rax]3467 # else3468 mov eax, [uPtr]3469 invlpg [eax]3470 # endif3471 }3472 # endif3473 }3474 #endif3475 3476 3477 /**3478 * Write back the internal caches and invalidate them.3479 */3480 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN3481 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMWriteBackAndInvalidateCaches(void);3482 #else3483 DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)3484 {3485 # if RT_INLINE_ASM_USES_INTRIN3486 __wbinvd();3487 3488 # elif RT_INLINE_ASM_GNU_STYLE3489 __asm__ __volatile__("wbinvd");3490 # else3491 __asm3492 {3493 wbinvd3494 }3495 # endif3496 }3497 #endif3498 3499 3500 /**3501 * Invalidate internal and (perhaps) external caches without first3502 * flushing dirty cache lines. Use with extreme care.3503 */3504 #if RT_INLINE_ASM_EXTERNAL3505 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidateInternalCaches(void);3506 #else3507 DECLINLINE(void) ASMInvalidateInternalCaches(void)3508 {3509 # if RT_INLINE_ASM_GNU_STYLE3510 __asm__ __volatile__("invd");3511 # else3512 __asm3513 {3514 invd3515 }3516 # endif3517 }3518 #endif3519 3520 3521 /**3522 * Memory load/store fence, waits for any pending writes and reads to complete.3523 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.3524 */3525 DECLINLINE(void) ASMMemoryFenceSSE2(void)3526 {3527 #if RT_INLINE_ASM_GNU_STYLE3528 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");3529 #elif RT_INLINE_ASM_USES_INTRIN3530 _mm_mfence();3531 #else3532 __asm3533 {3534 _emit 0x0f3535 _emit 0xae3536 _emit 0xf03537 }3538 #endif3539 }3540 3541 3542 /**3543 * Memory store fence, waits for any writes to complete.3544 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.3545 */3546 DECLINLINE(void) ASMWriteFenceSSE(void)3547 {3548 #if RT_INLINE_ASM_GNU_STYLE3549 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");3550 #elif RT_INLINE_ASM_USES_INTRIN3551 _mm_sfence();3552 #else3553 __asm3554 {3555 _emit 0x0f3556 _emit 0xae3557 _emit 0xf83558 }3559 #endif3560 }3561 3562 3563 /**3564 * Memory load fence, waits for any pending reads to complete.3565 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.3566 */3567 DECLINLINE(void) ASMReadFenceSSE2(void)3568 {3569 #if RT_INLINE_ASM_GNU_STYLE3570 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");3571 #elif RT_INLINE_ASM_USES_INTRIN3572 _mm_lfence();3573 #else3574 __asm3575 {3576 _emit 0x0f3577 _emit 0xae3578 _emit 0xe83579 }3580 #endif3581 }3582 3583 #if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)3584 3585 /*3586 * Clear the AC bit in the EFLAGS register.3587 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.3588 * Requires to be executed in R0.3589 */3590 DECLINLINE(void) ASMClearAC(void)3591 {3592 #if RT_INLINE_ASM_GNU_STYLE3593 __asm__ __volatile__ (".byte 0x0f,0x01,0xca\n\t");3594 #else3595 __asm3596 {3597 _emit 0x0f3598 _emit 0x013599 _emit 0xca3600 }3601 #endif3602 }3603 3604 3605 /*3606 * Set the AC bit in the EFLAGS register.3607 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.3608 * Requires to be executed in R0.3609 */3610 DECLINLINE(void) ASMSetAC(void)3611 {3612 #if RT_INLINE_ASM_GNU_STYLE3613 __asm__ __volatile__ (".byte 0x0f,0x01,0xcb\n\t");3614 #else3615 __asm3616 {3617 _emit 0x0f3618 _emit 0x013619 _emit 0xcb3620 }3621 #endif3622 }3623 3624 #endif /* !_MSC_VER || !RT_ARCH_AMD64 */3625 3626 3627 /*3628 * Include #pragma aux definitions for Watcom C/C++.3629 */3630 #if defined(__WATCOMC__) && ARCH_BITS == 163631 # define IPRT_ASM_AMD64_X86_WATCOM_16_INSTANTIATE3632 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_16_h3633 # include "asm-amd64-x86-watcom-16.h"3634 #elif defined(__WATCOMC__) && ARCH_BITS == 323635 # define IPRT_ASM_AMD64_X86_WATCOM_32_INSTANTIATE3636 # undef IPRT_INCLUDED_asm_amd64_x86_watcom_32_h3637 # include "asm-amd64-x86-watcom-32.h"3638 #endif3639 3640 3641 249 /** @} */ 3642 #endif /* !IPRT_INCLUDED_ asm_amd64_x86_h */3643 250 #endif /* !IPRT_INCLUDED_x86_helpers_h */ 251
Note:
See TracChangeset
for help on using the changeset viewer.