VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 95929

Last change on this file since 95929 was 93515, checked in by vboxsync, 3 years ago

iprt/asm-amd64-x86.h: Split out some non-assembly functions that related more to x86.h than to asm.h, changing the function prefix from ASM to RTX86. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 78.3 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2022 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef IPRT_INCLUDED_asm_amd64_x86_h
27#define IPRT_INCLUDED_asm_amd64_x86_h
28#ifndef RT_WITHOUT_PRAGMA_ONCE
29# pragma once
30#endif
31
32#include <iprt/types.h>
33#include <iprt/assert.h>
34#include <iprt/x86-helpers.h>
35#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
36# error "Not on AMD64 or x86"
37#endif
38
39#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
40/* Emit the intrinsics at all optimization levels. */
41# include <iprt/sanitized/intrin.h>
42# pragma intrinsic(_ReadWriteBarrier)
43# pragma intrinsic(__cpuid)
44# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
45# pragma intrinsic(__cpuidex)
46# endif
47# pragma intrinsic(_enable)
48# pragma intrinsic(_disable)
49# pragma intrinsic(__rdtsc)
50# pragma intrinsic(__readmsr)
51# pragma intrinsic(__writemsr)
52# pragma intrinsic(__outbyte)
53# pragma intrinsic(__outbytestring)
54# pragma intrinsic(__outword)
55# pragma intrinsic(__outwordstring)
56# pragma intrinsic(__outdword)
57# pragma intrinsic(__outdwordstring)
58# pragma intrinsic(__inbyte)
59# pragma intrinsic(__inbytestring)
60# pragma intrinsic(__inword)
61# pragma intrinsic(__inwordstring)
62# pragma intrinsic(__indword)
63# pragma intrinsic(__indwordstring)
64# pragma intrinsic(__invlpg)
65# pragma intrinsic(__wbinvd)
66# pragma intrinsic(__readcr0)
67# pragma intrinsic(__readcr2)
68# pragma intrinsic(__readcr3)
69# pragma intrinsic(__readcr4)
70# pragma intrinsic(__writecr0)
71# pragma intrinsic(__writecr3)
72# pragma intrinsic(__writecr4)
73# pragma intrinsic(__readdr)
74# pragma intrinsic(__writedr)
75# ifdef RT_ARCH_AMD64
76# pragma intrinsic(__readcr8)
77# pragma intrinsic(__writecr8)
78# endif
79# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2005
80# pragma intrinsic(__halt)
81# endif
82# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
83/*# pragma intrinsic(__readeflags) - buggy intrinsics in VC++ 2010, reordering/optimizers issues
84# pragma intrinsic(__writeeflags) */
85# pragma intrinsic(__rdtscp)
86# endif
87# if defined(RT_ARCH_AMD64) && RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015 /*?*/
88# pragma intrinsic(_readfsbase_u64)
89# pragma intrinsic(_readgsbase_u64)
90# pragma intrinsic(_writefsbase_u64)
91# pragma intrinsic(_writegsbase_u64)
92# endif
93# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
94# pragma intrinsic(__lidt)
95# pragma intrinsic(__sidt)
96# pragma intrinsic(_lgdt)
97# pragma intrinsic(_sgdt)
98# endif
99#endif
100
101
102/*
103 * Undefine all symbols we have Watcom C/C++ #pragma aux'es for.
104 */
105#if defined(__WATCOMC__) && ARCH_BITS == 16
106# include "asm-amd64-x86-watcom-16.h"
107#elif defined(__WATCOMC__) && ARCH_BITS == 32
108# include "asm-amd64-x86-watcom-32.h"
109#endif
110
111
112/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
113 * @ingroup grp_rt_asm
114 * @{
115 */
116
117/** @todo find a more proper place for these structures? */
118
119#pragma pack(1)
120/** IDTR */
121typedef struct RTIDTR
122{
123 /** Size of the IDT. */
124 uint16_t cbIdt;
125 /** Address of the IDT. */
126#if ARCH_BITS != 64
127 uint32_t pIdt;
128#else
129 uint64_t pIdt;
130#endif
131} RTIDTR, RT_FAR *PRTIDTR;
132#pragma pack()
133
134#pragma pack(1)
135/** @internal */
136typedef struct RTIDTRALIGNEDINT
137{
138 /** Alignment padding. */
139 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
140 /** The IDTR structure. */
141 RTIDTR Idtr;
142} RTIDTRALIGNEDINT;
143#pragma pack()
144
145/** Wrapped RTIDTR for preventing misalignment exceptions. */
146typedef union RTIDTRALIGNED
147{
148 /** Try make sure this structure has optimal alignment. */
149 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
150 /** Aligned structure. */
151 RTIDTRALIGNEDINT s;
152} RTIDTRALIGNED;
153AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
154/** Pointer to a an RTIDTR alignment wrapper. */
155typedef RTIDTRALIGNED RT_FAR *PRIDTRALIGNED;
156
157
158#pragma pack(1)
159/** GDTR */
160typedef struct RTGDTR
161{
162 /** Size of the GDT. */
163 uint16_t cbGdt;
164 /** Address of the GDT. */
165#if ARCH_BITS != 64
166 uint32_t pGdt;
167#else
168 uint64_t pGdt;
169#endif
170} RTGDTR, RT_FAR *PRTGDTR;
171#pragma pack()
172
173#pragma pack(1)
174/** @internal */
175typedef struct RTGDTRALIGNEDINT
176{
177 /** Alignment padding. */
178 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
179 /** The GDTR structure. */
180 RTGDTR Gdtr;
181} RTGDTRALIGNEDINT;
182#pragma pack()
183
184/** Wrapped RTGDTR for preventing misalignment exceptions. */
185typedef union RTGDTRALIGNED
186{
187 /** Try make sure this structure has optimal alignment. */
188 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
189 /** Aligned structure. */
190 RTGDTRALIGNEDINT s;
191} RTGDTRALIGNED;
192AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
193/** Pointer to a an RTGDTR alignment wrapper. */
194typedef RTGDTRALIGNED RT_FAR *PRGDTRALIGNED;
195
196
197/**
198 * Gets the content of the IDTR CPU register.
199 * @param pIdtr Where to store the IDTR contents.
200 */
201#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
202RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetIDTR(PRTIDTR pIdtr);
203#else
204DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
205{
206# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
207 __sidt(pIdtr);
208# elif RT_INLINE_ASM_GNU_STYLE
209 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
210# else
211 __asm
212 {
213# ifdef RT_ARCH_AMD64
214 mov rax, [pIdtr]
215 sidt [rax]
216# else
217 mov eax, [pIdtr]
218 sidt [eax]
219# endif
220 }
221# endif
222}
223#endif
224
225
226/**
227 * Gets the content of the IDTR.LIMIT CPU register.
228 * @returns IDTR limit.
229 */
230#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
231RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMGetIdtrLimit(void);
232#else
233DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
234{
235 RTIDTRALIGNED TmpIdtr;
236# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
237 __sidt(&TmpIdtr);
238# elif RT_INLINE_ASM_GNU_STYLE
239 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));
240# else
241 __asm
242 {
243 sidt [TmpIdtr.s.Idtr]
244 }
245# endif
246 return TmpIdtr.s.Idtr.cbIdt;
247}
248#endif
249
250
251/**
252 * Sets the content of the IDTR CPU register.
253 * @param pIdtr Where to load the IDTR contents from
254 */
255#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
256RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr);
257#else
258DECLINLINE(void) ASMSetIDTR(const RTIDTR RT_FAR *pIdtr)
259{
260# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
261 __lidt((void *)pIdtr);
262# elif RT_INLINE_ASM_GNU_STYLE
263 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
264# else
265 __asm
266 {
267# ifdef RT_ARCH_AMD64
268 mov rax, [pIdtr]
269 lidt [rax]
270# else
271 mov eax, [pIdtr]
272 lidt [eax]
273# endif
274 }
275# endif
276}
277#endif
278
279
280/**
281 * Gets the content of the GDTR CPU register.
282 * @param pGdtr Where to store the GDTR contents.
283 */
284#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
285RT_ASM_DECL_PRAGMA_WATCOM(void) ASMGetGDTR(PRTGDTR pGdtr);
286#else
287DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
288{
289# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
290 _sgdt(pGdtr);
291# elif RT_INLINE_ASM_GNU_STYLE
292 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
293# else
294 __asm
295 {
296# ifdef RT_ARCH_AMD64
297 mov rax, [pGdtr]
298 sgdt [rax]
299# else
300 mov eax, [pGdtr]
301 sgdt [eax]
302# endif
303 }
304# endif
305}
306#endif
307
308
309/**
310 * Sets the content of the GDTR CPU register.
311 * @param pGdtr Where to load the GDTR contents from
312 */
313#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2013
314RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr);
315#else
316DECLINLINE(void) ASMSetGDTR(const RTGDTR RT_FAR *pGdtr)
317{
318# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2013
319 _lgdt((void *)pGdtr);
320# elif RT_INLINE_ASM_GNU_STYLE
321 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
322# else
323 __asm
324 {
325# ifdef RT_ARCH_AMD64
326 mov rax, [pGdtr]
327 lgdt [rax]
328# else
329 mov eax, [pGdtr]
330 lgdt [eax]
331# endif
332 }
333# endif
334}
335#endif
336
337
338
339/**
340 * Get the cs register.
341 * @returns cs.
342 */
343#if RT_INLINE_ASM_EXTERNAL
344RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetCS(void);
345#else
346DECLINLINE(RTSEL) ASMGetCS(void)
347{
348 RTSEL SelCS;
349# if RT_INLINE_ASM_GNU_STYLE
350 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
351# else
352 __asm
353 {
354 mov ax, cs
355 mov [SelCS], ax
356 }
357# endif
358 return SelCS;
359}
360#endif
361
362
363/**
364 * Get the DS register.
365 * @returns DS.
366 */
367#if RT_INLINE_ASM_EXTERNAL
368RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetDS(void);
369#else
370DECLINLINE(RTSEL) ASMGetDS(void)
371{
372 RTSEL SelDS;
373# if RT_INLINE_ASM_GNU_STYLE
374 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
375# else
376 __asm
377 {
378 mov ax, ds
379 mov [SelDS], ax
380 }
381# endif
382 return SelDS;
383}
384#endif
385
386
387/**
388 * Get the ES register.
389 * @returns ES.
390 */
391#if RT_INLINE_ASM_EXTERNAL
392RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetES(void);
393#else
394DECLINLINE(RTSEL) ASMGetES(void)
395{
396 RTSEL SelES;
397# if RT_INLINE_ASM_GNU_STYLE
398 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
399# else
400 __asm
401 {
402 mov ax, es
403 mov [SelES], ax
404 }
405# endif
406 return SelES;
407}
408#endif
409
410
411/**
412 * Get the FS register.
413 * @returns FS.
414 */
415#if RT_INLINE_ASM_EXTERNAL
416RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetFS(void);
417#else
418DECLINLINE(RTSEL) ASMGetFS(void)
419{
420 RTSEL SelFS;
421# if RT_INLINE_ASM_GNU_STYLE
422 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
423# else
424 __asm
425 {
426 mov ax, fs
427 mov [SelFS], ax
428 }
429# endif
430 return SelFS;
431}
432# endif
433
434#ifdef RT_ARCH_AMD64
435
436/**
437 * Get the FS base register.
438 * @returns FS base address.
439 */
440#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
441DECLASM(uint64_t) ASMGetFSBase(void);
442#else
443DECLINLINE(uint64_t) ASMGetFSBase(void)
444{
445# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
446 return (uint64_t)_readfsbase_u64();
447# elif RT_INLINE_ASM_GNU_STYLE
448 uint64_t uFSBase;
449 __asm__ __volatile__("rdfsbase %0\n\t" : "=r" (uFSBase));
450 return uFSBase;
451# endif
452}
453# endif
454
455
456/**
457 * Set the FS base register.
458 * @param uNewBase The new base value.
459 */
460#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
461DECLASM(void) ASMSetFSBase(uint64_t uNewBase);
462#else
463DECLINLINE(void) ASMSetFSBase(uint64_t uNewBase)
464{
465# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
466 _writefsbase_u64(uNewBase);
467# elif RT_INLINE_ASM_GNU_STYLE
468 __asm__ __volatile__("wrfsbase %0\n\t" : : "r" (uNewBase));
469# endif
470}
471# endif
472
473#endif /* RT_ARCH_AMD64 */
474
475/**
476 * Get the GS register.
477 * @returns GS.
478 */
479#if RT_INLINE_ASM_EXTERNAL
480RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetGS(void);
481#else
482DECLINLINE(RTSEL) ASMGetGS(void)
483{
484 RTSEL SelGS;
485# if RT_INLINE_ASM_GNU_STYLE
486 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
487# else
488 __asm
489 {
490 mov ax, gs
491 mov [SelGS], ax
492 }
493# endif
494 return SelGS;
495}
496#endif
497
498#ifdef RT_ARCH_AMD64
499
500/**
501 * Get the GS base register.
502 * @returns GS base address.
503 */
504#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
505DECLASM(uint64_t) ASMGetGSBase(void);
506#else
507DECLINLINE(uint64_t) ASMGetGSBase(void)
508{
509# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
510 return (uint64_t)_readgsbase_u64();
511# elif RT_INLINE_ASM_GNU_STYLE
512 uint64_t uGSBase;
513 __asm__ __volatile__("rdgsbase %0\n\t" : "=r" (uGSBase));
514 return uGSBase;
515# endif
516}
517# endif
518
519
520/**
521 * Set the GS base register.
522 * @param uNewBase The new base value.
523 */
524#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2015 /*?*/
525DECLASM(void) ASMSetGSBase(uint64_t uNewBase);
526#else
527DECLINLINE(void) ASMSetGSBase(uint64_t uNewBase)
528{
529# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2015
530 _writegsbase_u64(uNewBase);
531# elif RT_INLINE_ASM_GNU_STYLE
532 __asm__ __volatile__("wrgsbase %0\n\t" : : "r" (uNewBase));
533# endif
534}
535# endif
536
537#endif /* RT_ARCH_AMD64 */
538
539
540/**
541 * Get the SS register.
542 * @returns SS.
543 */
544#if RT_INLINE_ASM_EXTERNAL
545RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetSS(void);
546#else
547DECLINLINE(RTSEL) ASMGetSS(void)
548{
549 RTSEL SelSS;
550# if RT_INLINE_ASM_GNU_STYLE
551 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
552# else
553 __asm
554 {
555 mov ax, ss
556 mov [SelSS], ax
557 }
558# endif
559 return SelSS;
560}
561#endif
562
563
564/**
565 * Get the TR register.
566 * @returns TR.
567 */
568#if RT_INLINE_ASM_EXTERNAL
569RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetTR(void);
570#else
571DECLINLINE(RTSEL) ASMGetTR(void)
572{
573 RTSEL SelTR;
574# if RT_INLINE_ASM_GNU_STYLE
575 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
576# else
577 __asm
578 {
579 str ax
580 mov [SelTR], ax
581 }
582# endif
583 return SelTR;
584}
585#endif
586
587
588/**
589 * Get the LDTR register.
590 * @returns LDTR.
591 */
592#if RT_INLINE_ASM_EXTERNAL
593RT_ASM_DECL_PRAGMA_WATCOM(RTSEL) ASMGetLDTR(void);
594#else
595DECLINLINE(RTSEL) ASMGetLDTR(void)
596{
597 RTSEL SelLDTR;
598# if RT_INLINE_ASM_GNU_STYLE
599 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
600# else
601 __asm
602 {
603 sldt ax
604 mov [SelLDTR], ax
605 }
606# endif
607 return SelLDTR;
608}
609#endif
610
611
612/**
613 * Get the access rights for the segment selector.
614 *
615 * @returns The access rights on success or UINT32_MAX on failure.
616 * @param uSel The selector value.
617 *
618 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
619 * always have bits 0:7 as 0 (on both Intel & AMD).
620 */
621#if RT_INLINE_ASM_EXTERNAL
622RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetSegAttr(uint32_t uSel);
623#else
624DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
625{
626 uint32_t uAttr;
627 /* LAR only accesses 16-bit of the source operand, but eax for the
628 destination operand is required for getting the full 32-bit access rights. */
629# if RT_INLINE_ASM_GNU_STYLE
630 __asm__ __volatile__("lar %1, %%eax\n\t"
631 "jz done%=\n\t"
632 "movl $0xffffffff, %%eax\n\t"
633 "done%=:\n\t"
634 "movl %%eax, %0\n\t"
635 : "=r" (uAttr)
636 : "r" (uSel)
637 : "cc", "%eax");
638# else
639 __asm
640 {
641 lar eax, [uSel]
642 jz done
643 mov eax, 0ffffffffh
644 done:
645 mov [uAttr], eax
646 }
647# endif
648 return uAttr;
649}
650#endif
651
652
653/**
654 * Get the [RE]FLAGS register.
655 * @returns [RE]FLAGS.
656 */
657#if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
658RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMGetFlags(void);
659#else
660DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
661{
662 RTCCUINTREG uFlags;
663# if RT_INLINE_ASM_GNU_STYLE
664# ifdef RT_ARCH_AMD64
665 __asm__ __volatile__("pushfq\n\t"
666 "popq %0\n\t"
667 : "=r" (uFlags));
668# else
669 __asm__ __volatile__("pushfl\n\t"
670 "popl %0\n\t"
671 : "=r" (uFlags));
672# endif
673# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
674 uFlags = __readeflags();
675# else
676 __asm
677 {
678# ifdef RT_ARCH_AMD64
679 pushfq
680 pop [uFlags]
681# else
682 pushfd
683 pop [uFlags]
684# endif
685 }
686# endif
687 return uFlags;
688}
689#endif
690
691
692/**
693 * Set the [RE]FLAGS register.
694 * @param uFlags The new [RE]FLAGS value.
695 */
696#if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - see __readeflags() above. */
697RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetFlags(RTCCUINTREG uFlags);
698#else
699DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
700{
701# if RT_INLINE_ASM_GNU_STYLE
702# ifdef RT_ARCH_AMD64
703 __asm__ __volatile__("pushq %0\n\t"
704 "popfq\n\t"
705 : : "g" (uFlags));
706# else
707 __asm__ __volatile__("pushl %0\n\t"
708 "popfl\n\t"
709 : : "g" (uFlags));
710# endif
711# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
712 __writeeflags(uFlags);
713# else
714 __asm
715 {
716# ifdef RT_ARCH_AMD64
717 push [uFlags]
718 popfq
719# else
720 push [uFlags]
721 popfd
722# endif
723 }
724# endif
725}
726#endif
727
728
729/**
730 * Modifies the [RE]FLAGS register.
731 * @returns Original value.
732 * @param fAndEfl Flags to keep (applied first).
733 * @param fOrEfl Flags to be set.
734 */
735#if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
736RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl);
737#else
738DECLINLINE(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl)
739{
740 RTCCUINTREG fOldEfl;
741# if RT_INLINE_ASM_GNU_STYLE
742# ifdef RT_ARCH_AMD64
743 __asm__ __volatile__("pushfq\n\t"
744 "movq (%%rsp), %0\n\t"
745 "andq %0, %1\n\t"
746 "orq %3, %1\n\t"
747 "mov %1, (%%rsp)\n\t"
748 "popfq\n\t"
749 : "=&r" (fOldEfl),
750 "=r" (fAndEfl)
751 : "1" (fAndEfl),
752 "rn" (fOrEfl) );
753# else
754 __asm__ __volatile__("pushfl\n\t"
755 "movl (%%esp), %0\n\t"
756 "andl %1, (%%esp)\n\t"
757 "orl %2, (%%esp)\n\t"
758 "popfl\n\t"
759 : "=&r" (fOldEfl)
760 : "rn" (fAndEfl),
761 "rn" (fOrEfl) );
762# endif
763# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
764 fOldEfl = __readeflags();
765 __writeeflags((fOldEfl & fAndEfl) | fOrEfl);
766# else
767 __asm
768 {
769# ifdef RT_ARCH_AMD64
770 mov rdx, [fAndEfl]
771 mov rcx, [fOrEfl]
772 pushfq
773 mov rax, [rsp]
774 and rdx, rax
775 or rdx, rcx
776 mov [rsp], rdx
777 popfq
778 mov [fOldEfl], rax
779# else
780 mov edx, [fAndEfl]
781 mov ecx, [fOrEfl]
782 pushfd
783 mov eax, [esp]
784 and edx, eax
785 or edx, ecx
786 mov [esp], edx
787 popfd
788 mov [fOldEfl], eax
789# endif
790 }
791# endif
792 return fOldEfl;
793}
794#endif
795
796
797/**
798 * Modifies the [RE]FLAGS register by ORing in one or more flags.
799 * @returns Original value.
800 * @param fOrEfl The flags to be set (ORed in).
801 */
802#if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
803RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl);
804#else
805DECLINLINE(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl)
806{
807 RTCCUINTREG fOldEfl;
808# if RT_INLINE_ASM_GNU_STYLE
809# ifdef RT_ARCH_AMD64
810 __asm__ __volatile__("pushfq\n\t"
811 "movq (%%rsp), %0\n\t"
812 "orq %1, (%%rsp)\n\t"
813 "popfq\n\t"
814 : "=&r" (fOldEfl)
815 : "rn" (fOrEfl) );
816# else
817 __asm__ __volatile__("pushfl\n\t"
818 "movl (%%esp), %0\n\t"
819 "orl %1, (%%esp)\n\t"
820 "popfl\n\t"
821 : "=&r" (fOldEfl)
822 : "rn" (fOrEfl) );
823# endif
824# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
825 fOldEfl = __readeflags();
826 __writeeflags(fOldEfl | fOrEfl);
827# else
828 __asm
829 {
830# ifdef RT_ARCH_AMD64
831 mov rcx, [fOrEfl]
832 pushfq
833 mov rdx, [rsp]
834 or [rsp], rcx
835 popfq
836 mov [fOldEfl], rax
837# else
838 mov ecx, [fOrEfl]
839 pushfd
840 mov edx, [esp]
841 or [esp], ecx
842 popfd
843 mov [fOldEfl], eax
844# endif
845 }
846# endif
847 return fOldEfl;
848}
849#endif
850
851
852/**
853 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.
854 * @returns Original value.
855 * @param fAndEfl The flags to keep.
856 */
857#if RT_INLINE_ASM_EXTERNAL /*&& RT_INLINE_ASM_USES_INTRIN < 15 - buggy intrinsics in VC++ 2010, reordering/optimizers issues. */
858RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl);
859#else
860DECLINLINE(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl)
861{
862 RTCCUINTREG fOldEfl;
863# if RT_INLINE_ASM_GNU_STYLE
864# ifdef RT_ARCH_AMD64
865 __asm__ __volatile__("pushfq\n\t"
866 "movq (%%rsp), %0\n\t"
867 "andq %1, (%%rsp)\n\t"
868 "popfq\n\t"
869 : "=&r" (fOldEfl)
870 : "rn" (fAndEfl) );
871# else
872 __asm__ __volatile__("pushfl\n\t"
873 "movl (%%esp), %0\n\t"
874 "andl %1, (%%esp)\n\t"
875 "popfl\n\t"
876 : "=&r" (fOldEfl)
877 : "rn" (fAndEfl) );
878# endif
879# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
880 fOldEfl = __readeflags();
881 __writeeflags(fOldEfl & fAndEfl);
882# else
883 __asm
884 {
885# ifdef RT_ARCH_AMD64
886 mov rdx, [fAndEfl]
887 pushfq
888 mov rdx, [rsp]
889 and [rsp], rdx
890 popfq
891 mov [fOldEfl], rax
892# else
893 mov edx, [fAndEfl]
894 pushfd
895 mov edx, [esp]
896 and [esp], edx
897 popfd
898 mov [fOldEfl], eax
899# endif
900 }
901# endif
902 return fOldEfl;
903}
904#endif
905
906
907/**
908 * Gets the content of the CPU timestamp counter register.
909 *
910 * @returns TSC.
911 */
912#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
913RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTSC(void);
914#else
915DECLINLINE(uint64_t) ASMReadTSC(void)
916{
917 RTUINT64U u;
918# if RT_INLINE_ASM_GNU_STYLE
919 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
920# else
921# if RT_INLINE_ASM_USES_INTRIN
922 u.u = __rdtsc();
923# else
924 __asm
925 {
926 rdtsc
927 mov [u.s.Lo], eax
928 mov [u.s.Hi], edx
929 }
930# endif
931# endif
932 return u.u;
933}
934#endif
935
936
937/**
938 * Gets the content of the CPU timestamp counter register and the
939 * assoicated AUX value.
940 *
941 * @returns TSC.
942 * @param puAux Where to store the AUX value.
943 */
944#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2008
945RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux);
946#else
947DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t RT_FAR *puAux)
948{
949 RTUINT64U u;
950# if RT_INLINE_ASM_GNU_STYLE
951 /* rdtscp is not supported by ancient linux build VM of course :-( */
952 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */
953 __asm__ __volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));
954# else
955# if RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2008
956 u.u = __rdtscp(puAux);
957# else
958 __asm
959 {
960 rdtscp
961 mov [u.s.Lo], eax
962 mov [u.s.Hi], edx
963 mov eax, [puAux]
964 mov [eax], ecx
965 }
966# endif
967# endif
968 return u.u;
969}
970#endif
971
972
973/**
974 * Performs the cpuid instruction returning all registers.
975 *
976 * @param uOperator CPUID operation (eax).
977 * @param pvEAX Where to store eax.
978 * @param pvEBX Where to store ebx.
979 * @param pvECX Where to store ecx.
980 * @param pvEDX Where to store edx.
981 * @remark We're using void pointers to ease the use of special bitfield structures and such.
982 */
983#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
984DECLASM(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
985#else
986DECLINLINE(void) ASMCpuId(uint32_t uOperator, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)
987{
988# if RT_INLINE_ASM_GNU_STYLE
989# ifdef RT_ARCH_AMD64
990 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
991 __asm__ __volatile__ ("cpuid\n\t"
992 : "=a" (uRAX),
993 "=b" (uRBX),
994 "=c" (uRCX),
995 "=d" (uRDX)
996 : "0" (uOperator), "2" (0));
997 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;
998 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;
999 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;
1000 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;
1001# else
1002 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
1003 "cpuid\n\t"
1004 "xchgl %%ebx, %1\n\t"
1005 : "=a" (*(uint32_t *)pvEAX),
1006 "=r" (*(uint32_t *)pvEBX),
1007 "=c" (*(uint32_t *)pvECX),
1008 "=d" (*(uint32_t *)pvEDX)
1009 : "0" (uOperator), "2" (0));
1010# endif
1011
1012# elif RT_INLINE_ASM_USES_INTRIN
1013 int aInfo[4];
1014 __cpuid(aInfo, uOperator);
1015 *(uint32_t RT_FAR *)pvEAX = aInfo[0];
1016 *(uint32_t RT_FAR *)pvEBX = aInfo[1];
1017 *(uint32_t RT_FAR *)pvECX = aInfo[2];
1018 *(uint32_t RT_FAR *)pvEDX = aInfo[3];
1019
1020# else
1021 uint32_t uEAX;
1022 uint32_t uEBX;
1023 uint32_t uECX;
1024 uint32_t uEDX;
1025 __asm
1026 {
1027 push ebx
1028 mov eax, [uOperator]
1029 cpuid
1030 mov [uEAX], eax
1031 mov [uEBX], ebx
1032 mov [uECX], ecx
1033 mov [uEDX], edx
1034 pop ebx
1035 }
1036 *(uint32_t RT_FAR *)pvEAX = uEAX;
1037 *(uint32_t RT_FAR *)pvEBX = uEBX;
1038 *(uint32_t RT_FAR *)pvECX = uECX;
1039 *(uint32_t RT_FAR *)pvEDX = uEDX;
1040# endif
1041}
1042#endif
1043
1044
1045/**
1046 * Performs the CPUID instruction with EAX and ECX input returning ALL output
1047 * registers.
1048 *
1049 * @param uOperator CPUID operation (eax).
1050 * @param uIdxECX ecx index
1051 * @param pvEAX Where to store eax.
1052 * @param pvEBX Where to store ebx.
1053 * @param pvECX Where to store ecx.
1054 * @param pvEDX Where to store edx.
1055 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1056 */
1057#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
1058DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1059#else
1060DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX)
1061{
1062# if RT_INLINE_ASM_GNU_STYLE
1063# ifdef RT_ARCH_AMD64
1064 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
1065 __asm__ ("cpuid\n\t"
1066 : "=a" (uRAX),
1067 "=b" (uRBX),
1068 "=c" (uRCX),
1069 "=d" (uRDX)
1070 : "0" (uOperator),
1071 "2" (uIdxECX));
1072 *(uint32_t RT_FAR *)pvEAX = (uint32_t)uRAX;
1073 *(uint32_t RT_FAR *)pvEBX = (uint32_t)uRBX;
1074 *(uint32_t RT_FAR *)pvECX = (uint32_t)uRCX;
1075 *(uint32_t RT_FAR *)pvEDX = (uint32_t)uRDX;
1076# else
1077 __asm__ ("xchgl %%ebx, %1\n\t"
1078 "cpuid\n\t"
1079 "xchgl %%ebx, %1\n\t"
1080 : "=a" (*(uint32_t *)pvEAX),
1081 "=r" (*(uint32_t *)pvEBX),
1082 "=c" (*(uint32_t *)pvECX),
1083 "=d" (*(uint32_t *)pvEDX)
1084 : "0" (uOperator),
1085 "2" (uIdxECX));
1086# endif
1087
1088# elif RT_INLINE_ASM_USES_INTRIN
1089 int aInfo[4];
1090 __cpuidex(aInfo, uOperator, uIdxECX);
1091 *(uint32_t RT_FAR *)pvEAX = aInfo[0];
1092 *(uint32_t RT_FAR *)pvEBX = aInfo[1];
1093 *(uint32_t RT_FAR *)pvECX = aInfo[2];
1094 *(uint32_t RT_FAR *)pvEDX = aInfo[3];
1095
1096# else
1097 uint32_t uEAX;
1098 uint32_t uEBX;
1099 uint32_t uECX;
1100 uint32_t uEDX;
1101 __asm
1102 {
1103 push ebx
1104 mov eax, [uOperator]
1105 mov ecx, [uIdxECX]
1106 cpuid
1107 mov [uEAX], eax
1108 mov [uEBX], ebx
1109 mov [uECX], ecx
1110 mov [uEDX], edx
1111 pop ebx
1112 }
1113 *(uint32_t RT_FAR *)pvEAX = uEAX;
1114 *(uint32_t RT_FAR *)pvEBX = uEBX;
1115 *(uint32_t RT_FAR *)pvECX = uECX;
1116 *(uint32_t RT_FAR *)pvEDX = uEDX;
1117# endif
1118}
1119#endif
1120
1121
1122/**
1123 * CPUID variant that initializes all 4 registers before the CPUID instruction.
1124 *
1125 * @returns The EAX result value.
1126 * @param uOperator CPUID operation (eax).
1127 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
1128 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
1129 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
1130 * @param pvEAX Where to store eax. Optional.
1131 * @param pvEBX Where to store ebx. Optional.
1132 * @param pvECX Where to store ecx. Optional.
1133 * @param pvEDX Where to store edx. Optional.
1134 */
1135DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
1136 void RT_FAR *pvEAX, void RT_FAR *pvEBX, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1137
1138
1139/**
1140 * Performs the cpuid instruction returning ecx and edx.
1141 *
1142 * @param uOperator CPUID operation (eax).
1143 * @param pvECX Where to store ecx.
1144 * @param pvEDX Where to store edx.
1145 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1146 */
1147#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1148RT_ASM_DECL_PRAGMA_WATCOM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX);
1149#else
1150DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void RT_FAR *pvECX, void RT_FAR *pvEDX)
1151{
1152 uint32_t uEBX;
1153 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
1154}
1155#endif
1156
1157
1158/**
1159 * Performs the cpuid instruction returning eax.
1160 *
1161 * @param uOperator CPUID operation (eax).
1162 * @returns EAX after cpuid operation.
1163 */
1164#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1165RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
1166#else
1167DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
1168{
1169 RTCCUINTREG xAX;
1170# if RT_INLINE_ASM_GNU_STYLE
1171# ifdef RT_ARCH_AMD64
1172 __asm__ ("cpuid"
1173 : "=a" (xAX)
1174 : "0" (uOperator)
1175 : "rbx", "rcx", "rdx");
1176# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1177 __asm__ ("push %%ebx\n\t"
1178 "cpuid\n\t"
1179 "pop %%ebx\n\t"
1180 : "=a" (xAX)
1181 : "0" (uOperator)
1182 : "ecx", "edx");
1183# else
1184 __asm__ ("cpuid"
1185 : "=a" (xAX)
1186 : "0" (uOperator)
1187 : "edx", "ecx", "ebx");
1188# endif
1189
1190# elif RT_INLINE_ASM_USES_INTRIN
1191 int aInfo[4];
1192 __cpuid(aInfo, uOperator);
1193 xAX = aInfo[0];
1194
1195# else
1196 __asm
1197 {
1198 push ebx
1199 mov eax, [uOperator]
1200 cpuid
1201 mov [xAX], eax
1202 pop ebx
1203 }
1204# endif
1205 return (uint32_t)xAX;
1206}
1207#endif
1208
1209
1210/**
1211 * Performs the cpuid instruction returning ebx.
1212 *
1213 * @param uOperator CPUID operation (eax).
1214 * @returns EBX after cpuid operation.
1215 */
1216#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1217RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
1218#else
1219DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
1220{
1221 RTCCUINTREG xBX;
1222# if RT_INLINE_ASM_GNU_STYLE
1223# ifdef RT_ARCH_AMD64
1224 RTCCUINTREG uSpill;
1225 __asm__ ("cpuid"
1226 : "=a" (uSpill),
1227 "=b" (xBX)
1228 : "0" (uOperator)
1229 : "rdx", "rcx");
1230# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1231 __asm__ ("push %%ebx\n\t"
1232 "cpuid\n\t"
1233 "mov %%ebx, %%edx\n\t"
1234 "pop %%ebx\n\t"
1235 : "=a" (uOperator),
1236 "=d" (xBX)
1237 : "0" (uOperator)
1238 : "ecx");
1239# else
1240 __asm__ ("cpuid"
1241 : "=a" (uOperator),
1242 "=b" (xBX)
1243 : "0" (uOperator)
1244 : "edx", "ecx");
1245# endif
1246
1247# elif RT_INLINE_ASM_USES_INTRIN
1248 int aInfo[4];
1249 __cpuid(aInfo, uOperator);
1250 xBX = aInfo[1];
1251
1252# else
1253 __asm
1254 {
1255 push ebx
1256 mov eax, [uOperator]
1257 cpuid
1258 mov [xBX], ebx
1259 pop ebx
1260 }
1261# endif
1262 return (uint32_t)xBX;
1263}
1264#endif
1265
1266
1267/**
1268 * Performs the cpuid instruction returning ecx.
1269 *
1270 * @param uOperator CPUID operation (eax).
1271 * @returns ECX after cpuid operation.
1272 */
1273#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1274RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
1275#else
1276DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
1277{
1278 RTCCUINTREG xCX;
1279# if RT_INLINE_ASM_GNU_STYLE
1280# ifdef RT_ARCH_AMD64
1281 RTCCUINTREG uSpill;
1282 __asm__ ("cpuid"
1283 : "=a" (uSpill),
1284 "=c" (xCX)
1285 : "0" (uOperator)
1286 : "rbx", "rdx");
1287# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1288 __asm__ ("push %%ebx\n\t"
1289 "cpuid\n\t"
1290 "pop %%ebx\n\t"
1291 : "=a" (uOperator),
1292 "=c" (xCX)
1293 : "0" (uOperator)
1294 : "edx");
1295# else
1296 __asm__ ("cpuid"
1297 : "=a" (uOperator),
1298 "=c" (xCX)
1299 : "0" (uOperator)
1300 : "ebx", "edx");
1301
1302# endif
1303
1304# elif RT_INLINE_ASM_USES_INTRIN
1305 int aInfo[4];
1306 __cpuid(aInfo, uOperator);
1307 xCX = aInfo[2];
1308
1309# else
1310 __asm
1311 {
1312 push ebx
1313 mov eax, [uOperator]
1314 cpuid
1315 mov [xCX], ecx
1316 pop ebx
1317 }
1318# endif
1319 return (uint32_t)xCX;
1320}
1321#endif
1322
1323
1324/**
1325 * Performs the cpuid instruction returning edx.
1326 *
1327 * @param uOperator CPUID operation (eax).
1328 * @returns EDX after cpuid operation.
1329 */
1330#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1331RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
1332#else
1333DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
1334{
1335 RTCCUINTREG xDX;
1336# if RT_INLINE_ASM_GNU_STYLE
1337# ifdef RT_ARCH_AMD64
1338 RTCCUINTREG uSpill;
1339 __asm__ ("cpuid"
1340 : "=a" (uSpill),
1341 "=d" (xDX)
1342 : "0" (uOperator)
1343 : "rbx", "rcx");
1344# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1345 __asm__ ("push %%ebx\n\t"
1346 "cpuid\n\t"
1347 "pop %%ebx\n\t"
1348 : "=a" (uOperator),
1349 "=d" (xDX)
1350 : "0" (uOperator)
1351 : "ecx");
1352# else
1353 __asm__ ("cpuid"
1354 : "=a" (uOperator),
1355 "=d" (xDX)
1356 : "0" (uOperator)
1357 : "ebx", "ecx");
1358# endif
1359
1360# elif RT_INLINE_ASM_USES_INTRIN
1361 int aInfo[4];
1362 __cpuid(aInfo, uOperator);
1363 xDX = aInfo[3];
1364
1365# else
1366 __asm
1367 {
1368 push ebx
1369 mov eax, [uOperator]
1370 cpuid
1371 mov [xDX], edx
1372 pop ebx
1373 }
1374# endif
1375 return (uint32_t)xDX;
1376}
1377#endif
1378
1379
1380/**
1381 * Checks if the current CPU supports CPUID.
1382 *
1383 * @returns true if CPUID is supported.
1384 */
1385#ifdef __WATCOMC__
1386DECLASM(bool) ASMHasCpuId(void);
1387#else
1388DECLINLINE(bool) ASMHasCpuId(void)
1389{
1390# ifdef RT_ARCH_AMD64
1391 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1392# else /* !RT_ARCH_AMD64 */
1393 bool fRet = false;
1394# if RT_INLINE_ASM_GNU_STYLE
1395 uint32_t u1;
1396 uint32_t u2;
1397 __asm__ ("pushf\n\t"
1398 "pop %1\n\t"
1399 "mov %1, %2\n\t"
1400 "xorl $0x200000, %1\n\t"
1401 "push %1\n\t"
1402 "popf\n\t"
1403 "pushf\n\t"
1404 "pop %1\n\t"
1405 "cmpl %1, %2\n\t"
1406 "setne %0\n\t"
1407 "push %2\n\t"
1408 "popf\n\t"
1409 : "=m" (fRet), "=r" (u1), "=r" (u2));
1410# else
1411 __asm
1412 {
1413 pushfd
1414 pop eax
1415 mov ebx, eax
1416 xor eax, 0200000h
1417 push eax
1418 popfd
1419 pushfd
1420 pop eax
1421 cmp eax, ebx
1422 setne fRet
1423 push ebx
1424 popfd
1425 }
1426# endif
1427 return fRet;
1428# endif /* !RT_ARCH_AMD64 */
1429}
1430#endif
1431
1432
1433/**
1434 * Gets the APIC ID of the current CPU.
1435 *
1436 * @returns the APIC ID.
1437 */
1438#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1439RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMGetApicId(void);
1440#else
1441DECLINLINE(uint8_t) ASMGetApicId(void)
1442{
1443 RTCCUINTREG xBX;
1444# if RT_INLINE_ASM_GNU_STYLE
1445# ifdef RT_ARCH_AMD64
1446 RTCCUINTREG uSpill;
1447 __asm__ __volatile__ ("cpuid"
1448 : "=a" (uSpill),
1449 "=b" (xBX)
1450 : "0" (1)
1451 : "rcx", "rdx");
1452# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1453 RTCCUINTREG uSpill;
1454 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1455 "cpuid\n\t"
1456 "xchgl %%ebx,%1\n\t"
1457 : "=a" (uSpill),
1458 "=rm" (xBX)
1459 : "0" (1)
1460 : "ecx", "edx");
1461# else
1462 RTCCUINTREG uSpill;
1463 __asm__ __volatile__ ("cpuid"
1464 : "=a" (uSpill),
1465 "=b" (xBX)
1466 : "0" (1)
1467 : "ecx", "edx");
1468# endif
1469
1470# elif RT_INLINE_ASM_USES_INTRIN
1471 int aInfo[4];
1472 __cpuid(aInfo, 1);
1473 xBX = aInfo[1];
1474
1475# else
1476 __asm
1477 {
1478 push ebx
1479 mov eax, 1
1480 cpuid
1481 mov [xBX], ebx
1482 pop ebx
1483 }
1484# endif
1485 return (uint8_t)(xBX >> 24);
1486}
1487#endif
1488
1489
1490/**
1491 * Gets the APIC ID of the current CPU using leaf 0xb.
1492 *
1493 * @returns the APIC ID.
1494 */
1495#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2010 /*?*/
1496RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMGetApicIdExt0B(void);
1497#else
1498DECLINLINE(uint32_t) ASMGetApicIdExt0B(void)
1499{
1500# if RT_INLINE_ASM_GNU_STYLE
1501 RTCCUINTREG xDX;
1502# ifdef RT_ARCH_AMD64
1503 RTCCUINTREG uSpillEax, uSpillEcx;
1504 __asm__ __volatile__ ("cpuid"
1505 : "=a" (uSpillEax),
1506 "=c" (uSpillEcx),
1507 "=d" (xDX)
1508 : "0" (0xb),
1509 "1" (0)
1510 : "rbx");
1511# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1512 RTCCUINTREG uSpillEax, uSpillEcx, uSpillEbx;
1513 __asm__ __volatile__ ("mov %%ebx,%2\n\t"
1514 "cpuid\n\t"
1515 "xchgl %%ebx,%2\n\t"
1516 : "=a" (uSpillEax),
1517 "=c" (uSpillEcx),
1518 "=rm" (uSpillEbx),
1519 "=d" (xDX)
1520 : "0" (0xb),
1521 "1" (0));
1522# else
1523 RTCCUINTREG uSpillEax, uSpillEcx;
1524 __asm__ __volatile__ ("cpuid"
1525 : "=a" (uSpillEax),
1526 "=c" (uSpillEcx),
1527 "=d" (xDX)
1528 : "0" (0xb),
1529 "1" (0)
1530 : "ebx");
1531# endif
1532 return (uint32_t)xDX;
1533
1534# elif RT_INLINE_ASM_USES_INTRIN >= RT_MSC_VER_VS2010 /*?*/
1535
1536 int aInfo[4];
1537 __cpuidex(aInfo, 0xb, 0);
1538 return aInfo[3];
1539
1540# else
1541 RTCCUINTREG xDX;
1542 __asm
1543 {
1544 push ebx
1545 mov eax, 0xb
1546 xor ecx, ecx
1547 cpuid
1548 mov [xDX], edx
1549 pop ebx
1550 }
1551 return (uint32_t)xDX;
1552# endif
1553}
1554#endif
1555
1556
1557/**
1558 * Gets the APIC ID of the current CPU using leaf 8000001E.
1559 *
1560 * @returns the APIC ID.
1561 */
1562DECLINLINE(uint32_t) ASMGetApicIdExt8000001E(void)
1563{
1564 return ASMCpuId_EAX(0x8000001e);
1565}
1566
1567
1568/**
1569 * Tests if this is a genuine Intel CPU.
1570 *
1571 * @returns true/false.
1572 * @remarks ASSUMES that cpuid is supported by the CPU.
1573 */
1574DECLINLINE(bool) ASMIsIntelCpu(void)
1575{
1576 uint32_t uEAX, uEBX, uECX, uEDX;
1577 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1578 return RTX86IsIntelCpu(uEBX, uECX, uEDX);
1579}
1580
1581
1582/**
1583 * Tests if this is an authentic AMD CPU.
1584 *
1585 * @returns true/false.
1586 * @remarks ASSUMES that cpuid is supported by the CPU.
1587 */
1588DECLINLINE(bool) ASMIsAmdCpu(void)
1589{
1590 uint32_t uEAX, uEBX, uECX, uEDX;
1591 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1592 return RTX86IsAmdCpu(uEBX, uECX, uEDX);
1593}
1594
1595
1596/**
1597 * Tests if this is a centaur hauling VIA CPU.
1598 *
1599 * @returns true/false.
1600 * @remarks ASSUMES that cpuid is supported by the CPU.
1601 */
1602DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1603{
1604 uint32_t uEAX, uEBX, uECX, uEDX;
1605 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1606 return RTX86IsViaCentaurCpu(uEBX, uECX, uEDX);
1607}
1608
1609
1610/**
1611 * Tests if this is a Shanghai CPU.
1612 *
1613 * @returns true/false.
1614 * @remarks ASSUMES that cpuid is supported by the CPU.
1615 */
1616DECLINLINE(bool) ASMIsShanghaiCpu(void)
1617{
1618 uint32_t uEAX, uEBX, uECX, uEDX;
1619 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1620 return RTX86IsShanghaiCpu(uEBX, uECX, uEDX);
1621}
1622
1623
1624/**
1625 * Tests if this is a genuine Hygon CPU.
1626 *
1627 * @returns true/false.
1628 * @remarks ASSUMES that cpuid is supported by the CPU.
1629 */
1630DECLINLINE(bool) ASMIsHygonCpu(void)
1631{
1632 uint32_t uEAX, uEBX, uECX, uEDX;
1633 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1634 return RTX86IsHygonCpu(uEBX, uECX, uEDX);
1635}
1636
1637
1638/**
1639 * Get cr0.
1640 * @returns cr0.
1641 */
1642#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1643RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR0(void);
1644#else
1645DECLINLINE(RTCCUINTXREG) ASMGetCR0(void)
1646{
1647 RTCCUINTXREG uCR0;
1648# if RT_INLINE_ASM_USES_INTRIN
1649 uCR0 = __readcr0();
1650
1651# elif RT_INLINE_ASM_GNU_STYLE
1652# ifdef RT_ARCH_AMD64
1653 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1654# else
1655 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1656# endif
1657# else
1658 __asm
1659 {
1660# ifdef RT_ARCH_AMD64
1661 mov rax, cr0
1662 mov [uCR0], rax
1663# else
1664 mov eax, cr0
1665 mov [uCR0], eax
1666# endif
1667 }
1668# endif
1669 return uCR0;
1670}
1671#endif
1672
1673
1674/**
1675 * Sets the CR0 register.
1676 * @param uCR0 The new CR0 value.
1677 */
1678#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1679RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR0(RTCCUINTXREG uCR0);
1680#else
1681DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0)
1682{
1683# if RT_INLINE_ASM_USES_INTRIN
1684 __writecr0(uCR0);
1685
1686# elif RT_INLINE_ASM_GNU_STYLE
1687# ifdef RT_ARCH_AMD64
1688 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1689# else
1690 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1691# endif
1692# else
1693 __asm
1694 {
1695# ifdef RT_ARCH_AMD64
1696 mov rax, [uCR0]
1697 mov cr0, rax
1698# else
1699 mov eax, [uCR0]
1700 mov cr0, eax
1701# endif
1702 }
1703# endif
1704}
1705#endif
1706
1707
1708/**
1709 * Get cr2.
1710 * @returns cr2.
1711 */
1712#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1713RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR2(void);
1714#else
1715DECLINLINE(RTCCUINTXREG) ASMGetCR2(void)
1716{
1717 RTCCUINTXREG uCR2;
1718# if RT_INLINE_ASM_USES_INTRIN
1719 uCR2 = __readcr2();
1720
1721# elif RT_INLINE_ASM_GNU_STYLE
1722# ifdef RT_ARCH_AMD64
1723 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1724# else
1725 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1726# endif
1727# else
1728 __asm
1729 {
1730# ifdef RT_ARCH_AMD64
1731 mov rax, cr2
1732 mov [uCR2], rax
1733# else
1734 mov eax, cr2
1735 mov [uCR2], eax
1736# endif
1737 }
1738# endif
1739 return uCR2;
1740}
1741#endif
1742
1743
1744/**
1745 * Sets the CR2 register.
1746 * @param uCR2 The new CR0 value.
1747 */
1748#if RT_INLINE_ASM_EXTERNAL
1749RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR2(RTCCUINTXREG uCR2);
1750#else
1751DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2)
1752{
1753# if RT_INLINE_ASM_GNU_STYLE
1754# ifdef RT_ARCH_AMD64
1755 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1756# else
1757 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1758# endif
1759# else
1760 __asm
1761 {
1762# ifdef RT_ARCH_AMD64
1763 mov rax, [uCR2]
1764 mov cr2, rax
1765# else
1766 mov eax, [uCR2]
1767 mov cr2, eax
1768# endif
1769 }
1770# endif
1771}
1772#endif
1773
1774
1775/**
1776 * Get cr3.
1777 * @returns cr3.
1778 */
1779#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1780RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR3(void);
1781#else
1782DECLINLINE(RTCCUINTXREG) ASMGetCR3(void)
1783{
1784 RTCCUINTXREG uCR3;
1785# if RT_INLINE_ASM_USES_INTRIN
1786 uCR3 = __readcr3();
1787
1788# elif RT_INLINE_ASM_GNU_STYLE
1789# ifdef RT_ARCH_AMD64
1790 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1791# else
1792 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1793# endif
1794# else
1795 __asm
1796 {
1797# ifdef RT_ARCH_AMD64
1798 mov rax, cr3
1799 mov [uCR3], rax
1800# else
1801 mov eax, cr3
1802 mov [uCR3], eax
1803# endif
1804 }
1805# endif
1806 return uCR3;
1807}
1808#endif
1809
1810
1811/**
1812 * Sets the CR3 register.
1813 *
1814 * @param uCR3 New CR3 value.
1815 */
1816#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1817RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR3(RTCCUINTXREG uCR3);
1818#else
1819DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3)
1820{
1821# if RT_INLINE_ASM_USES_INTRIN
1822 __writecr3(uCR3);
1823
1824# elif RT_INLINE_ASM_GNU_STYLE
1825# ifdef RT_ARCH_AMD64
1826 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1827# else
1828 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1829# endif
1830# else
1831 __asm
1832 {
1833# ifdef RT_ARCH_AMD64
1834 mov rax, [uCR3]
1835 mov cr3, rax
1836# else
1837 mov eax, [uCR3]
1838 mov cr3, eax
1839# endif
1840 }
1841# endif
1842}
1843#endif
1844
1845
1846/**
1847 * Reloads the CR3 register.
1848 */
1849#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1850RT_ASM_DECL_PRAGMA_WATCOM(void) ASMReloadCR3(void);
1851#else
1852DECLINLINE(void) ASMReloadCR3(void)
1853{
1854# if RT_INLINE_ASM_USES_INTRIN
1855 __writecr3(__readcr3());
1856
1857# elif RT_INLINE_ASM_GNU_STYLE
1858 RTCCUINTXREG u;
1859# ifdef RT_ARCH_AMD64
1860 __asm__ __volatile__("movq %%cr3, %0\n\t"
1861 "movq %0, %%cr3\n\t"
1862 : "=r" (u));
1863# else
1864 __asm__ __volatile__("movl %%cr3, %0\n\t"
1865 "movl %0, %%cr3\n\t"
1866 : "=r" (u));
1867# endif
1868# else
1869 __asm
1870 {
1871# ifdef RT_ARCH_AMD64
1872 mov rax, cr3
1873 mov cr3, rax
1874# else
1875 mov eax, cr3
1876 mov cr3, eax
1877# endif
1878 }
1879# endif
1880}
1881#endif
1882
1883
1884/**
1885 * Get cr4.
1886 * @returns cr4.
1887 */
1888#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1889RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetCR4(void);
1890#else
1891DECLINLINE(RTCCUINTXREG) ASMGetCR4(void)
1892{
1893 RTCCUINTXREG uCR4;
1894# if RT_INLINE_ASM_USES_INTRIN
1895 uCR4 = __readcr4();
1896
1897# elif RT_INLINE_ASM_GNU_STYLE
1898# ifdef RT_ARCH_AMD64
1899 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1900# else
1901 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1902# endif
1903# else
1904 __asm
1905 {
1906# ifdef RT_ARCH_AMD64
1907 mov rax, cr4
1908 mov [uCR4], rax
1909# else
1910 push eax /* just in case */
1911 /*mov eax, cr4*/
1912 _emit 0x0f
1913 _emit 0x20
1914 _emit 0xe0
1915 mov [uCR4], eax
1916 pop eax
1917# endif
1918 }
1919# endif
1920 return uCR4;
1921}
1922#endif
1923
1924
1925/**
1926 * Sets the CR4 register.
1927 *
1928 * @param uCR4 New CR4 value.
1929 */
1930#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1931RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetCR4(RTCCUINTXREG uCR4);
1932#else
1933DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4)
1934{
1935# if RT_INLINE_ASM_USES_INTRIN
1936 __writecr4(uCR4);
1937
1938# elif RT_INLINE_ASM_GNU_STYLE
1939# ifdef RT_ARCH_AMD64
1940 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1941# else
1942 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1943# endif
1944# else
1945 __asm
1946 {
1947# ifdef RT_ARCH_AMD64
1948 mov rax, [uCR4]
1949 mov cr4, rax
1950# else
1951 mov eax, [uCR4]
1952 _emit 0x0F
1953 _emit 0x22
1954 _emit 0xE0 /* mov cr4, eax */
1955# endif
1956 }
1957# endif
1958}
1959#endif
1960
1961
1962/**
1963 * Get cr8.
1964 * @returns cr8.
1965 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1966 */
1967#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1968DECLASM(RTCCUINTXREG) ASMGetCR8(void);
1969#else
1970DECLINLINE(RTCCUINTXREG) ASMGetCR8(void)
1971{
1972# ifdef RT_ARCH_AMD64
1973 RTCCUINTXREG uCR8;
1974# if RT_INLINE_ASM_USES_INTRIN
1975 uCR8 = __readcr8();
1976
1977# elif RT_INLINE_ASM_GNU_STYLE
1978 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1979# else
1980 __asm
1981 {
1982 mov rax, cr8
1983 mov [uCR8], rax
1984 }
1985# endif
1986 return uCR8;
1987# else /* !RT_ARCH_AMD64 */
1988 return 0;
1989# endif /* !RT_ARCH_AMD64 */
1990}
1991#endif
1992
1993
1994/**
1995 * Get XCR0 (eXtended feature Control Register 0).
1996 * @returns xcr0.
1997 */
1998DECLASM(uint64_t) ASMGetXcr0(void);
1999
2000/**
2001 * Sets the XCR0 register.
2002 * @param uXcr0 The new XCR0 value.
2003 */
2004DECLASM(void) ASMSetXcr0(uint64_t uXcr0);
2005
2006struct X86XSAVEAREA;
2007/**
2008 * Save extended CPU state.
2009 * @param pXStateArea Where to save the state.
2010 * @param fComponents Which state components to save.
2011 */
2012DECLASM(void) ASMXSave(struct X86XSAVEAREA RT_FAR *pXStateArea, uint64_t fComponents);
2013
2014/**
2015 * Loads extended CPU state.
2016 * @param pXStateArea Where to load the state from.
2017 * @param fComponents Which state components to load.
2018 */
2019DECLASM(void) ASMXRstor(struct X86XSAVEAREA const RT_FAR *pXStateArea, uint64_t fComponents);
2020
2021
2022struct X86FXSTATE;
2023/**
2024 * Save FPU and SSE CPU state.
2025 * @param pXStateArea Where to save the state.
2026 */
2027DECLASM(void) ASMFxSave(struct X86FXSTATE RT_FAR *pXStateArea);
2028
2029/**
2030 * Load FPU and SSE CPU state.
2031 * @param pXStateArea Where to load the state from.
2032 */
2033DECLASM(void) ASMFxRstor(struct X86FXSTATE const RT_FAR *pXStateArea);
2034
2035
2036/**
2037 * Enables interrupts (EFLAGS.IF).
2038 */
2039#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2040RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntEnable(void);
2041#else
2042DECLINLINE(void) ASMIntEnable(void)
2043{
2044# if RT_INLINE_ASM_GNU_STYLE
2045 __asm("sti\n");
2046# elif RT_INLINE_ASM_USES_INTRIN
2047 _enable();
2048# else
2049 __asm sti
2050# endif
2051}
2052#endif
2053
2054
2055/**
2056 * Disables interrupts (!EFLAGS.IF).
2057 */
2058#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2059RT_ASM_DECL_PRAGMA_WATCOM(void) ASMIntDisable(void);
2060#else
2061DECLINLINE(void) ASMIntDisable(void)
2062{
2063# if RT_INLINE_ASM_GNU_STYLE
2064 __asm("cli\n");
2065# elif RT_INLINE_ASM_USES_INTRIN
2066 _disable();
2067# else
2068 __asm cli
2069# endif
2070}
2071#endif
2072
2073
2074/**
2075 * Disables interrupts and returns previous xFLAGS.
2076 */
2077#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2078RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTREG) ASMIntDisableFlags(void);
2079#else
2080DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
2081{
2082 RTCCUINTREG xFlags;
2083# if RT_INLINE_ASM_GNU_STYLE
2084# ifdef RT_ARCH_AMD64
2085 __asm__ __volatile__("pushfq\n\t"
2086 "cli\n\t"
2087 "popq %0\n\t"
2088 : "=r" (xFlags));
2089# else
2090 __asm__ __volatile__("pushfl\n\t"
2091 "cli\n\t"
2092 "popl %0\n\t"
2093 : "=r" (xFlags));
2094# endif
2095# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
2096 xFlags = ASMGetFlags();
2097 _disable();
2098# else
2099 __asm {
2100 pushfd
2101 cli
2102 pop [xFlags]
2103 }
2104# endif
2105 return xFlags;
2106}
2107#endif
2108
2109
2110/**
2111 * Are interrupts enabled?
2112 *
2113 * @returns true / false.
2114 */
2115DECLINLINE(bool) ASMIntAreEnabled(void)
2116{
2117 RTCCUINTREG uFlags = ASMGetFlags();
2118 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
2119}
2120
2121
2122/**
2123 * Halts the CPU until interrupted.
2124 */
2125#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < RT_MSC_VER_VS2005
2126RT_ASM_DECL_PRAGMA_WATCOM(void) ASMHalt(void);
2127#else
2128DECLINLINE(void) ASMHalt(void)
2129{
2130# if RT_INLINE_ASM_GNU_STYLE
2131 __asm__ __volatile__("hlt\n\t");
2132# elif RT_INLINE_ASM_USES_INTRIN
2133 __halt();
2134# else
2135 __asm {
2136 hlt
2137 }
2138# endif
2139}
2140#endif
2141
2142
2143/**
2144 * Reads a machine specific register.
2145 *
2146 * @returns Register content.
2147 * @param uRegister Register to read.
2148 */
2149#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2150RT_ASM_DECL_PRAGMA_WATCOM(uint64_t) ASMRdMsr(uint32_t uRegister);
2151#else
2152DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
2153{
2154 RTUINT64U u;
2155# if RT_INLINE_ASM_GNU_STYLE
2156 __asm__ __volatile__("rdmsr\n\t"
2157 : "=a" (u.s.Lo),
2158 "=d" (u.s.Hi)
2159 : "c" (uRegister));
2160
2161# elif RT_INLINE_ASM_USES_INTRIN
2162 u.u = __readmsr(uRegister);
2163
2164# else
2165 __asm
2166 {
2167 mov ecx, [uRegister]
2168 rdmsr
2169 mov [u.s.Lo], eax
2170 mov [u.s.Hi], edx
2171 }
2172# endif
2173
2174 return u.u;
2175}
2176#endif
2177
2178
2179/**
2180 * Writes a machine specific register.
2181 *
2182 * @returns Register content.
2183 * @param uRegister Register to write to.
2184 * @param u64Val Value to write.
2185 */
2186#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2187RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
2188#else
2189DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
2190{
2191 RTUINT64U u;
2192
2193 u.u = u64Val;
2194# if RT_INLINE_ASM_GNU_STYLE
2195 __asm__ __volatile__("wrmsr\n\t"
2196 ::"a" (u.s.Lo),
2197 "d" (u.s.Hi),
2198 "c" (uRegister));
2199
2200# elif RT_INLINE_ASM_USES_INTRIN
2201 __writemsr(uRegister, u.u);
2202
2203# else
2204 __asm
2205 {
2206 mov ecx, [uRegister]
2207 mov edx, [u.s.Hi]
2208 mov eax, [u.s.Lo]
2209 wrmsr
2210 }
2211# endif
2212}
2213#endif
2214
2215
2216/**
2217 * Reads a machine specific register, extended version (for AMD).
2218 *
2219 * @returns Register content.
2220 * @param uRegister Register to read.
2221 * @param uXDI RDI/EDI value.
2222 */
2223#if RT_INLINE_ASM_EXTERNAL
2224RT_ASM_DECL_PRAGMA_WATCOM_386(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI);
2225#else
2226DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI)
2227{
2228 RTUINT64U u;
2229# if RT_INLINE_ASM_GNU_STYLE
2230 __asm__ __volatile__("rdmsr\n\t"
2231 : "=a" (u.s.Lo),
2232 "=d" (u.s.Hi)
2233 : "c" (uRegister),
2234 "D" (uXDI));
2235
2236# else
2237 __asm
2238 {
2239 mov ecx, [uRegister]
2240 xchg edi, [uXDI]
2241 rdmsr
2242 mov [u.s.Lo], eax
2243 mov [u.s.Hi], edx
2244 xchg edi, [uXDI]
2245 }
2246# endif
2247
2248 return u.u;
2249}
2250#endif
2251
2252
2253/**
2254 * Writes a machine specific register, extended version (for AMD).
2255 *
2256 * @returns Register content.
2257 * @param uRegister Register to write to.
2258 * @param uXDI RDI/EDI value.
2259 * @param u64Val Value to write.
2260 */
2261#if RT_INLINE_ASM_EXTERNAL
2262RT_ASM_DECL_PRAGMA_WATCOM_386(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val);
2263#else
2264DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val)
2265{
2266 RTUINT64U u;
2267
2268 u.u = u64Val;
2269# if RT_INLINE_ASM_GNU_STYLE
2270 __asm__ __volatile__("wrmsr\n\t"
2271 ::"a" (u.s.Lo),
2272 "d" (u.s.Hi),
2273 "c" (uRegister),
2274 "D" (uXDI));
2275
2276# else
2277 __asm
2278 {
2279 mov ecx, [uRegister]
2280 xchg edi, [uXDI]
2281 mov edx, [u.s.Hi]
2282 mov eax, [u.s.Lo]
2283 wrmsr
2284 xchg edi, [uXDI]
2285 }
2286# endif
2287}
2288#endif
2289
2290
2291
2292/**
2293 * Reads low part of a machine specific register.
2294 *
2295 * @returns Register content.
2296 * @param uRegister Register to read.
2297 */
2298#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2299RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
2300#else
2301DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
2302{
2303 uint32_t u32;
2304# if RT_INLINE_ASM_GNU_STYLE
2305 __asm__ __volatile__("rdmsr\n\t"
2306 : "=a" (u32)
2307 : "c" (uRegister)
2308 : "edx");
2309
2310# elif RT_INLINE_ASM_USES_INTRIN
2311 u32 = (uint32_t)__readmsr(uRegister);
2312
2313#else
2314 __asm
2315 {
2316 mov ecx, [uRegister]
2317 rdmsr
2318 mov [u32], eax
2319 }
2320# endif
2321
2322 return u32;
2323}
2324#endif
2325
2326
2327/**
2328 * Reads high part of a machine specific register.
2329 *
2330 * @returns Register content.
2331 * @param uRegister Register to read.
2332 */
2333#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2334RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
2335#else
2336DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
2337{
2338 uint32_t u32;
2339# if RT_INLINE_ASM_GNU_STYLE
2340 __asm__ __volatile__("rdmsr\n\t"
2341 : "=d" (u32)
2342 : "c" (uRegister)
2343 : "eax");
2344
2345# elif RT_INLINE_ASM_USES_INTRIN
2346 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
2347
2348# else
2349 __asm
2350 {
2351 mov ecx, [uRegister]
2352 rdmsr
2353 mov [u32], edx
2354 }
2355# endif
2356
2357 return u32;
2358}
2359#endif
2360
2361
2362/**
2363 * Gets dr0.
2364 *
2365 * @returns dr0.
2366 */
2367#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2368RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR0(void);
2369#else
2370DECLINLINE(RTCCUINTXREG) ASMGetDR0(void)
2371{
2372 RTCCUINTXREG uDR0;
2373# if RT_INLINE_ASM_USES_INTRIN
2374 uDR0 = __readdr(0);
2375# elif RT_INLINE_ASM_GNU_STYLE
2376# ifdef RT_ARCH_AMD64
2377 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
2378# else
2379 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
2380# endif
2381# else
2382 __asm
2383 {
2384# ifdef RT_ARCH_AMD64
2385 mov rax, dr0
2386 mov [uDR0], rax
2387# else
2388 mov eax, dr0
2389 mov [uDR0], eax
2390# endif
2391 }
2392# endif
2393 return uDR0;
2394}
2395#endif
2396
2397
2398/**
2399 * Gets dr1.
2400 *
2401 * @returns dr1.
2402 */
2403#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2404RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR1(void);
2405#else
2406DECLINLINE(RTCCUINTXREG) ASMGetDR1(void)
2407{
2408 RTCCUINTXREG uDR1;
2409# if RT_INLINE_ASM_USES_INTRIN
2410 uDR1 = __readdr(1);
2411# elif RT_INLINE_ASM_GNU_STYLE
2412# ifdef RT_ARCH_AMD64
2413 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
2414# else
2415 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
2416# endif
2417# else
2418 __asm
2419 {
2420# ifdef RT_ARCH_AMD64
2421 mov rax, dr1
2422 mov [uDR1], rax
2423# else
2424 mov eax, dr1
2425 mov [uDR1], eax
2426# endif
2427 }
2428# endif
2429 return uDR1;
2430}
2431#endif
2432
2433
2434/**
2435 * Gets dr2.
2436 *
2437 * @returns dr2.
2438 */
2439#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2440RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR2(void);
2441#else
2442DECLINLINE(RTCCUINTXREG) ASMGetDR2(void)
2443{
2444 RTCCUINTXREG uDR2;
2445# if RT_INLINE_ASM_USES_INTRIN
2446 uDR2 = __readdr(2);
2447# elif RT_INLINE_ASM_GNU_STYLE
2448# ifdef RT_ARCH_AMD64
2449 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2450# else
2451 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2452# endif
2453# else
2454 __asm
2455 {
2456# ifdef RT_ARCH_AMD64
2457 mov rax, dr2
2458 mov [uDR2], rax
2459# else
2460 mov eax, dr2
2461 mov [uDR2], eax
2462# endif
2463 }
2464# endif
2465 return uDR2;
2466}
2467#endif
2468
2469
2470/**
2471 * Gets dr3.
2472 *
2473 * @returns dr3.
2474 */
2475#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2476RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR3(void);
2477#else
2478DECLINLINE(RTCCUINTXREG) ASMGetDR3(void)
2479{
2480 RTCCUINTXREG uDR3;
2481# if RT_INLINE_ASM_USES_INTRIN
2482 uDR3 = __readdr(3);
2483# elif RT_INLINE_ASM_GNU_STYLE
2484# ifdef RT_ARCH_AMD64
2485 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2486# else
2487 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2488# endif
2489# else
2490 __asm
2491 {
2492# ifdef RT_ARCH_AMD64
2493 mov rax, dr3
2494 mov [uDR3], rax
2495# else
2496 mov eax, dr3
2497 mov [uDR3], eax
2498# endif
2499 }
2500# endif
2501 return uDR3;
2502}
2503#endif
2504
2505
2506/**
2507 * Gets dr6.
2508 *
2509 * @returns dr6.
2510 */
2511#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2512RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR6(void);
2513#else
2514DECLINLINE(RTCCUINTXREG) ASMGetDR6(void)
2515{
2516 RTCCUINTXREG uDR6;
2517# if RT_INLINE_ASM_USES_INTRIN
2518 uDR6 = __readdr(6);
2519# elif RT_INLINE_ASM_GNU_STYLE
2520# ifdef RT_ARCH_AMD64
2521 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2522# else
2523 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2524# endif
2525# else
2526 __asm
2527 {
2528# ifdef RT_ARCH_AMD64
2529 mov rax, dr6
2530 mov [uDR6], rax
2531# else
2532 mov eax, dr6
2533 mov [uDR6], eax
2534# endif
2535 }
2536# endif
2537 return uDR6;
2538}
2539#endif
2540
2541
2542/**
2543 * Reads and clears DR6.
2544 *
2545 * @returns DR6.
2546 */
2547#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2548RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetAndClearDR6(void);
2549#else
2550DECLINLINE(RTCCUINTXREG) ASMGetAndClearDR6(void)
2551{
2552 RTCCUINTXREG uDR6;
2553# if RT_INLINE_ASM_USES_INTRIN
2554 uDR6 = __readdr(6);
2555 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2556# elif RT_INLINE_ASM_GNU_STYLE
2557 RTCCUINTXREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2558# ifdef RT_ARCH_AMD64
2559 __asm__ __volatile__("movq %%dr6, %0\n\t"
2560 "movq %1, %%dr6\n\t"
2561 : "=r" (uDR6)
2562 : "r" (uNewValue));
2563# else
2564 __asm__ __volatile__("movl %%dr6, %0\n\t"
2565 "movl %1, %%dr6\n\t"
2566 : "=r" (uDR6)
2567 : "r" (uNewValue));
2568# endif
2569# else
2570 __asm
2571 {
2572# ifdef RT_ARCH_AMD64
2573 mov rax, dr6
2574 mov [uDR6], rax
2575 mov rcx, rax
2576 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2577 mov dr6, rcx
2578# else
2579 mov eax, dr6
2580 mov [uDR6], eax
2581 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2582 mov dr6, ecx
2583# endif
2584 }
2585# endif
2586 return uDR6;
2587}
2588#endif
2589
2590
2591/**
2592 * Gets dr7.
2593 *
2594 * @returns dr7.
2595 */
2596#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2597RT_ASM_DECL_PRAGMA_WATCOM(RTCCUINTXREG) ASMGetDR7(void);
2598#else
2599DECLINLINE(RTCCUINTXREG) ASMGetDR7(void)
2600{
2601 RTCCUINTXREG uDR7;
2602# if RT_INLINE_ASM_USES_INTRIN
2603 uDR7 = __readdr(7);
2604# elif RT_INLINE_ASM_GNU_STYLE
2605# ifdef RT_ARCH_AMD64
2606 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2607# else
2608 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2609# endif
2610# else
2611 __asm
2612 {
2613# ifdef RT_ARCH_AMD64
2614 mov rax, dr7
2615 mov [uDR7], rax
2616# else
2617 mov eax, dr7
2618 mov [uDR7], eax
2619# endif
2620 }
2621# endif
2622 return uDR7;
2623}
2624#endif
2625
2626
2627/**
2628 * Sets dr0.
2629 *
2630 * @param uDRVal Debug register value to write
2631 */
2632#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2633RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR0(RTCCUINTXREG uDRVal);
2634#else
2635DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal)
2636{
2637# if RT_INLINE_ASM_USES_INTRIN
2638 __writedr(0, uDRVal);
2639# elif RT_INLINE_ASM_GNU_STYLE
2640# ifdef RT_ARCH_AMD64
2641 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2642# else
2643 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2644# endif
2645# else
2646 __asm
2647 {
2648# ifdef RT_ARCH_AMD64
2649 mov rax, [uDRVal]
2650 mov dr0, rax
2651# else
2652 mov eax, [uDRVal]
2653 mov dr0, eax
2654# endif
2655 }
2656# endif
2657}
2658#endif
2659
2660
2661/**
2662 * Sets dr1.
2663 *
2664 * @param uDRVal Debug register value to write
2665 */
2666#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2667RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR1(RTCCUINTXREG uDRVal);
2668#else
2669DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal)
2670{
2671# if RT_INLINE_ASM_USES_INTRIN
2672 __writedr(1, uDRVal);
2673# elif RT_INLINE_ASM_GNU_STYLE
2674# ifdef RT_ARCH_AMD64
2675 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2676# else
2677 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2678# endif
2679# else
2680 __asm
2681 {
2682# ifdef RT_ARCH_AMD64
2683 mov rax, [uDRVal]
2684 mov dr1, rax
2685# else
2686 mov eax, [uDRVal]
2687 mov dr1, eax
2688# endif
2689 }
2690# endif
2691}
2692#endif
2693
2694
2695/**
2696 * Sets dr2.
2697 *
2698 * @param uDRVal Debug register value to write
2699 */
2700#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2701RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR2(RTCCUINTXREG uDRVal);
2702#else
2703DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal)
2704{
2705# if RT_INLINE_ASM_USES_INTRIN
2706 __writedr(2, uDRVal);
2707# elif RT_INLINE_ASM_GNU_STYLE
2708# ifdef RT_ARCH_AMD64
2709 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2710# else
2711 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2712# endif
2713# else
2714 __asm
2715 {
2716# ifdef RT_ARCH_AMD64
2717 mov rax, [uDRVal]
2718 mov dr2, rax
2719# else
2720 mov eax, [uDRVal]
2721 mov dr2, eax
2722# endif
2723 }
2724# endif
2725}
2726#endif
2727
2728
2729/**
2730 * Sets dr3.
2731 *
2732 * @param uDRVal Debug register value to write
2733 */
2734#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2735RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR3(RTCCUINTXREG uDRVal);
2736#else
2737DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal)
2738{
2739# if RT_INLINE_ASM_USES_INTRIN
2740 __writedr(3, uDRVal);
2741# elif RT_INLINE_ASM_GNU_STYLE
2742# ifdef RT_ARCH_AMD64
2743 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2744# else
2745 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2746# endif
2747# else
2748 __asm
2749 {
2750# ifdef RT_ARCH_AMD64
2751 mov rax, [uDRVal]
2752 mov dr3, rax
2753# else
2754 mov eax, [uDRVal]
2755 mov dr3, eax
2756# endif
2757 }
2758# endif
2759}
2760#endif
2761
2762
2763/**
2764 * Sets dr6.
2765 *
2766 * @param uDRVal Debug register value to write
2767 */
2768#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2769RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR6(RTCCUINTXREG uDRVal);
2770#else
2771DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal)
2772{
2773# if RT_INLINE_ASM_USES_INTRIN
2774 __writedr(6, uDRVal);
2775# elif RT_INLINE_ASM_GNU_STYLE
2776# ifdef RT_ARCH_AMD64
2777 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2778# else
2779 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2780# endif
2781# else
2782 __asm
2783 {
2784# ifdef RT_ARCH_AMD64
2785 mov rax, [uDRVal]
2786 mov dr6, rax
2787# else
2788 mov eax, [uDRVal]
2789 mov dr6, eax
2790# endif
2791 }
2792# endif
2793}
2794#endif
2795
2796
2797/**
2798 * Sets dr7.
2799 *
2800 * @param uDRVal Debug register value to write
2801 */
2802#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2803RT_ASM_DECL_PRAGMA_WATCOM(void) ASMSetDR7(RTCCUINTXREG uDRVal);
2804#else
2805DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal)
2806{
2807# if RT_INLINE_ASM_USES_INTRIN
2808 __writedr(7, uDRVal);
2809# elif RT_INLINE_ASM_GNU_STYLE
2810# ifdef RT_ARCH_AMD64
2811 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2812# else
2813 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2814# endif
2815# else
2816 __asm
2817 {
2818# ifdef RT_ARCH_AMD64
2819 mov rax, [uDRVal]
2820 mov dr7, rax
2821# else
2822 mov eax, [uDRVal]
2823 mov dr7, eax
2824# endif
2825 }
2826# endif
2827}
2828#endif
2829
2830
2831/**
2832 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2833 *
2834 * @param Port I/O port to write to.
2835 * @param u8 8-bit integer to write.
2836 */
2837#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2838RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2839#else
2840DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2841{
2842# if RT_INLINE_ASM_GNU_STYLE
2843 __asm__ __volatile__("outb %b1, %w0\n\t"
2844 :: "Nd" (Port),
2845 "a" (u8));
2846
2847# elif RT_INLINE_ASM_USES_INTRIN
2848 __outbyte(Port, u8);
2849
2850# else
2851 __asm
2852 {
2853 mov dx, [Port]
2854 mov al, [u8]
2855 out dx, al
2856 }
2857# endif
2858}
2859#endif
2860
2861
2862/**
2863 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2864 *
2865 * @returns 8-bit integer.
2866 * @param Port I/O port to read from.
2867 */
2868#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2869RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMInU8(RTIOPORT Port);
2870#else
2871DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2872{
2873 uint8_t u8;
2874# if RT_INLINE_ASM_GNU_STYLE
2875 __asm__ __volatile__("inb %w1, %b0\n\t"
2876 : "=a" (u8)
2877 : "Nd" (Port));
2878
2879# elif RT_INLINE_ASM_USES_INTRIN
2880 u8 = __inbyte(Port);
2881
2882# else
2883 __asm
2884 {
2885 mov dx, [Port]
2886 in al, dx
2887 mov [u8], al
2888 }
2889# endif
2890 return u8;
2891}
2892#endif
2893
2894
2895/**
2896 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2897 *
2898 * @param Port I/O port to write to.
2899 * @param u16 16-bit integer to write.
2900 */
2901#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2902RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2903#else
2904DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2905{
2906# if RT_INLINE_ASM_GNU_STYLE
2907 __asm__ __volatile__("outw %w1, %w0\n\t"
2908 :: "Nd" (Port),
2909 "a" (u16));
2910
2911# elif RT_INLINE_ASM_USES_INTRIN
2912 __outword(Port, u16);
2913
2914# else
2915 __asm
2916 {
2917 mov dx, [Port]
2918 mov ax, [u16]
2919 out dx, ax
2920 }
2921# endif
2922}
2923#endif
2924
2925
2926/**
2927 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2928 *
2929 * @returns 16-bit integer.
2930 * @param Port I/O port to read from.
2931 */
2932#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2933RT_ASM_DECL_PRAGMA_WATCOM(uint16_t) ASMInU16(RTIOPORT Port);
2934#else
2935DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2936{
2937 uint16_t u16;
2938# if RT_INLINE_ASM_GNU_STYLE
2939 __asm__ __volatile__("inw %w1, %w0\n\t"
2940 : "=a" (u16)
2941 : "Nd" (Port));
2942
2943# elif RT_INLINE_ASM_USES_INTRIN
2944 u16 = __inword(Port);
2945
2946# else
2947 __asm
2948 {
2949 mov dx, [Port]
2950 in ax, dx
2951 mov [u16], ax
2952 }
2953# endif
2954 return u16;
2955}
2956#endif
2957
2958
2959/**
2960 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2961 *
2962 * @param Port I/O port to write to.
2963 * @param u32 32-bit integer to write.
2964 */
2965#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2966RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2967#else
2968DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2969{
2970# if RT_INLINE_ASM_GNU_STYLE
2971 __asm__ __volatile__("outl %1, %w0\n\t"
2972 :: "Nd" (Port),
2973 "a" (u32));
2974
2975# elif RT_INLINE_ASM_USES_INTRIN
2976 __outdword(Port, u32);
2977
2978# else
2979 __asm
2980 {
2981 mov dx, [Port]
2982 mov eax, [u32]
2983 out dx, eax
2984 }
2985# endif
2986}
2987#endif
2988
2989
2990/**
2991 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2992 *
2993 * @returns 32-bit integer.
2994 * @param Port I/O port to read from.
2995 */
2996#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2997RT_ASM_DECL_PRAGMA_WATCOM(uint32_t) ASMInU32(RTIOPORT Port);
2998#else
2999DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
3000{
3001 uint32_t u32;
3002# if RT_INLINE_ASM_GNU_STYLE
3003 __asm__ __volatile__("inl %w1, %0\n\t"
3004 : "=a" (u32)
3005 : "Nd" (Port));
3006
3007# elif RT_INLINE_ASM_USES_INTRIN
3008 u32 = __indword(Port);
3009
3010# else
3011 __asm
3012 {
3013 mov dx, [Port]
3014 in eax, dx
3015 mov [u32], eax
3016 }
3017# endif
3018 return u32;
3019}
3020#endif
3021
3022
3023/**
3024 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
3025 *
3026 * @param Port I/O port to write to.
3027 * @param pau8 Pointer to the string buffer.
3028 * @param c The number of items to write.
3029 */
3030#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3031RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c);
3032#else
3033DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const RT_FAR *pau8, size_t c)
3034{
3035# if RT_INLINE_ASM_GNU_STYLE
3036 __asm__ __volatile__("rep; outsb\n\t"
3037 : "+S" (pau8),
3038 "+c" (c)
3039 : "d" (Port));
3040
3041# elif RT_INLINE_ASM_USES_INTRIN
3042 __outbytestring(Port, (unsigned char RT_FAR *)pau8, (unsigned long)c);
3043
3044# else
3045 __asm
3046 {
3047 mov dx, [Port]
3048 mov ecx, [c]
3049 mov eax, [pau8]
3050 xchg esi, eax
3051 rep outsb
3052 xchg esi, eax
3053 }
3054# endif
3055}
3056#endif
3057
3058
3059/**
3060 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
3061 *
3062 * @param Port I/O port to read from.
3063 * @param pau8 Pointer to the string buffer (output).
3064 * @param c The number of items to read.
3065 */
3066#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3067RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c);
3068#else
3069DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t RT_FAR *pau8, size_t c)
3070{
3071# if RT_INLINE_ASM_GNU_STYLE
3072 __asm__ __volatile__("rep; insb\n\t"
3073 : "+D" (pau8),
3074 "+c" (c)
3075 : "d" (Port));
3076
3077# elif RT_INLINE_ASM_USES_INTRIN
3078 __inbytestring(Port, pau8, (unsigned long)c);
3079
3080# else
3081 __asm
3082 {
3083 mov dx, [Port]
3084 mov ecx, [c]
3085 mov eax, [pau8]
3086 xchg edi, eax
3087 rep insb
3088 xchg edi, eax
3089 }
3090# endif
3091}
3092#endif
3093
3094
3095/**
3096 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
3097 *
3098 * @param Port I/O port to write to.
3099 * @param pau16 Pointer to the string buffer.
3100 * @param c The number of items to write.
3101 */
3102#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3103RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c);
3104#else
3105DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const RT_FAR *pau16, size_t c)
3106{
3107# if RT_INLINE_ASM_GNU_STYLE
3108 __asm__ __volatile__("rep; outsw\n\t"
3109 : "+S" (pau16),
3110 "+c" (c)
3111 : "d" (Port));
3112
3113# elif RT_INLINE_ASM_USES_INTRIN
3114 __outwordstring(Port, (unsigned short RT_FAR *)pau16, (unsigned long)c);
3115
3116# else
3117 __asm
3118 {
3119 mov dx, [Port]
3120 mov ecx, [c]
3121 mov eax, [pau16]
3122 xchg esi, eax
3123 rep outsw
3124 xchg esi, eax
3125 }
3126# endif
3127}
3128#endif
3129
3130
3131/**
3132 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
3133 *
3134 * @param Port I/O port to read from.
3135 * @param pau16 Pointer to the string buffer (output).
3136 * @param c The number of items to read.
3137 */
3138#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3139RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c);
3140#else
3141DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t RT_FAR *pau16, size_t c)
3142{
3143# if RT_INLINE_ASM_GNU_STYLE
3144 __asm__ __volatile__("rep; insw\n\t"
3145 : "+D" (pau16),
3146 "+c" (c)
3147 : "d" (Port));
3148
3149# elif RT_INLINE_ASM_USES_INTRIN
3150 __inwordstring(Port, pau16, (unsigned long)c);
3151
3152# else
3153 __asm
3154 {
3155 mov dx, [Port]
3156 mov ecx, [c]
3157 mov eax, [pau16]
3158 xchg edi, eax
3159 rep insw
3160 xchg edi, eax
3161 }
3162# endif
3163}
3164#endif
3165
3166
3167/**
3168 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
3169 *
3170 * @param Port I/O port to write to.
3171 * @param pau32 Pointer to the string buffer.
3172 * @param c The number of items to write.
3173 */
3174#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3175RT_ASM_DECL_PRAGMA_WATCOM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c);
3176#else
3177DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const RT_FAR *pau32, size_t c)
3178{
3179# if RT_INLINE_ASM_GNU_STYLE
3180 __asm__ __volatile__("rep; outsl\n\t"
3181 : "+S" (pau32),
3182 "+c" (c)
3183 : "d" (Port));
3184
3185# elif RT_INLINE_ASM_USES_INTRIN
3186 __outdwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);
3187
3188# else
3189 __asm
3190 {
3191 mov dx, [Port]
3192 mov ecx, [c]
3193 mov eax, [pau32]
3194 xchg esi, eax
3195 rep outsd
3196 xchg esi, eax
3197 }
3198# endif
3199}
3200#endif
3201
3202
3203/**
3204 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
3205 *
3206 * @param Port I/O port to read from.
3207 * @param pau32 Pointer to the string buffer (output).
3208 * @param c The number of items to read.
3209 */
3210#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3211RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c);
3212#else
3213DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t RT_FAR *pau32, size_t c)
3214{
3215# if RT_INLINE_ASM_GNU_STYLE
3216 __asm__ __volatile__("rep; insl\n\t"
3217 : "+D" (pau32),
3218 "+c" (c)
3219 : "d" (Port));
3220
3221# elif RT_INLINE_ASM_USES_INTRIN
3222 __indwordstring(Port, (unsigned long RT_FAR *)pau32, (unsigned long)c);
3223
3224# else
3225 __asm
3226 {
3227 mov dx, [Port]
3228 mov ecx, [c]
3229 mov eax, [pau32]
3230 xchg edi, eax
3231 rep insd
3232 xchg edi, eax
3233 }
3234# endif
3235}
3236#endif
3237
3238
3239/**
3240 * Invalidate page.
3241 *
3242 * @param uPtr Address of the page to invalidate.
3243 */
3244#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3245RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidatePage(RTCCUINTXREG uPtr);
3246#else
3247DECLINLINE(void) ASMInvalidatePage(RTCCUINTXREG uPtr)
3248{
3249# if RT_INLINE_ASM_USES_INTRIN
3250 __invlpg((void RT_FAR *)uPtr);
3251
3252# elif RT_INLINE_ASM_GNU_STYLE
3253 __asm__ __volatile__("invlpg %0\n\t"
3254 : : "m" (*(uint8_t RT_FAR *)(uintptr_t)uPtr));
3255# else
3256 __asm
3257 {
3258# ifdef RT_ARCH_AMD64
3259 mov rax, [uPtr]
3260 invlpg [rax]
3261# else
3262 mov eax, [uPtr]
3263 invlpg [eax]
3264# endif
3265 }
3266# endif
3267}
3268#endif
3269
3270
3271/**
3272 * Write back the internal caches and invalidate them.
3273 */
3274#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3275RT_ASM_DECL_PRAGMA_WATCOM(void) ASMWriteBackAndInvalidateCaches(void);
3276#else
3277DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
3278{
3279# if RT_INLINE_ASM_USES_INTRIN
3280 __wbinvd();
3281
3282# elif RT_INLINE_ASM_GNU_STYLE
3283 __asm__ __volatile__("wbinvd");
3284# else
3285 __asm
3286 {
3287 wbinvd
3288 }
3289# endif
3290}
3291#endif
3292
3293
3294/**
3295 * Invalidate internal and (perhaps) external caches without first
3296 * flushing dirty cache lines. Use with extreme care.
3297 */
3298#if RT_INLINE_ASM_EXTERNAL
3299RT_ASM_DECL_PRAGMA_WATCOM(void) ASMInvalidateInternalCaches(void);
3300#else
3301DECLINLINE(void) ASMInvalidateInternalCaches(void)
3302{
3303# if RT_INLINE_ASM_GNU_STYLE
3304 __asm__ __volatile__("invd");
3305# else
3306 __asm
3307 {
3308 invd
3309 }
3310# endif
3311}
3312#endif
3313
3314
3315/**
3316 * Memory load/store fence, waits for any pending writes and reads to complete.
3317 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3318 */
3319DECLINLINE(void) ASMMemoryFenceSSE2(void)
3320{
3321#if RT_INLINE_ASM_GNU_STYLE
3322 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3323#elif RT_INLINE_ASM_USES_INTRIN
3324 _mm_mfence();
3325#else
3326 __asm
3327 {
3328 _emit 0x0f
3329 _emit 0xae
3330 _emit 0xf0
3331 }
3332#endif
3333}
3334
3335
3336/**
3337 * Memory store fence, waits for any writes to complete.
3338 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3339 */
3340DECLINLINE(void) ASMWriteFenceSSE(void)
3341{
3342#if RT_INLINE_ASM_GNU_STYLE
3343 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3344#elif RT_INLINE_ASM_USES_INTRIN
3345 _mm_sfence();
3346#else
3347 __asm
3348 {
3349 _emit 0x0f
3350 _emit 0xae
3351 _emit 0xf8
3352 }
3353#endif
3354}
3355
3356
3357/**
3358 * Memory load fence, waits for any pending reads to complete.
3359 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3360 */
3361DECLINLINE(void) ASMReadFenceSSE2(void)
3362{
3363#if RT_INLINE_ASM_GNU_STYLE
3364 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3365#elif RT_INLINE_ASM_USES_INTRIN
3366 _mm_lfence();
3367#else
3368 __asm
3369 {
3370 _emit 0x0f
3371 _emit 0xae
3372 _emit 0xe8
3373 }
3374#endif
3375}
3376
3377#if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)
3378
3379/*
3380 * Clear the AC bit in the EFLAGS register.
3381 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3382 * Requires to be executed in R0.
3383 */
3384DECLINLINE(void) ASMClearAC(void)
3385{
3386#if RT_INLINE_ASM_GNU_STYLE
3387 __asm__ __volatile__ (".byte 0x0f,0x01,0xca\n\t");
3388#else
3389 __asm
3390 {
3391 _emit 0x0f
3392 _emit 0x01
3393 _emit 0xca
3394 }
3395#endif
3396}
3397
3398
3399/*
3400 * Set the AC bit in the EFLAGS register.
3401 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3402 * Requires to be executed in R0.
3403 */
3404DECLINLINE(void) ASMSetAC(void)
3405{
3406#if RT_INLINE_ASM_GNU_STYLE
3407 __asm__ __volatile__ (".byte 0x0f,0x01,0xcb\n\t");
3408#else
3409 __asm
3410 {
3411 _emit 0x0f
3412 _emit 0x01
3413 _emit 0xcb
3414 }
3415#endif
3416}
3417
3418#endif /* !_MSC_VER || !RT_ARCH_AMD64 */
3419
3420
3421/*
3422 * Include #pragma aux definitions for Watcom C/C++.
3423 */
3424#if defined(__WATCOMC__) && ARCH_BITS == 16
3425# define IPRT_ASM_AMD64_X86_WATCOM_16_INSTANTIATE
3426# undef IPRT_INCLUDED_asm_amd64_x86_watcom_16_h
3427# include "asm-amd64-x86-watcom-16.h"
3428#elif defined(__WATCOMC__) && ARCH_BITS == 32
3429# define IPRT_ASM_AMD64_X86_WATCOM_32_INSTANTIATE
3430# undef IPRT_INCLUDED_asm_amd64_x86_watcom_32_h
3431# include "asm-amd64-x86-watcom-32.h"
3432#endif
3433
3434
3435/** @} */
3436#endif /* !IPRT_INCLUDED_asm_amd64_x86_h */
3437
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette