VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 59240

Last change on this file since 59240 was 58749, checked in by vboxsync, 9 years ago

iprt/asm*.h: Watcom & RT_MANGLER, include #pragma aux before function prototypes so we can #undef the mangled symbol.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 73.3 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2015 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#include <iprt/assert.h>
31#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
32# error "Not on AMD64 or x86"
33#endif
34
35#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
36# include <intrin.h>
37 /* Emit the intrinsics at all optimization levels. */
38# pragma intrinsic(_ReadWriteBarrier)
39# pragma intrinsic(__cpuid)
40# pragma intrinsic(_enable)
41# pragma intrinsic(_disable)
42# pragma intrinsic(__rdtsc)
43# pragma intrinsic(__readmsr)
44# pragma intrinsic(__writemsr)
45# pragma intrinsic(__outbyte)
46# pragma intrinsic(__outbytestring)
47# pragma intrinsic(__outword)
48# pragma intrinsic(__outwordstring)
49# pragma intrinsic(__outdword)
50# pragma intrinsic(__outdwordstring)
51# pragma intrinsic(__inbyte)
52# pragma intrinsic(__inbytestring)
53# pragma intrinsic(__inword)
54# pragma intrinsic(__inwordstring)
55# pragma intrinsic(__indword)
56# pragma intrinsic(__indwordstring)
57# pragma intrinsic(__invlpg)
58# pragma intrinsic(__wbinvd)
59# pragma intrinsic(__readcr0)
60# pragma intrinsic(__readcr2)
61# pragma intrinsic(__readcr3)
62# pragma intrinsic(__readcr4)
63# pragma intrinsic(__writecr0)
64# pragma intrinsic(__writecr3)
65# pragma intrinsic(__writecr4)
66# pragma intrinsic(__readdr)
67# pragma intrinsic(__writedr)
68# ifdef RT_ARCH_AMD64
69# pragma intrinsic(__readcr8)
70# pragma intrinsic(__writecr8)
71# endif
72# if RT_INLINE_ASM_USES_INTRIN >= 15
73# pragma intrinsic(__readeflags)
74# pragma intrinsic(__writeeflags)
75# pragma intrinsic(__rdtscp)
76# endif
77#endif
78
79
80/*
81 * Include #pragma aux definitions for Watcom C/C++.
82 */
83#if defined(__WATCOMC__) && ARCH_BITS == 16
84# include "asm-amd64-x86-watcom-16.h"
85#elif defined(__WATCOMC__) && ARCH_BITS == 32
86# include "asm-amd64-x86-watcom-32.h"
87#endif
88
89
90/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
91 * @ingroup grp_rt_asm
92 * @{
93 */
94
95/** @todo find a more proper place for these structures? */
96
97#pragma pack(1)
98/** IDTR */
99typedef struct RTIDTR
100{
101 /** Size of the IDT. */
102 uint16_t cbIdt;
103 /** Address of the IDT. */
104#if ARCH_BITS != 64
105 uint32_t pIdt;
106#else
107 uint64_t pIdt;
108#endif
109} RTIDTR, *PRTIDTR;
110#pragma pack()
111
112#pragma pack(1)
113/** @internal */
114typedef struct RTIDTRALIGNEDINT
115{
116 /** Alignment padding. */
117 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
118 /** The IDTR structure. */
119 RTIDTR Idtr;
120} RTIDTRALIGNEDINT;
121#pragma pack()
122
123/** Wrapped RTIDTR for preventing misalignment exceptions. */
124typedef union RTIDTRALIGNED
125{
126 /** Try make sure this structure has optimal alignment. */
127 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
128 /** Aligned structure. */
129 RTIDTRALIGNEDINT s;
130} RTIDTRALIGNED;
131AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
132/** Pointer to a an RTIDTR alignment wrapper. */
133typedef RTIDTRALIGNED *PRIDTRALIGNED;
134
135
136#pragma pack(1)
137/** GDTR */
138typedef struct RTGDTR
139{
140 /** Size of the GDT. */
141 uint16_t cbGdt;
142 /** Address of the GDT. */
143#if ARCH_BITS != 64
144 uint32_t pGdt;
145#else
146 uint64_t pGdt;
147#endif
148} RTGDTR, *PRTGDTR;
149#pragma pack()
150
151#pragma pack(1)
152/** @internal */
153typedef struct RTGDTRALIGNEDINT
154{
155 /** Alignment padding. */
156 uint16_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
157 /** The GDTR structure. */
158 RTGDTR Gdtr;
159} RTGDTRALIGNEDINT;
160#pragma pack()
161
162/** Wrapped RTGDTR for preventing misalignment exceptions. */
163typedef union RTGDTRALIGNED
164{
165 /** Try make sure this structure has optimal alignment. */
166 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
167 /** Aligned structure. */
168 RTGDTRALIGNEDINT s;
169} RTGDTRALIGNED;
170AssertCompileSize(RTIDTRALIGNED, ((ARCH_BITS == 64) + 1) * 8);
171/** Pointer to a an RTGDTR alignment wrapper. */
172typedef RTGDTRALIGNED *PRGDTRALIGNED;
173
174
175/**
176 * Gets the content of the IDTR CPU register.
177 * @param pIdtr Where to store the IDTR contents.
178 */
179#if RT_INLINE_ASM_EXTERNAL
180DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
181#else
182DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
183{
184# if RT_INLINE_ASM_GNU_STYLE
185 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
186# else
187 __asm
188 {
189# ifdef RT_ARCH_AMD64
190 mov rax, [pIdtr]
191 sidt [rax]
192# else
193 mov eax, [pIdtr]
194 sidt [eax]
195# endif
196 }
197# endif
198}
199#endif
200
201
202/**
203 * Gets the content of the IDTR.LIMIT CPU register.
204 * @returns IDTR limit.
205 */
206#if RT_INLINE_ASM_EXTERNAL
207DECLASM(uint16_t) ASMGetIdtrLimit(void);
208#else
209DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
210{
211 RTIDTRALIGNED TmpIdtr;
212# if RT_INLINE_ASM_GNU_STYLE
213 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));
214# else
215 __asm
216 {
217 sidt [TmpIdtr.s.Idtr]
218 }
219# endif
220 return TmpIdtr.s.Idtr.cbIdt;
221}
222#endif
223
224
225/**
226 * Sets the content of the IDTR CPU register.
227 * @param pIdtr Where to load the IDTR contents from
228 */
229#if RT_INLINE_ASM_EXTERNAL
230DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
231#else
232DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
233{
234# if RT_INLINE_ASM_GNU_STYLE
235 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
236# else
237 __asm
238 {
239# ifdef RT_ARCH_AMD64
240 mov rax, [pIdtr]
241 lidt [rax]
242# else
243 mov eax, [pIdtr]
244 lidt [eax]
245# endif
246 }
247# endif
248}
249#endif
250
251
252/**
253 * Gets the content of the GDTR CPU register.
254 * @param pGdtr Where to store the GDTR contents.
255 */
256#if RT_INLINE_ASM_EXTERNAL
257DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
258#else
259DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
260{
261# if RT_INLINE_ASM_GNU_STYLE
262 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
263# else
264 __asm
265 {
266# ifdef RT_ARCH_AMD64
267 mov rax, [pGdtr]
268 sgdt [rax]
269# else
270 mov eax, [pGdtr]
271 sgdt [eax]
272# endif
273 }
274# endif
275}
276#endif
277
278
279/**
280 * Sets the content of the GDTR CPU register.
281 * @param pGdtr Where to load the GDTR contents from
282 */
283#if RT_INLINE_ASM_EXTERNAL
284DECLASM(void) ASMSetGDTR(const RTGDTR *pGdtr);
285#else
286DECLINLINE(void) ASMSetGDTR(const RTGDTR *pGdtr)
287{
288# if RT_INLINE_ASM_GNU_STYLE
289 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
290# else
291 __asm
292 {
293# ifdef RT_ARCH_AMD64
294 mov rax, [pGdtr]
295 lgdt [rax]
296# else
297 mov eax, [pGdtr]
298 lgdt [eax]
299# endif
300 }
301# endif
302}
303#endif
304
305
306
307/**
308 * Get the cs register.
309 * @returns cs.
310 */
311#if RT_INLINE_ASM_EXTERNAL
312DECLASM(RTSEL) ASMGetCS(void);
313#else
314DECLINLINE(RTSEL) ASMGetCS(void)
315{
316 RTSEL SelCS;
317# if RT_INLINE_ASM_GNU_STYLE
318 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
319# else
320 __asm
321 {
322 mov ax, cs
323 mov [SelCS], ax
324 }
325# endif
326 return SelCS;
327}
328#endif
329
330
331/**
332 * Get the DS register.
333 * @returns DS.
334 */
335#if RT_INLINE_ASM_EXTERNAL
336DECLASM(RTSEL) ASMGetDS(void);
337#else
338DECLINLINE(RTSEL) ASMGetDS(void)
339{
340 RTSEL SelDS;
341# if RT_INLINE_ASM_GNU_STYLE
342 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
343# else
344 __asm
345 {
346 mov ax, ds
347 mov [SelDS], ax
348 }
349# endif
350 return SelDS;
351}
352#endif
353
354
355/**
356 * Get the ES register.
357 * @returns ES.
358 */
359#if RT_INLINE_ASM_EXTERNAL
360DECLASM(RTSEL) ASMGetES(void);
361#else
362DECLINLINE(RTSEL) ASMGetES(void)
363{
364 RTSEL SelES;
365# if RT_INLINE_ASM_GNU_STYLE
366 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
367# else
368 __asm
369 {
370 mov ax, es
371 mov [SelES], ax
372 }
373# endif
374 return SelES;
375}
376#endif
377
378
379/**
380 * Get the FS register.
381 * @returns FS.
382 */
383#if RT_INLINE_ASM_EXTERNAL
384DECLASM(RTSEL) ASMGetFS(void);
385#else
386DECLINLINE(RTSEL) ASMGetFS(void)
387{
388 RTSEL SelFS;
389# if RT_INLINE_ASM_GNU_STYLE
390 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
391# else
392 __asm
393 {
394 mov ax, fs
395 mov [SelFS], ax
396 }
397# endif
398 return SelFS;
399}
400# endif
401
402
403/**
404 * Get the GS register.
405 * @returns GS.
406 */
407#if RT_INLINE_ASM_EXTERNAL
408DECLASM(RTSEL) ASMGetGS(void);
409#else
410DECLINLINE(RTSEL) ASMGetGS(void)
411{
412 RTSEL SelGS;
413# if RT_INLINE_ASM_GNU_STYLE
414 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
415# else
416 __asm
417 {
418 mov ax, gs
419 mov [SelGS], ax
420 }
421# endif
422 return SelGS;
423}
424#endif
425
426
427/**
428 * Get the SS register.
429 * @returns SS.
430 */
431#if RT_INLINE_ASM_EXTERNAL
432DECLASM(RTSEL) ASMGetSS(void);
433#else
434DECLINLINE(RTSEL) ASMGetSS(void)
435{
436 RTSEL SelSS;
437# if RT_INLINE_ASM_GNU_STYLE
438 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
439# else
440 __asm
441 {
442 mov ax, ss
443 mov [SelSS], ax
444 }
445# endif
446 return SelSS;
447}
448#endif
449
450
451/**
452 * Get the TR register.
453 * @returns TR.
454 */
455#if RT_INLINE_ASM_EXTERNAL
456DECLASM(RTSEL) ASMGetTR(void);
457#else
458DECLINLINE(RTSEL) ASMGetTR(void)
459{
460 RTSEL SelTR;
461# if RT_INLINE_ASM_GNU_STYLE
462 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
463# else
464 __asm
465 {
466 str ax
467 mov [SelTR], ax
468 }
469# endif
470 return SelTR;
471}
472#endif
473
474
475/**
476 * Get the LDTR register.
477 * @returns LDTR.
478 */
479#if RT_INLINE_ASM_EXTERNAL
480DECLASM(RTSEL) ASMGetLDTR(void);
481#else
482DECLINLINE(RTSEL) ASMGetLDTR(void)
483{
484 RTSEL SelLDTR;
485# if RT_INLINE_ASM_GNU_STYLE
486 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
487# else
488 __asm
489 {
490 sldt ax
491 mov [SelLDTR], ax
492 }
493# endif
494 return SelLDTR;
495}
496#endif
497
498
499/**
500 * Get the access rights for the segment selector.
501 *
502 * @returns The access rights on success or UINT32_MAX on failure.
503 * @param uSel The selector value.
504 *
505 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
506 * always have bits 0:7 as 0 (on both Intel & AMD).
507 */
508#if RT_INLINE_ASM_EXTERNAL
509DECLASM(uint32_t) ASMGetSegAttr(uint32_t uSel);
510#else
511DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
512{
513 uint32_t uAttr;
514 /* LAR only accesses 16-bit of the source operand, but eax for the
515 destination operand is required for getting the full 32-bit access rights. */
516# if RT_INLINE_ASM_GNU_STYLE
517 __asm__ __volatile__("lar %1, %%eax\n\t"
518 "jz done%=\n\t"
519 "movl $0xffffffff, %%eax\n\t"
520 "done%=:\n\t"
521 "movl %%eax, %0\n\t"
522 : "=r" (uAttr)
523 : "r" (uSel)
524 : "cc", "%eax");
525# else
526 __asm
527 {
528 lar eax, [uSel]
529 jz done
530 mov eax, 0ffffffffh
531 done:
532 mov [uAttr], eax
533 }
534# endif
535 return uAttr;
536}
537#endif
538
539
540/**
541 * Get the [RE]FLAGS register.
542 * @returns [RE]FLAGS.
543 */
544#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
545DECLASM(RTCCUINTREG) ASMGetFlags(void);
546#else
547DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
548{
549 RTCCUINTREG uFlags;
550# if RT_INLINE_ASM_GNU_STYLE
551# ifdef RT_ARCH_AMD64
552 __asm__ __volatile__("pushfq\n\t"
553 "popq %0\n\t"
554 : "=r" (uFlags));
555# else
556 __asm__ __volatile__("pushfl\n\t"
557 "popl %0\n\t"
558 : "=r" (uFlags));
559# endif
560# elif RT_INLINE_ASM_USES_INTRIN >= 15
561 uFlags = __readeflags();
562# else
563 __asm
564 {
565# ifdef RT_ARCH_AMD64
566 pushfq
567 pop [uFlags]
568# else
569 pushfd
570 pop [uFlags]
571# endif
572 }
573# endif
574 return uFlags;
575}
576#endif
577
578
579/**
580 * Set the [RE]FLAGS register.
581 * @param uFlags The new [RE]FLAGS value.
582 */
583#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
584DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
585#else
586DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
587{
588# if RT_INLINE_ASM_GNU_STYLE
589# ifdef RT_ARCH_AMD64
590 __asm__ __volatile__("pushq %0\n\t"
591 "popfq\n\t"
592 : : "g" (uFlags));
593# else
594 __asm__ __volatile__("pushl %0\n\t"
595 "popfl\n\t"
596 : : "g" (uFlags));
597# endif
598# elif RT_INLINE_ASM_USES_INTRIN >= 15
599 __writeeflags(uFlags);
600# else
601 __asm
602 {
603# ifdef RT_ARCH_AMD64
604 push [uFlags]
605 popfq
606# else
607 push [uFlags]
608 popfd
609# endif
610 }
611# endif
612}
613#endif
614
615
616/**
617 * Modifies the [RE]FLAGS register.
618 * @returns Original value.
619 * @param fAndEfl Flags to keep (applied first).
620 * @param fOrEfl Flags to be set.
621 */
622#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
623DECLASM(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl);
624#else
625DECLINLINE(RTCCUINTREG) ASMChangeFlags(RTCCUINTREG fAndEfl, RTCCUINTREG fOrEfl)
626{
627 RTCCUINTREG fOldEfl;
628# if RT_INLINE_ASM_GNU_STYLE
629# ifdef RT_ARCH_AMD64
630 __asm__ __volatile__("pushfq\n\t"
631 "movq (%%rsp), %0\n\t"
632 "andq %0, %1\n\t"
633 "orq %3, %1\n\t"
634 "mov %1, (%%rsp)\n\t"
635 "popfq\n\t"
636 : "=&r" (fOldEfl),
637 "=r" (fAndEfl)
638 : "1" (fAndEfl),
639 "rn" (fOrEfl) );
640# else
641 __asm__ __volatile__("pushfl\n\t"
642 "movl (%%esp), %0\n\t"
643 "andl %1, (%%esp)\n\t"
644 "orl %2, (%%esp)\n\t"
645 "popfl\n\t"
646 : "=&r" (fOldEfl)
647 : "rn" (fAndEfl),
648 "rn" (fOrEfl) );
649# endif
650# elif RT_INLINE_ASM_USES_INTRIN >= 15
651 fOldEfl = __readeflags();
652 __writeeflags((fOldEfl & fAndEfl) | fOrEfl);
653# else
654 __asm
655 {
656# ifdef RT_ARCH_AMD64
657 mov rdx, [fAndEfl]
658 mov rcx, [fOrEfl]
659 pushfq
660 mov rax, [rsp]
661 and rdx, rax
662 or rdx, rcx
663 mov [rsp], rdx
664 popfq
665 mov [fOldEfl], rax
666# else
667 mov edx, [fAndEfl]
668 mov ecx, [fOrEfl]
669 pushfd
670 mov eax, [esp]
671 and edx, eax
672 or edx, ecx
673 mov [esp], edx
674 popfd
675 mov [fOldEfl], eax
676# endif
677 }
678# endif
679 return fOldEfl;
680}
681#endif
682
683
684/**
685 * Modifies the [RE]FLAGS register by ORing in one or more flags.
686 * @returns Original value.
687 * @param fOrEfl The flags to be set (ORed in).
688 */
689#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
690DECLASM(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl);
691#else
692DECLINLINE(RTCCUINTREG) ASMAddFlags(RTCCUINTREG fOrEfl)
693{
694 RTCCUINTREG fOldEfl;
695# if RT_INLINE_ASM_GNU_STYLE
696# ifdef RT_ARCH_AMD64
697 __asm__ __volatile__("pushfq\n\t"
698 "movq (%%rsp), %0\n\t"
699 "orq %1, (%%rsp)\n\t"
700 "popfq\n\t"
701 : "=&r" (fOldEfl)
702 : "rn" (fOrEfl) );
703# else
704 __asm__ __volatile__("pushfl\n\t"
705 "movl (%%esp), %0\n\t"
706 "orl %1, (%%esp)\n\t"
707 "popfl\n\t"
708 : "=&r" (fOldEfl)
709 : "rn" (fOrEfl) );
710# endif
711# elif RT_INLINE_ASM_USES_INTRIN >= 15
712 fOldEfl = __readeflags();
713 __writeeflags(fOldEfl | fOrEfl);
714# else
715 __asm
716 {
717# ifdef RT_ARCH_AMD64
718 mov rcx, [fOrEfl]
719 pushfq
720 mov rdx, [rsp]
721 or [rsp], rcx
722 popfq
723 mov [fOldEfl], rax
724# else
725 mov ecx, [fOrEfl]
726 pushfd
727 mov edx, [esp]
728 or [esp], ecx
729 popfd
730 mov [fOldEfl], eax
731# endif
732 }
733# endif
734 return fOldEfl;
735}
736#endif
737
738
739/**
740 * Modifies the [RE]FLAGS register by AND'ing out one or more flags.
741 * @returns Original value.
742 * @param fAndEfl The flags to keep.
743 */
744#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
745DECLASM(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl);
746#else
747DECLINLINE(RTCCUINTREG) ASMClearFlags(RTCCUINTREG fAndEfl)
748{
749 RTCCUINTREG fOldEfl;
750# if RT_INLINE_ASM_GNU_STYLE
751# ifdef RT_ARCH_AMD64
752 __asm__ __volatile__("pushfq\n\t"
753 "movq (%%rsp), %0\n\t"
754 "andq %1, (%%rsp)\n\t"
755 "popfq\n\t"
756 : "=&r" (fOldEfl)
757 : "rn" (fAndEfl) );
758# else
759 __asm__ __volatile__("pushfl\n\t"
760 "movl (%%esp), %0\n\t"
761 "andl %1, (%%esp)\n\t"
762 "popfl\n\t"
763 : "=&r" (fOldEfl)
764 : "rn" (fAndEfl) );
765# endif
766# elif RT_INLINE_ASM_USES_INTRIN >= 15
767 fOldEfl = __readeflags();
768 __writeeflags(fOldEfl & fAndEfl);
769# else
770 __asm
771 {
772# ifdef RT_ARCH_AMD64
773 mov rdx, [fAndEfl]
774 pushfq
775 mov rdx, [rsp]
776 and [rsp], rdx
777 popfq
778 mov [fOldEfl], rax
779# else
780 mov edx, [fAndEfl]
781 pushfd
782 mov edx, [esp]
783 and [esp], edx
784 popfd
785 mov [fOldEfl], eax
786# endif
787 }
788# endif
789 return fOldEfl;
790}
791#endif
792
793
794/**
795 * Gets the content of the CPU timestamp counter register.
796 *
797 * @returns TSC.
798 */
799#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
800DECLASM(uint64_t) ASMReadTSC(void);
801#else
802DECLINLINE(uint64_t) ASMReadTSC(void)
803{
804 RTUINT64U u;
805# if RT_INLINE_ASM_GNU_STYLE
806 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
807# else
808# if RT_INLINE_ASM_USES_INTRIN
809 u.u = __rdtsc();
810# else
811 __asm
812 {
813 rdtsc
814 mov [u.s.Lo], eax
815 mov [u.s.Hi], edx
816 }
817# endif
818# endif
819 return u.u;
820}
821#endif
822
823
824/**
825 * Gets the content of the CPU timestamp counter register and the
826 * assoicated AUX value.
827 *
828 * @returns TSC.
829 * @param puAux Where to store the AUX value.
830 */
831#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
832DECLASM(uint64_t) ASMReadTscWithAux(uint32_t *puAux);
833#else
834DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t *puAux)
835{
836 RTUINT64U u;
837# if RT_INLINE_ASM_GNU_STYLE
838 /* rdtscp is not supported by ancient linux build VM of course :-( */
839 /*__asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux)); */
840 __asm__ __volatile__(".byte 0x0f,0x01,0xf9\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));
841# else
842# if RT_INLINE_ASM_USES_INTRIN >= 15
843 u.u = __rdtscp(puAux);
844# else
845 __asm
846 {
847 rdtscp
848 mov [u.s.Lo], eax
849 mov [u.s.Hi], edx
850 mov eax, [puAux]
851 mov [eax], ecx
852 }
853# endif
854# endif
855 return u.u;
856}
857#endif
858
859
860/**
861 * Performs the cpuid instruction returning all registers.
862 *
863 * @param uOperator CPUID operation (eax).
864 * @param pvEAX Where to store eax.
865 * @param pvEBX Where to store ebx.
866 * @param pvECX Where to store ecx.
867 * @param pvEDX Where to store edx.
868 * @remark We're using void pointers to ease the use of special bitfield structures and such.
869 */
870#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
871DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
872#else
873DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
874{
875# if RT_INLINE_ASM_GNU_STYLE
876# ifdef RT_ARCH_AMD64
877 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
878 __asm__ __volatile__ ("cpuid\n\t"
879 : "=a" (uRAX),
880 "=b" (uRBX),
881 "=c" (uRCX),
882 "=d" (uRDX)
883 : "0" (uOperator), "2" (0));
884 *(uint32_t *)pvEAX = (uint32_t)uRAX;
885 *(uint32_t *)pvEBX = (uint32_t)uRBX;
886 *(uint32_t *)pvECX = (uint32_t)uRCX;
887 *(uint32_t *)pvEDX = (uint32_t)uRDX;
888# else
889 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
890 "cpuid\n\t"
891 "xchgl %%ebx, %1\n\t"
892 : "=a" (*(uint32_t *)pvEAX),
893 "=r" (*(uint32_t *)pvEBX),
894 "=c" (*(uint32_t *)pvECX),
895 "=d" (*(uint32_t *)pvEDX)
896 : "0" (uOperator), "2" (0));
897# endif
898
899# elif RT_INLINE_ASM_USES_INTRIN
900 int aInfo[4];
901 __cpuid(aInfo, uOperator);
902 *(uint32_t *)pvEAX = aInfo[0];
903 *(uint32_t *)pvEBX = aInfo[1];
904 *(uint32_t *)pvECX = aInfo[2];
905 *(uint32_t *)pvEDX = aInfo[3];
906
907# else
908 uint32_t uEAX;
909 uint32_t uEBX;
910 uint32_t uECX;
911 uint32_t uEDX;
912 __asm
913 {
914 push ebx
915 mov eax, [uOperator]
916 cpuid
917 mov [uEAX], eax
918 mov [uEBX], ebx
919 mov [uECX], ecx
920 mov [uEDX], edx
921 pop ebx
922 }
923 *(uint32_t *)pvEAX = uEAX;
924 *(uint32_t *)pvEBX = uEBX;
925 *(uint32_t *)pvECX = uECX;
926 *(uint32_t *)pvEDX = uEDX;
927# endif
928}
929#endif
930
931
932/**
933 * Performs the CPUID instruction with EAX and ECX input returning ALL output
934 * registers.
935 *
936 * @param uOperator CPUID operation (eax).
937 * @param uIdxECX ecx index
938 * @param pvEAX Where to store eax.
939 * @param pvEBX Where to store ebx.
940 * @param pvECX Where to store ecx.
941 * @param pvEDX Where to store edx.
942 * @remark We're using void pointers to ease the use of special bitfield structures and such.
943 */
944#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
945DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
946#else
947DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
948{
949# if RT_INLINE_ASM_GNU_STYLE
950# ifdef RT_ARCH_AMD64
951 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
952 __asm__ ("cpuid\n\t"
953 : "=a" (uRAX),
954 "=b" (uRBX),
955 "=c" (uRCX),
956 "=d" (uRDX)
957 : "0" (uOperator),
958 "2" (uIdxECX));
959 *(uint32_t *)pvEAX = (uint32_t)uRAX;
960 *(uint32_t *)pvEBX = (uint32_t)uRBX;
961 *(uint32_t *)pvECX = (uint32_t)uRCX;
962 *(uint32_t *)pvEDX = (uint32_t)uRDX;
963# else
964 __asm__ ("xchgl %%ebx, %1\n\t"
965 "cpuid\n\t"
966 "xchgl %%ebx, %1\n\t"
967 : "=a" (*(uint32_t *)pvEAX),
968 "=r" (*(uint32_t *)pvEBX),
969 "=c" (*(uint32_t *)pvECX),
970 "=d" (*(uint32_t *)pvEDX)
971 : "0" (uOperator),
972 "2" (uIdxECX));
973# endif
974
975# elif RT_INLINE_ASM_USES_INTRIN
976 int aInfo[4];
977 __cpuidex(aInfo, uOperator, uIdxECX);
978 *(uint32_t *)pvEAX = aInfo[0];
979 *(uint32_t *)pvEBX = aInfo[1];
980 *(uint32_t *)pvECX = aInfo[2];
981 *(uint32_t *)pvEDX = aInfo[3];
982
983# else
984 uint32_t uEAX;
985 uint32_t uEBX;
986 uint32_t uECX;
987 uint32_t uEDX;
988 __asm
989 {
990 push ebx
991 mov eax, [uOperator]
992 mov ecx, [uIdxECX]
993 cpuid
994 mov [uEAX], eax
995 mov [uEBX], ebx
996 mov [uECX], ecx
997 mov [uEDX], edx
998 pop ebx
999 }
1000 *(uint32_t *)pvEAX = uEAX;
1001 *(uint32_t *)pvEBX = uEBX;
1002 *(uint32_t *)pvECX = uECX;
1003 *(uint32_t *)pvEDX = uEDX;
1004# endif
1005}
1006#endif
1007
1008
1009/**
1010 * CPUID variant that initializes all 4 registers before the CPUID instruction.
1011 *
1012 * @returns The EAX result value.
1013 * @param uOperator CPUID operation (eax).
1014 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
1015 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
1016 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
1017 * @param pvEAX Where to store eax. Optional.
1018 * @param pvEBX Where to store ebx. Optional.
1019 * @param pvECX Where to store ecx. Optional.
1020 * @param pvEDX Where to store edx. Optional.
1021 */
1022DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
1023 void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
1024
1025
1026/**
1027 * Performs the cpuid instruction returning ecx and edx.
1028 *
1029 * @param uOperator CPUID operation (eax).
1030 * @param pvECX Where to store ecx.
1031 * @param pvEDX Where to store edx.
1032 * @remark We're using void pointers to ease the use of special bitfield structures and such.
1033 */
1034#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1035DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
1036#else
1037DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
1038{
1039 uint32_t uEBX;
1040 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
1041}
1042#endif
1043
1044
1045/**
1046 * Performs the cpuid instruction returning eax.
1047 *
1048 * @param uOperator CPUID operation (eax).
1049 * @returns EAX after cpuid operation.
1050 */
1051#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1052DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
1053#else
1054DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
1055{
1056 RTCCUINTREG xAX;
1057# if RT_INLINE_ASM_GNU_STYLE
1058# ifdef RT_ARCH_AMD64
1059 __asm__ ("cpuid"
1060 : "=a" (xAX)
1061 : "0" (uOperator)
1062 : "rbx", "rcx", "rdx");
1063# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1064 __asm__ ("push %%ebx\n\t"
1065 "cpuid\n\t"
1066 "pop %%ebx\n\t"
1067 : "=a" (xAX)
1068 : "0" (uOperator)
1069 : "ecx", "edx");
1070# else
1071 __asm__ ("cpuid"
1072 : "=a" (xAX)
1073 : "0" (uOperator)
1074 : "edx", "ecx", "ebx");
1075# endif
1076
1077# elif RT_INLINE_ASM_USES_INTRIN
1078 int aInfo[4];
1079 __cpuid(aInfo, uOperator);
1080 xAX = aInfo[0];
1081
1082# else
1083 __asm
1084 {
1085 push ebx
1086 mov eax, [uOperator]
1087 cpuid
1088 mov [xAX], eax
1089 pop ebx
1090 }
1091# endif
1092 return (uint32_t)xAX;
1093}
1094#endif
1095
1096
1097/**
1098 * Performs the cpuid instruction returning ebx.
1099 *
1100 * @param uOperator CPUID operation (eax).
1101 * @returns EBX after cpuid operation.
1102 */
1103#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1104DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
1105#else
1106DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
1107{
1108 RTCCUINTREG xBX;
1109# if RT_INLINE_ASM_GNU_STYLE
1110# ifdef RT_ARCH_AMD64
1111 RTCCUINTREG uSpill;
1112 __asm__ ("cpuid"
1113 : "=a" (uSpill),
1114 "=b" (xBX)
1115 : "0" (uOperator)
1116 : "rdx", "rcx");
1117# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1118 __asm__ ("push %%ebx\n\t"
1119 "cpuid\n\t"
1120 "mov %%ebx, %%edx\n\t"
1121 "pop %%ebx\n\t"
1122 : "=a" (uOperator),
1123 "=d" (xBX)
1124 : "0" (uOperator)
1125 : "ecx");
1126# else
1127 __asm__ ("cpuid"
1128 : "=a" (uOperator),
1129 "=b" (xBX)
1130 : "0" (uOperator)
1131 : "edx", "ecx");
1132# endif
1133
1134# elif RT_INLINE_ASM_USES_INTRIN
1135 int aInfo[4];
1136 __cpuid(aInfo, uOperator);
1137 xBX = aInfo[1];
1138
1139# else
1140 __asm
1141 {
1142 push ebx
1143 mov eax, [uOperator]
1144 cpuid
1145 mov [xBX], ebx
1146 pop ebx
1147 }
1148# endif
1149 return (uint32_t)xBX;
1150}
1151#endif
1152
1153
1154/**
1155 * Performs the cpuid instruction returning ecx.
1156 *
1157 * @param uOperator CPUID operation (eax).
1158 * @returns ECX after cpuid operation.
1159 */
1160#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1161DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
1162#else
1163DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
1164{
1165 RTCCUINTREG xCX;
1166# if RT_INLINE_ASM_GNU_STYLE
1167# ifdef RT_ARCH_AMD64
1168 RTCCUINTREG uSpill;
1169 __asm__ ("cpuid"
1170 : "=a" (uSpill),
1171 "=c" (xCX)
1172 : "0" (uOperator)
1173 : "rbx", "rdx");
1174# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1175 __asm__ ("push %%ebx\n\t"
1176 "cpuid\n\t"
1177 "pop %%ebx\n\t"
1178 : "=a" (uOperator),
1179 "=c" (xCX)
1180 : "0" (uOperator)
1181 : "edx");
1182# else
1183 __asm__ ("cpuid"
1184 : "=a" (uOperator),
1185 "=c" (xCX)
1186 : "0" (uOperator)
1187 : "ebx", "edx");
1188
1189# endif
1190
1191# elif RT_INLINE_ASM_USES_INTRIN
1192 int aInfo[4];
1193 __cpuid(aInfo, uOperator);
1194 xCX = aInfo[2];
1195
1196# else
1197 __asm
1198 {
1199 push ebx
1200 mov eax, [uOperator]
1201 cpuid
1202 mov [xCX], ecx
1203 pop ebx
1204 }
1205# endif
1206 return (uint32_t)xCX;
1207}
1208#endif
1209
1210
1211/**
1212 * Performs the cpuid instruction returning edx.
1213 *
1214 * @param uOperator CPUID operation (eax).
1215 * @returns EDX after cpuid operation.
1216 */
1217#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1218DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
1219#else
1220DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
1221{
1222 RTCCUINTREG xDX;
1223# if RT_INLINE_ASM_GNU_STYLE
1224# ifdef RT_ARCH_AMD64
1225 RTCCUINTREG uSpill;
1226 __asm__ ("cpuid"
1227 : "=a" (uSpill),
1228 "=d" (xDX)
1229 : "0" (uOperator)
1230 : "rbx", "rcx");
1231# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1232 __asm__ ("push %%ebx\n\t"
1233 "cpuid\n\t"
1234 "pop %%ebx\n\t"
1235 : "=a" (uOperator),
1236 "=d" (xDX)
1237 : "0" (uOperator)
1238 : "ecx");
1239# else
1240 __asm__ ("cpuid"
1241 : "=a" (uOperator),
1242 "=d" (xDX)
1243 : "0" (uOperator)
1244 : "ebx", "ecx");
1245# endif
1246
1247# elif RT_INLINE_ASM_USES_INTRIN
1248 int aInfo[4];
1249 __cpuid(aInfo, uOperator);
1250 xDX = aInfo[3];
1251
1252# else
1253 __asm
1254 {
1255 push ebx
1256 mov eax, [uOperator]
1257 cpuid
1258 mov [xDX], edx
1259 pop ebx
1260 }
1261# endif
1262 return (uint32_t)xDX;
1263}
1264#endif
1265
1266
1267/**
1268 * Checks if the current CPU supports CPUID.
1269 *
1270 * @returns true if CPUID is supported.
1271 */
1272DECLINLINE(bool) ASMHasCpuId(void)
1273{
1274#ifdef RT_ARCH_AMD64
1275 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1276#else /* !RT_ARCH_AMD64 */
1277 bool fRet = false;
1278# if RT_INLINE_ASM_GNU_STYLE
1279 uint32_t u1;
1280 uint32_t u2;
1281 __asm__ ("pushf\n\t"
1282 "pop %1\n\t"
1283 "mov %1, %2\n\t"
1284 "xorl $0x200000, %1\n\t"
1285 "push %1\n\t"
1286 "popf\n\t"
1287 "pushf\n\t"
1288 "pop %1\n\t"
1289 "cmpl %1, %2\n\t"
1290 "setne %0\n\t"
1291 "push %2\n\t"
1292 "popf\n\t"
1293 : "=m" (fRet), "=r" (u1), "=r" (u2));
1294# else
1295 __asm
1296 {
1297 pushfd
1298 pop eax
1299 mov ebx, eax
1300 xor eax, 0200000h
1301 push eax
1302 popfd
1303 pushfd
1304 pop eax
1305 cmp eax, ebx
1306 setne fRet
1307 push ebx
1308 popfd
1309 }
1310# endif
1311 return fRet;
1312#endif /* !RT_ARCH_AMD64 */
1313}
1314
1315
1316/**
1317 * Gets the APIC ID of the current CPU.
1318 *
1319 * @returns the APIC ID.
1320 */
1321#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1322DECLASM(uint8_t) ASMGetApicId(void);
1323#else
1324DECLINLINE(uint8_t) ASMGetApicId(void)
1325{
1326 RTCCUINTREG xBX;
1327# if RT_INLINE_ASM_GNU_STYLE
1328# ifdef RT_ARCH_AMD64
1329 RTCCUINTREG uSpill;
1330 __asm__ __volatile__ ("cpuid"
1331 : "=a" (uSpill),
1332 "=b" (xBX)
1333 : "0" (1)
1334 : "rcx", "rdx");
1335# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1336 RTCCUINTREG uSpill;
1337 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1338 "cpuid\n\t"
1339 "xchgl %%ebx,%1\n\t"
1340 : "=a" (uSpill),
1341 "=rm" (xBX)
1342 : "0" (1)
1343 : "ecx", "edx");
1344# else
1345 RTCCUINTREG uSpill;
1346 __asm__ __volatile__ ("cpuid"
1347 : "=a" (uSpill),
1348 "=b" (xBX)
1349 : "0" (1)
1350 : "ecx", "edx");
1351# endif
1352
1353# elif RT_INLINE_ASM_USES_INTRIN
1354 int aInfo[4];
1355 __cpuid(aInfo, 1);
1356 xBX = aInfo[1];
1357
1358# else
1359 __asm
1360 {
1361 push ebx
1362 mov eax, 1
1363 cpuid
1364 mov [xBX], ebx
1365 pop ebx
1366 }
1367# endif
1368 return (uint8_t)(xBX >> 24);
1369}
1370#endif
1371
1372
1373/**
1374 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
1375 *
1376 * @returns true/false.
1377 * @param uEBX EBX return from ASMCpuId(0)
1378 * @param uECX ECX return from ASMCpuId(0)
1379 * @param uEDX EDX return from ASMCpuId(0)
1380 */
1381DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1382{
1383 return uEBX == UINT32_C(0x756e6547)
1384 && uECX == UINT32_C(0x6c65746e)
1385 && uEDX == UINT32_C(0x49656e69);
1386}
1387
1388
1389/**
1390 * Tests if this is a genuine Intel CPU.
1391 *
1392 * @returns true/false.
1393 * @remarks ASSUMES that cpuid is supported by the CPU.
1394 */
1395DECLINLINE(bool) ASMIsIntelCpu(void)
1396{
1397 uint32_t uEAX, uEBX, uECX, uEDX;
1398 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1399 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1400}
1401
1402
1403/**
1404 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1405 *
1406 * @returns true/false.
1407 * @param uEBX EBX return from ASMCpuId(0)
1408 * @param uECX ECX return from ASMCpuId(0)
1409 * @param uEDX EDX return from ASMCpuId(0)
1410 */
1411DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1412{
1413 return uEBX == UINT32_C(0x68747541)
1414 && uECX == UINT32_C(0x444d4163)
1415 && uEDX == UINT32_C(0x69746e65);
1416}
1417
1418
1419/**
1420 * Tests if this is an authentic AMD CPU.
1421 *
1422 * @returns true/false.
1423 * @remarks ASSUMES that cpuid is supported by the CPU.
1424 */
1425DECLINLINE(bool) ASMIsAmdCpu(void)
1426{
1427 uint32_t uEAX, uEBX, uECX, uEDX;
1428 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1429 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1430}
1431
1432
1433/**
1434 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1435 *
1436 * @returns true/false.
1437 * @param uEBX EBX return from ASMCpuId(0).
1438 * @param uECX ECX return from ASMCpuId(0).
1439 * @param uEDX EDX return from ASMCpuId(0).
1440 */
1441DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1442{
1443 return uEBX == UINT32_C(0x746e6543)
1444 && uECX == UINT32_C(0x736c7561)
1445 && uEDX == UINT32_C(0x48727561);
1446}
1447
1448
1449/**
1450 * Tests if this is a centaur hauling VIA CPU.
1451 *
1452 * @returns true/false.
1453 * @remarks ASSUMES that cpuid is supported by the CPU.
1454 */
1455DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1456{
1457 uint32_t uEAX, uEBX, uECX, uEDX;
1458 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1459 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1460}
1461
1462
1463/**
1464 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1465 *
1466 *
1467 * @returns true/false.
1468 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1469 *
1470 * @note This only succeeds if there are at least two leaves in the range.
1471 * @remarks The upper range limit is just some half reasonable value we've
1472 * picked out of thin air.
1473 */
1474DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1475{
1476 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1477}
1478
1479
1480/**
1481 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1482 *
1483 * This only succeeds if there are at least two leaves in the range.
1484 *
1485 * @returns true/false.
1486 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1487 *
1488 * @note This only succeeds if there are at least two leaves in the range.
1489 * @remarks The upper range limit is just some half reasonable value we've
1490 * picked out of thin air.
1491 */
1492DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1493{
1494 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1495}
1496
1497
1498/**
1499 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1500 *
1501 * @returns Family.
1502 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1503 */
1504DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1505{
1506 return ((uEAX >> 8) & 0xf) == 0xf
1507 ? ((uEAX >> 20) & 0x7f) + 0xf
1508 : ((uEAX >> 8) & 0xf);
1509}
1510
1511
1512/**
1513 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1514 *
1515 * @returns Model.
1516 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1517 */
1518DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1519{
1520 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1521 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1522 : ((uEAX >> 4) & 0xf);
1523}
1524
1525
1526/**
1527 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1528 *
1529 * @returns Model.
1530 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1531 */
1532DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1533{
1534 return ((uEAX >> 8) & 0xf) == 0xf
1535 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1536 : ((uEAX >> 4) & 0xf);
1537}
1538
1539
1540/**
1541 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1542 *
1543 * @returns Model.
1544 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1545 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1546 */
1547DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1548{
1549 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1550 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1551 : ((uEAX >> 4) & 0xf);
1552}
1553
1554
1555/**
1556 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1557 *
1558 * @returns Model.
1559 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1560 */
1561DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1562{
1563 return uEAX & 0xf;
1564}
1565
1566
1567/**
1568 * Get cr0.
1569 * @returns cr0.
1570 */
1571#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1572DECLASM(RTCCUINTXREG) ASMGetCR0(void);
1573#else
1574DECLINLINE(RTCCUINTXREG) ASMGetCR0(void)
1575{
1576 RTCCUINTXREG uCR0;
1577# if RT_INLINE_ASM_USES_INTRIN
1578 uCR0 = __readcr0();
1579
1580# elif RT_INLINE_ASM_GNU_STYLE
1581# ifdef RT_ARCH_AMD64
1582 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1583# else
1584 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1585# endif
1586# else
1587 __asm
1588 {
1589# ifdef RT_ARCH_AMD64
1590 mov rax, cr0
1591 mov [uCR0], rax
1592# else
1593 mov eax, cr0
1594 mov [uCR0], eax
1595# endif
1596 }
1597# endif
1598 return uCR0;
1599}
1600#endif
1601
1602
1603/**
1604 * Sets the CR0 register.
1605 * @param uCR0 The new CR0 value.
1606 */
1607#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1608DECLASM(void) ASMSetCR0(RTCCUINTXREG uCR0);
1609#else
1610DECLINLINE(void) ASMSetCR0(RTCCUINTXREG uCR0)
1611{
1612# if RT_INLINE_ASM_USES_INTRIN
1613 __writecr0(uCR0);
1614
1615# elif RT_INLINE_ASM_GNU_STYLE
1616# ifdef RT_ARCH_AMD64
1617 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1618# else
1619 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1620# endif
1621# else
1622 __asm
1623 {
1624# ifdef RT_ARCH_AMD64
1625 mov rax, [uCR0]
1626 mov cr0, rax
1627# else
1628 mov eax, [uCR0]
1629 mov cr0, eax
1630# endif
1631 }
1632# endif
1633}
1634#endif
1635
1636
1637/**
1638 * Get cr2.
1639 * @returns cr2.
1640 */
1641#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1642DECLASM(RTCCUINTXREG) ASMGetCR2(void);
1643#else
1644DECLINLINE(RTCCUINTXREG) ASMGetCR2(void)
1645{
1646 RTCCUINTXREG uCR2;
1647# if RT_INLINE_ASM_USES_INTRIN
1648 uCR2 = __readcr2();
1649
1650# elif RT_INLINE_ASM_GNU_STYLE
1651# ifdef RT_ARCH_AMD64
1652 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1653# else
1654 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1655# endif
1656# else
1657 __asm
1658 {
1659# ifdef RT_ARCH_AMD64
1660 mov rax, cr2
1661 mov [uCR2], rax
1662# else
1663 mov eax, cr2
1664 mov [uCR2], eax
1665# endif
1666 }
1667# endif
1668 return uCR2;
1669}
1670#endif
1671
1672
1673/**
1674 * Sets the CR2 register.
1675 * @param uCR2 The new CR0 value.
1676 */
1677#if RT_INLINE_ASM_EXTERNAL
1678DECLASM(void) ASMSetCR2(RTCCUINTXREG uCR2);
1679#else
1680DECLINLINE(void) ASMSetCR2(RTCCUINTXREG uCR2)
1681{
1682# if RT_INLINE_ASM_GNU_STYLE
1683# ifdef RT_ARCH_AMD64
1684 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1685# else
1686 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1687# endif
1688# else
1689 __asm
1690 {
1691# ifdef RT_ARCH_AMD64
1692 mov rax, [uCR2]
1693 mov cr2, rax
1694# else
1695 mov eax, [uCR2]
1696 mov cr2, eax
1697# endif
1698 }
1699# endif
1700}
1701#endif
1702
1703
1704/**
1705 * Get cr3.
1706 * @returns cr3.
1707 */
1708#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1709DECLASM(RTCCUINTXREG) ASMGetCR3(void);
1710#else
1711DECLINLINE(RTCCUINTXREG) ASMGetCR3(void)
1712{
1713 RTCCUINTXREG uCR3;
1714# if RT_INLINE_ASM_USES_INTRIN
1715 uCR3 = __readcr3();
1716
1717# elif RT_INLINE_ASM_GNU_STYLE
1718# ifdef RT_ARCH_AMD64
1719 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1720# else
1721 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1722# endif
1723# else
1724 __asm
1725 {
1726# ifdef RT_ARCH_AMD64
1727 mov rax, cr3
1728 mov [uCR3], rax
1729# else
1730 mov eax, cr3
1731 mov [uCR3], eax
1732# endif
1733 }
1734# endif
1735 return uCR3;
1736}
1737#endif
1738
1739
1740/**
1741 * Sets the CR3 register.
1742 *
1743 * @param uCR3 New CR3 value.
1744 */
1745#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1746DECLASM(void) ASMSetCR3(RTCCUINTXREG uCR3);
1747#else
1748DECLINLINE(void) ASMSetCR3(RTCCUINTXREG uCR3)
1749{
1750# if RT_INLINE_ASM_USES_INTRIN
1751 __writecr3(uCR3);
1752
1753# elif RT_INLINE_ASM_GNU_STYLE
1754# ifdef RT_ARCH_AMD64
1755 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1756# else
1757 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1758# endif
1759# else
1760 __asm
1761 {
1762# ifdef RT_ARCH_AMD64
1763 mov rax, [uCR3]
1764 mov cr3, rax
1765# else
1766 mov eax, [uCR3]
1767 mov cr3, eax
1768# endif
1769 }
1770# endif
1771}
1772#endif
1773
1774
1775/**
1776 * Reloads the CR3 register.
1777 */
1778#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1779DECLASM(void) ASMReloadCR3(void);
1780#else
1781DECLINLINE(void) ASMReloadCR3(void)
1782{
1783# if RT_INLINE_ASM_USES_INTRIN
1784 __writecr3(__readcr3());
1785
1786# elif RT_INLINE_ASM_GNU_STYLE
1787 RTCCUINTXREG u;
1788# ifdef RT_ARCH_AMD64
1789 __asm__ __volatile__("movq %%cr3, %0\n\t"
1790 "movq %0, %%cr3\n\t"
1791 : "=r" (u));
1792# else
1793 __asm__ __volatile__("movl %%cr3, %0\n\t"
1794 "movl %0, %%cr3\n\t"
1795 : "=r" (u));
1796# endif
1797# else
1798 __asm
1799 {
1800# ifdef RT_ARCH_AMD64
1801 mov rax, cr3
1802 mov cr3, rax
1803# else
1804 mov eax, cr3
1805 mov cr3, eax
1806# endif
1807 }
1808# endif
1809}
1810#endif
1811
1812
1813/**
1814 * Get cr4.
1815 * @returns cr4.
1816 */
1817#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1818DECLASM(RTCCUINTXREG) ASMGetCR4(void);
1819#else
1820DECLINLINE(RTCCUINTXREG) ASMGetCR4(void)
1821{
1822 RTCCUINTXREG uCR4;
1823# if RT_INLINE_ASM_USES_INTRIN
1824 uCR4 = __readcr4();
1825
1826# elif RT_INLINE_ASM_GNU_STYLE
1827# ifdef RT_ARCH_AMD64
1828 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1829# else
1830 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1831# endif
1832# else
1833 __asm
1834 {
1835# ifdef RT_ARCH_AMD64
1836 mov rax, cr4
1837 mov [uCR4], rax
1838# else
1839 push eax /* just in case */
1840 /*mov eax, cr4*/
1841 _emit 0x0f
1842 _emit 0x20
1843 _emit 0xe0
1844 mov [uCR4], eax
1845 pop eax
1846# endif
1847 }
1848# endif
1849 return uCR4;
1850}
1851#endif
1852
1853
1854/**
1855 * Sets the CR4 register.
1856 *
1857 * @param uCR4 New CR4 value.
1858 */
1859#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1860DECLASM(void) ASMSetCR4(RTCCUINTXREG uCR4);
1861#else
1862DECLINLINE(void) ASMSetCR4(RTCCUINTXREG uCR4)
1863{
1864# if RT_INLINE_ASM_USES_INTRIN
1865 __writecr4(uCR4);
1866
1867# elif RT_INLINE_ASM_GNU_STYLE
1868# ifdef RT_ARCH_AMD64
1869 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1870# else
1871 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1872# endif
1873# else
1874 __asm
1875 {
1876# ifdef RT_ARCH_AMD64
1877 mov rax, [uCR4]
1878 mov cr4, rax
1879# else
1880 mov eax, [uCR4]
1881 _emit 0x0F
1882 _emit 0x22
1883 _emit 0xE0 /* mov cr4, eax */
1884# endif
1885 }
1886# endif
1887}
1888#endif
1889
1890
1891/**
1892 * Get cr8.
1893 * @returns cr8.
1894 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1895 */
1896#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1897DECLASM(RTCCUINTXREG) ASMGetCR8(void);
1898#else
1899DECLINLINE(RTCCUINTXREG) ASMGetCR8(void)
1900{
1901# ifdef RT_ARCH_AMD64
1902 RTCCUINTXREG uCR8;
1903# if RT_INLINE_ASM_USES_INTRIN
1904 uCR8 = __readcr8();
1905
1906# elif RT_INLINE_ASM_GNU_STYLE
1907 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1908# else
1909 __asm
1910 {
1911 mov rax, cr8
1912 mov [uCR8], rax
1913 }
1914# endif
1915 return uCR8;
1916# else /* !RT_ARCH_AMD64 */
1917 return 0;
1918# endif /* !RT_ARCH_AMD64 */
1919}
1920#endif
1921
1922
1923/**
1924 * Get XCR0 (eXtended feature Control Register 0).
1925 * @returns xcr0.
1926 */
1927DECLASM(uint64_t) ASMGetXcr0(void);
1928
1929/**
1930 * Sets the XCR0 register.
1931 * @param uXcr0 The new XCR0 value.
1932 */
1933DECLASM(void) ASMSetXcr0(uint64_t uXcr0);
1934
1935struct X86XSAVEAREA;
1936/**
1937 * Save extended CPU state.
1938 * @param pXStateArea Where to save the state.
1939 * @param fComponents Which state components to save.
1940 */
1941DECLASM(void) ASMXSave(struct X86XSAVEAREA *pXStateArea, uint64_t fComponents);
1942
1943/**
1944 * Loads extended CPU state.
1945 * @param pXStateArea Where to load the state from.
1946 * @param fComponents Which state components to load.
1947 */
1948DECLASM(void) ASMXRstor(struct X86XSAVEAREA const *pXStateArea, uint64_t fComponents);
1949
1950
1951/**
1952 * Enables interrupts (EFLAGS.IF).
1953 */
1954#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1955DECLASM(void) ASMIntEnable(void);
1956#else
1957DECLINLINE(void) ASMIntEnable(void)
1958{
1959# if RT_INLINE_ASM_GNU_STYLE
1960 __asm("sti\n");
1961# elif RT_INLINE_ASM_USES_INTRIN
1962 _enable();
1963# else
1964 __asm sti
1965# endif
1966}
1967#endif
1968
1969
1970/**
1971 * Disables interrupts (!EFLAGS.IF).
1972 */
1973#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1974DECLASM(void) ASMIntDisable(void);
1975#else
1976DECLINLINE(void) ASMIntDisable(void)
1977{
1978# if RT_INLINE_ASM_GNU_STYLE
1979 __asm("cli\n");
1980# elif RT_INLINE_ASM_USES_INTRIN
1981 _disable();
1982# else
1983 __asm cli
1984# endif
1985}
1986#endif
1987
1988
1989/**
1990 * Disables interrupts and returns previous xFLAGS.
1991 */
1992#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1993DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1994#else
1995DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1996{
1997 RTCCUINTREG xFlags;
1998# if RT_INLINE_ASM_GNU_STYLE
1999# ifdef RT_ARCH_AMD64
2000 __asm__ __volatile__("pushfq\n\t"
2001 "cli\n\t"
2002 "popq %0\n\t"
2003 : "=r" (xFlags));
2004# else
2005 __asm__ __volatile__("pushfl\n\t"
2006 "cli\n\t"
2007 "popl %0\n\t"
2008 : "=r" (xFlags));
2009# endif
2010# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
2011 xFlags = ASMGetFlags();
2012 _disable();
2013# else
2014 __asm {
2015 pushfd
2016 cli
2017 pop [xFlags]
2018 }
2019# endif
2020 return xFlags;
2021}
2022#endif
2023
2024
2025/**
2026 * Are interrupts enabled?
2027 *
2028 * @returns true / false.
2029 */
2030DECLINLINE(bool) ASMIntAreEnabled(void)
2031{
2032 RTCCUINTREG uFlags = ASMGetFlags();
2033 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
2034}
2035
2036
2037/**
2038 * Halts the CPU until interrupted.
2039 */
2040#if RT_INLINE_ASM_EXTERNAL
2041DECLASM(void) ASMHalt(void);
2042#else
2043DECLINLINE(void) ASMHalt(void)
2044{
2045# if RT_INLINE_ASM_GNU_STYLE
2046 __asm__ __volatile__("hlt\n\t");
2047# else
2048 __asm {
2049 hlt
2050 }
2051# endif
2052}
2053#endif
2054
2055
2056/**
2057 * Reads a machine specific register.
2058 *
2059 * @returns Register content.
2060 * @param uRegister Register to read.
2061 */
2062#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2063DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
2064#else
2065DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
2066{
2067 RTUINT64U u;
2068# if RT_INLINE_ASM_GNU_STYLE
2069 __asm__ __volatile__("rdmsr\n\t"
2070 : "=a" (u.s.Lo),
2071 "=d" (u.s.Hi)
2072 : "c" (uRegister));
2073
2074# elif RT_INLINE_ASM_USES_INTRIN
2075 u.u = __readmsr(uRegister);
2076
2077# else
2078 __asm
2079 {
2080 mov ecx, [uRegister]
2081 rdmsr
2082 mov [u.s.Lo], eax
2083 mov [u.s.Hi], edx
2084 }
2085# endif
2086
2087 return u.u;
2088}
2089#endif
2090
2091
2092/**
2093 * Writes a machine specific register.
2094 *
2095 * @returns Register content.
2096 * @param uRegister Register to write to.
2097 * @param u64Val Value to write.
2098 */
2099#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2100DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
2101#else
2102DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
2103{
2104 RTUINT64U u;
2105
2106 u.u = u64Val;
2107# if RT_INLINE_ASM_GNU_STYLE
2108 __asm__ __volatile__("wrmsr\n\t"
2109 ::"a" (u.s.Lo),
2110 "d" (u.s.Hi),
2111 "c" (uRegister));
2112
2113# elif RT_INLINE_ASM_USES_INTRIN
2114 __writemsr(uRegister, u.u);
2115
2116# else
2117 __asm
2118 {
2119 mov ecx, [uRegister]
2120 mov edx, [u.s.Hi]
2121 mov eax, [u.s.Lo]
2122 wrmsr
2123 }
2124# endif
2125}
2126#endif
2127
2128
2129/**
2130 * Reads a machine specific register, extended version (for AMD).
2131 *
2132 * @returns Register content.
2133 * @param uRegister Register to read.
2134 * @param uXDI RDI/EDI value.
2135 */
2136#if RT_INLINE_ASM_EXTERNAL
2137DECLASM(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI);
2138#else
2139DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI)
2140{
2141 RTUINT64U u;
2142# if RT_INLINE_ASM_GNU_STYLE
2143 __asm__ __volatile__("rdmsr\n\t"
2144 : "=a" (u.s.Lo),
2145 "=d" (u.s.Hi)
2146 : "c" (uRegister),
2147 "D" (uXDI));
2148
2149# else
2150 __asm
2151 {
2152 mov ecx, [uRegister]
2153 xchg edi, [uXDI]
2154 rdmsr
2155 mov [u.s.Lo], eax
2156 mov [u.s.Hi], edx
2157 xchg edi, [uXDI]
2158 }
2159# endif
2160
2161 return u.u;
2162}
2163#endif
2164
2165
2166/**
2167 * Writes a machine specific register, extended version (for AMD).
2168 *
2169 * @returns Register content.
2170 * @param uRegister Register to write to.
2171 * @param uXDI RDI/EDI value.
2172 * @param u64Val Value to write.
2173 */
2174#if RT_INLINE_ASM_EXTERNAL
2175DECLASM(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val);
2176#else
2177DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTXREG uXDI, uint64_t u64Val)
2178{
2179 RTUINT64U u;
2180
2181 u.u = u64Val;
2182# if RT_INLINE_ASM_GNU_STYLE
2183 __asm__ __volatile__("wrmsr\n\t"
2184 ::"a" (u.s.Lo),
2185 "d" (u.s.Hi),
2186 "c" (uRegister),
2187 "D" (uXDI));
2188
2189# else
2190 __asm
2191 {
2192 mov ecx, [uRegister]
2193 xchg edi, [uXDI]
2194 mov edx, [u.s.Hi]
2195 mov eax, [u.s.Lo]
2196 wrmsr
2197 xchg edi, [uXDI]
2198 }
2199# endif
2200}
2201#endif
2202
2203
2204
2205/**
2206 * Reads low part of a machine specific register.
2207 *
2208 * @returns Register content.
2209 * @param uRegister Register to read.
2210 */
2211#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2212DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
2213#else
2214DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
2215{
2216 uint32_t u32;
2217# if RT_INLINE_ASM_GNU_STYLE
2218 __asm__ __volatile__("rdmsr\n\t"
2219 : "=a" (u32)
2220 : "c" (uRegister)
2221 : "edx");
2222
2223# elif RT_INLINE_ASM_USES_INTRIN
2224 u32 = (uint32_t)__readmsr(uRegister);
2225
2226#else
2227 __asm
2228 {
2229 mov ecx, [uRegister]
2230 rdmsr
2231 mov [u32], eax
2232 }
2233# endif
2234
2235 return u32;
2236}
2237#endif
2238
2239
2240/**
2241 * Reads high part of a machine specific register.
2242 *
2243 * @returns Register content.
2244 * @param uRegister Register to read.
2245 */
2246#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2247DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
2248#else
2249DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
2250{
2251 uint32_t u32;
2252# if RT_INLINE_ASM_GNU_STYLE
2253 __asm__ __volatile__("rdmsr\n\t"
2254 : "=d" (u32)
2255 : "c" (uRegister)
2256 : "eax");
2257
2258# elif RT_INLINE_ASM_USES_INTRIN
2259 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
2260
2261# else
2262 __asm
2263 {
2264 mov ecx, [uRegister]
2265 rdmsr
2266 mov [u32], edx
2267 }
2268# endif
2269
2270 return u32;
2271}
2272#endif
2273
2274
2275/**
2276 * Gets dr0.
2277 *
2278 * @returns dr0.
2279 */
2280#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2281DECLASM(RTCCUINTXREG) ASMGetDR0(void);
2282#else
2283DECLINLINE(RTCCUINTXREG) ASMGetDR0(void)
2284{
2285 RTCCUINTXREG uDR0;
2286# if RT_INLINE_ASM_USES_INTRIN
2287 uDR0 = __readdr(0);
2288# elif RT_INLINE_ASM_GNU_STYLE
2289# ifdef RT_ARCH_AMD64
2290 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
2291# else
2292 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
2293# endif
2294# else
2295 __asm
2296 {
2297# ifdef RT_ARCH_AMD64
2298 mov rax, dr0
2299 mov [uDR0], rax
2300# else
2301 mov eax, dr0
2302 mov [uDR0], eax
2303# endif
2304 }
2305# endif
2306 return uDR0;
2307}
2308#endif
2309
2310
2311/**
2312 * Gets dr1.
2313 *
2314 * @returns dr1.
2315 */
2316#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2317DECLASM(RTCCUINTXREG) ASMGetDR1(void);
2318#else
2319DECLINLINE(RTCCUINTXREG) ASMGetDR1(void)
2320{
2321 RTCCUINTXREG uDR1;
2322# if RT_INLINE_ASM_USES_INTRIN
2323 uDR1 = __readdr(1);
2324# elif RT_INLINE_ASM_GNU_STYLE
2325# ifdef RT_ARCH_AMD64
2326 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
2327# else
2328 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
2329# endif
2330# else
2331 __asm
2332 {
2333# ifdef RT_ARCH_AMD64
2334 mov rax, dr1
2335 mov [uDR1], rax
2336# else
2337 mov eax, dr1
2338 mov [uDR1], eax
2339# endif
2340 }
2341# endif
2342 return uDR1;
2343}
2344#endif
2345
2346
2347/**
2348 * Gets dr2.
2349 *
2350 * @returns dr2.
2351 */
2352#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2353DECLASM(RTCCUINTXREG) ASMGetDR2(void);
2354#else
2355DECLINLINE(RTCCUINTXREG) ASMGetDR2(void)
2356{
2357 RTCCUINTXREG uDR2;
2358# if RT_INLINE_ASM_USES_INTRIN
2359 uDR2 = __readdr(2);
2360# elif RT_INLINE_ASM_GNU_STYLE
2361# ifdef RT_ARCH_AMD64
2362 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2363# else
2364 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2365# endif
2366# else
2367 __asm
2368 {
2369# ifdef RT_ARCH_AMD64
2370 mov rax, dr2
2371 mov [uDR2], rax
2372# else
2373 mov eax, dr2
2374 mov [uDR2], eax
2375# endif
2376 }
2377# endif
2378 return uDR2;
2379}
2380#endif
2381
2382
2383/**
2384 * Gets dr3.
2385 *
2386 * @returns dr3.
2387 */
2388#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2389DECLASM(RTCCUINTXREG) ASMGetDR3(void);
2390#else
2391DECLINLINE(RTCCUINTXREG) ASMGetDR3(void)
2392{
2393 RTCCUINTXREG uDR3;
2394# if RT_INLINE_ASM_USES_INTRIN
2395 uDR3 = __readdr(3);
2396# elif RT_INLINE_ASM_GNU_STYLE
2397# ifdef RT_ARCH_AMD64
2398 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2399# else
2400 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2401# endif
2402# else
2403 __asm
2404 {
2405# ifdef RT_ARCH_AMD64
2406 mov rax, dr3
2407 mov [uDR3], rax
2408# else
2409 mov eax, dr3
2410 mov [uDR3], eax
2411# endif
2412 }
2413# endif
2414 return uDR3;
2415}
2416#endif
2417
2418
2419/**
2420 * Gets dr6.
2421 *
2422 * @returns dr6.
2423 */
2424#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2425DECLASM(RTCCUINTXREG) ASMGetDR6(void);
2426#else
2427DECLINLINE(RTCCUINTXREG) ASMGetDR6(void)
2428{
2429 RTCCUINTXREG uDR6;
2430# if RT_INLINE_ASM_USES_INTRIN
2431 uDR6 = __readdr(6);
2432# elif RT_INLINE_ASM_GNU_STYLE
2433# ifdef RT_ARCH_AMD64
2434 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2435# else
2436 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2437# endif
2438# else
2439 __asm
2440 {
2441# ifdef RT_ARCH_AMD64
2442 mov rax, dr6
2443 mov [uDR6], rax
2444# else
2445 mov eax, dr6
2446 mov [uDR6], eax
2447# endif
2448 }
2449# endif
2450 return uDR6;
2451}
2452#endif
2453
2454
2455/**
2456 * Reads and clears DR6.
2457 *
2458 * @returns DR6.
2459 */
2460#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2461DECLASM(RTCCUINTXREG) ASMGetAndClearDR6(void);
2462#else
2463DECLINLINE(RTCCUINTXREG) ASMGetAndClearDR6(void)
2464{
2465 RTCCUINTXREG uDR6;
2466# if RT_INLINE_ASM_USES_INTRIN
2467 uDR6 = __readdr(6);
2468 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2469# elif RT_INLINE_ASM_GNU_STYLE
2470 RTCCUINTXREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2471# ifdef RT_ARCH_AMD64
2472 __asm__ __volatile__("movq %%dr6, %0\n\t"
2473 "movq %1, %%dr6\n\t"
2474 : "=r" (uDR6)
2475 : "r" (uNewValue));
2476# else
2477 __asm__ __volatile__("movl %%dr6, %0\n\t"
2478 "movl %1, %%dr6\n\t"
2479 : "=r" (uDR6)
2480 : "r" (uNewValue));
2481# endif
2482# else
2483 __asm
2484 {
2485# ifdef RT_ARCH_AMD64
2486 mov rax, dr6
2487 mov [uDR6], rax
2488 mov rcx, rax
2489 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2490 mov dr6, rcx
2491# else
2492 mov eax, dr6
2493 mov [uDR6], eax
2494 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2495 mov dr6, ecx
2496# endif
2497 }
2498# endif
2499 return uDR6;
2500}
2501#endif
2502
2503
2504/**
2505 * Gets dr7.
2506 *
2507 * @returns dr7.
2508 */
2509#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2510DECLASM(RTCCUINTXREG) ASMGetDR7(void);
2511#else
2512DECLINLINE(RTCCUINTXREG) ASMGetDR7(void)
2513{
2514 RTCCUINTXREG uDR7;
2515# if RT_INLINE_ASM_USES_INTRIN
2516 uDR7 = __readdr(7);
2517# elif RT_INLINE_ASM_GNU_STYLE
2518# ifdef RT_ARCH_AMD64
2519 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2520# else
2521 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2522# endif
2523# else
2524 __asm
2525 {
2526# ifdef RT_ARCH_AMD64
2527 mov rax, dr7
2528 mov [uDR7], rax
2529# else
2530 mov eax, dr7
2531 mov [uDR7], eax
2532# endif
2533 }
2534# endif
2535 return uDR7;
2536}
2537#endif
2538
2539
2540/**
2541 * Sets dr0.
2542 *
2543 * @param uDRVal Debug register value to write
2544 */
2545#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2546DECLASM(void) ASMSetDR0(RTCCUINTXREG uDRVal);
2547#else
2548DECLINLINE(void) ASMSetDR0(RTCCUINTXREG uDRVal)
2549{
2550# if RT_INLINE_ASM_USES_INTRIN
2551 __writedr(0, uDRVal);
2552# elif RT_INLINE_ASM_GNU_STYLE
2553# ifdef RT_ARCH_AMD64
2554 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2555# else
2556 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2557# endif
2558# else
2559 __asm
2560 {
2561# ifdef RT_ARCH_AMD64
2562 mov rax, [uDRVal]
2563 mov dr0, rax
2564# else
2565 mov eax, [uDRVal]
2566 mov dr0, eax
2567# endif
2568 }
2569# endif
2570}
2571#endif
2572
2573
2574/**
2575 * Sets dr1.
2576 *
2577 * @param uDRVal Debug register value to write
2578 */
2579#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2580DECLASM(void) ASMSetDR1(RTCCUINTXREG uDRVal);
2581#else
2582DECLINLINE(void) ASMSetDR1(RTCCUINTXREG uDRVal)
2583{
2584# if RT_INLINE_ASM_USES_INTRIN
2585 __writedr(1, uDRVal);
2586# elif RT_INLINE_ASM_GNU_STYLE
2587# ifdef RT_ARCH_AMD64
2588 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2589# else
2590 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2591# endif
2592# else
2593 __asm
2594 {
2595# ifdef RT_ARCH_AMD64
2596 mov rax, [uDRVal]
2597 mov dr1, rax
2598# else
2599 mov eax, [uDRVal]
2600 mov dr1, eax
2601# endif
2602 }
2603# endif
2604}
2605#endif
2606
2607
2608/**
2609 * Sets dr2.
2610 *
2611 * @param uDRVal Debug register value to write
2612 */
2613#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2614DECLASM(void) ASMSetDR2(RTCCUINTXREG uDRVal);
2615#else
2616DECLINLINE(void) ASMSetDR2(RTCCUINTXREG uDRVal)
2617{
2618# if RT_INLINE_ASM_USES_INTRIN
2619 __writedr(2, uDRVal);
2620# elif RT_INLINE_ASM_GNU_STYLE
2621# ifdef RT_ARCH_AMD64
2622 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2623# else
2624 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2625# endif
2626# else
2627 __asm
2628 {
2629# ifdef RT_ARCH_AMD64
2630 mov rax, [uDRVal]
2631 mov dr2, rax
2632# else
2633 mov eax, [uDRVal]
2634 mov dr2, eax
2635# endif
2636 }
2637# endif
2638}
2639#endif
2640
2641
2642/**
2643 * Sets dr3.
2644 *
2645 * @param uDRVal Debug register value to write
2646 */
2647#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2648DECLASM(void) ASMSetDR3(RTCCUINTXREG uDRVal);
2649#else
2650DECLINLINE(void) ASMSetDR3(RTCCUINTXREG uDRVal)
2651{
2652# if RT_INLINE_ASM_USES_INTRIN
2653 __writedr(3, uDRVal);
2654# elif RT_INLINE_ASM_GNU_STYLE
2655# ifdef RT_ARCH_AMD64
2656 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2657# else
2658 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2659# endif
2660# else
2661 __asm
2662 {
2663# ifdef RT_ARCH_AMD64
2664 mov rax, [uDRVal]
2665 mov dr3, rax
2666# else
2667 mov eax, [uDRVal]
2668 mov dr3, eax
2669# endif
2670 }
2671# endif
2672}
2673#endif
2674
2675
2676/**
2677 * Sets dr6.
2678 *
2679 * @param uDRVal Debug register value to write
2680 */
2681#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2682DECLASM(void) ASMSetDR6(RTCCUINTXREG uDRVal);
2683#else
2684DECLINLINE(void) ASMSetDR6(RTCCUINTXREG uDRVal)
2685{
2686# if RT_INLINE_ASM_USES_INTRIN
2687 __writedr(6, uDRVal);
2688# elif RT_INLINE_ASM_GNU_STYLE
2689# ifdef RT_ARCH_AMD64
2690 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2691# else
2692 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2693# endif
2694# else
2695 __asm
2696 {
2697# ifdef RT_ARCH_AMD64
2698 mov rax, [uDRVal]
2699 mov dr6, rax
2700# else
2701 mov eax, [uDRVal]
2702 mov dr6, eax
2703# endif
2704 }
2705# endif
2706}
2707#endif
2708
2709
2710/**
2711 * Sets dr7.
2712 *
2713 * @param uDRVal Debug register value to write
2714 */
2715#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2716DECLASM(void) ASMSetDR7(RTCCUINTXREG uDRVal);
2717#else
2718DECLINLINE(void) ASMSetDR7(RTCCUINTXREG uDRVal)
2719{
2720# if RT_INLINE_ASM_USES_INTRIN
2721 __writedr(7, uDRVal);
2722# elif RT_INLINE_ASM_GNU_STYLE
2723# ifdef RT_ARCH_AMD64
2724 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2725# else
2726 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2727# endif
2728# else
2729 __asm
2730 {
2731# ifdef RT_ARCH_AMD64
2732 mov rax, [uDRVal]
2733 mov dr7, rax
2734# else
2735 mov eax, [uDRVal]
2736 mov dr7, eax
2737# endif
2738 }
2739# endif
2740}
2741#endif
2742
2743
2744/**
2745 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2746 *
2747 * @param Port I/O port to write to.
2748 * @param u8 8-bit integer to write.
2749 */
2750#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2751DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2752#else
2753DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2754{
2755# if RT_INLINE_ASM_GNU_STYLE
2756 __asm__ __volatile__("outb %b1, %w0\n\t"
2757 :: "Nd" (Port),
2758 "a" (u8));
2759
2760# elif RT_INLINE_ASM_USES_INTRIN
2761 __outbyte(Port, u8);
2762
2763# else
2764 __asm
2765 {
2766 mov dx, [Port]
2767 mov al, [u8]
2768 out dx, al
2769 }
2770# endif
2771}
2772#endif
2773
2774
2775/**
2776 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2777 *
2778 * @returns 8-bit integer.
2779 * @param Port I/O port to read from.
2780 */
2781#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2782DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2783#else
2784DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2785{
2786 uint8_t u8;
2787# if RT_INLINE_ASM_GNU_STYLE
2788 __asm__ __volatile__("inb %w1, %b0\n\t"
2789 : "=a" (u8)
2790 : "Nd" (Port));
2791
2792# elif RT_INLINE_ASM_USES_INTRIN
2793 u8 = __inbyte(Port);
2794
2795# else
2796 __asm
2797 {
2798 mov dx, [Port]
2799 in al, dx
2800 mov [u8], al
2801 }
2802# endif
2803 return u8;
2804}
2805#endif
2806
2807
2808/**
2809 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2810 *
2811 * @param Port I/O port to write to.
2812 * @param u16 16-bit integer to write.
2813 */
2814#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2815DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2816#else
2817DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2818{
2819# if RT_INLINE_ASM_GNU_STYLE
2820 __asm__ __volatile__("outw %w1, %w0\n\t"
2821 :: "Nd" (Port),
2822 "a" (u16));
2823
2824# elif RT_INLINE_ASM_USES_INTRIN
2825 __outword(Port, u16);
2826
2827# else
2828 __asm
2829 {
2830 mov dx, [Port]
2831 mov ax, [u16]
2832 out dx, ax
2833 }
2834# endif
2835}
2836#endif
2837
2838
2839/**
2840 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2841 *
2842 * @returns 16-bit integer.
2843 * @param Port I/O port to read from.
2844 */
2845#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2846DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2847#else
2848DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2849{
2850 uint16_t u16;
2851# if RT_INLINE_ASM_GNU_STYLE
2852 __asm__ __volatile__("inw %w1, %w0\n\t"
2853 : "=a" (u16)
2854 : "Nd" (Port));
2855
2856# elif RT_INLINE_ASM_USES_INTRIN
2857 u16 = __inword(Port);
2858
2859# else
2860 __asm
2861 {
2862 mov dx, [Port]
2863 in ax, dx
2864 mov [u16], ax
2865 }
2866# endif
2867 return u16;
2868}
2869#endif
2870
2871
2872/**
2873 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2874 *
2875 * @param Port I/O port to write to.
2876 * @param u32 32-bit integer to write.
2877 */
2878#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2879DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2880#else
2881DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2882{
2883# if RT_INLINE_ASM_GNU_STYLE
2884 __asm__ __volatile__("outl %1, %w0\n\t"
2885 :: "Nd" (Port),
2886 "a" (u32));
2887
2888# elif RT_INLINE_ASM_USES_INTRIN
2889 __outdword(Port, u32);
2890
2891# else
2892 __asm
2893 {
2894 mov dx, [Port]
2895 mov eax, [u32]
2896 out dx, eax
2897 }
2898# endif
2899}
2900#endif
2901
2902
2903/**
2904 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2905 *
2906 * @returns 32-bit integer.
2907 * @param Port I/O port to read from.
2908 */
2909#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2910DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2911#else
2912DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2913{
2914 uint32_t u32;
2915# if RT_INLINE_ASM_GNU_STYLE
2916 __asm__ __volatile__("inl %w1, %0\n\t"
2917 : "=a" (u32)
2918 : "Nd" (Port));
2919
2920# elif RT_INLINE_ASM_USES_INTRIN
2921 u32 = __indword(Port);
2922
2923# else
2924 __asm
2925 {
2926 mov dx, [Port]
2927 in eax, dx
2928 mov [u32], eax
2929 }
2930# endif
2931 return u32;
2932}
2933#endif
2934
2935
2936/**
2937 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2938 *
2939 * @param Port I/O port to write to.
2940 * @param pau8 Pointer to the string buffer.
2941 * @param c The number of items to write.
2942 */
2943#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2944DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2945#else
2946DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2947{
2948# if RT_INLINE_ASM_GNU_STYLE
2949 __asm__ __volatile__("rep; outsb\n\t"
2950 : "+S" (pau8),
2951 "+c" (c)
2952 : "d" (Port));
2953
2954# elif RT_INLINE_ASM_USES_INTRIN
2955 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2956
2957# else
2958 __asm
2959 {
2960 mov dx, [Port]
2961 mov ecx, [c]
2962 mov eax, [pau8]
2963 xchg esi, eax
2964 rep outsb
2965 xchg esi, eax
2966 }
2967# endif
2968}
2969#endif
2970
2971
2972/**
2973 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2974 *
2975 * @param Port I/O port to read from.
2976 * @param pau8 Pointer to the string buffer (output).
2977 * @param c The number of items to read.
2978 */
2979#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2980DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2981#else
2982DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2983{
2984# if RT_INLINE_ASM_GNU_STYLE
2985 __asm__ __volatile__("rep; insb\n\t"
2986 : "+D" (pau8),
2987 "+c" (c)
2988 : "d" (Port));
2989
2990# elif RT_INLINE_ASM_USES_INTRIN
2991 __inbytestring(Port, pau8, (unsigned long)c);
2992
2993# else
2994 __asm
2995 {
2996 mov dx, [Port]
2997 mov ecx, [c]
2998 mov eax, [pau8]
2999 xchg edi, eax
3000 rep insb
3001 xchg edi, eax
3002 }
3003# endif
3004}
3005#endif
3006
3007
3008/**
3009 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
3010 *
3011 * @param Port I/O port to write to.
3012 * @param pau16 Pointer to the string buffer.
3013 * @param c The number of items to write.
3014 */
3015#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3016DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
3017#else
3018DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
3019{
3020# if RT_INLINE_ASM_GNU_STYLE
3021 __asm__ __volatile__("rep; outsw\n\t"
3022 : "+S" (pau16),
3023 "+c" (c)
3024 : "d" (Port));
3025
3026# elif RT_INLINE_ASM_USES_INTRIN
3027 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
3028
3029# else
3030 __asm
3031 {
3032 mov dx, [Port]
3033 mov ecx, [c]
3034 mov eax, [pau16]
3035 xchg esi, eax
3036 rep outsw
3037 xchg esi, eax
3038 }
3039# endif
3040}
3041#endif
3042
3043
3044/**
3045 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
3046 *
3047 * @param Port I/O port to read from.
3048 * @param pau16 Pointer to the string buffer (output).
3049 * @param c The number of items to read.
3050 */
3051#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3052DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
3053#else
3054DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
3055{
3056# if RT_INLINE_ASM_GNU_STYLE
3057 __asm__ __volatile__("rep; insw\n\t"
3058 : "+D" (pau16),
3059 "+c" (c)
3060 : "d" (Port));
3061
3062# elif RT_INLINE_ASM_USES_INTRIN
3063 __inwordstring(Port, pau16, (unsigned long)c);
3064
3065# else
3066 __asm
3067 {
3068 mov dx, [Port]
3069 mov ecx, [c]
3070 mov eax, [pau16]
3071 xchg edi, eax
3072 rep insw
3073 xchg edi, eax
3074 }
3075# endif
3076}
3077#endif
3078
3079
3080/**
3081 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
3082 *
3083 * @param Port I/O port to write to.
3084 * @param pau32 Pointer to the string buffer.
3085 * @param c The number of items to write.
3086 */
3087#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3088DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
3089#else
3090DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
3091{
3092# if RT_INLINE_ASM_GNU_STYLE
3093 __asm__ __volatile__("rep; outsl\n\t"
3094 : "+S" (pau32),
3095 "+c" (c)
3096 : "d" (Port));
3097
3098# elif RT_INLINE_ASM_USES_INTRIN
3099 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
3100
3101# else
3102 __asm
3103 {
3104 mov dx, [Port]
3105 mov ecx, [c]
3106 mov eax, [pau32]
3107 xchg esi, eax
3108 rep outsd
3109 xchg esi, eax
3110 }
3111# endif
3112}
3113#endif
3114
3115
3116/**
3117 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
3118 *
3119 * @param Port I/O port to read from.
3120 * @param pau32 Pointer to the string buffer (output).
3121 * @param c The number of items to read.
3122 */
3123#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3124DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
3125#else
3126DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
3127{
3128# if RT_INLINE_ASM_GNU_STYLE
3129 __asm__ __volatile__("rep; insl\n\t"
3130 : "+D" (pau32),
3131 "+c" (c)
3132 : "d" (Port));
3133
3134# elif RT_INLINE_ASM_USES_INTRIN
3135 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
3136
3137# else
3138 __asm
3139 {
3140 mov dx, [Port]
3141 mov ecx, [c]
3142 mov eax, [pau32]
3143 xchg edi, eax
3144 rep insd
3145 xchg edi, eax
3146 }
3147# endif
3148}
3149#endif
3150
3151
3152/**
3153 * Invalidate page.
3154 *
3155 * @param pv Address of the page to invalidate.
3156 */
3157#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3158DECLASM(void) ASMInvalidatePage(void *pv);
3159#else
3160DECLINLINE(void) ASMInvalidatePage(void *pv)
3161{
3162# if RT_INLINE_ASM_USES_INTRIN
3163 __invlpg(pv);
3164
3165# elif RT_INLINE_ASM_GNU_STYLE
3166 __asm__ __volatile__("invlpg %0\n\t"
3167 : : "m" (*(uint8_t *)pv));
3168# else
3169 __asm
3170 {
3171# ifdef RT_ARCH_AMD64
3172 mov rax, [pv]
3173 invlpg [rax]
3174# else
3175 mov eax, [pv]
3176 invlpg [eax]
3177# endif
3178 }
3179# endif
3180}
3181#endif
3182
3183
3184/**
3185 * Write back the internal caches and invalidate them.
3186 */
3187#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
3188DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
3189#else
3190DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
3191{
3192# if RT_INLINE_ASM_USES_INTRIN
3193 __wbinvd();
3194
3195# elif RT_INLINE_ASM_GNU_STYLE
3196 __asm__ __volatile__("wbinvd");
3197# else
3198 __asm
3199 {
3200 wbinvd
3201 }
3202# endif
3203}
3204#endif
3205
3206
3207/**
3208 * Invalidate internal and (perhaps) external caches without first
3209 * flushing dirty cache lines. Use with extreme care.
3210 */
3211#if RT_INLINE_ASM_EXTERNAL
3212DECLASM(void) ASMInvalidateInternalCaches(void);
3213#else
3214DECLINLINE(void) ASMInvalidateInternalCaches(void)
3215{
3216# if RT_INLINE_ASM_GNU_STYLE
3217 __asm__ __volatile__("invd");
3218# else
3219 __asm
3220 {
3221 invd
3222 }
3223# endif
3224}
3225#endif
3226
3227
3228/**
3229 * Memory load/store fence, waits for any pending writes and reads to complete.
3230 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3231 */
3232DECLINLINE(void) ASMMemoryFenceSSE2(void)
3233{
3234#if RT_INLINE_ASM_GNU_STYLE
3235 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3236#elif RT_INLINE_ASM_USES_INTRIN
3237 _mm_mfence();
3238#else
3239 __asm
3240 {
3241 _emit 0x0f
3242 _emit 0xae
3243 _emit 0xf0
3244 }
3245#endif
3246}
3247
3248
3249/**
3250 * Memory store fence, waits for any writes to complete.
3251 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3252 */
3253DECLINLINE(void) ASMWriteFenceSSE(void)
3254{
3255#if RT_INLINE_ASM_GNU_STYLE
3256 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3257#elif RT_INLINE_ASM_USES_INTRIN
3258 _mm_sfence();
3259#else
3260 __asm
3261 {
3262 _emit 0x0f
3263 _emit 0xae
3264 _emit 0xf8
3265 }
3266#endif
3267}
3268
3269
3270/**
3271 * Memory load fence, waits for any pending reads to complete.
3272 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3273 */
3274DECLINLINE(void) ASMReadFenceSSE2(void)
3275{
3276#if RT_INLINE_ASM_GNU_STYLE
3277 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3278#elif RT_INLINE_ASM_USES_INTRIN
3279 _mm_lfence();
3280#else
3281 __asm
3282 {
3283 _emit 0x0f
3284 _emit 0xae
3285 _emit 0xe8
3286 }
3287#endif
3288}
3289
3290#if !defined(_MSC_VER) || !defined(RT_ARCH_AMD64)
3291
3292/*
3293 * Clear the AC bit in the EFLAGS register.
3294 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3295 * Requires to be executed in R0.
3296 */
3297DECLINLINE(void) ASMClearAC(void)
3298{
3299#if RT_INLINE_ASM_GNU_STYLE
3300 __asm__ __volatile__ (".byte 0x0f,0x01,0xca\n\t");
3301#else
3302 __asm
3303 {
3304 _emit 0x0f
3305 _emit 0x01
3306 _emit 0xca
3307 }
3308#endif
3309}
3310
3311
3312/*
3313 * Set the AC bit in the EFLAGS register.
3314 * Requires the X86_CPUID_STEXT_FEATURE_EBX_SMAP CPUID bit set.
3315 * Requires to be executed in R0.
3316 */
3317DECLINLINE(void) ASMSetAC(void)
3318{
3319#if RT_INLINE_ASM_GNU_STYLE
3320 __asm__ __volatile__ (".byte 0x0f,0x01,0xcb\n\t");
3321#else
3322 __asm
3323 {
3324 _emit 0x0f
3325 _emit 0x01
3326 _emit 0xcb
3327 }
3328#endif
3329}
3330
3331#endif /* !_MSC_VER) || !RT_ARCH_AMD64 */
3332
3333/** @} */
3334#endif
3335
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette