VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 54259

Last change on this file since 54259 was 54254, checked in by vboxsync, 10 years ago

Added ASMGetIdtrLimit.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 66.4 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2013 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#include <iprt/assert.h>
31#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
32# error "Not on AMD64 or x86"
33#endif
34
35#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
36# include <intrin.h>
37 /* Emit the intrinsics at all optimization levels. */
38# pragma intrinsic(_ReadWriteBarrier)
39# pragma intrinsic(__cpuid)
40# pragma intrinsic(_enable)
41# pragma intrinsic(_disable)
42# pragma intrinsic(__rdtsc)
43# pragma intrinsic(__readmsr)
44# pragma intrinsic(__writemsr)
45# pragma intrinsic(__outbyte)
46# pragma intrinsic(__outbytestring)
47# pragma intrinsic(__outword)
48# pragma intrinsic(__outwordstring)
49# pragma intrinsic(__outdword)
50# pragma intrinsic(__outdwordstring)
51# pragma intrinsic(__inbyte)
52# pragma intrinsic(__inbytestring)
53# pragma intrinsic(__inword)
54# pragma intrinsic(__inwordstring)
55# pragma intrinsic(__indword)
56# pragma intrinsic(__indwordstring)
57# pragma intrinsic(__invlpg)
58# pragma intrinsic(__wbinvd)
59# pragma intrinsic(__readcr0)
60# pragma intrinsic(__readcr2)
61# pragma intrinsic(__readcr3)
62# pragma intrinsic(__readcr4)
63# pragma intrinsic(__writecr0)
64# pragma intrinsic(__writecr3)
65# pragma intrinsic(__writecr4)
66# pragma intrinsic(__readdr)
67# pragma intrinsic(__writedr)
68# ifdef RT_ARCH_AMD64
69# pragma intrinsic(__readcr8)
70# pragma intrinsic(__writecr8)
71# endif
72# if RT_INLINE_ASM_USES_INTRIN >= 15
73# pragma intrinsic(__readeflags)
74# pragma intrinsic(__writeeflags)
75# pragma intrinsic(__rdtscp)
76# endif
77#endif
78
79
80
81/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
82 * @ingroup grp_rt_asm
83 * @{
84 */
85
86/** @todo find a more proper place for these structures? */
87
88#pragma pack(1)
89/** IDTR */
90typedef struct RTIDTR
91{
92 /** Size of the IDT. */
93 uint16_t cbIdt;
94 /** Address of the IDT. */
95 uintptr_t pIdt;
96} RTIDTR, *PRTIDTR;
97#pragma pack()
98
99#pragma pack(1)
100/** @internal */
101typedef struct RTIDTRALIGNEDINT
102{
103 /** Alignment padding. */
104 uint8_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
105 /** The IDTR structure. */
106 RTIDTR Idtr;
107} RTIDTRALIGNEDINT;
108#pragma pack()
109
110/** Wrapped RTIDTR for preventing misalignment exceptions. */
111typedef union RTIDTRALIGNED
112{
113 /** Try make sure this structure has optimal alignment. */
114 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
115 /** Aligned structure. */
116 RTIDTRALIGNEDINT s;
117} RTIDTRALIGNED;
118AssertCompileSize(RTIDTRALIGNED, ARCH_BITS * 2 / 8);
119/** Pointer to a an RTIDTR alignment wrapper. */
120typedef RTIDTRALIGNED *PRIDTRALIGNED;
121
122
123#pragma pack(1)
124/** GDTR */
125typedef struct RTGDTR
126{
127 /** Size of the GDT. */
128 uint16_t cbGdt;
129 /** Address of the GDT. */
130 uintptr_t pGdt;
131} RTGDTR, *PRTGDTR;
132#pragma pack()
133
134#pragma pack(1)
135/** @internal */
136typedef struct RTGDTRALIGNEDINT
137{
138 /** Alignment padding. */
139 uint8_t au16Padding[ARCH_BITS == 64 ? 3 : 1];
140 /** The GDTR structure. */
141 RTGDTR Gdtr;
142} RTGDTRALIGNEDINT;
143#pragma pack()
144
145/** Wrapped RTGDTR for preventing misalignment exceptions. */
146typedef union RTGDTRALIGNED
147{
148 /** Try make sure this structure has optimal alignment. */
149 uint64_t auAlignmentHack[ARCH_BITS == 64 ? 2 : 1];
150 /** Aligned structure. */
151 RTGDTRALIGNEDINT s;
152} RTGDTRALIGNED;
153AssertCompileSize(RTGDTRALIGNED, ARCH_BITS * 2 / 8);
154/** Pointer to a an RTGDTR alignment wrapper. */
155typedef RTGDTRALIGNED *PRGDTRALIGNED;
156
157
158/**
159 * Gets the content of the IDTR CPU register.
160 * @param pIdtr Where to store the IDTR contents.
161 */
162#if RT_INLINE_ASM_EXTERNAL
163DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
164#else
165DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
166{
167# if RT_INLINE_ASM_GNU_STYLE
168 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
169# else
170 __asm
171 {
172# ifdef RT_ARCH_AMD64
173 mov rax, [pIdtr]
174 sidt [rax]
175# else
176 mov eax, [pIdtr]
177 sidt [eax]
178# endif
179 }
180# endif
181}
182#endif
183
184
185/**
186 * Gets the content of the IDTR.LIMIT CPU register.
187 * @returns IDTR limit.
188 */
189#if RT_INLINE_ASM_EXTERNAL
190DECLASM(uint16_t) ASMGetIdtrLimit(void);
191#else
192DECLINLINE(uint16_t) ASMGetIdtrLimit(void)
193{
194 RTIDTRALIGNED TmpIdtr;
195# if RT_INLINE_ASM_GNU_STYLE
196 __asm__ __volatile__("sidt %0" : "=m" (TmpIdtr.s.Idtr));
197# else
198 __asm
199 {
200 sidt [TmpIdtr.s.Idtr]
201 }
202# endif
203 return TmpIdtr.s.Idtr.cbIdt;
204}
205#endif
206
207
208/**
209 * Sets the content of the IDTR CPU register.
210 * @param pIdtr Where to load the IDTR contents from
211 */
212#if RT_INLINE_ASM_EXTERNAL
213DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
214#else
215DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
216{
217# if RT_INLINE_ASM_GNU_STYLE
218 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
219# else
220 __asm
221 {
222# ifdef RT_ARCH_AMD64
223 mov rax, [pIdtr]
224 lidt [rax]
225# else
226 mov eax, [pIdtr]
227 lidt [eax]
228# endif
229 }
230# endif
231}
232#endif
233
234
235/**
236 * Gets the content of the GDTR CPU register.
237 * @param pGdtr Where to store the GDTR contents.
238 */
239#if RT_INLINE_ASM_EXTERNAL
240DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
241#else
242DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
243{
244# if RT_INLINE_ASM_GNU_STYLE
245 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
246# else
247 __asm
248 {
249# ifdef RT_ARCH_AMD64
250 mov rax, [pGdtr]
251 sgdt [rax]
252# else
253 mov eax, [pGdtr]
254 sgdt [eax]
255# endif
256 }
257# endif
258}
259#endif
260
261
262/**
263 * Sets the content of the GDTR CPU register.
264 * @param pIdtr Where to load the GDTR contents from
265 */
266#if RT_INLINE_ASM_EXTERNAL
267DECLASM(void) ASMSetGDTR(const RTGDTR *pGdtr);
268#else
269DECLINLINE(void) ASMSetGDTR(const RTGDTR *pGdtr)
270{
271# if RT_INLINE_ASM_GNU_STYLE
272 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
273# else
274 __asm
275 {
276# ifdef RT_ARCH_AMD64
277 mov rax, [pGdtr]
278 lgdt [rax]
279# else
280 mov eax, [pGdtr]
281 lgdt [eax]
282# endif
283 }
284# endif
285}
286#endif
287
288
289
290/**
291 * Get the cs register.
292 * @returns cs.
293 */
294#if RT_INLINE_ASM_EXTERNAL
295DECLASM(RTSEL) ASMGetCS(void);
296#else
297DECLINLINE(RTSEL) ASMGetCS(void)
298{
299 RTSEL SelCS;
300# if RT_INLINE_ASM_GNU_STYLE
301 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
302# else
303 __asm
304 {
305 mov ax, cs
306 mov [SelCS], ax
307 }
308# endif
309 return SelCS;
310}
311#endif
312
313
314/**
315 * Get the DS register.
316 * @returns DS.
317 */
318#if RT_INLINE_ASM_EXTERNAL
319DECLASM(RTSEL) ASMGetDS(void);
320#else
321DECLINLINE(RTSEL) ASMGetDS(void)
322{
323 RTSEL SelDS;
324# if RT_INLINE_ASM_GNU_STYLE
325 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
326# else
327 __asm
328 {
329 mov ax, ds
330 mov [SelDS], ax
331 }
332# endif
333 return SelDS;
334}
335#endif
336
337
338/**
339 * Get the ES register.
340 * @returns ES.
341 */
342#if RT_INLINE_ASM_EXTERNAL
343DECLASM(RTSEL) ASMGetES(void);
344#else
345DECLINLINE(RTSEL) ASMGetES(void)
346{
347 RTSEL SelES;
348# if RT_INLINE_ASM_GNU_STYLE
349 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
350# else
351 __asm
352 {
353 mov ax, es
354 mov [SelES], ax
355 }
356# endif
357 return SelES;
358}
359#endif
360
361
362/**
363 * Get the FS register.
364 * @returns FS.
365 */
366#if RT_INLINE_ASM_EXTERNAL
367DECLASM(RTSEL) ASMGetFS(void);
368#else
369DECLINLINE(RTSEL) ASMGetFS(void)
370{
371 RTSEL SelFS;
372# if RT_INLINE_ASM_GNU_STYLE
373 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
374# else
375 __asm
376 {
377 mov ax, fs
378 mov [SelFS], ax
379 }
380# endif
381 return SelFS;
382}
383# endif
384
385
386/**
387 * Get the GS register.
388 * @returns GS.
389 */
390#if RT_INLINE_ASM_EXTERNAL
391DECLASM(RTSEL) ASMGetGS(void);
392#else
393DECLINLINE(RTSEL) ASMGetGS(void)
394{
395 RTSEL SelGS;
396# if RT_INLINE_ASM_GNU_STYLE
397 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
398# else
399 __asm
400 {
401 mov ax, gs
402 mov [SelGS], ax
403 }
404# endif
405 return SelGS;
406}
407#endif
408
409
410/**
411 * Get the SS register.
412 * @returns SS.
413 */
414#if RT_INLINE_ASM_EXTERNAL
415DECLASM(RTSEL) ASMGetSS(void);
416#else
417DECLINLINE(RTSEL) ASMGetSS(void)
418{
419 RTSEL SelSS;
420# if RT_INLINE_ASM_GNU_STYLE
421 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
422# else
423 __asm
424 {
425 mov ax, ss
426 mov [SelSS], ax
427 }
428# endif
429 return SelSS;
430}
431#endif
432
433
434/**
435 * Get the TR register.
436 * @returns TR.
437 */
438#if RT_INLINE_ASM_EXTERNAL
439DECLASM(RTSEL) ASMGetTR(void);
440#else
441DECLINLINE(RTSEL) ASMGetTR(void)
442{
443 RTSEL SelTR;
444# if RT_INLINE_ASM_GNU_STYLE
445 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
446# else
447 __asm
448 {
449 str ax
450 mov [SelTR], ax
451 }
452# endif
453 return SelTR;
454}
455#endif
456
457
458/**
459 * Get the LDTR register.
460 * @returns LDTR.
461 */
462#if RT_INLINE_ASM_EXTERNAL
463DECLASM(RTSEL) ASMGetLDTR(void);
464#else
465DECLINLINE(RTSEL) ASMGetLDTR(void)
466{
467 RTSEL SelLDTR;
468# if RT_INLINE_ASM_GNU_STYLE
469 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
470# else
471 __asm
472 {
473 sldt ax
474 mov [SelLDTR], ax
475 }
476# endif
477 return SelLDTR;
478}
479#endif
480
481
482/**
483 * Get the access rights for the segment selector.
484 *
485 * @returns The access rights on success or UINT32_MAX on failure.
486 * @param uSel The selector value.
487 *
488 * @remarks Using UINT32_MAX for failure is chosen because valid access rights
489 * always have bits 0:7 as 0 (on both Intel & AMD).
490 */
491#if RT_INLINE_ASM_EXTERNAL
492DECLASM(uint32_t) ASMGetSegAttr(uint32_t uSel);
493#else
494DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
495{
496 uint32_t uAttr;
497 /* LAR only accesses 16-bit of the source operand, but eax for the
498 destination operand is required for getting the full 32-bit access rights. */
499# if RT_INLINE_ASM_GNU_STYLE
500 __asm__ __volatile__("lar %1, %%eax\n\t"
501 "jz done%=\n\t"
502 "movl $0xffffffff, %%eax\n\t"
503 "done%=:\n\t"
504 "movl %%eax, %0\n\t"
505 : "=r" (uAttr)
506 : "r" (uSel)
507 : "cc", "%eax");
508# else
509 __asm
510 {
511 lar eax, [uSel]
512 jz done
513 mov eax, 0ffffffffh
514 done:
515 mov [uAttr], eax
516 }
517# endif
518 return uAttr;
519}
520#endif
521
522
523/**
524 * Get the [RE]FLAGS register.
525 * @returns [RE]FLAGS.
526 */
527#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
528DECLASM(RTCCUINTREG) ASMGetFlags(void);
529#else
530DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
531{
532 RTCCUINTREG uFlags;
533# if RT_INLINE_ASM_GNU_STYLE
534# ifdef RT_ARCH_AMD64
535 __asm__ __volatile__("pushfq\n\t"
536 "popq %0\n\t"
537 : "=r" (uFlags));
538# else
539 __asm__ __volatile__("pushfl\n\t"
540 "popl %0\n\t"
541 : "=r" (uFlags));
542# endif
543# elif RT_INLINE_ASM_USES_INTRIN >= 15
544 uFlags = __readeflags();
545# else
546 __asm
547 {
548# ifdef RT_ARCH_AMD64
549 pushfq
550 pop [uFlags]
551# else
552 pushfd
553 pop [uFlags]
554# endif
555 }
556# endif
557 return uFlags;
558}
559#endif
560
561
562/**
563 * Set the [RE]FLAGS register.
564 * @param uFlags The new [RE]FLAGS value.
565 */
566#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
567DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
568#else
569DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
570{
571# if RT_INLINE_ASM_GNU_STYLE
572# ifdef RT_ARCH_AMD64
573 __asm__ __volatile__("pushq %0\n\t"
574 "popfq\n\t"
575 : : "g" (uFlags));
576# else
577 __asm__ __volatile__("pushl %0\n\t"
578 "popfl\n\t"
579 : : "g" (uFlags));
580# endif
581# elif RT_INLINE_ASM_USES_INTRIN >= 15
582 __writeeflags(uFlags);
583# else
584 __asm
585 {
586# ifdef RT_ARCH_AMD64
587 push [uFlags]
588 popfq
589# else
590 push [uFlags]
591 popfd
592# endif
593 }
594# endif
595}
596#endif
597
598
599/**
600 * Gets the content of the CPU timestamp counter register.
601 *
602 * @returns TSC.
603 */
604#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
605DECLASM(uint64_t) ASMReadTSC(void);
606#else
607DECLINLINE(uint64_t) ASMReadTSC(void)
608{
609 RTUINT64U u;
610# if RT_INLINE_ASM_GNU_STYLE
611 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
612# else
613# if RT_INLINE_ASM_USES_INTRIN
614 u.u = __rdtsc();
615# else
616 __asm
617 {
618 rdtsc
619 mov [u.s.Lo], eax
620 mov [u.s.Hi], edx
621 }
622# endif
623# endif
624 return u.u;
625}
626#endif
627
628
629/**
630 * Gets the content of the CPU timestamp counter register and the
631 * assoicated AUX value.
632 *
633 * @returns TSC.
634 * @param puAux Where to store the AUX value.
635 */
636#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
637DECLASM(uint64_t) ASMReadTscWithAux(uint32_t *puAux);
638#else
639DECLINLINE(uint64_t) ASMReadTscWithAux(uint32_t *puAux)
640{
641 RTUINT64U u;
642# if RT_INLINE_ASM_GNU_STYLE
643 __asm__ __volatile__("rdtscp\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi), "=c" (*puAux));
644# else
645# if RT_INLINE_ASM_USES_INTRIN >= 15
646 u.u = __rdtscp(puAux);
647# else
648 __asm
649 {
650 rdtscp
651 mov [u.s.Lo], eax
652 mov [u.s.Hi], edx
653 mov eax, [puAux]
654 mov [eax], ecx
655 }
656# endif
657# endif
658 return u.u;
659}
660#endif
661
662
663/**
664 * Performs the cpuid instruction returning all registers.
665 *
666 * @param uOperator CPUID operation (eax).
667 * @param pvEAX Where to store eax.
668 * @param pvEBX Where to store ebx.
669 * @param pvECX Where to store ecx.
670 * @param pvEDX Where to store edx.
671 * @remark We're using void pointers to ease the use of special bitfield structures and such.
672 */
673#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
674DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
675#else
676DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
677{
678# if RT_INLINE_ASM_GNU_STYLE
679# ifdef RT_ARCH_AMD64
680 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
681 __asm__ __volatile__ ("cpuid\n\t"
682 : "=a" (uRAX),
683 "=b" (uRBX),
684 "=c" (uRCX),
685 "=d" (uRDX)
686 : "0" (uOperator), "2" (0));
687 *(uint32_t *)pvEAX = (uint32_t)uRAX;
688 *(uint32_t *)pvEBX = (uint32_t)uRBX;
689 *(uint32_t *)pvECX = (uint32_t)uRCX;
690 *(uint32_t *)pvEDX = (uint32_t)uRDX;
691# else
692 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
693 "cpuid\n\t"
694 "xchgl %%ebx, %1\n\t"
695 : "=a" (*(uint32_t *)pvEAX),
696 "=r" (*(uint32_t *)pvEBX),
697 "=c" (*(uint32_t *)pvECX),
698 "=d" (*(uint32_t *)pvEDX)
699 : "0" (uOperator), "2" (0));
700# endif
701
702# elif RT_INLINE_ASM_USES_INTRIN
703 int aInfo[4];
704 __cpuid(aInfo, uOperator);
705 *(uint32_t *)pvEAX = aInfo[0];
706 *(uint32_t *)pvEBX = aInfo[1];
707 *(uint32_t *)pvECX = aInfo[2];
708 *(uint32_t *)pvEDX = aInfo[3];
709
710# else
711 uint32_t uEAX;
712 uint32_t uEBX;
713 uint32_t uECX;
714 uint32_t uEDX;
715 __asm
716 {
717 push ebx
718 mov eax, [uOperator]
719 cpuid
720 mov [uEAX], eax
721 mov [uEBX], ebx
722 mov [uECX], ecx
723 mov [uEDX], edx
724 pop ebx
725 }
726 *(uint32_t *)pvEAX = uEAX;
727 *(uint32_t *)pvEBX = uEBX;
728 *(uint32_t *)pvECX = uECX;
729 *(uint32_t *)pvEDX = uEDX;
730# endif
731}
732#endif
733
734
735/**
736 * Performs the CPUID instruction with EAX and ECX input returning ALL output
737 * registers.
738 *
739 * @param uOperator CPUID operation (eax).
740 * @param uIdxECX ecx index
741 * @param pvEAX Where to store eax.
742 * @param pvEBX Where to store ebx.
743 * @param pvECX Where to store ecx.
744 * @param pvEDX Where to store edx.
745 * @remark We're using void pointers to ease the use of special bitfield structures and such.
746 */
747#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
748DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
749#else
750DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
751{
752# if RT_INLINE_ASM_GNU_STYLE
753# ifdef RT_ARCH_AMD64
754 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
755 __asm__ ("cpuid\n\t"
756 : "=a" (uRAX),
757 "=b" (uRBX),
758 "=c" (uRCX),
759 "=d" (uRDX)
760 : "0" (uOperator),
761 "2" (uIdxECX));
762 *(uint32_t *)pvEAX = (uint32_t)uRAX;
763 *(uint32_t *)pvEBX = (uint32_t)uRBX;
764 *(uint32_t *)pvECX = (uint32_t)uRCX;
765 *(uint32_t *)pvEDX = (uint32_t)uRDX;
766# else
767 __asm__ ("xchgl %%ebx, %1\n\t"
768 "cpuid\n\t"
769 "xchgl %%ebx, %1\n\t"
770 : "=a" (*(uint32_t *)pvEAX),
771 "=r" (*(uint32_t *)pvEBX),
772 "=c" (*(uint32_t *)pvECX),
773 "=d" (*(uint32_t *)pvEDX)
774 : "0" (uOperator),
775 "2" (uIdxECX));
776# endif
777
778# elif RT_INLINE_ASM_USES_INTRIN
779 int aInfo[4];
780 __cpuidex(aInfo, uOperator, uIdxECX);
781 *(uint32_t *)pvEAX = aInfo[0];
782 *(uint32_t *)pvEBX = aInfo[1];
783 *(uint32_t *)pvECX = aInfo[2];
784 *(uint32_t *)pvEDX = aInfo[3];
785
786# else
787 uint32_t uEAX;
788 uint32_t uEBX;
789 uint32_t uECX;
790 uint32_t uEDX;
791 __asm
792 {
793 push ebx
794 mov eax, [uOperator]
795 mov ecx, [uIdxECX]
796 cpuid
797 mov [uEAX], eax
798 mov [uEBX], ebx
799 mov [uECX], ecx
800 mov [uEDX], edx
801 pop ebx
802 }
803 *(uint32_t *)pvEAX = uEAX;
804 *(uint32_t *)pvEBX = uEBX;
805 *(uint32_t *)pvECX = uECX;
806 *(uint32_t *)pvEDX = uEDX;
807# endif
808}
809#endif
810
811
812/**
813 * CPUID variant that initializes all 4 registers before the CPUID instruction.
814 *
815 * @returns The EAX result value.
816 * @param uOperator CPUID operation (eax).
817 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
818 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
819 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
820 * @param pvEAX Where to store eax. Optional.
821 * @param pvEBX Where to store ebx. Optional.
822 * @param pvECX Where to store ecx. Optional.
823 * @param pvEDX Where to store edx. Optional.
824 */
825DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
826 void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
827
828
829/**
830 * Performs the cpuid instruction returning ecx and edx.
831 *
832 * @param uOperator CPUID operation (eax).
833 * @param pvECX Where to store ecx.
834 * @param pvEDX Where to store edx.
835 * @remark We're using void pointers to ease the use of special bitfield structures and such.
836 */
837#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
838DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
839#else
840DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
841{
842 uint32_t uEBX;
843 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
844}
845#endif
846
847
848/**
849 * Performs the cpuid instruction returning eax.
850 *
851 * @param uOperator CPUID operation (eax).
852 * @returns EAX after cpuid operation.
853 */
854#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
855DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
856#else
857DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
858{
859 RTCCUINTREG xAX;
860# if RT_INLINE_ASM_GNU_STYLE
861# ifdef RT_ARCH_AMD64
862 __asm__ ("cpuid"
863 : "=a" (xAX)
864 : "0" (uOperator)
865 : "rbx", "rcx", "rdx");
866# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
867 __asm__ ("push %%ebx\n\t"
868 "cpuid\n\t"
869 "pop %%ebx\n\t"
870 : "=a" (xAX)
871 : "0" (uOperator)
872 : "ecx", "edx");
873# else
874 __asm__ ("cpuid"
875 : "=a" (xAX)
876 : "0" (uOperator)
877 : "edx", "ecx", "ebx");
878# endif
879
880# elif RT_INLINE_ASM_USES_INTRIN
881 int aInfo[4];
882 __cpuid(aInfo, uOperator);
883 xAX = aInfo[0];
884
885# else
886 __asm
887 {
888 push ebx
889 mov eax, [uOperator]
890 cpuid
891 mov [xAX], eax
892 pop ebx
893 }
894# endif
895 return (uint32_t)xAX;
896}
897#endif
898
899
900/**
901 * Performs the cpuid instruction returning ebx.
902 *
903 * @param uOperator CPUID operation (eax).
904 * @returns EBX after cpuid operation.
905 */
906#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
907DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
908#else
909DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
910{
911 RTCCUINTREG xBX;
912# if RT_INLINE_ASM_GNU_STYLE
913# ifdef RT_ARCH_AMD64
914 RTCCUINTREG uSpill;
915 __asm__ ("cpuid"
916 : "=a" (uSpill),
917 "=b" (xBX)
918 : "0" (uOperator)
919 : "rdx", "rcx");
920# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
921 __asm__ ("push %%ebx\n\t"
922 "cpuid\n\t"
923 "mov %%ebx, %%edx\n\t"
924 "pop %%ebx\n\t"
925 : "=a" (uOperator),
926 "=d" (xBX)
927 : "0" (uOperator)
928 : "ecx");
929# else
930 __asm__ ("cpuid"
931 : "=a" (uOperator),
932 "=b" (xBX)
933 : "0" (uOperator)
934 : "edx", "ecx");
935# endif
936
937# elif RT_INLINE_ASM_USES_INTRIN
938 int aInfo[4];
939 __cpuid(aInfo, uOperator);
940 xBX = aInfo[1];
941
942# else
943 __asm
944 {
945 push ebx
946 mov eax, [uOperator]
947 cpuid
948 mov [xBX], ebx
949 pop ebx
950 }
951# endif
952 return (uint32_t)xBX;
953}
954#endif
955
956
957/**
958 * Performs the cpuid instruction returning ecx.
959 *
960 * @param uOperator CPUID operation (eax).
961 * @returns ECX after cpuid operation.
962 */
963#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
964DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
965#else
966DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
967{
968 RTCCUINTREG xCX;
969# if RT_INLINE_ASM_GNU_STYLE
970# ifdef RT_ARCH_AMD64
971 RTCCUINTREG uSpill;
972 __asm__ ("cpuid"
973 : "=a" (uSpill),
974 "=c" (xCX)
975 : "0" (uOperator)
976 : "rbx", "rdx");
977# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
978 __asm__ ("push %%ebx\n\t"
979 "cpuid\n\t"
980 "pop %%ebx\n\t"
981 : "=a" (uOperator),
982 "=c" (xCX)
983 : "0" (uOperator)
984 : "edx");
985# else
986 __asm__ ("cpuid"
987 : "=a" (uOperator),
988 "=c" (xCX)
989 : "0" (uOperator)
990 : "ebx", "edx");
991
992# endif
993
994# elif RT_INLINE_ASM_USES_INTRIN
995 int aInfo[4];
996 __cpuid(aInfo, uOperator);
997 xCX = aInfo[2];
998
999# else
1000 __asm
1001 {
1002 push ebx
1003 mov eax, [uOperator]
1004 cpuid
1005 mov [xCX], ecx
1006 pop ebx
1007 }
1008# endif
1009 return (uint32_t)xCX;
1010}
1011#endif
1012
1013
1014/**
1015 * Performs the cpuid instruction returning edx.
1016 *
1017 * @param uOperator CPUID operation (eax).
1018 * @returns EDX after cpuid operation.
1019 */
1020#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1021DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
1022#else
1023DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
1024{
1025 RTCCUINTREG xDX;
1026# if RT_INLINE_ASM_GNU_STYLE
1027# ifdef RT_ARCH_AMD64
1028 RTCCUINTREG uSpill;
1029 __asm__ ("cpuid"
1030 : "=a" (uSpill),
1031 "=d" (xDX)
1032 : "0" (uOperator)
1033 : "rbx", "rcx");
1034# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1035 __asm__ ("push %%ebx\n\t"
1036 "cpuid\n\t"
1037 "pop %%ebx\n\t"
1038 : "=a" (uOperator),
1039 "=d" (xDX)
1040 : "0" (uOperator)
1041 : "ecx");
1042# else
1043 __asm__ ("cpuid"
1044 : "=a" (uOperator),
1045 "=d" (xDX)
1046 : "0" (uOperator)
1047 : "ebx", "ecx");
1048# endif
1049
1050# elif RT_INLINE_ASM_USES_INTRIN
1051 int aInfo[4];
1052 __cpuid(aInfo, uOperator);
1053 xDX = aInfo[3];
1054
1055# else
1056 __asm
1057 {
1058 push ebx
1059 mov eax, [uOperator]
1060 cpuid
1061 mov [xDX], edx
1062 pop ebx
1063 }
1064# endif
1065 return (uint32_t)xDX;
1066}
1067#endif
1068
1069
1070/**
1071 * Checks if the current CPU supports CPUID.
1072 *
1073 * @returns true if CPUID is supported.
1074 */
1075DECLINLINE(bool) ASMHasCpuId(void)
1076{
1077#ifdef RT_ARCH_AMD64
1078 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
1079#else /* !RT_ARCH_AMD64 */
1080 bool fRet = false;
1081# if RT_INLINE_ASM_GNU_STYLE
1082 uint32_t u1;
1083 uint32_t u2;
1084 __asm__ ("pushf\n\t"
1085 "pop %1\n\t"
1086 "mov %1, %2\n\t"
1087 "xorl $0x200000, %1\n\t"
1088 "push %1\n\t"
1089 "popf\n\t"
1090 "pushf\n\t"
1091 "pop %1\n\t"
1092 "cmpl %1, %2\n\t"
1093 "setne %0\n\t"
1094 "push %2\n\t"
1095 "popf\n\t"
1096 : "=m" (fRet), "=r" (u1), "=r" (u2));
1097# else
1098 __asm
1099 {
1100 pushfd
1101 pop eax
1102 mov ebx, eax
1103 xor eax, 0200000h
1104 push eax
1105 popfd
1106 pushfd
1107 pop eax
1108 cmp eax, ebx
1109 setne fRet
1110 push ebx
1111 popfd
1112 }
1113# endif
1114 return fRet;
1115#endif /* !RT_ARCH_AMD64 */
1116}
1117
1118
1119/**
1120 * Gets the APIC ID of the current CPU.
1121 *
1122 * @returns the APIC ID.
1123 */
1124#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1125DECLASM(uint8_t) ASMGetApicId(void);
1126#else
1127DECLINLINE(uint8_t) ASMGetApicId(void)
1128{
1129 RTCCUINTREG xBX;
1130# if RT_INLINE_ASM_GNU_STYLE
1131# ifdef RT_ARCH_AMD64
1132 RTCCUINTREG uSpill;
1133 __asm__ __volatile__ ("cpuid"
1134 : "=a" (uSpill),
1135 "=b" (xBX)
1136 : "0" (1)
1137 : "rcx", "rdx");
1138# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1139 RTCCUINTREG uSpill;
1140 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1141 "cpuid\n\t"
1142 "xchgl %%ebx,%1\n\t"
1143 : "=a" (uSpill),
1144 "=rm" (xBX)
1145 : "0" (1)
1146 : "ecx", "edx");
1147# else
1148 RTCCUINTREG uSpill;
1149 __asm__ __volatile__ ("cpuid"
1150 : "=a" (uSpill),
1151 "=b" (xBX)
1152 : "0" (1)
1153 : "ecx", "edx");
1154# endif
1155
1156# elif RT_INLINE_ASM_USES_INTRIN
1157 int aInfo[4];
1158 __cpuid(aInfo, 1);
1159 xBX = aInfo[1];
1160
1161# else
1162 __asm
1163 {
1164 push ebx
1165 mov eax, 1
1166 cpuid
1167 mov [xBX], ebx
1168 pop ebx
1169 }
1170# endif
1171 return (uint8_t)(xBX >> 24);
1172}
1173#endif
1174
1175
1176/**
1177 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
1178 *
1179 * @returns true/false.
1180 * @param uEBX EBX return from ASMCpuId(0)
1181 * @param uECX ECX return from ASMCpuId(0)
1182 * @param uEDX EDX return from ASMCpuId(0)
1183 */
1184DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1185{
1186 return uEBX == UINT32_C(0x756e6547)
1187 && uECX == UINT32_C(0x6c65746e)
1188 && uEDX == UINT32_C(0x49656e69);
1189}
1190
1191
1192/**
1193 * Tests if this is a genuine Intel CPU.
1194 *
1195 * @returns true/false.
1196 * @remarks ASSUMES that cpuid is supported by the CPU.
1197 */
1198DECLINLINE(bool) ASMIsIntelCpu(void)
1199{
1200 uint32_t uEAX, uEBX, uECX, uEDX;
1201 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1202 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1203}
1204
1205
1206/**
1207 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1208 *
1209 * @returns true/false.
1210 * @param uEBX EBX return from ASMCpuId(0)
1211 * @param uECX ECX return from ASMCpuId(0)
1212 * @param uEDX EDX return from ASMCpuId(0)
1213 */
1214DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1215{
1216 return uEBX == UINT32_C(0x68747541)
1217 && uECX == UINT32_C(0x444d4163)
1218 && uEDX == UINT32_C(0x69746e65);
1219}
1220
1221
1222/**
1223 * Tests if this is an authentic AMD CPU.
1224 *
1225 * @returns true/false.
1226 * @remarks ASSUMES that cpuid is supported by the CPU.
1227 */
1228DECLINLINE(bool) ASMIsAmdCpu(void)
1229{
1230 uint32_t uEAX, uEBX, uECX, uEDX;
1231 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1232 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1233}
1234
1235
1236/**
1237 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1238 *
1239 * @returns true/false.
1240 * @param uEBX EBX return from ASMCpuId(0).
1241 * @param uECX ECX return from ASMCpuId(0).
1242 * @param uEDX EDX return from ASMCpuId(0).
1243 */
1244DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1245{
1246 return uEBX == UINT32_C(0x746e6543)
1247 && uECX == UINT32_C(0x736c7561)
1248 && uEDX == UINT32_C(0x48727561);
1249}
1250
1251
1252/**
1253 * Tests if this is a centaur hauling VIA CPU.
1254 *
1255 * @returns true/false.
1256 * @remarks ASSUMES that cpuid is supported by the CPU.
1257 */
1258DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1259{
1260 uint32_t uEAX, uEBX, uECX, uEDX;
1261 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1262 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1263}
1264
1265
1266/**
1267 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1268 *
1269 *
1270 * @returns true/false.
1271 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1272 *
1273 * @note This only succeeds if there are at least two leaves in the range.
1274 * @remarks The upper range limit is just some half reasonable value we've
1275 * picked out of thin air.
1276 */
1277DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1278{
1279 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1280}
1281
1282
1283/**
1284 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1285 *
1286 * This only succeeds if there are at least two leaves in the range.
1287 *
1288 * @returns true/false.
1289 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1290 *
1291 * @note This only succeeds if there are at least two leaves in the range.
1292 * @remarks The upper range limit is just some half reasonable value we've
1293 * picked out of thin air.
1294 */
1295DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1296{
1297 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1298}
1299
1300
1301/**
1302 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1303 *
1304 * @returns Family.
1305 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1306 */
1307DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1308{
1309 return ((uEAX >> 8) & 0xf) == 0xf
1310 ? ((uEAX >> 20) & 0x7f) + 0xf
1311 : ((uEAX >> 8) & 0xf);
1312}
1313
1314
1315/**
1316 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1317 *
1318 * @returns Model.
1319 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1320 */
1321DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1322{
1323 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1324 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1325 : ((uEAX >> 4) & 0xf);
1326}
1327
1328
1329/**
1330 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1331 *
1332 * @returns Model.
1333 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1334 */
1335DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1336{
1337 return ((uEAX >> 8) & 0xf) == 0xf
1338 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1339 : ((uEAX >> 4) & 0xf);
1340}
1341
1342
1343/**
1344 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1345 *
1346 * @returns Model.
1347 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1348 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1349 */
1350DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1351{
1352 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1353 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1354 : ((uEAX >> 4) & 0xf);
1355}
1356
1357
1358/**
1359 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1360 *
1361 * @returns Model.
1362 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1363 */
1364DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1365{
1366 return uEAX & 0xf;
1367}
1368
1369
1370/**
1371 * Get cr0.
1372 * @returns cr0.
1373 */
1374#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1375DECLASM(RTCCUINTREG) ASMGetCR0(void);
1376#else
1377DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1378{
1379 RTCCUINTREG uCR0;
1380# if RT_INLINE_ASM_USES_INTRIN
1381 uCR0 = __readcr0();
1382
1383# elif RT_INLINE_ASM_GNU_STYLE
1384# ifdef RT_ARCH_AMD64
1385 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1386# else
1387 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1388# endif
1389# else
1390 __asm
1391 {
1392# ifdef RT_ARCH_AMD64
1393 mov rax, cr0
1394 mov [uCR0], rax
1395# else
1396 mov eax, cr0
1397 mov [uCR0], eax
1398# endif
1399 }
1400# endif
1401 return uCR0;
1402}
1403#endif
1404
1405
1406/**
1407 * Sets the CR0 register.
1408 * @param uCR0 The new CR0 value.
1409 */
1410#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1411DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1412#else
1413DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1414{
1415# if RT_INLINE_ASM_USES_INTRIN
1416 __writecr0(uCR0);
1417
1418# elif RT_INLINE_ASM_GNU_STYLE
1419# ifdef RT_ARCH_AMD64
1420 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1421# else
1422 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1423# endif
1424# else
1425 __asm
1426 {
1427# ifdef RT_ARCH_AMD64
1428 mov rax, [uCR0]
1429 mov cr0, rax
1430# else
1431 mov eax, [uCR0]
1432 mov cr0, eax
1433# endif
1434 }
1435# endif
1436}
1437#endif
1438
1439
1440/**
1441 * Get cr2.
1442 * @returns cr2.
1443 */
1444#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1445DECLASM(RTCCUINTREG) ASMGetCR2(void);
1446#else
1447DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1448{
1449 RTCCUINTREG uCR2;
1450# if RT_INLINE_ASM_USES_INTRIN
1451 uCR2 = __readcr2();
1452
1453# elif RT_INLINE_ASM_GNU_STYLE
1454# ifdef RT_ARCH_AMD64
1455 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1456# else
1457 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1458# endif
1459# else
1460 __asm
1461 {
1462# ifdef RT_ARCH_AMD64
1463 mov rax, cr2
1464 mov [uCR2], rax
1465# else
1466 mov eax, cr2
1467 mov [uCR2], eax
1468# endif
1469 }
1470# endif
1471 return uCR2;
1472}
1473#endif
1474
1475
1476/**
1477 * Sets the CR2 register.
1478 * @param uCR2 The new CR0 value.
1479 */
1480#if RT_INLINE_ASM_EXTERNAL
1481DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1482#else
1483DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1484{
1485# if RT_INLINE_ASM_GNU_STYLE
1486# ifdef RT_ARCH_AMD64
1487 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1488# else
1489 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1490# endif
1491# else
1492 __asm
1493 {
1494# ifdef RT_ARCH_AMD64
1495 mov rax, [uCR2]
1496 mov cr2, rax
1497# else
1498 mov eax, [uCR2]
1499 mov cr2, eax
1500# endif
1501 }
1502# endif
1503}
1504#endif
1505
1506
1507/**
1508 * Get cr3.
1509 * @returns cr3.
1510 */
1511#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1512DECLASM(RTCCUINTREG) ASMGetCR3(void);
1513#else
1514DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1515{
1516 RTCCUINTREG uCR3;
1517# if RT_INLINE_ASM_USES_INTRIN
1518 uCR3 = __readcr3();
1519
1520# elif RT_INLINE_ASM_GNU_STYLE
1521# ifdef RT_ARCH_AMD64
1522 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1523# else
1524 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1525# endif
1526# else
1527 __asm
1528 {
1529# ifdef RT_ARCH_AMD64
1530 mov rax, cr3
1531 mov [uCR3], rax
1532# else
1533 mov eax, cr3
1534 mov [uCR3], eax
1535# endif
1536 }
1537# endif
1538 return uCR3;
1539}
1540#endif
1541
1542
1543/**
1544 * Sets the CR3 register.
1545 *
1546 * @param uCR3 New CR3 value.
1547 */
1548#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1549DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1550#else
1551DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1552{
1553# if RT_INLINE_ASM_USES_INTRIN
1554 __writecr3(uCR3);
1555
1556# elif RT_INLINE_ASM_GNU_STYLE
1557# ifdef RT_ARCH_AMD64
1558 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1559# else
1560 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1561# endif
1562# else
1563 __asm
1564 {
1565# ifdef RT_ARCH_AMD64
1566 mov rax, [uCR3]
1567 mov cr3, rax
1568# else
1569 mov eax, [uCR3]
1570 mov cr3, eax
1571# endif
1572 }
1573# endif
1574}
1575#endif
1576
1577
1578/**
1579 * Reloads the CR3 register.
1580 */
1581#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1582DECLASM(void) ASMReloadCR3(void);
1583#else
1584DECLINLINE(void) ASMReloadCR3(void)
1585{
1586# if RT_INLINE_ASM_USES_INTRIN
1587 __writecr3(__readcr3());
1588
1589# elif RT_INLINE_ASM_GNU_STYLE
1590 RTCCUINTREG u;
1591# ifdef RT_ARCH_AMD64
1592 __asm__ __volatile__("movq %%cr3, %0\n\t"
1593 "movq %0, %%cr3\n\t"
1594 : "=r" (u));
1595# else
1596 __asm__ __volatile__("movl %%cr3, %0\n\t"
1597 "movl %0, %%cr3\n\t"
1598 : "=r" (u));
1599# endif
1600# else
1601 __asm
1602 {
1603# ifdef RT_ARCH_AMD64
1604 mov rax, cr3
1605 mov cr3, rax
1606# else
1607 mov eax, cr3
1608 mov cr3, eax
1609# endif
1610 }
1611# endif
1612}
1613#endif
1614
1615
1616/**
1617 * Get cr4.
1618 * @returns cr4.
1619 */
1620#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1621DECLASM(RTCCUINTREG) ASMGetCR4(void);
1622#else
1623DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1624{
1625 RTCCUINTREG uCR4;
1626# if RT_INLINE_ASM_USES_INTRIN
1627 uCR4 = __readcr4();
1628
1629# elif RT_INLINE_ASM_GNU_STYLE
1630# ifdef RT_ARCH_AMD64
1631 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1632# else
1633 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1634# endif
1635# else
1636 __asm
1637 {
1638# ifdef RT_ARCH_AMD64
1639 mov rax, cr4
1640 mov [uCR4], rax
1641# else
1642 push eax /* just in case */
1643 /*mov eax, cr4*/
1644 _emit 0x0f
1645 _emit 0x20
1646 _emit 0xe0
1647 mov [uCR4], eax
1648 pop eax
1649# endif
1650 }
1651# endif
1652 return uCR4;
1653}
1654#endif
1655
1656
1657/**
1658 * Sets the CR4 register.
1659 *
1660 * @param uCR4 New CR4 value.
1661 */
1662#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1663DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1664#else
1665DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1666{
1667# if RT_INLINE_ASM_USES_INTRIN
1668 __writecr4(uCR4);
1669
1670# elif RT_INLINE_ASM_GNU_STYLE
1671# ifdef RT_ARCH_AMD64
1672 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1673# else
1674 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1675# endif
1676# else
1677 __asm
1678 {
1679# ifdef RT_ARCH_AMD64
1680 mov rax, [uCR4]
1681 mov cr4, rax
1682# else
1683 mov eax, [uCR4]
1684 _emit 0x0F
1685 _emit 0x22
1686 _emit 0xE0 /* mov cr4, eax */
1687# endif
1688 }
1689# endif
1690}
1691#endif
1692
1693
1694/**
1695 * Get cr8.
1696 * @returns cr8.
1697 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1698 */
1699#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1700DECLASM(RTCCUINTREG) ASMGetCR8(void);
1701#else
1702DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1703{
1704# ifdef RT_ARCH_AMD64
1705 RTCCUINTREG uCR8;
1706# if RT_INLINE_ASM_USES_INTRIN
1707 uCR8 = __readcr8();
1708
1709# elif RT_INLINE_ASM_GNU_STYLE
1710 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1711# else
1712 __asm
1713 {
1714 mov rax, cr8
1715 mov [uCR8], rax
1716 }
1717# endif
1718 return uCR8;
1719# else /* !RT_ARCH_AMD64 */
1720 return 0;
1721# endif /* !RT_ARCH_AMD64 */
1722}
1723#endif
1724
1725
1726/**
1727 * Enables interrupts (EFLAGS.IF).
1728 */
1729#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1730DECLASM(void) ASMIntEnable(void);
1731#else
1732DECLINLINE(void) ASMIntEnable(void)
1733{
1734# if RT_INLINE_ASM_GNU_STYLE
1735 __asm("sti\n");
1736# elif RT_INLINE_ASM_USES_INTRIN
1737 _enable();
1738# else
1739 __asm sti
1740# endif
1741}
1742#endif
1743
1744
1745/**
1746 * Disables interrupts (!EFLAGS.IF).
1747 */
1748#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1749DECLASM(void) ASMIntDisable(void);
1750#else
1751DECLINLINE(void) ASMIntDisable(void)
1752{
1753# if RT_INLINE_ASM_GNU_STYLE
1754 __asm("cli\n");
1755# elif RT_INLINE_ASM_USES_INTRIN
1756 _disable();
1757# else
1758 __asm cli
1759# endif
1760}
1761#endif
1762
1763
1764/**
1765 * Disables interrupts and returns previous xFLAGS.
1766 */
1767#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1768DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1769#else
1770DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1771{
1772 RTCCUINTREG xFlags;
1773# if RT_INLINE_ASM_GNU_STYLE
1774# ifdef RT_ARCH_AMD64
1775 __asm__ __volatile__("pushfq\n\t"
1776 "cli\n\t"
1777 "popq %0\n\t"
1778 : "=r" (xFlags));
1779# else
1780 __asm__ __volatile__("pushfl\n\t"
1781 "cli\n\t"
1782 "popl %0\n\t"
1783 : "=r" (xFlags));
1784# endif
1785# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1786 xFlags = ASMGetFlags();
1787 _disable();
1788# else
1789 __asm {
1790 pushfd
1791 cli
1792 pop [xFlags]
1793 }
1794# endif
1795 return xFlags;
1796}
1797#endif
1798
1799
1800/**
1801 * Are interrupts enabled?
1802 *
1803 * @returns true / false.
1804 */
1805DECLINLINE(bool) ASMIntAreEnabled(void)
1806{
1807 RTCCUINTREG uFlags = ASMGetFlags();
1808 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1809}
1810
1811
1812/**
1813 * Halts the CPU until interrupted.
1814 */
1815#if RT_INLINE_ASM_EXTERNAL
1816DECLASM(void) ASMHalt(void);
1817#else
1818DECLINLINE(void) ASMHalt(void)
1819{
1820# if RT_INLINE_ASM_GNU_STYLE
1821 __asm__ __volatile__("hlt\n\t");
1822# else
1823 __asm {
1824 hlt
1825 }
1826# endif
1827}
1828#endif
1829
1830
1831/**
1832 * Reads a machine specific register.
1833 *
1834 * @returns Register content.
1835 * @param uRegister Register to read.
1836 */
1837#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1838DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1839#else
1840DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1841{
1842 RTUINT64U u;
1843# if RT_INLINE_ASM_GNU_STYLE
1844 __asm__ __volatile__("rdmsr\n\t"
1845 : "=a" (u.s.Lo),
1846 "=d" (u.s.Hi)
1847 : "c" (uRegister));
1848
1849# elif RT_INLINE_ASM_USES_INTRIN
1850 u.u = __readmsr(uRegister);
1851
1852# else
1853 __asm
1854 {
1855 mov ecx, [uRegister]
1856 rdmsr
1857 mov [u.s.Lo], eax
1858 mov [u.s.Hi], edx
1859 }
1860# endif
1861
1862 return u.u;
1863}
1864#endif
1865
1866
1867/**
1868 * Writes a machine specific register.
1869 *
1870 * @returns Register content.
1871 * @param uRegister Register to write to.
1872 * @param u64Val Value to write.
1873 */
1874#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1875DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1876#else
1877DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1878{
1879 RTUINT64U u;
1880
1881 u.u = u64Val;
1882# if RT_INLINE_ASM_GNU_STYLE
1883 __asm__ __volatile__("wrmsr\n\t"
1884 ::"a" (u.s.Lo),
1885 "d" (u.s.Hi),
1886 "c" (uRegister));
1887
1888# elif RT_INLINE_ASM_USES_INTRIN
1889 __writemsr(uRegister, u.u);
1890
1891# else
1892 __asm
1893 {
1894 mov ecx, [uRegister]
1895 mov edx, [u.s.Hi]
1896 mov eax, [u.s.Lo]
1897 wrmsr
1898 }
1899# endif
1900}
1901#endif
1902
1903
1904/**
1905 * Reads a machine specific register, extended version (for AMD).
1906 *
1907 * @returns Register content.
1908 * @param uRegister Register to read.
1909 * @param uXDI RDI/EDI value.
1910 */
1911#if RT_INLINE_ASM_EXTERNAL
1912DECLASM(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTREG uXDI);
1913#else
1914DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTREG uXDI)
1915{
1916 RTUINT64U u;
1917# if RT_INLINE_ASM_GNU_STYLE
1918 __asm__ __volatile__("rdmsr\n\t"
1919 : "=a" (u.s.Lo),
1920 "=d" (u.s.Hi)
1921 : "c" (uRegister),
1922 "D" (uXDI));
1923
1924# else
1925 __asm
1926 {
1927 mov ecx, [uRegister]
1928 xchg edi, [uXDI]
1929 rdmsr
1930 mov [u.s.Lo], eax
1931 mov [u.s.Hi], edx
1932 xchg edi, [uXDI]
1933 }
1934# endif
1935
1936 return u.u;
1937}
1938#endif
1939
1940
1941/**
1942 * Writes a machine specific register, extended version (for AMD).
1943 *
1944 * @returns Register content.
1945 * @param uRegister Register to write to.
1946 * @param uXDI RDI/EDI value.
1947 * @param u64Val Value to write.
1948 */
1949#if RT_INLINE_ASM_EXTERNAL
1950DECLASM(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTREG uXDI, uint64_t u64Val);
1951#else
1952DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTREG uXDI, uint64_t u64Val)
1953{
1954 RTUINT64U u;
1955
1956 u.u = u64Val;
1957# if RT_INLINE_ASM_GNU_STYLE
1958 __asm__ __volatile__("wrmsr\n\t"
1959 ::"a" (u.s.Lo),
1960 "d" (u.s.Hi),
1961 "c" (uRegister),
1962 "D" (uXDI));
1963
1964# else
1965 __asm
1966 {
1967 mov ecx, [uRegister]
1968 xchg edi, [uXDI]
1969 mov edx, [u.s.Hi]
1970 mov eax, [u.s.Lo]
1971 wrmsr
1972 xchg edi, [uXDI]
1973 }
1974# endif
1975}
1976#endif
1977
1978
1979
1980/**
1981 * Reads low part of a machine specific register.
1982 *
1983 * @returns Register content.
1984 * @param uRegister Register to read.
1985 */
1986#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1987DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1988#else
1989DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1990{
1991 uint32_t u32;
1992# if RT_INLINE_ASM_GNU_STYLE
1993 __asm__ __volatile__("rdmsr\n\t"
1994 : "=a" (u32)
1995 : "c" (uRegister)
1996 : "edx");
1997
1998# elif RT_INLINE_ASM_USES_INTRIN
1999 u32 = (uint32_t)__readmsr(uRegister);
2000
2001#else
2002 __asm
2003 {
2004 mov ecx, [uRegister]
2005 rdmsr
2006 mov [u32], eax
2007 }
2008# endif
2009
2010 return u32;
2011}
2012#endif
2013
2014
2015/**
2016 * Reads high part of a machine specific register.
2017 *
2018 * @returns Register content.
2019 * @param uRegister Register to read.
2020 */
2021#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2022DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
2023#else
2024DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
2025{
2026 uint32_t u32;
2027# if RT_INLINE_ASM_GNU_STYLE
2028 __asm__ __volatile__("rdmsr\n\t"
2029 : "=d" (u32)
2030 : "c" (uRegister)
2031 : "eax");
2032
2033# elif RT_INLINE_ASM_USES_INTRIN
2034 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
2035
2036# else
2037 __asm
2038 {
2039 mov ecx, [uRegister]
2040 rdmsr
2041 mov [u32], edx
2042 }
2043# endif
2044
2045 return u32;
2046}
2047#endif
2048
2049
2050/**
2051 * Gets dr0.
2052 *
2053 * @returns dr0.
2054 */
2055#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2056DECLASM(RTCCUINTREG) ASMGetDR0(void);
2057#else
2058DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
2059{
2060 RTCCUINTREG uDR0;
2061# if RT_INLINE_ASM_USES_INTRIN
2062 uDR0 = __readdr(0);
2063# elif RT_INLINE_ASM_GNU_STYLE
2064# ifdef RT_ARCH_AMD64
2065 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
2066# else
2067 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
2068# endif
2069# else
2070 __asm
2071 {
2072# ifdef RT_ARCH_AMD64
2073 mov rax, dr0
2074 mov [uDR0], rax
2075# else
2076 mov eax, dr0
2077 mov [uDR0], eax
2078# endif
2079 }
2080# endif
2081 return uDR0;
2082}
2083#endif
2084
2085
2086/**
2087 * Gets dr1.
2088 *
2089 * @returns dr1.
2090 */
2091#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2092DECLASM(RTCCUINTREG) ASMGetDR1(void);
2093#else
2094DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
2095{
2096 RTCCUINTREG uDR1;
2097# if RT_INLINE_ASM_USES_INTRIN
2098 uDR1 = __readdr(1);
2099# elif RT_INLINE_ASM_GNU_STYLE
2100# ifdef RT_ARCH_AMD64
2101 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
2102# else
2103 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
2104# endif
2105# else
2106 __asm
2107 {
2108# ifdef RT_ARCH_AMD64
2109 mov rax, dr1
2110 mov [uDR1], rax
2111# else
2112 mov eax, dr1
2113 mov [uDR1], eax
2114# endif
2115 }
2116# endif
2117 return uDR1;
2118}
2119#endif
2120
2121
2122/**
2123 * Gets dr2.
2124 *
2125 * @returns dr2.
2126 */
2127#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2128DECLASM(RTCCUINTREG) ASMGetDR2(void);
2129#else
2130DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
2131{
2132 RTCCUINTREG uDR2;
2133# if RT_INLINE_ASM_USES_INTRIN
2134 uDR2 = __readdr(2);
2135# elif RT_INLINE_ASM_GNU_STYLE
2136# ifdef RT_ARCH_AMD64
2137 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2138# else
2139 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2140# endif
2141# else
2142 __asm
2143 {
2144# ifdef RT_ARCH_AMD64
2145 mov rax, dr2
2146 mov [uDR2], rax
2147# else
2148 mov eax, dr2
2149 mov [uDR2], eax
2150# endif
2151 }
2152# endif
2153 return uDR2;
2154}
2155#endif
2156
2157
2158/**
2159 * Gets dr3.
2160 *
2161 * @returns dr3.
2162 */
2163#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2164DECLASM(RTCCUINTREG) ASMGetDR3(void);
2165#else
2166DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
2167{
2168 RTCCUINTREG uDR3;
2169# if RT_INLINE_ASM_USES_INTRIN
2170 uDR3 = __readdr(3);
2171# elif RT_INLINE_ASM_GNU_STYLE
2172# ifdef RT_ARCH_AMD64
2173 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2174# else
2175 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2176# endif
2177# else
2178 __asm
2179 {
2180# ifdef RT_ARCH_AMD64
2181 mov rax, dr3
2182 mov [uDR3], rax
2183# else
2184 mov eax, dr3
2185 mov [uDR3], eax
2186# endif
2187 }
2188# endif
2189 return uDR3;
2190}
2191#endif
2192
2193
2194/**
2195 * Gets dr6.
2196 *
2197 * @returns dr6.
2198 */
2199#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2200DECLASM(RTCCUINTREG) ASMGetDR6(void);
2201#else
2202DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
2203{
2204 RTCCUINTREG uDR6;
2205# if RT_INLINE_ASM_USES_INTRIN
2206 uDR6 = __readdr(6);
2207# elif RT_INLINE_ASM_GNU_STYLE
2208# ifdef RT_ARCH_AMD64
2209 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2210# else
2211 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2212# endif
2213# else
2214 __asm
2215 {
2216# ifdef RT_ARCH_AMD64
2217 mov rax, dr6
2218 mov [uDR6], rax
2219# else
2220 mov eax, dr6
2221 mov [uDR6], eax
2222# endif
2223 }
2224# endif
2225 return uDR6;
2226}
2227#endif
2228
2229
2230/**
2231 * Reads and clears DR6.
2232 *
2233 * @returns DR6.
2234 */
2235#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2236DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
2237#else
2238DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
2239{
2240 RTCCUINTREG uDR6;
2241# if RT_INLINE_ASM_USES_INTRIN
2242 uDR6 = __readdr(6);
2243 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2244# elif RT_INLINE_ASM_GNU_STYLE
2245 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2246# ifdef RT_ARCH_AMD64
2247 __asm__ __volatile__("movq %%dr6, %0\n\t"
2248 "movq %1, %%dr6\n\t"
2249 : "=r" (uDR6)
2250 : "r" (uNewValue));
2251# else
2252 __asm__ __volatile__("movl %%dr6, %0\n\t"
2253 "movl %1, %%dr6\n\t"
2254 : "=r" (uDR6)
2255 : "r" (uNewValue));
2256# endif
2257# else
2258 __asm
2259 {
2260# ifdef RT_ARCH_AMD64
2261 mov rax, dr6
2262 mov [uDR6], rax
2263 mov rcx, rax
2264 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2265 mov dr6, rcx
2266# else
2267 mov eax, dr6
2268 mov [uDR6], eax
2269 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2270 mov dr6, ecx
2271# endif
2272 }
2273# endif
2274 return uDR6;
2275}
2276#endif
2277
2278
2279/**
2280 * Gets dr7.
2281 *
2282 * @returns dr7.
2283 */
2284#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2285DECLASM(RTCCUINTREG) ASMGetDR7(void);
2286#else
2287DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
2288{
2289 RTCCUINTREG uDR7;
2290# if RT_INLINE_ASM_USES_INTRIN
2291 uDR7 = __readdr(7);
2292# elif RT_INLINE_ASM_GNU_STYLE
2293# ifdef RT_ARCH_AMD64
2294 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2295# else
2296 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2297# endif
2298# else
2299 __asm
2300 {
2301# ifdef RT_ARCH_AMD64
2302 mov rax, dr7
2303 mov [uDR7], rax
2304# else
2305 mov eax, dr7
2306 mov [uDR7], eax
2307# endif
2308 }
2309# endif
2310 return uDR7;
2311}
2312#endif
2313
2314
2315/**
2316 * Sets dr0.
2317 *
2318 * @param uDRVal Debug register value to write
2319 */
2320#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2321DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2322#else
2323DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2324{
2325# if RT_INLINE_ASM_USES_INTRIN
2326 __writedr(0, uDRVal);
2327# elif RT_INLINE_ASM_GNU_STYLE
2328# ifdef RT_ARCH_AMD64
2329 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2330# else
2331 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2332# endif
2333# else
2334 __asm
2335 {
2336# ifdef RT_ARCH_AMD64
2337 mov rax, [uDRVal]
2338 mov dr0, rax
2339# else
2340 mov eax, [uDRVal]
2341 mov dr0, eax
2342# endif
2343 }
2344# endif
2345}
2346#endif
2347
2348
2349/**
2350 * Sets dr1.
2351 *
2352 * @param uDRVal Debug register value to write
2353 */
2354#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2355DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2356#else
2357DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2358{
2359# if RT_INLINE_ASM_USES_INTRIN
2360 __writedr(1, uDRVal);
2361# elif RT_INLINE_ASM_GNU_STYLE
2362# ifdef RT_ARCH_AMD64
2363 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2364# else
2365 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2366# endif
2367# else
2368 __asm
2369 {
2370# ifdef RT_ARCH_AMD64
2371 mov rax, [uDRVal]
2372 mov dr1, rax
2373# else
2374 mov eax, [uDRVal]
2375 mov dr1, eax
2376# endif
2377 }
2378# endif
2379}
2380#endif
2381
2382
2383/**
2384 * Sets dr2.
2385 *
2386 * @param uDRVal Debug register value to write
2387 */
2388#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2389DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2390#else
2391DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2392{
2393# if RT_INLINE_ASM_USES_INTRIN
2394 __writedr(2, uDRVal);
2395# elif RT_INLINE_ASM_GNU_STYLE
2396# ifdef RT_ARCH_AMD64
2397 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2398# else
2399 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2400# endif
2401# else
2402 __asm
2403 {
2404# ifdef RT_ARCH_AMD64
2405 mov rax, [uDRVal]
2406 mov dr2, rax
2407# else
2408 mov eax, [uDRVal]
2409 mov dr2, eax
2410# endif
2411 }
2412# endif
2413}
2414#endif
2415
2416
2417/**
2418 * Sets dr3.
2419 *
2420 * @param uDRVal Debug register value to write
2421 */
2422#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2423DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2424#else
2425DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2426{
2427# if RT_INLINE_ASM_USES_INTRIN
2428 __writedr(3, uDRVal);
2429# elif RT_INLINE_ASM_GNU_STYLE
2430# ifdef RT_ARCH_AMD64
2431 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2432# else
2433 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2434# endif
2435# else
2436 __asm
2437 {
2438# ifdef RT_ARCH_AMD64
2439 mov rax, [uDRVal]
2440 mov dr3, rax
2441# else
2442 mov eax, [uDRVal]
2443 mov dr3, eax
2444# endif
2445 }
2446# endif
2447}
2448#endif
2449
2450
2451/**
2452 * Sets dr6.
2453 *
2454 * @param uDRVal Debug register value to write
2455 */
2456#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2457DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2458#else
2459DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2460{
2461# if RT_INLINE_ASM_USES_INTRIN
2462 __writedr(6, uDRVal);
2463# elif RT_INLINE_ASM_GNU_STYLE
2464# ifdef RT_ARCH_AMD64
2465 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2466# else
2467 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2468# endif
2469# else
2470 __asm
2471 {
2472# ifdef RT_ARCH_AMD64
2473 mov rax, [uDRVal]
2474 mov dr6, rax
2475# else
2476 mov eax, [uDRVal]
2477 mov dr6, eax
2478# endif
2479 }
2480# endif
2481}
2482#endif
2483
2484
2485/**
2486 * Sets dr7.
2487 *
2488 * @param uDRVal Debug register value to write
2489 */
2490#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2491DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2492#else
2493DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2494{
2495# if RT_INLINE_ASM_USES_INTRIN
2496 __writedr(7, uDRVal);
2497# elif RT_INLINE_ASM_GNU_STYLE
2498# ifdef RT_ARCH_AMD64
2499 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2500# else
2501 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2502# endif
2503# else
2504 __asm
2505 {
2506# ifdef RT_ARCH_AMD64
2507 mov rax, [uDRVal]
2508 mov dr7, rax
2509# else
2510 mov eax, [uDRVal]
2511 mov dr7, eax
2512# endif
2513 }
2514# endif
2515}
2516#endif
2517
2518
2519/**
2520 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2521 *
2522 * @param Port I/O port to write to.
2523 * @param u8 8-bit integer to write.
2524 */
2525#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2526DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2527#else
2528DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2529{
2530# if RT_INLINE_ASM_GNU_STYLE
2531 __asm__ __volatile__("outb %b1, %w0\n\t"
2532 :: "Nd" (Port),
2533 "a" (u8));
2534
2535# elif RT_INLINE_ASM_USES_INTRIN
2536 __outbyte(Port, u8);
2537
2538# else
2539 __asm
2540 {
2541 mov dx, [Port]
2542 mov al, [u8]
2543 out dx, al
2544 }
2545# endif
2546}
2547#endif
2548
2549
2550/**
2551 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2552 *
2553 * @returns 8-bit integer.
2554 * @param Port I/O port to read from.
2555 */
2556#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2557DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2558#else
2559DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2560{
2561 uint8_t u8;
2562# if RT_INLINE_ASM_GNU_STYLE
2563 __asm__ __volatile__("inb %w1, %b0\n\t"
2564 : "=a" (u8)
2565 : "Nd" (Port));
2566
2567# elif RT_INLINE_ASM_USES_INTRIN
2568 u8 = __inbyte(Port);
2569
2570# else
2571 __asm
2572 {
2573 mov dx, [Port]
2574 in al, dx
2575 mov [u8], al
2576 }
2577# endif
2578 return u8;
2579}
2580#endif
2581
2582
2583/**
2584 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2585 *
2586 * @param Port I/O port to write to.
2587 * @param u16 16-bit integer to write.
2588 */
2589#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2590DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2591#else
2592DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2593{
2594# if RT_INLINE_ASM_GNU_STYLE
2595 __asm__ __volatile__("outw %w1, %w0\n\t"
2596 :: "Nd" (Port),
2597 "a" (u16));
2598
2599# elif RT_INLINE_ASM_USES_INTRIN
2600 __outword(Port, u16);
2601
2602# else
2603 __asm
2604 {
2605 mov dx, [Port]
2606 mov ax, [u16]
2607 out dx, ax
2608 }
2609# endif
2610}
2611#endif
2612
2613
2614/**
2615 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2616 *
2617 * @returns 16-bit integer.
2618 * @param Port I/O port to read from.
2619 */
2620#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2621DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2622#else
2623DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2624{
2625 uint16_t u16;
2626# if RT_INLINE_ASM_GNU_STYLE
2627 __asm__ __volatile__("inw %w1, %w0\n\t"
2628 : "=a" (u16)
2629 : "Nd" (Port));
2630
2631# elif RT_INLINE_ASM_USES_INTRIN
2632 u16 = __inword(Port);
2633
2634# else
2635 __asm
2636 {
2637 mov dx, [Port]
2638 in ax, dx
2639 mov [u16], ax
2640 }
2641# endif
2642 return u16;
2643}
2644#endif
2645
2646
2647/**
2648 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2649 *
2650 * @param Port I/O port to write to.
2651 * @param u32 32-bit integer to write.
2652 */
2653#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2654DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2655#else
2656DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2657{
2658# if RT_INLINE_ASM_GNU_STYLE
2659 __asm__ __volatile__("outl %1, %w0\n\t"
2660 :: "Nd" (Port),
2661 "a" (u32));
2662
2663# elif RT_INLINE_ASM_USES_INTRIN
2664 __outdword(Port, u32);
2665
2666# else
2667 __asm
2668 {
2669 mov dx, [Port]
2670 mov eax, [u32]
2671 out dx, eax
2672 }
2673# endif
2674}
2675#endif
2676
2677
2678/**
2679 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2680 *
2681 * @returns 32-bit integer.
2682 * @param Port I/O port to read from.
2683 */
2684#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2685DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2686#else
2687DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2688{
2689 uint32_t u32;
2690# if RT_INLINE_ASM_GNU_STYLE
2691 __asm__ __volatile__("inl %w1, %0\n\t"
2692 : "=a" (u32)
2693 : "Nd" (Port));
2694
2695# elif RT_INLINE_ASM_USES_INTRIN
2696 u32 = __indword(Port);
2697
2698# else
2699 __asm
2700 {
2701 mov dx, [Port]
2702 in eax, dx
2703 mov [u32], eax
2704 }
2705# endif
2706 return u32;
2707}
2708#endif
2709
2710
2711/**
2712 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2713 *
2714 * @param Port I/O port to write to.
2715 * @param pau8 Pointer to the string buffer.
2716 * @param c The number of items to write.
2717 */
2718#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2719DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2720#else
2721DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2722{
2723# if RT_INLINE_ASM_GNU_STYLE
2724 __asm__ __volatile__("rep; outsb\n\t"
2725 : "+S" (pau8),
2726 "+c" (c)
2727 : "d" (Port));
2728
2729# elif RT_INLINE_ASM_USES_INTRIN
2730 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2731
2732# else
2733 __asm
2734 {
2735 mov dx, [Port]
2736 mov ecx, [c]
2737 mov eax, [pau8]
2738 xchg esi, eax
2739 rep outsb
2740 xchg esi, eax
2741 }
2742# endif
2743}
2744#endif
2745
2746
2747/**
2748 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2749 *
2750 * @param Port I/O port to read from.
2751 * @param pau8 Pointer to the string buffer (output).
2752 * @param c The number of items to read.
2753 */
2754#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2755DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2756#else
2757DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2758{
2759# if RT_INLINE_ASM_GNU_STYLE
2760 __asm__ __volatile__("rep; insb\n\t"
2761 : "+D" (pau8),
2762 "+c" (c)
2763 : "d" (Port));
2764
2765# elif RT_INLINE_ASM_USES_INTRIN
2766 __inbytestring(Port, pau8, (unsigned long)c);
2767
2768# else
2769 __asm
2770 {
2771 mov dx, [Port]
2772 mov ecx, [c]
2773 mov eax, [pau8]
2774 xchg edi, eax
2775 rep insb
2776 xchg edi, eax
2777 }
2778# endif
2779}
2780#endif
2781
2782
2783/**
2784 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2785 *
2786 * @param Port I/O port to write to.
2787 * @param pau16 Pointer to the string buffer.
2788 * @param c The number of items to write.
2789 */
2790#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2791DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2792#else
2793DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2794{
2795# if RT_INLINE_ASM_GNU_STYLE
2796 __asm__ __volatile__("rep; outsw\n\t"
2797 : "+S" (pau16),
2798 "+c" (c)
2799 : "d" (Port));
2800
2801# elif RT_INLINE_ASM_USES_INTRIN
2802 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2803
2804# else
2805 __asm
2806 {
2807 mov dx, [Port]
2808 mov ecx, [c]
2809 mov eax, [pau16]
2810 xchg esi, eax
2811 rep outsw
2812 xchg esi, eax
2813 }
2814# endif
2815}
2816#endif
2817
2818
2819/**
2820 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2821 *
2822 * @param Port I/O port to read from.
2823 * @param pau16 Pointer to the string buffer (output).
2824 * @param c The number of items to read.
2825 */
2826#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2827DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2828#else
2829DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2830{
2831# if RT_INLINE_ASM_GNU_STYLE
2832 __asm__ __volatile__("rep; insw\n\t"
2833 : "+D" (pau16),
2834 "+c" (c)
2835 : "d" (Port));
2836
2837# elif RT_INLINE_ASM_USES_INTRIN
2838 __inwordstring(Port, pau16, (unsigned long)c);
2839
2840# else
2841 __asm
2842 {
2843 mov dx, [Port]
2844 mov ecx, [c]
2845 mov eax, [pau16]
2846 xchg edi, eax
2847 rep insw
2848 xchg edi, eax
2849 }
2850# endif
2851}
2852#endif
2853
2854
2855/**
2856 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2857 *
2858 * @param Port I/O port to write to.
2859 * @param pau32 Pointer to the string buffer.
2860 * @param c The number of items to write.
2861 */
2862#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2863DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2864#else
2865DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2866{
2867# if RT_INLINE_ASM_GNU_STYLE
2868 __asm__ __volatile__("rep; outsl\n\t"
2869 : "+S" (pau32),
2870 "+c" (c)
2871 : "d" (Port));
2872
2873# elif RT_INLINE_ASM_USES_INTRIN
2874 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2875
2876# else
2877 __asm
2878 {
2879 mov dx, [Port]
2880 mov ecx, [c]
2881 mov eax, [pau32]
2882 xchg esi, eax
2883 rep outsd
2884 xchg esi, eax
2885 }
2886# endif
2887}
2888#endif
2889
2890
2891/**
2892 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2893 *
2894 * @param Port I/O port to read from.
2895 * @param pau32 Pointer to the string buffer (output).
2896 * @param c The number of items to read.
2897 */
2898#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2899DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2900#else
2901DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2902{
2903# if RT_INLINE_ASM_GNU_STYLE
2904 __asm__ __volatile__("rep; insl\n\t"
2905 : "+D" (pau32),
2906 "+c" (c)
2907 : "d" (Port));
2908
2909# elif RT_INLINE_ASM_USES_INTRIN
2910 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2911
2912# else
2913 __asm
2914 {
2915 mov dx, [Port]
2916 mov ecx, [c]
2917 mov eax, [pau32]
2918 xchg edi, eax
2919 rep insd
2920 xchg edi, eax
2921 }
2922# endif
2923}
2924#endif
2925
2926
2927/**
2928 * Invalidate page.
2929 *
2930 * @param pv Address of the page to invalidate.
2931 */
2932#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2933DECLASM(void) ASMInvalidatePage(void *pv);
2934#else
2935DECLINLINE(void) ASMInvalidatePage(void *pv)
2936{
2937# if RT_INLINE_ASM_USES_INTRIN
2938 __invlpg(pv);
2939
2940# elif RT_INLINE_ASM_GNU_STYLE
2941 __asm__ __volatile__("invlpg %0\n\t"
2942 : : "m" (*(uint8_t *)pv));
2943# else
2944 __asm
2945 {
2946# ifdef RT_ARCH_AMD64
2947 mov rax, [pv]
2948 invlpg [rax]
2949# else
2950 mov eax, [pv]
2951 invlpg [eax]
2952# endif
2953 }
2954# endif
2955}
2956#endif
2957
2958
2959/**
2960 * Write back the internal caches and invalidate them.
2961 */
2962#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2963DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2964#else
2965DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2966{
2967# if RT_INLINE_ASM_USES_INTRIN
2968 __wbinvd();
2969
2970# elif RT_INLINE_ASM_GNU_STYLE
2971 __asm__ __volatile__("wbinvd");
2972# else
2973 __asm
2974 {
2975 wbinvd
2976 }
2977# endif
2978}
2979#endif
2980
2981
2982/**
2983 * Invalidate internal and (perhaps) external caches without first
2984 * flushing dirty cache lines. Use with extreme care.
2985 */
2986#if RT_INLINE_ASM_EXTERNAL
2987DECLASM(void) ASMInvalidateInternalCaches(void);
2988#else
2989DECLINLINE(void) ASMInvalidateInternalCaches(void)
2990{
2991# if RT_INLINE_ASM_GNU_STYLE
2992 __asm__ __volatile__("invd");
2993# else
2994 __asm
2995 {
2996 invd
2997 }
2998# endif
2999}
3000#endif
3001
3002
3003/**
3004 * Memory load/store fence, waits for any pending writes and reads to complete.
3005 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3006 */
3007DECLINLINE(void) ASMMemoryFenceSSE2(void)
3008{
3009#if RT_INLINE_ASM_GNU_STYLE
3010 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
3011#elif RT_INLINE_ASM_USES_INTRIN
3012 _mm_mfence();
3013#else
3014 __asm
3015 {
3016 _emit 0x0f
3017 _emit 0xae
3018 _emit 0xf0
3019 }
3020#endif
3021}
3022
3023
3024/**
3025 * Memory store fence, waits for any writes to complete.
3026 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
3027 */
3028DECLINLINE(void) ASMWriteFenceSSE(void)
3029{
3030#if RT_INLINE_ASM_GNU_STYLE
3031 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
3032#elif RT_INLINE_ASM_USES_INTRIN
3033 _mm_sfence();
3034#else
3035 __asm
3036 {
3037 _emit 0x0f
3038 _emit 0xae
3039 _emit 0xf8
3040 }
3041#endif
3042}
3043
3044
3045/**
3046 * Memory load fence, waits for any pending reads to complete.
3047 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
3048 */
3049DECLINLINE(void) ASMReadFenceSSE2(void)
3050{
3051#if RT_INLINE_ASM_GNU_STYLE
3052 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
3053#elif RT_INLINE_ASM_USES_INTRIN
3054 _mm_lfence();
3055#else
3056 __asm
3057 {
3058 _emit 0x0f
3059 _emit 0xae
3060 _emit 0xe8
3061 }
3062#endif
3063}
3064
3065/** @} */
3066#endif
3067
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette