VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 50424

Last change on this file since 50424 was 50424, checked in by vboxsync, 11 years ago

ASMSetIDTR and ASMSetGDTR.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 63.8 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2013 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71# if RT_INLINE_ASM_USES_INTRIN >= 15
72# pragma intrinsic(__readeflags)
73# pragma intrinsic(__writeeflags)
74# endif
75#endif
76
77
78
79/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
80 * @ingroup grp_rt_asm
81 * @{
82 */
83
84/** @todo find a more proper place for this structure? */
85#pragma pack(1)
86/** IDTR */
87typedef struct RTIDTR
88{
89 /** Size of the IDT. */
90 uint16_t cbIdt;
91 /** Address of the IDT. */
92 uintptr_t pIdt;
93} RTIDTR, *PRTIDTR;
94#pragma pack()
95
96#pragma pack(1)
97/** GDTR */
98typedef struct RTGDTR
99{
100 /** Size of the GDT. */
101 uint16_t cbGdt;
102 /** Address of the GDT. */
103 uintptr_t pGdt;
104} RTGDTR, *PRTGDTR;
105#pragma pack()
106
107
108/**
109 * Gets the content of the IDTR CPU register.
110 * @param pIdtr Where to store the IDTR contents.
111 */
112#if RT_INLINE_ASM_EXTERNAL
113DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
114#else
115DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
116{
117# if RT_INLINE_ASM_GNU_STYLE
118 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
119# else
120 __asm
121 {
122# ifdef RT_ARCH_AMD64
123 mov rax, [pIdtr]
124 sidt [rax]
125# else
126 mov eax, [pIdtr]
127 sidt [eax]
128# endif
129 }
130# endif
131}
132#endif
133
134
135/**
136 * Sets the content of the IDTR CPU register.
137 * @param pIdtr Where to load the IDTR contents from
138 */
139#if RT_INLINE_ASM_EXTERNAL
140DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
141#else
142DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
143{
144# if RT_INLINE_ASM_GNU_STYLE
145 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
146# else
147 __asm
148 {
149# ifdef RT_ARCH_AMD64
150 mov rax, [pIdtr]
151 lidt [rax]
152# else
153 mov eax, [pIdtr]
154 lidt [eax]
155# endif
156 }
157# endif
158}
159#endif
160
161
162/**
163 * Gets the content of the GDTR CPU register.
164 * @param pGdtr Where to store the GDTR contents.
165 */
166#if RT_INLINE_ASM_EXTERNAL
167DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
168#else
169DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
170{
171# if RT_INLINE_ASM_GNU_STYLE
172 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
173# else
174 __asm
175 {
176# ifdef RT_ARCH_AMD64
177 mov rax, [pGdtr]
178 sgdt [rax]
179# else
180 mov eax, [pGdtr]
181 sgdt [eax]
182# endif
183 }
184# endif
185}
186#endif
187
188
189/**
190 * Sets the content of the GDTR CPU register.
191 * @param pIdtr Where to load the GDTR contents from
192 */
193#if RT_INLINE_ASM_EXTERNAL
194DECLASM(void) ASMSetGDTR(const RTGDTR *pGdtr);
195#else
196DECLINLINE(void) ASMSetGDTR(const RTGDTR *pGdtr)
197{
198# if RT_INLINE_ASM_GNU_STYLE
199 __asm__ __volatile__("lgdt %0" : : "m" (*pGdtr));
200# else
201 __asm
202 {
203# ifdef RT_ARCH_AMD64
204 mov rax, [pGdtr]
205 lgdt [rax]
206# else
207 mov eax, [pGdtr]
208 lgdt [eax]
209# endif
210 }
211# endif
212}
213#endif
214
215
216
217/**
218 * Get the cs register.
219 * @returns cs.
220 */
221#if RT_INLINE_ASM_EXTERNAL
222DECLASM(RTSEL) ASMGetCS(void);
223#else
224DECLINLINE(RTSEL) ASMGetCS(void)
225{
226 RTSEL SelCS;
227# if RT_INLINE_ASM_GNU_STYLE
228 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
229# else
230 __asm
231 {
232 mov ax, cs
233 mov [SelCS], ax
234 }
235# endif
236 return SelCS;
237}
238#endif
239
240
241/**
242 * Get the DS register.
243 * @returns DS.
244 */
245#if RT_INLINE_ASM_EXTERNAL
246DECLASM(RTSEL) ASMGetDS(void);
247#else
248DECLINLINE(RTSEL) ASMGetDS(void)
249{
250 RTSEL SelDS;
251# if RT_INLINE_ASM_GNU_STYLE
252 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
253# else
254 __asm
255 {
256 mov ax, ds
257 mov [SelDS], ax
258 }
259# endif
260 return SelDS;
261}
262#endif
263
264
265/**
266 * Get the ES register.
267 * @returns ES.
268 */
269#if RT_INLINE_ASM_EXTERNAL
270DECLASM(RTSEL) ASMGetES(void);
271#else
272DECLINLINE(RTSEL) ASMGetES(void)
273{
274 RTSEL SelES;
275# if RT_INLINE_ASM_GNU_STYLE
276 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
277# else
278 __asm
279 {
280 mov ax, es
281 mov [SelES], ax
282 }
283# endif
284 return SelES;
285}
286#endif
287
288
289/**
290 * Get the FS register.
291 * @returns FS.
292 */
293#if RT_INLINE_ASM_EXTERNAL
294DECLASM(RTSEL) ASMGetFS(void);
295#else
296DECLINLINE(RTSEL) ASMGetFS(void)
297{
298 RTSEL SelFS;
299# if RT_INLINE_ASM_GNU_STYLE
300 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
301# else
302 __asm
303 {
304 mov ax, fs
305 mov [SelFS], ax
306 }
307# endif
308 return SelFS;
309}
310# endif
311
312
313/**
314 * Get the GS register.
315 * @returns GS.
316 */
317#if RT_INLINE_ASM_EXTERNAL
318DECLASM(RTSEL) ASMGetGS(void);
319#else
320DECLINLINE(RTSEL) ASMGetGS(void)
321{
322 RTSEL SelGS;
323# if RT_INLINE_ASM_GNU_STYLE
324 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
325# else
326 __asm
327 {
328 mov ax, gs
329 mov [SelGS], ax
330 }
331# endif
332 return SelGS;
333}
334#endif
335
336
337/**
338 * Get the SS register.
339 * @returns SS.
340 */
341#if RT_INLINE_ASM_EXTERNAL
342DECLASM(RTSEL) ASMGetSS(void);
343#else
344DECLINLINE(RTSEL) ASMGetSS(void)
345{
346 RTSEL SelSS;
347# if RT_INLINE_ASM_GNU_STYLE
348 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
349# else
350 __asm
351 {
352 mov ax, ss
353 mov [SelSS], ax
354 }
355# endif
356 return SelSS;
357}
358#endif
359
360
361/**
362 * Get the TR register.
363 * @returns TR.
364 */
365#if RT_INLINE_ASM_EXTERNAL
366DECLASM(RTSEL) ASMGetTR(void);
367#else
368DECLINLINE(RTSEL) ASMGetTR(void)
369{
370 RTSEL SelTR;
371# if RT_INLINE_ASM_GNU_STYLE
372 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
373# else
374 __asm
375 {
376 str ax
377 mov [SelTR], ax
378 }
379# endif
380 return SelTR;
381}
382#endif
383
384
385/**
386 * Get the LDTR register.
387 * @returns LDTR.
388 */
389#if RT_INLINE_ASM_EXTERNAL
390DECLASM(RTSEL) ASMGetLDTR(void);
391#else
392DECLINLINE(RTSEL) ASMGetLDTR(void)
393{
394 RTSEL SelLDTR;
395# if RT_INLINE_ASM_GNU_STYLE
396 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
397# else
398 __asm
399 {
400 sldt ax
401 mov [SelLDTR], ax
402 }
403# endif
404 return SelLDTR;
405}
406#endif
407
408
409/**
410 * Get the access rights for the segment selector.
411 *
412 * @returns The access rights on success or ~0U on failure.
413 * @param uSel The selector value.
414 *
415 * @remarks Using ~0U for failure is chosen because valid access rights always
416 * have bits 0:7 as 0 (on both Intel & AMD).
417 */
418#if RT_INLINE_ASM_EXTERNAL
419DECLASM(uint32_t) ASMGetSegAttr(uint32_t uSel);
420#else
421DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
422{
423 uint32_t uAttr;
424 /* LAR only accesses 16-bit of the source operand, but eax for the
425 destination operand is required for getting the full 32-bit access rights. */
426# if RT_INLINE_ASM_GNU_STYLE
427 __asm__ __volatile__("lar %1, %%eax\n\t"
428 "jz done%=\n\t"
429 "movl $0xffffffff, %%eax\n\t"
430 "done%=:\n\t"
431 "movl %%eax, %0\n\t"
432 : "=r" (uAttr)
433 : "r" (uSel)
434 : "cc", "%eax");
435# else
436 __asm
437 {
438 lar eax, [uSel]
439 jz done
440 mov eax, 0ffffffffh
441 done:
442 mov [uAttr], eax
443 }
444# endif
445 return uAttr;
446}
447#endif
448
449
450/**
451 * Get the [RE]FLAGS register.
452 * @returns [RE]FLAGS.
453 */
454#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
455DECLASM(RTCCUINTREG) ASMGetFlags(void);
456#else
457DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
458{
459 RTCCUINTREG uFlags;
460# if RT_INLINE_ASM_GNU_STYLE
461# ifdef RT_ARCH_AMD64
462 __asm__ __volatile__("pushfq\n\t"
463 "popq %0\n\t"
464 : "=r" (uFlags));
465# else
466 __asm__ __volatile__("pushfl\n\t"
467 "popl %0\n\t"
468 : "=r" (uFlags));
469# endif
470# elif RT_INLINE_ASM_USES_INTRIN >= 15
471 uFlags = __readeflags();
472# else
473 __asm
474 {
475# ifdef RT_ARCH_AMD64
476 pushfq
477 pop [uFlags]
478# else
479 pushfd
480 pop [uFlags]
481# endif
482 }
483# endif
484 return uFlags;
485}
486#endif
487
488
489/**
490 * Set the [RE]FLAGS register.
491 * @param uFlags The new [RE]FLAGS value.
492 */
493#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
494DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
495#else
496DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
497{
498# if RT_INLINE_ASM_GNU_STYLE
499# ifdef RT_ARCH_AMD64
500 __asm__ __volatile__("pushq %0\n\t"
501 "popfq\n\t"
502 : : "g" (uFlags));
503# else
504 __asm__ __volatile__("pushl %0\n\t"
505 "popfl\n\t"
506 : : "g" (uFlags));
507# endif
508# elif RT_INLINE_ASM_USES_INTRIN >= 15
509 __writeeflags(uFlags);
510# else
511 __asm
512 {
513# ifdef RT_ARCH_AMD64
514 push [uFlags]
515 popfq
516# else
517 push [uFlags]
518 popfd
519# endif
520 }
521# endif
522}
523#endif
524
525
526/**
527 * Gets the content of the CPU timestamp counter register.
528 *
529 * @returns TSC.
530 */
531#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
532DECLASM(uint64_t) ASMReadTSC(void);
533#else
534DECLINLINE(uint64_t) ASMReadTSC(void)
535{
536 RTUINT64U u;
537# if RT_INLINE_ASM_GNU_STYLE
538 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
539# else
540# if RT_INLINE_ASM_USES_INTRIN
541 u.u = __rdtsc();
542# else
543 __asm
544 {
545 rdtsc
546 mov [u.s.Lo], eax
547 mov [u.s.Hi], edx
548 }
549# endif
550# endif
551 return u.u;
552}
553#endif
554
555
556/**
557 * Performs the cpuid instruction returning all registers.
558 *
559 * @param uOperator CPUID operation (eax).
560 * @param pvEAX Where to store eax.
561 * @param pvEBX Where to store ebx.
562 * @param pvECX Where to store ecx.
563 * @param pvEDX Where to store edx.
564 * @remark We're using void pointers to ease the use of special bitfield structures and such.
565 */
566#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
567DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
568#else
569DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
570{
571# if RT_INLINE_ASM_GNU_STYLE
572# ifdef RT_ARCH_AMD64
573 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
574 __asm__ __volatile__ ("cpuid\n\t"
575 : "=a" (uRAX),
576 "=b" (uRBX),
577 "=c" (uRCX),
578 "=d" (uRDX)
579 : "0" (uOperator), "2" (0));
580 *(uint32_t *)pvEAX = (uint32_t)uRAX;
581 *(uint32_t *)pvEBX = (uint32_t)uRBX;
582 *(uint32_t *)pvECX = (uint32_t)uRCX;
583 *(uint32_t *)pvEDX = (uint32_t)uRDX;
584# else
585 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
586 "cpuid\n\t"
587 "xchgl %%ebx, %1\n\t"
588 : "=a" (*(uint32_t *)pvEAX),
589 "=r" (*(uint32_t *)pvEBX),
590 "=c" (*(uint32_t *)pvECX),
591 "=d" (*(uint32_t *)pvEDX)
592 : "0" (uOperator), "2" (0));
593# endif
594
595# elif RT_INLINE_ASM_USES_INTRIN
596 int aInfo[4];
597 __cpuid(aInfo, uOperator);
598 *(uint32_t *)pvEAX = aInfo[0];
599 *(uint32_t *)pvEBX = aInfo[1];
600 *(uint32_t *)pvECX = aInfo[2];
601 *(uint32_t *)pvEDX = aInfo[3];
602
603# else
604 uint32_t uEAX;
605 uint32_t uEBX;
606 uint32_t uECX;
607 uint32_t uEDX;
608 __asm
609 {
610 push ebx
611 mov eax, [uOperator]
612 cpuid
613 mov [uEAX], eax
614 mov [uEBX], ebx
615 mov [uECX], ecx
616 mov [uEDX], edx
617 pop ebx
618 }
619 *(uint32_t *)pvEAX = uEAX;
620 *(uint32_t *)pvEBX = uEBX;
621 *(uint32_t *)pvECX = uECX;
622 *(uint32_t *)pvEDX = uEDX;
623# endif
624}
625#endif
626
627
628/**
629 * Performs the CPUID instruction with EAX and ECX input returning ALL output
630 * registers.
631 *
632 * @param uOperator CPUID operation (eax).
633 * @param uIdxECX ecx index
634 * @param pvEAX Where to store eax.
635 * @param pvEBX Where to store ebx.
636 * @param pvECX Where to store ecx.
637 * @param pvEDX Where to store edx.
638 * @remark We're using void pointers to ease the use of special bitfield structures and such.
639 */
640#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
641DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
642#else
643DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
644{
645# if RT_INLINE_ASM_GNU_STYLE
646# ifdef RT_ARCH_AMD64
647 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
648 __asm__ ("cpuid\n\t"
649 : "=a" (uRAX),
650 "=b" (uRBX),
651 "=c" (uRCX),
652 "=d" (uRDX)
653 : "0" (uOperator),
654 "2" (uIdxECX));
655 *(uint32_t *)pvEAX = (uint32_t)uRAX;
656 *(uint32_t *)pvEBX = (uint32_t)uRBX;
657 *(uint32_t *)pvECX = (uint32_t)uRCX;
658 *(uint32_t *)pvEDX = (uint32_t)uRDX;
659# else
660 __asm__ ("xchgl %%ebx, %1\n\t"
661 "cpuid\n\t"
662 "xchgl %%ebx, %1\n\t"
663 : "=a" (*(uint32_t *)pvEAX),
664 "=r" (*(uint32_t *)pvEBX),
665 "=c" (*(uint32_t *)pvECX),
666 "=d" (*(uint32_t *)pvEDX)
667 : "0" (uOperator),
668 "2" (uIdxECX));
669# endif
670
671# elif RT_INLINE_ASM_USES_INTRIN
672 int aInfo[4];
673 __cpuidex(aInfo, uOperator, uIdxECX);
674 *(uint32_t *)pvEAX = aInfo[0];
675 *(uint32_t *)pvEBX = aInfo[1];
676 *(uint32_t *)pvECX = aInfo[2];
677 *(uint32_t *)pvEDX = aInfo[3];
678
679# else
680 uint32_t uEAX;
681 uint32_t uEBX;
682 uint32_t uECX;
683 uint32_t uEDX;
684 __asm
685 {
686 push ebx
687 mov eax, [uOperator]
688 mov ecx, [uIdxECX]
689 cpuid
690 mov [uEAX], eax
691 mov [uEBX], ebx
692 mov [uECX], ecx
693 mov [uEDX], edx
694 pop ebx
695 }
696 *(uint32_t *)pvEAX = uEAX;
697 *(uint32_t *)pvEBX = uEBX;
698 *(uint32_t *)pvECX = uECX;
699 *(uint32_t *)pvEDX = uEDX;
700# endif
701}
702#endif
703
704
705/**
706 * CPUID variant that initializes all 4 registers before the CPUID instruction.
707 *
708 * @returns The EAX result value.
709 * @param uOperator CPUID operation (eax).
710 * @param uInitEBX The value to assign EBX prior to the CPUID instruction.
711 * @param uInitECX The value to assign ECX prior to the CPUID instruction.
712 * @param uInitEDX The value to assign EDX prior to the CPUID instruction.
713 * @param pvEAX Where to store eax. Optional.
714 * @param pvEBX Where to store ebx. Optional.
715 * @param pvECX Where to store ecx. Optional.
716 * @param pvEDX Where to store edx. Optional.
717 */
718DECLASM(uint32_t) ASMCpuIdExSlow(uint32_t uOperator, uint32_t uInitEBX, uint32_t uInitECX, uint32_t uInitEDX,
719 void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
720
721
722/**
723 * Performs the cpuid instruction returning ecx and edx.
724 *
725 * @param uOperator CPUID operation (eax).
726 * @param pvECX Where to store ecx.
727 * @param pvEDX Where to store edx.
728 * @remark We're using void pointers to ease the use of special bitfield structures and such.
729 */
730#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
731DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
732#else
733DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
734{
735 uint32_t uEBX;
736 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
737}
738#endif
739
740
741/**
742 * Performs the cpuid instruction returning eax.
743 *
744 * @param uOperator CPUID operation (eax).
745 * @returns EAX after cpuid operation.
746 */
747#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
748DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
749#else
750DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
751{
752 RTCCUINTREG xAX;
753# if RT_INLINE_ASM_GNU_STYLE
754# ifdef RT_ARCH_AMD64
755 __asm__ ("cpuid"
756 : "=a" (xAX)
757 : "0" (uOperator)
758 : "rbx", "rcx", "rdx");
759# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
760 __asm__ ("push %%ebx\n\t"
761 "cpuid\n\t"
762 "pop %%ebx\n\t"
763 : "=a" (xAX)
764 : "0" (uOperator)
765 : "ecx", "edx");
766# else
767 __asm__ ("cpuid"
768 : "=a" (xAX)
769 : "0" (uOperator)
770 : "edx", "ecx", "ebx");
771# endif
772
773# elif RT_INLINE_ASM_USES_INTRIN
774 int aInfo[4];
775 __cpuid(aInfo, uOperator);
776 xAX = aInfo[0];
777
778# else
779 __asm
780 {
781 push ebx
782 mov eax, [uOperator]
783 cpuid
784 mov [xAX], eax
785 pop ebx
786 }
787# endif
788 return (uint32_t)xAX;
789}
790#endif
791
792
793/**
794 * Performs the cpuid instruction returning ebx.
795 *
796 * @param uOperator CPUID operation (eax).
797 * @returns EBX after cpuid operation.
798 */
799#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
800DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
801#else
802DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
803{
804 RTCCUINTREG xBX;
805# if RT_INLINE_ASM_GNU_STYLE
806# ifdef RT_ARCH_AMD64
807 RTCCUINTREG uSpill;
808 __asm__ ("cpuid"
809 : "=a" (uSpill),
810 "=b" (xBX)
811 : "0" (uOperator)
812 : "rdx", "rcx");
813# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
814 __asm__ ("push %%ebx\n\t"
815 "cpuid\n\t"
816 "mov %%ebx, %%edx\n\t"
817 "pop %%ebx\n\t"
818 : "=a" (uOperator),
819 "=d" (xBX)
820 : "0" (uOperator)
821 : "ecx");
822# else
823 __asm__ ("cpuid"
824 : "=a" (uOperator),
825 "=b" (xBX)
826 : "0" (uOperator)
827 : "edx", "ecx");
828# endif
829
830# elif RT_INLINE_ASM_USES_INTRIN
831 int aInfo[4];
832 __cpuid(aInfo, uOperator);
833 xBX = aInfo[1];
834
835# else
836 __asm
837 {
838 push ebx
839 mov eax, [uOperator]
840 cpuid
841 mov [xBX], ebx
842 pop ebx
843 }
844# endif
845 return (uint32_t)xBX;
846}
847#endif
848
849
850/**
851 * Performs the cpuid instruction returning ecx.
852 *
853 * @param uOperator CPUID operation (eax).
854 * @returns ECX after cpuid operation.
855 */
856#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
857DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
858#else
859DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
860{
861 RTCCUINTREG xCX;
862# if RT_INLINE_ASM_GNU_STYLE
863# ifdef RT_ARCH_AMD64
864 RTCCUINTREG uSpill;
865 __asm__ ("cpuid"
866 : "=a" (uSpill),
867 "=c" (xCX)
868 : "0" (uOperator)
869 : "rbx", "rdx");
870# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
871 __asm__ ("push %%ebx\n\t"
872 "cpuid\n\t"
873 "pop %%ebx\n\t"
874 : "=a" (uOperator),
875 "=c" (xCX)
876 : "0" (uOperator)
877 : "edx");
878# else
879 __asm__ ("cpuid"
880 : "=a" (uOperator),
881 "=c" (xCX)
882 : "0" (uOperator)
883 : "ebx", "edx");
884
885# endif
886
887# elif RT_INLINE_ASM_USES_INTRIN
888 int aInfo[4];
889 __cpuid(aInfo, uOperator);
890 xCX = aInfo[2];
891
892# else
893 __asm
894 {
895 push ebx
896 mov eax, [uOperator]
897 cpuid
898 mov [xCX], ecx
899 pop ebx
900 }
901# endif
902 return (uint32_t)xCX;
903}
904#endif
905
906
907/**
908 * Performs the cpuid instruction returning edx.
909 *
910 * @param uOperator CPUID operation (eax).
911 * @returns EDX after cpuid operation.
912 */
913#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
914DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
915#else
916DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
917{
918 RTCCUINTREG xDX;
919# if RT_INLINE_ASM_GNU_STYLE
920# ifdef RT_ARCH_AMD64
921 RTCCUINTREG uSpill;
922 __asm__ ("cpuid"
923 : "=a" (uSpill),
924 "=d" (xDX)
925 : "0" (uOperator)
926 : "rbx", "rcx");
927# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
928 __asm__ ("push %%ebx\n\t"
929 "cpuid\n\t"
930 "pop %%ebx\n\t"
931 : "=a" (uOperator),
932 "=d" (xDX)
933 : "0" (uOperator)
934 : "ecx");
935# else
936 __asm__ ("cpuid"
937 : "=a" (uOperator),
938 "=d" (xDX)
939 : "0" (uOperator)
940 : "ebx", "ecx");
941# endif
942
943# elif RT_INLINE_ASM_USES_INTRIN
944 int aInfo[4];
945 __cpuid(aInfo, uOperator);
946 xDX = aInfo[3];
947
948# else
949 __asm
950 {
951 push ebx
952 mov eax, [uOperator]
953 cpuid
954 mov [xDX], edx
955 pop ebx
956 }
957# endif
958 return (uint32_t)xDX;
959}
960#endif
961
962
963/**
964 * Checks if the current CPU supports CPUID.
965 *
966 * @returns true if CPUID is supported.
967 */
968DECLINLINE(bool) ASMHasCpuId(void)
969{
970#ifdef RT_ARCH_AMD64
971 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
972#else /* !RT_ARCH_AMD64 */
973 bool fRet = false;
974# if RT_INLINE_ASM_GNU_STYLE
975 uint32_t u1;
976 uint32_t u2;
977 __asm__ ("pushf\n\t"
978 "pop %1\n\t"
979 "mov %1, %2\n\t"
980 "xorl $0x200000, %1\n\t"
981 "push %1\n\t"
982 "popf\n\t"
983 "pushf\n\t"
984 "pop %1\n\t"
985 "cmpl %1, %2\n\t"
986 "setne %0\n\t"
987 "push %2\n\t"
988 "popf\n\t"
989 : "=m" (fRet), "=r" (u1), "=r" (u2));
990# else
991 __asm
992 {
993 pushfd
994 pop eax
995 mov ebx, eax
996 xor eax, 0200000h
997 push eax
998 popfd
999 pushfd
1000 pop eax
1001 cmp eax, ebx
1002 setne fRet
1003 push ebx
1004 popfd
1005 }
1006# endif
1007 return fRet;
1008#endif /* !RT_ARCH_AMD64 */
1009}
1010
1011
1012/**
1013 * Gets the APIC ID of the current CPU.
1014 *
1015 * @returns the APIC ID.
1016 */
1017#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1018DECLASM(uint8_t) ASMGetApicId(void);
1019#else
1020DECLINLINE(uint8_t) ASMGetApicId(void)
1021{
1022 RTCCUINTREG xBX;
1023# if RT_INLINE_ASM_GNU_STYLE
1024# ifdef RT_ARCH_AMD64
1025 RTCCUINTREG uSpill;
1026 __asm__ __volatile__ ("cpuid"
1027 : "=a" (uSpill),
1028 "=b" (xBX)
1029 : "0" (1)
1030 : "rcx", "rdx");
1031# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
1032 RTCCUINTREG uSpill;
1033 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
1034 "cpuid\n\t"
1035 "xchgl %%ebx,%1\n\t"
1036 : "=a" (uSpill),
1037 "=rm" (xBX)
1038 : "0" (1)
1039 : "ecx", "edx");
1040# else
1041 RTCCUINTREG uSpill;
1042 __asm__ __volatile__ ("cpuid"
1043 : "=a" (uSpill),
1044 "=b" (xBX)
1045 : "0" (1)
1046 : "ecx", "edx");
1047# endif
1048
1049# elif RT_INLINE_ASM_USES_INTRIN
1050 int aInfo[4];
1051 __cpuid(aInfo, 1);
1052 xBX = aInfo[1];
1053
1054# else
1055 __asm
1056 {
1057 push ebx
1058 mov eax, 1
1059 cpuid
1060 mov [xBX], ebx
1061 pop ebx
1062 }
1063# endif
1064 return (uint8_t)(xBX >> 24);
1065}
1066#endif
1067
1068
1069/**
1070 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
1071 *
1072 * @returns true/false.
1073 * @param uEBX EBX return from ASMCpuId(0)
1074 * @param uECX ECX return from ASMCpuId(0)
1075 * @param uEDX EDX return from ASMCpuId(0)
1076 */
1077DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1078{
1079 return uEBX == UINT32_C(0x756e6547)
1080 && uECX == UINT32_C(0x6c65746e)
1081 && uEDX == UINT32_C(0x49656e69);
1082}
1083
1084
1085/**
1086 * Tests if this is a genuine Intel CPU.
1087 *
1088 * @returns true/false.
1089 * @remarks ASSUMES that cpuid is supported by the CPU.
1090 */
1091DECLINLINE(bool) ASMIsIntelCpu(void)
1092{
1093 uint32_t uEAX, uEBX, uECX, uEDX;
1094 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1095 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1096}
1097
1098
1099/**
1100 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1101 *
1102 * @returns true/false.
1103 * @param uEBX EBX return from ASMCpuId(0)
1104 * @param uECX ECX return from ASMCpuId(0)
1105 * @param uEDX EDX return from ASMCpuId(0)
1106 */
1107DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1108{
1109 return uEBX == UINT32_C(0x68747541)
1110 && uECX == UINT32_C(0x444d4163)
1111 && uEDX == UINT32_C(0x69746e65);
1112}
1113
1114
1115/**
1116 * Tests if this is an authentic AMD CPU.
1117 *
1118 * @returns true/false.
1119 * @remarks ASSUMES that cpuid is supported by the CPU.
1120 */
1121DECLINLINE(bool) ASMIsAmdCpu(void)
1122{
1123 uint32_t uEAX, uEBX, uECX, uEDX;
1124 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1125 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1126}
1127
1128
1129/**
1130 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1131 *
1132 * @returns true/false.
1133 * @param uEBX EBX return from ASMCpuId(0).
1134 * @param uECX ECX return from ASMCpuId(0).
1135 * @param uEDX EDX return from ASMCpuId(0).
1136 */
1137DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1138{
1139 return uEBX == UINT32_C(0x746e6543)
1140 && uECX == UINT32_C(0x736c7561)
1141 && uEDX == UINT32_C(0x48727561);
1142}
1143
1144
1145/**
1146 * Tests if this is a centaur hauling VIA CPU.
1147 *
1148 * @returns true/false.
1149 * @remarks ASSUMES that cpuid is supported by the CPU.
1150 */
1151DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1152{
1153 uint32_t uEAX, uEBX, uECX, uEDX;
1154 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1155 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1156}
1157
1158
1159/**
1160 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1161 *
1162 *
1163 * @returns true/false.
1164 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1165 *
1166 * @note This only succeeds if there are at least two leaves in the range.
1167 * @remarks The upper range limit is just some half reasonable value we've
1168 * picked out of thin air.
1169 */
1170DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1171{
1172 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1173}
1174
1175
1176/**
1177 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1178 *
1179 * This only succeeds if there are at least two leaves in the range.
1180 *
1181 * @returns true/false.
1182 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1183 *
1184 * @note This only succeeds if there are at least two leaves in the range.
1185 * @remarks The upper range limit is just some half reasonable value we've
1186 * picked out of thin air.
1187 */
1188DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1189{
1190 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1191}
1192
1193
1194/**
1195 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1196 *
1197 * @returns Family.
1198 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1199 */
1200DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1201{
1202 return ((uEAX >> 8) & 0xf) == 0xf
1203 ? ((uEAX >> 20) & 0x7f) + 0xf
1204 : ((uEAX >> 8) & 0xf);
1205}
1206
1207
1208/**
1209 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1210 *
1211 * @returns Model.
1212 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1213 */
1214DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1215{
1216 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1217 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1218 : ((uEAX >> 4) & 0xf);
1219}
1220
1221
1222/**
1223 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1224 *
1225 * @returns Model.
1226 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1227 */
1228DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1229{
1230 return ((uEAX >> 8) & 0xf) == 0xf
1231 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1232 : ((uEAX >> 4) & 0xf);
1233}
1234
1235
1236/**
1237 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1238 *
1239 * @returns Model.
1240 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1241 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1242 */
1243DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1244{
1245 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1246 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1247 : ((uEAX >> 4) & 0xf);
1248}
1249
1250
1251/**
1252 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1253 *
1254 * @returns Model.
1255 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1256 */
1257DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1258{
1259 return uEAX & 0xf;
1260}
1261
1262
1263/**
1264 * Get cr0.
1265 * @returns cr0.
1266 */
1267#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1268DECLASM(RTCCUINTREG) ASMGetCR0(void);
1269#else
1270DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1271{
1272 RTCCUINTREG uCR0;
1273# if RT_INLINE_ASM_USES_INTRIN
1274 uCR0 = __readcr0();
1275
1276# elif RT_INLINE_ASM_GNU_STYLE
1277# ifdef RT_ARCH_AMD64
1278 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1279# else
1280 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1281# endif
1282# else
1283 __asm
1284 {
1285# ifdef RT_ARCH_AMD64
1286 mov rax, cr0
1287 mov [uCR0], rax
1288# else
1289 mov eax, cr0
1290 mov [uCR0], eax
1291# endif
1292 }
1293# endif
1294 return uCR0;
1295}
1296#endif
1297
1298
1299/**
1300 * Sets the CR0 register.
1301 * @param uCR0 The new CR0 value.
1302 */
1303#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1304DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1305#else
1306DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1307{
1308# if RT_INLINE_ASM_USES_INTRIN
1309 __writecr0(uCR0);
1310
1311# elif RT_INLINE_ASM_GNU_STYLE
1312# ifdef RT_ARCH_AMD64
1313 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1314# else
1315 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1316# endif
1317# else
1318 __asm
1319 {
1320# ifdef RT_ARCH_AMD64
1321 mov rax, [uCR0]
1322 mov cr0, rax
1323# else
1324 mov eax, [uCR0]
1325 mov cr0, eax
1326# endif
1327 }
1328# endif
1329}
1330#endif
1331
1332
1333/**
1334 * Get cr2.
1335 * @returns cr2.
1336 */
1337#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1338DECLASM(RTCCUINTREG) ASMGetCR2(void);
1339#else
1340DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1341{
1342 RTCCUINTREG uCR2;
1343# if RT_INLINE_ASM_USES_INTRIN
1344 uCR2 = __readcr2();
1345
1346# elif RT_INLINE_ASM_GNU_STYLE
1347# ifdef RT_ARCH_AMD64
1348 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1349# else
1350 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1351# endif
1352# else
1353 __asm
1354 {
1355# ifdef RT_ARCH_AMD64
1356 mov rax, cr2
1357 mov [uCR2], rax
1358# else
1359 mov eax, cr2
1360 mov [uCR2], eax
1361# endif
1362 }
1363# endif
1364 return uCR2;
1365}
1366#endif
1367
1368
1369/**
1370 * Sets the CR2 register.
1371 * @param uCR2 The new CR0 value.
1372 */
1373#if RT_INLINE_ASM_EXTERNAL
1374DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1375#else
1376DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1377{
1378# if RT_INLINE_ASM_GNU_STYLE
1379# ifdef RT_ARCH_AMD64
1380 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1381# else
1382 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1383# endif
1384# else
1385 __asm
1386 {
1387# ifdef RT_ARCH_AMD64
1388 mov rax, [uCR2]
1389 mov cr2, rax
1390# else
1391 mov eax, [uCR2]
1392 mov cr2, eax
1393# endif
1394 }
1395# endif
1396}
1397#endif
1398
1399
1400/**
1401 * Get cr3.
1402 * @returns cr3.
1403 */
1404#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1405DECLASM(RTCCUINTREG) ASMGetCR3(void);
1406#else
1407DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1408{
1409 RTCCUINTREG uCR3;
1410# if RT_INLINE_ASM_USES_INTRIN
1411 uCR3 = __readcr3();
1412
1413# elif RT_INLINE_ASM_GNU_STYLE
1414# ifdef RT_ARCH_AMD64
1415 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1416# else
1417 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1418# endif
1419# else
1420 __asm
1421 {
1422# ifdef RT_ARCH_AMD64
1423 mov rax, cr3
1424 mov [uCR3], rax
1425# else
1426 mov eax, cr3
1427 mov [uCR3], eax
1428# endif
1429 }
1430# endif
1431 return uCR3;
1432}
1433#endif
1434
1435
1436/**
1437 * Sets the CR3 register.
1438 *
1439 * @param uCR3 New CR3 value.
1440 */
1441#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1442DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1443#else
1444DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1445{
1446# if RT_INLINE_ASM_USES_INTRIN
1447 __writecr3(uCR3);
1448
1449# elif RT_INLINE_ASM_GNU_STYLE
1450# ifdef RT_ARCH_AMD64
1451 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1452# else
1453 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1454# endif
1455# else
1456 __asm
1457 {
1458# ifdef RT_ARCH_AMD64
1459 mov rax, [uCR3]
1460 mov cr3, rax
1461# else
1462 mov eax, [uCR3]
1463 mov cr3, eax
1464# endif
1465 }
1466# endif
1467}
1468#endif
1469
1470
1471/**
1472 * Reloads the CR3 register.
1473 */
1474#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1475DECLASM(void) ASMReloadCR3(void);
1476#else
1477DECLINLINE(void) ASMReloadCR3(void)
1478{
1479# if RT_INLINE_ASM_USES_INTRIN
1480 __writecr3(__readcr3());
1481
1482# elif RT_INLINE_ASM_GNU_STYLE
1483 RTCCUINTREG u;
1484# ifdef RT_ARCH_AMD64
1485 __asm__ __volatile__("movq %%cr3, %0\n\t"
1486 "movq %0, %%cr3\n\t"
1487 : "=r" (u));
1488# else
1489 __asm__ __volatile__("movl %%cr3, %0\n\t"
1490 "movl %0, %%cr3\n\t"
1491 : "=r" (u));
1492# endif
1493# else
1494 __asm
1495 {
1496# ifdef RT_ARCH_AMD64
1497 mov rax, cr3
1498 mov cr3, rax
1499# else
1500 mov eax, cr3
1501 mov cr3, eax
1502# endif
1503 }
1504# endif
1505}
1506#endif
1507
1508
1509/**
1510 * Get cr4.
1511 * @returns cr4.
1512 */
1513#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1514DECLASM(RTCCUINTREG) ASMGetCR4(void);
1515#else
1516DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1517{
1518 RTCCUINTREG uCR4;
1519# if RT_INLINE_ASM_USES_INTRIN
1520 uCR4 = __readcr4();
1521
1522# elif RT_INLINE_ASM_GNU_STYLE
1523# ifdef RT_ARCH_AMD64
1524 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1525# else
1526 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1527# endif
1528# else
1529 __asm
1530 {
1531# ifdef RT_ARCH_AMD64
1532 mov rax, cr4
1533 mov [uCR4], rax
1534# else
1535 push eax /* just in case */
1536 /*mov eax, cr4*/
1537 _emit 0x0f
1538 _emit 0x20
1539 _emit 0xe0
1540 mov [uCR4], eax
1541 pop eax
1542# endif
1543 }
1544# endif
1545 return uCR4;
1546}
1547#endif
1548
1549
1550/**
1551 * Sets the CR4 register.
1552 *
1553 * @param uCR4 New CR4 value.
1554 */
1555#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1556DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1557#else
1558DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1559{
1560# if RT_INLINE_ASM_USES_INTRIN
1561 __writecr4(uCR4);
1562
1563# elif RT_INLINE_ASM_GNU_STYLE
1564# ifdef RT_ARCH_AMD64
1565 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1566# else
1567 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1568# endif
1569# else
1570 __asm
1571 {
1572# ifdef RT_ARCH_AMD64
1573 mov rax, [uCR4]
1574 mov cr4, rax
1575# else
1576 mov eax, [uCR4]
1577 _emit 0x0F
1578 _emit 0x22
1579 _emit 0xE0 /* mov cr4, eax */
1580# endif
1581 }
1582# endif
1583}
1584#endif
1585
1586
1587/**
1588 * Get cr8.
1589 * @returns cr8.
1590 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1591 */
1592#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1593DECLASM(RTCCUINTREG) ASMGetCR8(void);
1594#else
1595DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1596{
1597# ifdef RT_ARCH_AMD64
1598 RTCCUINTREG uCR8;
1599# if RT_INLINE_ASM_USES_INTRIN
1600 uCR8 = __readcr8();
1601
1602# elif RT_INLINE_ASM_GNU_STYLE
1603 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1604# else
1605 __asm
1606 {
1607 mov rax, cr8
1608 mov [uCR8], rax
1609 }
1610# endif
1611 return uCR8;
1612# else /* !RT_ARCH_AMD64 */
1613 return 0;
1614# endif /* !RT_ARCH_AMD64 */
1615}
1616#endif
1617
1618
1619/**
1620 * Enables interrupts (EFLAGS.IF).
1621 */
1622#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1623DECLASM(void) ASMIntEnable(void);
1624#else
1625DECLINLINE(void) ASMIntEnable(void)
1626{
1627# if RT_INLINE_ASM_GNU_STYLE
1628 __asm("sti\n");
1629# elif RT_INLINE_ASM_USES_INTRIN
1630 _enable();
1631# else
1632 __asm sti
1633# endif
1634}
1635#endif
1636
1637
1638/**
1639 * Disables interrupts (!EFLAGS.IF).
1640 */
1641#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1642DECLASM(void) ASMIntDisable(void);
1643#else
1644DECLINLINE(void) ASMIntDisable(void)
1645{
1646# if RT_INLINE_ASM_GNU_STYLE
1647 __asm("cli\n");
1648# elif RT_INLINE_ASM_USES_INTRIN
1649 _disable();
1650# else
1651 __asm cli
1652# endif
1653}
1654#endif
1655
1656
1657/**
1658 * Disables interrupts and returns previous xFLAGS.
1659 */
1660#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1661DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1662#else
1663DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1664{
1665 RTCCUINTREG xFlags;
1666# if RT_INLINE_ASM_GNU_STYLE
1667# ifdef RT_ARCH_AMD64
1668 __asm__ __volatile__("pushfq\n\t"
1669 "cli\n\t"
1670 "popq %0\n\t"
1671 : "=r" (xFlags));
1672# else
1673 __asm__ __volatile__("pushfl\n\t"
1674 "cli\n\t"
1675 "popl %0\n\t"
1676 : "=r" (xFlags));
1677# endif
1678# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1679 xFlags = ASMGetFlags();
1680 _disable();
1681# else
1682 __asm {
1683 pushfd
1684 cli
1685 pop [xFlags]
1686 }
1687# endif
1688 return xFlags;
1689}
1690#endif
1691
1692
1693/**
1694 * Are interrupts enabled?
1695 *
1696 * @returns true / false.
1697 */
1698DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1699{
1700 RTCCUINTREG uFlags = ASMGetFlags();
1701 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1702}
1703
1704
1705/**
1706 * Halts the CPU until interrupted.
1707 */
1708#if RT_INLINE_ASM_EXTERNAL
1709DECLASM(void) ASMHalt(void);
1710#else
1711DECLINLINE(void) ASMHalt(void)
1712{
1713# if RT_INLINE_ASM_GNU_STYLE
1714 __asm__ __volatile__("hlt\n\t");
1715# else
1716 __asm {
1717 hlt
1718 }
1719# endif
1720}
1721#endif
1722
1723
1724/**
1725 * Reads a machine specific register.
1726 *
1727 * @returns Register content.
1728 * @param uRegister Register to read.
1729 */
1730#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1731DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1732#else
1733DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1734{
1735 RTUINT64U u;
1736# if RT_INLINE_ASM_GNU_STYLE
1737 __asm__ __volatile__("rdmsr\n\t"
1738 : "=a" (u.s.Lo),
1739 "=d" (u.s.Hi)
1740 : "c" (uRegister));
1741
1742# elif RT_INLINE_ASM_USES_INTRIN
1743 u.u = __readmsr(uRegister);
1744
1745# else
1746 __asm
1747 {
1748 mov ecx, [uRegister]
1749 rdmsr
1750 mov [u.s.Lo], eax
1751 mov [u.s.Hi], edx
1752 }
1753# endif
1754
1755 return u.u;
1756}
1757#endif
1758
1759
1760/**
1761 * Writes a machine specific register.
1762 *
1763 * @returns Register content.
1764 * @param uRegister Register to write to.
1765 * @param u64Val Value to write.
1766 */
1767#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1768DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1769#else
1770DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1771{
1772 RTUINT64U u;
1773
1774 u.u = u64Val;
1775# if RT_INLINE_ASM_GNU_STYLE
1776 __asm__ __volatile__("wrmsr\n\t"
1777 ::"a" (u.s.Lo),
1778 "d" (u.s.Hi),
1779 "c" (uRegister));
1780
1781# elif RT_INLINE_ASM_USES_INTRIN
1782 __writemsr(uRegister, u.u);
1783
1784# else
1785 __asm
1786 {
1787 mov ecx, [uRegister]
1788 mov edx, [u.s.Hi]
1789 mov eax, [u.s.Lo]
1790 wrmsr
1791 }
1792# endif
1793}
1794#endif
1795
1796
1797/**
1798 * Reads a machine specific register, extended version (for AMD).
1799 *
1800 * @returns Register content.
1801 * @param uRegister Register to read.
1802 * @param uXDI RDI/EDI value.
1803 */
1804#if RT_INLINE_ASM_EXTERNAL
1805DECLASM(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTREG uXDI);
1806#else
1807DECLINLINE(uint64_t) ASMRdMsrEx(uint32_t uRegister, RTCCUINTREG uXDI)
1808{
1809 RTUINT64U u;
1810# if RT_INLINE_ASM_GNU_STYLE
1811 __asm__ __volatile__("rdmsr\n\t"
1812 : "=a" (u.s.Lo),
1813 "=d" (u.s.Hi)
1814 : "c" (uRegister),
1815 "D" (uXDI));
1816
1817# else
1818 __asm
1819 {
1820 mov ecx, [uRegister]
1821 xchg edi, [uXDI]
1822 rdmsr
1823 mov [u.s.Lo], eax
1824 mov [u.s.Hi], edx
1825 xchg edi, [uXDI]
1826 }
1827# endif
1828
1829 return u.u;
1830}
1831#endif
1832
1833
1834/**
1835 * Writes a machine specific register, extended version (for AMD).
1836 *
1837 * @returns Register content.
1838 * @param uRegister Register to write to.
1839 * @param uXDI RDI/EDI value.
1840 * @param u64Val Value to write.
1841 */
1842#if RT_INLINE_ASM_EXTERNAL
1843DECLASM(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTREG uXDI, uint64_t u64Val);
1844#else
1845DECLINLINE(void) ASMWrMsrEx(uint32_t uRegister, RTCCUINTREG uXDI, uint64_t u64Val)
1846{
1847 RTUINT64U u;
1848
1849 u.u = u64Val;
1850# if RT_INLINE_ASM_GNU_STYLE
1851 __asm__ __volatile__("wrmsr\n\t"
1852 ::"a" (u.s.Lo),
1853 "d" (u.s.Hi),
1854 "c" (uRegister),
1855 "D" (uXDI));
1856
1857# else
1858 __asm
1859 {
1860 mov ecx, [uRegister]
1861 xchg edi, [uXDI]
1862 mov edx, [u.s.Hi]
1863 mov eax, [u.s.Lo]
1864 wrmsr
1865 xchg edi, [uXDI]
1866 }
1867# endif
1868}
1869#endif
1870
1871
1872
1873/**
1874 * Reads low part of a machine specific register.
1875 *
1876 * @returns Register content.
1877 * @param uRegister Register to read.
1878 */
1879#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1880DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1881#else
1882DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1883{
1884 uint32_t u32;
1885# if RT_INLINE_ASM_GNU_STYLE
1886 __asm__ __volatile__("rdmsr\n\t"
1887 : "=a" (u32)
1888 : "c" (uRegister)
1889 : "edx");
1890
1891# elif RT_INLINE_ASM_USES_INTRIN
1892 u32 = (uint32_t)__readmsr(uRegister);
1893
1894#else
1895 __asm
1896 {
1897 mov ecx, [uRegister]
1898 rdmsr
1899 mov [u32], eax
1900 }
1901# endif
1902
1903 return u32;
1904}
1905#endif
1906
1907
1908/**
1909 * Reads high part of a machine specific register.
1910 *
1911 * @returns Register content.
1912 * @param uRegister Register to read.
1913 */
1914#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1915DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1916#else
1917DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1918{
1919 uint32_t u32;
1920# if RT_INLINE_ASM_GNU_STYLE
1921 __asm__ __volatile__("rdmsr\n\t"
1922 : "=d" (u32)
1923 : "c" (uRegister)
1924 : "eax");
1925
1926# elif RT_INLINE_ASM_USES_INTRIN
1927 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1928
1929# else
1930 __asm
1931 {
1932 mov ecx, [uRegister]
1933 rdmsr
1934 mov [u32], edx
1935 }
1936# endif
1937
1938 return u32;
1939}
1940#endif
1941
1942
1943/**
1944 * Gets dr0.
1945 *
1946 * @returns dr0.
1947 */
1948#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1949DECLASM(RTCCUINTREG) ASMGetDR0(void);
1950#else
1951DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1952{
1953 RTCCUINTREG uDR0;
1954# if RT_INLINE_ASM_USES_INTRIN
1955 uDR0 = __readdr(0);
1956# elif RT_INLINE_ASM_GNU_STYLE
1957# ifdef RT_ARCH_AMD64
1958 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1959# else
1960 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1961# endif
1962# else
1963 __asm
1964 {
1965# ifdef RT_ARCH_AMD64
1966 mov rax, dr0
1967 mov [uDR0], rax
1968# else
1969 mov eax, dr0
1970 mov [uDR0], eax
1971# endif
1972 }
1973# endif
1974 return uDR0;
1975}
1976#endif
1977
1978
1979/**
1980 * Gets dr1.
1981 *
1982 * @returns dr1.
1983 */
1984#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1985DECLASM(RTCCUINTREG) ASMGetDR1(void);
1986#else
1987DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1988{
1989 RTCCUINTREG uDR1;
1990# if RT_INLINE_ASM_USES_INTRIN
1991 uDR1 = __readdr(1);
1992# elif RT_INLINE_ASM_GNU_STYLE
1993# ifdef RT_ARCH_AMD64
1994 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1995# else
1996 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1997# endif
1998# else
1999 __asm
2000 {
2001# ifdef RT_ARCH_AMD64
2002 mov rax, dr1
2003 mov [uDR1], rax
2004# else
2005 mov eax, dr1
2006 mov [uDR1], eax
2007# endif
2008 }
2009# endif
2010 return uDR1;
2011}
2012#endif
2013
2014
2015/**
2016 * Gets dr2.
2017 *
2018 * @returns dr2.
2019 */
2020#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2021DECLASM(RTCCUINTREG) ASMGetDR2(void);
2022#else
2023DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
2024{
2025 RTCCUINTREG uDR2;
2026# if RT_INLINE_ASM_USES_INTRIN
2027 uDR2 = __readdr(2);
2028# elif RT_INLINE_ASM_GNU_STYLE
2029# ifdef RT_ARCH_AMD64
2030 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
2031# else
2032 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
2033# endif
2034# else
2035 __asm
2036 {
2037# ifdef RT_ARCH_AMD64
2038 mov rax, dr2
2039 mov [uDR2], rax
2040# else
2041 mov eax, dr2
2042 mov [uDR2], eax
2043# endif
2044 }
2045# endif
2046 return uDR2;
2047}
2048#endif
2049
2050
2051/**
2052 * Gets dr3.
2053 *
2054 * @returns dr3.
2055 */
2056#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2057DECLASM(RTCCUINTREG) ASMGetDR3(void);
2058#else
2059DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
2060{
2061 RTCCUINTREG uDR3;
2062# if RT_INLINE_ASM_USES_INTRIN
2063 uDR3 = __readdr(3);
2064# elif RT_INLINE_ASM_GNU_STYLE
2065# ifdef RT_ARCH_AMD64
2066 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
2067# else
2068 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
2069# endif
2070# else
2071 __asm
2072 {
2073# ifdef RT_ARCH_AMD64
2074 mov rax, dr3
2075 mov [uDR3], rax
2076# else
2077 mov eax, dr3
2078 mov [uDR3], eax
2079# endif
2080 }
2081# endif
2082 return uDR3;
2083}
2084#endif
2085
2086
2087/**
2088 * Gets dr6.
2089 *
2090 * @returns dr6.
2091 */
2092#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2093DECLASM(RTCCUINTREG) ASMGetDR6(void);
2094#else
2095DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
2096{
2097 RTCCUINTREG uDR6;
2098# if RT_INLINE_ASM_USES_INTRIN
2099 uDR6 = __readdr(6);
2100# elif RT_INLINE_ASM_GNU_STYLE
2101# ifdef RT_ARCH_AMD64
2102 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
2103# else
2104 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
2105# endif
2106# else
2107 __asm
2108 {
2109# ifdef RT_ARCH_AMD64
2110 mov rax, dr6
2111 mov [uDR6], rax
2112# else
2113 mov eax, dr6
2114 mov [uDR6], eax
2115# endif
2116 }
2117# endif
2118 return uDR6;
2119}
2120#endif
2121
2122
2123/**
2124 * Reads and clears DR6.
2125 *
2126 * @returns DR6.
2127 */
2128#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2129DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
2130#else
2131DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
2132{
2133 RTCCUINTREG uDR6;
2134# if RT_INLINE_ASM_USES_INTRIN
2135 uDR6 = __readdr(6);
2136 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2137# elif RT_INLINE_ASM_GNU_STYLE
2138 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2139# ifdef RT_ARCH_AMD64
2140 __asm__ __volatile__("movq %%dr6, %0\n\t"
2141 "movq %1, %%dr6\n\t"
2142 : "=r" (uDR6)
2143 : "r" (uNewValue));
2144# else
2145 __asm__ __volatile__("movl %%dr6, %0\n\t"
2146 "movl %1, %%dr6\n\t"
2147 : "=r" (uDR6)
2148 : "r" (uNewValue));
2149# endif
2150# else
2151 __asm
2152 {
2153# ifdef RT_ARCH_AMD64
2154 mov rax, dr6
2155 mov [uDR6], rax
2156 mov rcx, rax
2157 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2158 mov dr6, rcx
2159# else
2160 mov eax, dr6
2161 mov [uDR6], eax
2162 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2163 mov dr6, ecx
2164# endif
2165 }
2166# endif
2167 return uDR6;
2168}
2169#endif
2170
2171
2172/**
2173 * Gets dr7.
2174 *
2175 * @returns dr7.
2176 */
2177#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2178DECLASM(RTCCUINTREG) ASMGetDR7(void);
2179#else
2180DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
2181{
2182 RTCCUINTREG uDR7;
2183# if RT_INLINE_ASM_USES_INTRIN
2184 uDR7 = __readdr(7);
2185# elif RT_INLINE_ASM_GNU_STYLE
2186# ifdef RT_ARCH_AMD64
2187 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2188# else
2189 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2190# endif
2191# else
2192 __asm
2193 {
2194# ifdef RT_ARCH_AMD64
2195 mov rax, dr7
2196 mov [uDR7], rax
2197# else
2198 mov eax, dr7
2199 mov [uDR7], eax
2200# endif
2201 }
2202# endif
2203 return uDR7;
2204}
2205#endif
2206
2207
2208/**
2209 * Sets dr0.
2210 *
2211 * @param uDRVal Debug register value to write
2212 */
2213#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2214DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2215#else
2216DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2217{
2218# if RT_INLINE_ASM_USES_INTRIN
2219 __writedr(0, uDRVal);
2220# elif RT_INLINE_ASM_GNU_STYLE
2221# ifdef RT_ARCH_AMD64
2222 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2223# else
2224 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2225# endif
2226# else
2227 __asm
2228 {
2229# ifdef RT_ARCH_AMD64
2230 mov rax, [uDRVal]
2231 mov dr0, rax
2232# else
2233 mov eax, [uDRVal]
2234 mov dr0, eax
2235# endif
2236 }
2237# endif
2238}
2239#endif
2240
2241
2242/**
2243 * Sets dr1.
2244 *
2245 * @param uDRVal Debug register value to write
2246 */
2247#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2248DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2249#else
2250DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2251{
2252# if RT_INLINE_ASM_USES_INTRIN
2253 __writedr(1, uDRVal);
2254# elif RT_INLINE_ASM_GNU_STYLE
2255# ifdef RT_ARCH_AMD64
2256 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2257# else
2258 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2259# endif
2260# else
2261 __asm
2262 {
2263# ifdef RT_ARCH_AMD64
2264 mov rax, [uDRVal]
2265 mov dr1, rax
2266# else
2267 mov eax, [uDRVal]
2268 mov dr1, eax
2269# endif
2270 }
2271# endif
2272}
2273#endif
2274
2275
2276/**
2277 * Sets dr2.
2278 *
2279 * @param uDRVal Debug register value to write
2280 */
2281#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2282DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2283#else
2284DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2285{
2286# if RT_INLINE_ASM_USES_INTRIN
2287 __writedr(2, uDRVal);
2288# elif RT_INLINE_ASM_GNU_STYLE
2289# ifdef RT_ARCH_AMD64
2290 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2291# else
2292 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2293# endif
2294# else
2295 __asm
2296 {
2297# ifdef RT_ARCH_AMD64
2298 mov rax, [uDRVal]
2299 mov dr2, rax
2300# else
2301 mov eax, [uDRVal]
2302 mov dr2, eax
2303# endif
2304 }
2305# endif
2306}
2307#endif
2308
2309
2310/**
2311 * Sets dr3.
2312 *
2313 * @param uDRVal Debug register value to write
2314 */
2315#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2316DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2317#else
2318DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2319{
2320# if RT_INLINE_ASM_USES_INTRIN
2321 __writedr(3, uDRVal);
2322# elif RT_INLINE_ASM_GNU_STYLE
2323# ifdef RT_ARCH_AMD64
2324 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2325# else
2326 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2327# endif
2328# else
2329 __asm
2330 {
2331# ifdef RT_ARCH_AMD64
2332 mov rax, [uDRVal]
2333 mov dr3, rax
2334# else
2335 mov eax, [uDRVal]
2336 mov dr3, eax
2337# endif
2338 }
2339# endif
2340}
2341#endif
2342
2343
2344/**
2345 * Sets dr6.
2346 *
2347 * @param uDRVal Debug register value to write
2348 */
2349#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2350DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2351#else
2352DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2353{
2354# if RT_INLINE_ASM_USES_INTRIN
2355 __writedr(6, uDRVal);
2356# elif RT_INLINE_ASM_GNU_STYLE
2357# ifdef RT_ARCH_AMD64
2358 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2359# else
2360 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2361# endif
2362# else
2363 __asm
2364 {
2365# ifdef RT_ARCH_AMD64
2366 mov rax, [uDRVal]
2367 mov dr6, rax
2368# else
2369 mov eax, [uDRVal]
2370 mov dr6, eax
2371# endif
2372 }
2373# endif
2374}
2375#endif
2376
2377
2378/**
2379 * Sets dr7.
2380 *
2381 * @param uDRVal Debug register value to write
2382 */
2383#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2384DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2385#else
2386DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2387{
2388# if RT_INLINE_ASM_USES_INTRIN
2389 __writedr(7, uDRVal);
2390# elif RT_INLINE_ASM_GNU_STYLE
2391# ifdef RT_ARCH_AMD64
2392 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2393# else
2394 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2395# endif
2396# else
2397 __asm
2398 {
2399# ifdef RT_ARCH_AMD64
2400 mov rax, [uDRVal]
2401 mov dr7, rax
2402# else
2403 mov eax, [uDRVal]
2404 mov dr7, eax
2405# endif
2406 }
2407# endif
2408}
2409#endif
2410
2411
2412/**
2413 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2414 *
2415 * @param Port I/O port to write to.
2416 * @param u8 8-bit integer to write.
2417 */
2418#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2419DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2420#else
2421DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2422{
2423# if RT_INLINE_ASM_GNU_STYLE
2424 __asm__ __volatile__("outb %b1, %w0\n\t"
2425 :: "Nd" (Port),
2426 "a" (u8));
2427
2428# elif RT_INLINE_ASM_USES_INTRIN
2429 __outbyte(Port, u8);
2430
2431# else
2432 __asm
2433 {
2434 mov dx, [Port]
2435 mov al, [u8]
2436 out dx, al
2437 }
2438# endif
2439}
2440#endif
2441
2442
2443/**
2444 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2445 *
2446 * @returns 8-bit integer.
2447 * @param Port I/O port to read from.
2448 */
2449#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2450DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2451#else
2452DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2453{
2454 uint8_t u8;
2455# if RT_INLINE_ASM_GNU_STYLE
2456 __asm__ __volatile__("inb %w1, %b0\n\t"
2457 : "=a" (u8)
2458 : "Nd" (Port));
2459
2460# elif RT_INLINE_ASM_USES_INTRIN
2461 u8 = __inbyte(Port);
2462
2463# else
2464 __asm
2465 {
2466 mov dx, [Port]
2467 in al, dx
2468 mov [u8], al
2469 }
2470# endif
2471 return u8;
2472}
2473#endif
2474
2475
2476/**
2477 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2478 *
2479 * @param Port I/O port to write to.
2480 * @param u16 16-bit integer to write.
2481 */
2482#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2483DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2484#else
2485DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2486{
2487# if RT_INLINE_ASM_GNU_STYLE
2488 __asm__ __volatile__("outw %w1, %w0\n\t"
2489 :: "Nd" (Port),
2490 "a" (u16));
2491
2492# elif RT_INLINE_ASM_USES_INTRIN
2493 __outword(Port, u16);
2494
2495# else
2496 __asm
2497 {
2498 mov dx, [Port]
2499 mov ax, [u16]
2500 out dx, ax
2501 }
2502# endif
2503}
2504#endif
2505
2506
2507/**
2508 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2509 *
2510 * @returns 16-bit integer.
2511 * @param Port I/O port to read from.
2512 */
2513#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2514DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2515#else
2516DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2517{
2518 uint16_t u16;
2519# if RT_INLINE_ASM_GNU_STYLE
2520 __asm__ __volatile__("inw %w1, %w0\n\t"
2521 : "=a" (u16)
2522 : "Nd" (Port));
2523
2524# elif RT_INLINE_ASM_USES_INTRIN
2525 u16 = __inword(Port);
2526
2527# else
2528 __asm
2529 {
2530 mov dx, [Port]
2531 in ax, dx
2532 mov [u16], ax
2533 }
2534# endif
2535 return u16;
2536}
2537#endif
2538
2539
2540/**
2541 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2542 *
2543 * @param Port I/O port to write to.
2544 * @param u32 32-bit integer to write.
2545 */
2546#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2547DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2548#else
2549DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2550{
2551# if RT_INLINE_ASM_GNU_STYLE
2552 __asm__ __volatile__("outl %1, %w0\n\t"
2553 :: "Nd" (Port),
2554 "a" (u32));
2555
2556# elif RT_INLINE_ASM_USES_INTRIN
2557 __outdword(Port, u32);
2558
2559# else
2560 __asm
2561 {
2562 mov dx, [Port]
2563 mov eax, [u32]
2564 out dx, eax
2565 }
2566# endif
2567}
2568#endif
2569
2570
2571/**
2572 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2573 *
2574 * @returns 32-bit integer.
2575 * @param Port I/O port to read from.
2576 */
2577#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2578DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2579#else
2580DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2581{
2582 uint32_t u32;
2583# if RT_INLINE_ASM_GNU_STYLE
2584 __asm__ __volatile__("inl %w1, %0\n\t"
2585 : "=a" (u32)
2586 : "Nd" (Port));
2587
2588# elif RT_INLINE_ASM_USES_INTRIN
2589 u32 = __indword(Port);
2590
2591# else
2592 __asm
2593 {
2594 mov dx, [Port]
2595 in eax, dx
2596 mov [u32], eax
2597 }
2598# endif
2599 return u32;
2600}
2601#endif
2602
2603
2604/**
2605 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2606 *
2607 * @param Port I/O port to write to.
2608 * @param pau8 Pointer to the string buffer.
2609 * @param c The number of items to write.
2610 */
2611#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2612DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2613#else
2614DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2615{
2616# if RT_INLINE_ASM_GNU_STYLE
2617 __asm__ __volatile__("rep; outsb\n\t"
2618 : "+S" (pau8),
2619 "+c" (c)
2620 : "d" (Port));
2621
2622# elif RT_INLINE_ASM_USES_INTRIN
2623 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2624
2625# else
2626 __asm
2627 {
2628 mov dx, [Port]
2629 mov ecx, [c]
2630 mov eax, [pau8]
2631 xchg esi, eax
2632 rep outsb
2633 xchg esi, eax
2634 }
2635# endif
2636}
2637#endif
2638
2639
2640/**
2641 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2642 *
2643 * @param Port I/O port to read from.
2644 * @param pau8 Pointer to the string buffer (output).
2645 * @param c The number of items to read.
2646 */
2647#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2648DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2649#else
2650DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2651{
2652# if RT_INLINE_ASM_GNU_STYLE
2653 __asm__ __volatile__("rep; insb\n\t"
2654 : "+D" (pau8),
2655 "+c" (c)
2656 : "d" (Port));
2657
2658# elif RT_INLINE_ASM_USES_INTRIN
2659 __inbytestring(Port, pau8, (unsigned long)c);
2660
2661# else
2662 __asm
2663 {
2664 mov dx, [Port]
2665 mov ecx, [c]
2666 mov eax, [pau8]
2667 xchg edi, eax
2668 rep insb
2669 xchg edi, eax
2670 }
2671# endif
2672}
2673#endif
2674
2675
2676/**
2677 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2678 *
2679 * @param Port I/O port to write to.
2680 * @param pau16 Pointer to the string buffer.
2681 * @param c The number of items to write.
2682 */
2683#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2684DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2685#else
2686DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2687{
2688# if RT_INLINE_ASM_GNU_STYLE
2689 __asm__ __volatile__("rep; outsw\n\t"
2690 : "+S" (pau16),
2691 "+c" (c)
2692 : "d" (Port));
2693
2694# elif RT_INLINE_ASM_USES_INTRIN
2695 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2696
2697# else
2698 __asm
2699 {
2700 mov dx, [Port]
2701 mov ecx, [c]
2702 mov eax, [pau16]
2703 xchg esi, eax
2704 rep outsw
2705 xchg esi, eax
2706 }
2707# endif
2708}
2709#endif
2710
2711
2712/**
2713 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2714 *
2715 * @param Port I/O port to read from.
2716 * @param pau16 Pointer to the string buffer (output).
2717 * @param c The number of items to read.
2718 */
2719#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2720DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2721#else
2722DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2723{
2724# if RT_INLINE_ASM_GNU_STYLE
2725 __asm__ __volatile__("rep; insw\n\t"
2726 : "+D" (pau16),
2727 "+c" (c)
2728 : "d" (Port));
2729
2730# elif RT_INLINE_ASM_USES_INTRIN
2731 __inwordstring(Port, pau16, (unsigned long)c);
2732
2733# else
2734 __asm
2735 {
2736 mov dx, [Port]
2737 mov ecx, [c]
2738 mov eax, [pau16]
2739 xchg edi, eax
2740 rep insw
2741 xchg edi, eax
2742 }
2743# endif
2744}
2745#endif
2746
2747
2748/**
2749 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2750 *
2751 * @param Port I/O port to write to.
2752 * @param pau32 Pointer to the string buffer.
2753 * @param c The number of items to write.
2754 */
2755#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2756DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2757#else
2758DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2759{
2760# if RT_INLINE_ASM_GNU_STYLE
2761 __asm__ __volatile__("rep; outsl\n\t"
2762 : "+S" (pau32),
2763 "+c" (c)
2764 : "d" (Port));
2765
2766# elif RT_INLINE_ASM_USES_INTRIN
2767 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2768
2769# else
2770 __asm
2771 {
2772 mov dx, [Port]
2773 mov ecx, [c]
2774 mov eax, [pau32]
2775 xchg esi, eax
2776 rep outsd
2777 xchg esi, eax
2778 }
2779# endif
2780}
2781#endif
2782
2783
2784/**
2785 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2786 *
2787 * @param Port I/O port to read from.
2788 * @param pau32 Pointer to the string buffer (output).
2789 * @param c The number of items to read.
2790 */
2791#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2792DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2793#else
2794DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2795{
2796# if RT_INLINE_ASM_GNU_STYLE
2797 __asm__ __volatile__("rep; insl\n\t"
2798 : "+D" (pau32),
2799 "+c" (c)
2800 : "d" (Port));
2801
2802# elif RT_INLINE_ASM_USES_INTRIN
2803 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2804
2805# else
2806 __asm
2807 {
2808 mov dx, [Port]
2809 mov ecx, [c]
2810 mov eax, [pau32]
2811 xchg edi, eax
2812 rep insd
2813 xchg edi, eax
2814 }
2815# endif
2816}
2817#endif
2818
2819
2820/**
2821 * Invalidate page.
2822 *
2823 * @param pv Address of the page to invalidate.
2824 */
2825#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2826DECLASM(void) ASMInvalidatePage(void *pv);
2827#else
2828DECLINLINE(void) ASMInvalidatePage(void *pv)
2829{
2830# if RT_INLINE_ASM_USES_INTRIN
2831 __invlpg(pv);
2832
2833# elif RT_INLINE_ASM_GNU_STYLE
2834 __asm__ __volatile__("invlpg %0\n\t"
2835 : : "m" (*(uint8_t *)pv));
2836# else
2837 __asm
2838 {
2839# ifdef RT_ARCH_AMD64
2840 mov rax, [pv]
2841 invlpg [rax]
2842# else
2843 mov eax, [pv]
2844 invlpg [eax]
2845# endif
2846 }
2847# endif
2848}
2849#endif
2850
2851
2852/**
2853 * Write back the internal caches and invalidate them.
2854 */
2855#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2856DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2857#else
2858DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2859{
2860# if RT_INLINE_ASM_USES_INTRIN
2861 __wbinvd();
2862
2863# elif RT_INLINE_ASM_GNU_STYLE
2864 __asm__ __volatile__("wbinvd");
2865# else
2866 __asm
2867 {
2868 wbinvd
2869 }
2870# endif
2871}
2872#endif
2873
2874
2875/**
2876 * Invalidate internal and (perhaps) external caches without first
2877 * flushing dirty cache lines. Use with extreme care.
2878 */
2879#if RT_INLINE_ASM_EXTERNAL
2880DECLASM(void) ASMInvalidateInternalCaches(void);
2881#else
2882DECLINLINE(void) ASMInvalidateInternalCaches(void)
2883{
2884# if RT_INLINE_ASM_GNU_STYLE
2885 __asm__ __volatile__("invd");
2886# else
2887 __asm
2888 {
2889 invd
2890 }
2891# endif
2892}
2893#endif
2894
2895
2896/**
2897 * Memory load/store fence, waits for any pending writes and reads to complete.
2898 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2899 */
2900DECLINLINE(void) ASMMemoryFenceSSE2(void)
2901{
2902#if RT_INLINE_ASM_GNU_STYLE
2903 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2904#elif RT_INLINE_ASM_USES_INTRIN
2905 _mm_mfence();
2906#else
2907 __asm
2908 {
2909 _emit 0x0f
2910 _emit 0xae
2911 _emit 0xf0
2912 }
2913#endif
2914}
2915
2916
2917/**
2918 * Memory store fence, waits for any writes to complete.
2919 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2920 */
2921DECLINLINE(void) ASMWriteFenceSSE(void)
2922{
2923#if RT_INLINE_ASM_GNU_STYLE
2924 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2925#elif RT_INLINE_ASM_USES_INTRIN
2926 _mm_sfence();
2927#else
2928 __asm
2929 {
2930 _emit 0x0f
2931 _emit 0xae
2932 _emit 0xf8
2933 }
2934#endif
2935}
2936
2937
2938/**
2939 * Memory load fence, waits for any pending reads to complete.
2940 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2941 */
2942DECLINLINE(void) ASMReadFenceSSE2(void)
2943{
2944#if RT_INLINE_ASM_GNU_STYLE
2945 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2946#elif RT_INLINE_ASM_USES_INTRIN
2947 _mm_lfence();
2948#else
2949 __asm
2950 {
2951 _emit 0x0f
2952 _emit 0xae
2953 _emit 0xe8
2954 }
2955#endif
2956}
2957
2958/** @} */
2959#endif
2960
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette