VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 47071

Last change on this file since 47071 was 46925, checked in by vboxsync, 11 years ago

VMM: Optimized world-switch with lazy restoration LDTR and TR on Intel.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 59.4 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2013 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71#endif
72
73
74
75/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
76 * @ingroup grp_rt_asm
77 * @{
78 */
79
80/** @todo find a more proper place for this structure? */
81#pragma pack(1)
82/** IDTR */
83typedef struct RTIDTR
84{
85 /** Size of the IDT. */
86 uint16_t cbIdt;
87 /** Address of the IDT. */
88 uintptr_t pIdt;
89} RTIDTR, *PRTIDTR;
90#pragma pack()
91
92#pragma pack(1)
93/** GDTR */
94typedef struct RTGDTR
95{
96 /** Size of the GDT. */
97 uint16_t cbGdt;
98 /** Address of the GDT. */
99 uintptr_t pGdt;
100} RTGDTR, *PRTGDTR;
101#pragma pack()
102
103
104/**
105 * Gets the content of the IDTR CPU register.
106 * @param pIdtr Where to store the IDTR contents.
107 */
108#if RT_INLINE_ASM_EXTERNAL
109DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
110#else
111DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
112{
113# if RT_INLINE_ASM_GNU_STYLE
114 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
115# else
116 __asm
117 {
118# ifdef RT_ARCH_AMD64
119 mov rax, [pIdtr]
120 sidt [rax]
121# else
122 mov eax, [pIdtr]
123 sidt [eax]
124# endif
125 }
126# endif
127}
128#endif
129
130
131/**
132 * Sets the content of the IDTR CPU register.
133 * @param pIdtr Where to load the IDTR contents from
134 */
135#if RT_INLINE_ASM_EXTERNAL
136DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
137#else
138DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
139{
140# if RT_INLINE_ASM_GNU_STYLE
141 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
142# else
143 __asm
144 {
145# ifdef RT_ARCH_AMD64
146 mov rax, [pIdtr]
147 lidt [rax]
148# else
149 mov eax, [pIdtr]
150 lidt [eax]
151# endif
152 }
153# endif
154}
155#endif
156
157
158/**
159 * Gets the content of the GDTR CPU register.
160 * @param pGdtr Where to store the GDTR contents.
161 */
162#if RT_INLINE_ASM_EXTERNAL
163DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
164#else
165DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
166{
167# if RT_INLINE_ASM_GNU_STYLE
168 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
169# else
170 __asm
171 {
172# ifdef RT_ARCH_AMD64
173 mov rax, [pGdtr]
174 sgdt [rax]
175# else
176 mov eax, [pGdtr]
177 sgdt [eax]
178# endif
179 }
180# endif
181}
182#endif
183
184/**
185 * Get the cs register.
186 * @returns cs.
187 */
188#if RT_INLINE_ASM_EXTERNAL
189DECLASM(RTSEL) ASMGetCS(void);
190#else
191DECLINLINE(RTSEL) ASMGetCS(void)
192{
193 RTSEL SelCS;
194# if RT_INLINE_ASM_GNU_STYLE
195 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
196# else
197 __asm
198 {
199 mov ax, cs
200 mov [SelCS], ax
201 }
202# endif
203 return SelCS;
204}
205#endif
206
207
208/**
209 * Get the DS register.
210 * @returns DS.
211 */
212#if RT_INLINE_ASM_EXTERNAL
213DECLASM(RTSEL) ASMGetDS(void);
214#else
215DECLINLINE(RTSEL) ASMGetDS(void)
216{
217 RTSEL SelDS;
218# if RT_INLINE_ASM_GNU_STYLE
219 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
220# else
221 __asm
222 {
223 mov ax, ds
224 mov [SelDS], ax
225 }
226# endif
227 return SelDS;
228}
229#endif
230
231
232/**
233 * Get the ES register.
234 * @returns ES.
235 */
236#if RT_INLINE_ASM_EXTERNAL
237DECLASM(RTSEL) ASMGetES(void);
238#else
239DECLINLINE(RTSEL) ASMGetES(void)
240{
241 RTSEL SelES;
242# if RT_INLINE_ASM_GNU_STYLE
243 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
244# else
245 __asm
246 {
247 mov ax, es
248 mov [SelES], ax
249 }
250# endif
251 return SelES;
252}
253#endif
254
255
256/**
257 * Get the FS register.
258 * @returns FS.
259 */
260#if RT_INLINE_ASM_EXTERNAL
261DECLASM(RTSEL) ASMGetFS(void);
262#else
263DECLINLINE(RTSEL) ASMGetFS(void)
264{
265 RTSEL SelFS;
266# if RT_INLINE_ASM_GNU_STYLE
267 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
268# else
269 __asm
270 {
271 mov ax, fs
272 mov [SelFS], ax
273 }
274# endif
275 return SelFS;
276}
277# endif
278
279
280/**
281 * Get the GS register.
282 * @returns GS.
283 */
284#if RT_INLINE_ASM_EXTERNAL
285DECLASM(RTSEL) ASMGetGS(void);
286#else
287DECLINLINE(RTSEL) ASMGetGS(void)
288{
289 RTSEL SelGS;
290# if RT_INLINE_ASM_GNU_STYLE
291 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
292# else
293 __asm
294 {
295 mov ax, gs
296 mov [SelGS], ax
297 }
298# endif
299 return SelGS;
300}
301#endif
302
303
304/**
305 * Get the SS register.
306 * @returns SS.
307 */
308#if RT_INLINE_ASM_EXTERNAL
309DECLASM(RTSEL) ASMGetSS(void);
310#else
311DECLINLINE(RTSEL) ASMGetSS(void)
312{
313 RTSEL SelSS;
314# if RT_INLINE_ASM_GNU_STYLE
315 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
316# else
317 __asm
318 {
319 mov ax, ss
320 mov [SelSS], ax
321 }
322# endif
323 return SelSS;
324}
325#endif
326
327
328/**
329 * Get the TR register.
330 * @returns TR.
331 */
332#if RT_INLINE_ASM_EXTERNAL
333DECLASM(RTSEL) ASMGetTR(void);
334#else
335DECLINLINE(RTSEL) ASMGetTR(void)
336{
337 RTSEL SelTR;
338# if RT_INLINE_ASM_GNU_STYLE
339 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
340# else
341 __asm
342 {
343 str ax
344 mov [SelTR], ax
345 }
346# endif
347 return SelTR;
348}
349#endif
350
351
352/**
353 * Get the LDTR register.
354 * @returns LDTR.
355 */
356#if RT_INLINE_ASM_EXTERNAL
357DECLASM(RTSEL) ASMGetLDTR(void);
358#else
359DECLINLINE(RTSEL) ASMGetLDTR(void)
360{
361 RTSEL SelLDTR;
362# if RT_INLINE_ASM_GNU_STYLE
363 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
364# else
365 __asm
366 {
367 sldt ax
368 mov [SelLDTR], ax
369 }
370# endif
371 return SelLDTR;
372}
373#endif
374
375
376/**
377 * Get the [RE]FLAGS register.
378 * @returns [RE]FLAGS.
379 */
380#if RT_INLINE_ASM_EXTERNAL
381DECLASM(RTCCUINTREG) ASMGetFlags(void);
382#else
383DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
384{
385 RTCCUINTREG uFlags;
386# if RT_INLINE_ASM_GNU_STYLE
387# ifdef RT_ARCH_AMD64
388 __asm__ __volatile__("pushfq\n\t"
389 "popq %0\n\t"
390 : "=r" (uFlags));
391# else
392 __asm__ __volatile__("pushfl\n\t"
393 "popl %0\n\t"
394 : "=r" (uFlags));
395# endif
396# else
397 __asm
398 {
399# ifdef RT_ARCH_AMD64
400 pushfq
401 pop [uFlags]
402# else
403 pushfd
404 pop [uFlags]
405# endif
406 }
407# endif
408 return uFlags;
409}
410#endif
411
412
413/**
414 * Set the [RE]FLAGS register.
415 * @param uFlags The new [RE]FLAGS value.
416 */
417#if RT_INLINE_ASM_EXTERNAL
418DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
419#else
420DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
421{
422# if RT_INLINE_ASM_GNU_STYLE
423# ifdef RT_ARCH_AMD64
424 __asm__ __volatile__("pushq %0\n\t"
425 "popfq\n\t"
426 : : "g" (uFlags));
427# else
428 __asm__ __volatile__("pushl %0\n\t"
429 "popfl\n\t"
430 : : "g" (uFlags));
431# endif
432# else
433 __asm
434 {
435# ifdef RT_ARCH_AMD64
436 push [uFlags]
437 popfq
438# else
439 push [uFlags]
440 popfd
441# endif
442 }
443# endif
444}
445#endif
446
447
448/**
449 * Gets the content of the CPU timestamp counter register.
450 *
451 * @returns TSC.
452 */
453#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
454DECLASM(uint64_t) ASMReadTSC(void);
455#else
456DECLINLINE(uint64_t) ASMReadTSC(void)
457{
458 RTUINT64U u;
459# if RT_INLINE_ASM_GNU_STYLE
460 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
461# else
462# if RT_INLINE_ASM_USES_INTRIN
463 u.u = __rdtsc();
464# else
465 __asm
466 {
467 rdtsc
468 mov [u.s.Lo], eax
469 mov [u.s.Hi], edx
470 }
471# endif
472# endif
473 return u.u;
474}
475#endif
476
477
478/**
479 * Performs the cpuid instruction returning all registers.
480 *
481 * @param uOperator CPUID operation (eax).
482 * @param pvEAX Where to store eax.
483 * @param pvEBX Where to store ebx.
484 * @param pvECX Where to store ecx.
485 * @param pvEDX Where to store edx.
486 * @remark We're using void pointers to ease the use of special bitfield structures and such.
487 */
488#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
489DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
490#else
491DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
492{
493# if RT_INLINE_ASM_GNU_STYLE
494# ifdef RT_ARCH_AMD64
495 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
496 __asm__ __volatile__ ("cpuid\n\t"
497 : "=a" (uRAX),
498 "=b" (uRBX),
499 "=c" (uRCX),
500 "=d" (uRDX)
501 : "0" (uOperator), "2" (0));
502 *(uint32_t *)pvEAX = (uint32_t)uRAX;
503 *(uint32_t *)pvEBX = (uint32_t)uRBX;
504 *(uint32_t *)pvECX = (uint32_t)uRCX;
505 *(uint32_t *)pvEDX = (uint32_t)uRDX;
506# else
507 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
508 "cpuid\n\t"
509 "xchgl %%ebx, %1\n\t"
510 : "=a" (*(uint32_t *)pvEAX),
511 "=r" (*(uint32_t *)pvEBX),
512 "=c" (*(uint32_t *)pvECX),
513 "=d" (*(uint32_t *)pvEDX)
514 : "0" (uOperator), "2" (0));
515# endif
516
517# elif RT_INLINE_ASM_USES_INTRIN
518 int aInfo[4];
519 __cpuid(aInfo, uOperator);
520 *(uint32_t *)pvEAX = aInfo[0];
521 *(uint32_t *)pvEBX = aInfo[1];
522 *(uint32_t *)pvECX = aInfo[2];
523 *(uint32_t *)pvEDX = aInfo[3];
524
525# else
526 uint32_t uEAX;
527 uint32_t uEBX;
528 uint32_t uECX;
529 uint32_t uEDX;
530 __asm
531 {
532 push ebx
533 mov eax, [uOperator]
534 cpuid
535 mov [uEAX], eax
536 mov [uEBX], ebx
537 mov [uECX], ecx
538 mov [uEDX], edx
539 pop ebx
540 }
541 *(uint32_t *)pvEAX = uEAX;
542 *(uint32_t *)pvEBX = uEBX;
543 *(uint32_t *)pvECX = uECX;
544 *(uint32_t *)pvEDX = uEDX;
545# endif
546}
547#endif
548
549
550/**
551 * Performs the CPUID instruction with EAX and ECX input returning ALL output
552 * registers.
553 *
554 * @param uOperator CPUID operation (eax).
555 * @param uIdxECX ecx index
556 * @param pvEAX Where to store eax.
557 * @param pvEBX Where to store ebx.
558 * @param pvECX Where to store ecx.
559 * @param pvEDX Where to store edx.
560 * @remark We're using void pointers to ease the use of special bitfield structures and such.
561 */
562#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
563DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
564#else
565DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
566{
567# if RT_INLINE_ASM_GNU_STYLE
568# ifdef RT_ARCH_AMD64
569 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
570 __asm__ ("cpuid\n\t"
571 : "=a" (uRAX),
572 "=b" (uRBX),
573 "=c" (uRCX),
574 "=d" (uRDX)
575 : "0" (uOperator),
576 "2" (uIdxECX));
577 *(uint32_t *)pvEAX = (uint32_t)uRAX;
578 *(uint32_t *)pvEBX = (uint32_t)uRBX;
579 *(uint32_t *)pvECX = (uint32_t)uRCX;
580 *(uint32_t *)pvEDX = (uint32_t)uRDX;
581# else
582 __asm__ ("xchgl %%ebx, %1\n\t"
583 "cpuid\n\t"
584 "xchgl %%ebx, %1\n\t"
585 : "=a" (*(uint32_t *)pvEAX),
586 "=r" (*(uint32_t *)pvEBX),
587 "=c" (*(uint32_t *)pvECX),
588 "=d" (*(uint32_t *)pvEDX)
589 : "0" (uOperator),
590 "2" (uIdxECX));
591# endif
592
593# elif RT_INLINE_ASM_USES_INTRIN
594 int aInfo[4];
595 __cpuidex(aInfo, uOperator, uIdxECX);
596 *(uint32_t *)pvEAX = aInfo[0];
597 *(uint32_t *)pvEBX = aInfo[1];
598 *(uint32_t *)pvECX = aInfo[2];
599 *(uint32_t *)pvEDX = aInfo[3];
600
601# else
602 uint32_t uEAX;
603 uint32_t uEBX;
604 uint32_t uECX;
605 uint32_t uEDX;
606 __asm
607 {
608 push ebx
609 mov eax, [uOperator]
610 mov ecx, [uIdxECX]
611 cpuid
612 mov [uEAX], eax
613 mov [uEBX], ebx
614 mov [uECX], ecx
615 mov [uEDX], edx
616 pop ebx
617 }
618 *(uint32_t *)pvEAX = uEAX;
619 *(uint32_t *)pvEBX = uEBX;
620 *(uint32_t *)pvECX = uECX;
621 *(uint32_t *)pvEDX = uEDX;
622# endif
623}
624#endif
625
626
627/**
628 * Performs the cpuid instruction returning ecx and edx.
629 *
630 * @param uOperator CPUID operation (eax).
631 * @param pvECX Where to store ecx.
632 * @param pvEDX Where to store edx.
633 * @remark We're using void pointers to ease the use of special bitfield structures and such.
634 */
635#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
636DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
637#else
638DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
639{
640 uint32_t uEBX;
641 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
642}
643#endif
644
645
646/**
647 * Performs the cpuid instruction returning eax.
648 *
649 * @param uOperator CPUID operation (eax).
650 * @returns EAX after cpuid operation.
651 */
652#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
653DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
654#else
655DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
656{
657 RTCCUINTREG xAX;
658# if RT_INLINE_ASM_GNU_STYLE
659# ifdef RT_ARCH_AMD64
660 __asm__ ("cpuid"
661 : "=a" (xAX)
662 : "0" (uOperator)
663 : "rbx", "rcx", "rdx");
664# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
665 __asm__ ("push %%ebx\n\t"
666 "cpuid\n\t"
667 "pop %%ebx\n\t"
668 : "=a" (xAX)
669 : "0" (uOperator)
670 : "ecx", "edx");
671# else
672 __asm__ ("cpuid"
673 : "=a" (xAX)
674 : "0" (uOperator)
675 : "edx", "ecx", "ebx");
676# endif
677
678# elif RT_INLINE_ASM_USES_INTRIN
679 int aInfo[4];
680 __cpuid(aInfo, uOperator);
681 xAX = aInfo[0];
682
683# else
684 __asm
685 {
686 push ebx
687 mov eax, [uOperator]
688 cpuid
689 mov [xAX], eax
690 pop ebx
691 }
692# endif
693 return (uint32_t)xAX;
694}
695#endif
696
697
698/**
699 * Performs the cpuid instruction returning ebx.
700 *
701 * @param uOperator CPUID operation (eax).
702 * @returns EBX after cpuid operation.
703 */
704#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
705DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
706#else
707DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
708{
709 RTCCUINTREG xBX;
710# if RT_INLINE_ASM_GNU_STYLE
711# ifdef RT_ARCH_AMD64
712 RTCCUINTREG uSpill;
713 __asm__ ("cpuid"
714 : "=a" (uSpill),
715 "=b" (xBX)
716 : "0" (uOperator)
717 : "rdx", "rcx");
718# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
719 __asm__ ("push %%ebx\n\t"
720 "cpuid\n\t"
721 "mov %%ebx, %%edx\n\t"
722 "pop %%ebx\n\t"
723 : "=a" (uOperator),
724 "=d" (xBX)
725 : "0" (uOperator)
726 : "ecx");
727# else
728 __asm__ ("cpuid"
729 : "=a" (uOperator),
730 "=b" (xBX)
731 : "0" (uOperator)
732 : "edx", "ecx");
733# endif
734
735# elif RT_INLINE_ASM_USES_INTRIN
736 int aInfo[4];
737 __cpuid(aInfo, uOperator);
738 xBX = aInfo[1];
739
740# else
741 __asm
742 {
743 push ebx
744 mov eax, [uOperator]
745 cpuid
746 mov [xBX], ebx
747 pop ebx
748 }
749# endif
750 return (uint32_t)xBX;
751}
752#endif
753
754
755/**
756 * Performs the cpuid instruction returning ecx.
757 *
758 * @param uOperator CPUID operation (eax).
759 * @returns ECX after cpuid operation.
760 */
761#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
762DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
763#else
764DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
765{
766 RTCCUINTREG xCX;
767# if RT_INLINE_ASM_GNU_STYLE
768# ifdef RT_ARCH_AMD64
769 RTCCUINTREG uSpill;
770 __asm__ ("cpuid"
771 : "=a" (uSpill),
772 "=c" (xCX)
773 : "0" (uOperator)
774 : "rbx", "rdx");
775# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
776 __asm__ ("push %%ebx\n\t"
777 "cpuid\n\t"
778 "pop %%ebx\n\t"
779 : "=a" (uOperator),
780 "=c" (xCX)
781 : "0" (uOperator)
782 : "edx");
783# else
784 __asm__ ("cpuid"
785 : "=a" (uOperator),
786 "=c" (xCX)
787 : "0" (uOperator)
788 : "ebx", "edx");
789
790# endif
791
792# elif RT_INLINE_ASM_USES_INTRIN
793 int aInfo[4];
794 __cpuid(aInfo, uOperator);
795 xCX = aInfo[2];
796
797# else
798 __asm
799 {
800 push ebx
801 mov eax, [uOperator]
802 cpuid
803 mov [xCX], ecx
804 pop ebx
805 }
806# endif
807 return (uint32_t)xCX;
808}
809#endif
810
811
812/**
813 * Performs the cpuid instruction returning edx.
814 *
815 * @param uOperator CPUID operation (eax).
816 * @returns EDX after cpuid operation.
817 */
818#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
819DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
820#else
821DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
822{
823 RTCCUINTREG xDX;
824# if RT_INLINE_ASM_GNU_STYLE
825# ifdef RT_ARCH_AMD64
826 RTCCUINTREG uSpill;
827 __asm__ ("cpuid"
828 : "=a" (uSpill),
829 "=d" (xDX)
830 : "0" (uOperator)
831 : "rbx", "rcx");
832# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
833 __asm__ ("push %%ebx\n\t"
834 "cpuid\n\t"
835 "pop %%ebx\n\t"
836 : "=a" (uOperator),
837 "=d" (xDX)
838 : "0" (uOperator)
839 : "ecx");
840# else
841 __asm__ ("cpuid"
842 : "=a" (uOperator),
843 "=d" (xDX)
844 : "0" (uOperator)
845 : "ebx", "ecx");
846# endif
847
848# elif RT_INLINE_ASM_USES_INTRIN
849 int aInfo[4];
850 __cpuid(aInfo, uOperator);
851 xDX = aInfo[3];
852
853# else
854 __asm
855 {
856 push ebx
857 mov eax, [uOperator]
858 cpuid
859 mov [xDX], edx
860 pop ebx
861 }
862# endif
863 return (uint32_t)xDX;
864}
865#endif
866
867
868/**
869 * Checks if the current CPU supports CPUID.
870 *
871 * @returns true if CPUID is supported.
872 */
873DECLINLINE(bool) ASMHasCpuId(void)
874{
875#ifdef RT_ARCH_AMD64
876 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
877#else /* !RT_ARCH_AMD64 */
878 bool fRet = false;
879# if RT_INLINE_ASM_GNU_STYLE
880 uint32_t u1;
881 uint32_t u2;
882 __asm__ ("pushf\n\t"
883 "pop %1\n\t"
884 "mov %1, %2\n\t"
885 "xorl $0x200000, %1\n\t"
886 "push %1\n\t"
887 "popf\n\t"
888 "pushf\n\t"
889 "pop %1\n\t"
890 "cmpl %1, %2\n\t"
891 "setne %0\n\t"
892 "push %2\n\t"
893 "popf\n\t"
894 : "=m" (fRet), "=r" (u1), "=r" (u2));
895# else
896 __asm
897 {
898 pushfd
899 pop eax
900 mov ebx, eax
901 xor eax, 0200000h
902 push eax
903 popfd
904 pushfd
905 pop eax
906 cmp eax, ebx
907 setne fRet
908 push ebx
909 popfd
910 }
911# endif
912 return fRet;
913#endif /* !RT_ARCH_AMD64 */
914}
915
916
917/**
918 * Gets the APIC ID of the current CPU.
919 *
920 * @returns the APIC ID.
921 */
922#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
923DECLASM(uint8_t) ASMGetApicId(void);
924#else
925DECLINLINE(uint8_t) ASMGetApicId(void)
926{
927 RTCCUINTREG xBX;
928# if RT_INLINE_ASM_GNU_STYLE
929# ifdef RT_ARCH_AMD64
930 RTCCUINTREG uSpill;
931 __asm__ __volatile__ ("cpuid"
932 : "=a" (uSpill),
933 "=b" (xBX)
934 : "0" (1)
935 : "rcx", "rdx");
936# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
937 RTCCUINTREG uSpill;
938 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
939 "cpuid\n\t"
940 "xchgl %%ebx,%1\n\t"
941 : "=a" (uSpill),
942 "=rm" (xBX)
943 : "0" (1)
944 : "ecx", "edx");
945# else
946 RTCCUINTREG uSpill;
947 __asm__ __volatile__ ("cpuid"
948 : "=a" (uSpill),
949 "=b" (xBX)
950 : "0" (1)
951 : "ecx", "edx");
952# endif
953
954# elif RT_INLINE_ASM_USES_INTRIN
955 int aInfo[4];
956 __cpuid(aInfo, 1);
957 xBX = aInfo[1];
958
959# else
960 __asm
961 {
962 push ebx
963 mov eax, 1
964 cpuid
965 mov [xBX], ebx
966 pop ebx
967 }
968# endif
969 return (uint8_t)(xBX >> 24);
970}
971#endif
972
973
974/**
975 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
976 *
977 * @returns true/false.
978 * @param uEBX EBX return from ASMCpuId(0)
979 * @param uECX ECX return from ASMCpuId(0)
980 * @param uEDX EDX return from ASMCpuId(0)
981 */
982DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
983{
984 return uEBX == UINT32_C(0x756e6547)
985 && uECX == UINT32_C(0x6c65746e)
986 && uEDX == UINT32_C(0x49656e69);
987}
988
989
990/**
991 * Tests if this is a genuine Intel CPU.
992 *
993 * @returns true/false.
994 * @remarks ASSUMES that cpuid is supported by the CPU.
995 */
996DECLINLINE(bool) ASMIsIntelCpu(void)
997{
998 uint32_t uEAX, uEBX, uECX, uEDX;
999 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1000 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1001}
1002
1003
1004/**
1005 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1006 *
1007 * @returns true/false.
1008 * @param uEBX EBX return from ASMCpuId(0)
1009 * @param uECX ECX return from ASMCpuId(0)
1010 * @param uEDX EDX return from ASMCpuId(0)
1011 */
1012DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1013{
1014 return uEBX == UINT32_C(0x68747541)
1015 && uECX == UINT32_C(0x444d4163)
1016 && uEDX == UINT32_C(0x69746e65);
1017}
1018
1019
1020/**
1021 * Tests if this is an authentic AMD CPU.
1022 *
1023 * @returns true/false.
1024 * @remarks ASSUMES that cpuid is supported by the CPU.
1025 */
1026DECLINLINE(bool) ASMIsAmdCpu(void)
1027{
1028 uint32_t uEAX, uEBX, uECX, uEDX;
1029 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1030 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1031}
1032
1033
1034/**
1035 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1036 *
1037 * @returns true/false.
1038 * @param uEBX EBX return from ASMCpuId(0).
1039 * @param uECX ECX return from ASMCpuId(0).
1040 * @param uEDX EDX return from ASMCpuId(0).
1041 */
1042DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1043{
1044 return uEBX == UINT32_C(0x746e6543)
1045 && uECX == UINT32_C(0x736c7561)
1046 && uEDX == UINT32_C(0x48727561);
1047}
1048
1049
1050/**
1051 * Tests if this is a centaur hauling VIA CPU.
1052 *
1053 * @returns true/false.
1054 * @remarks ASSUMES that cpuid is supported by the CPU.
1055 */
1056DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1057{
1058 uint32_t uEAX, uEBX, uECX, uEDX;
1059 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1060 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1061}
1062
1063
1064/**
1065 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1066 *
1067 *
1068 * @returns true/false.
1069 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1070 *
1071 * @note This only succeeds if there are at least two leaves in the range.
1072 * @remarks The upper range limit is just some half reasonable value we've
1073 * picked out of thin air.
1074 */
1075DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1076{
1077 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1078}
1079
1080
1081/**
1082 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1083 *
1084 * This only succeeds if there are at least two leaves in the range.
1085 *
1086 * @returns true/false.
1087 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1088 *
1089 * @note This only succeeds if there are at least two leaves in the range.
1090 * @remarks The upper range limit is just some half reasonable value we've
1091 * picked out of thin air.
1092 */
1093DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1094{
1095 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1096}
1097
1098
1099/**
1100 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1101 *
1102 * @returns Family.
1103 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1104 */
1105DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1106{
1107 return ((uEAX >> 8) & 0xf) == 0xf
1108 ? ((uEAX >> 20) & 0x7f) + 0xf
1109 : ((uEAX >> 8) & 0xf);
1110}
1111
1112
1113/**
1114 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1115 *
1116 * @returns Model.
1117 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1118 */
1119DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1120{
1121 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1122 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1123 : ((uEAX >> 4) & 0xf);
1124}
1125
1126
1127/**
1128 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1129 *
1130 * @returns Model.
1131 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1132 */
1133DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1134{
1135 return ((uEAX >> 8) & 0xf) == 0xf
1136 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1137 : ((uEAX >> 4) & 0xf);
1138}
1139
1140
1141/**
1142 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1143 *
1144 * @returns Model.
1145 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1146 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1147 */
1148DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1149{
1150 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1151 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1152 : ((uEAX >> 4) & 0xf);
1153}
1154
1155
1156/**
1157 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1158 *
1159 * @returns Model.
1160 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1161 */
1162DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1163{
1164 return uEAX & 0xf;
1165}
1166
1167
1168/**
1169 * Get cr0.
1170 * @returns cr0.
1171 */
1172#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1173DECLASM(RTCCUINTREG) ASMGetCR0(void);
1174#else
1175DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1176{
1177 RTCCUINTREG uCR0;
1178# if RT_INLINE_ASM_USES_INTRIN
1179 uCR0 = __readcr0();
1180
1181# elif RT_INLINE_ASM_GNU_STYLE
1182# ifdef RT_ARCH_AMD64
1183 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1184# else
1185 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1186# endif
1187# else
1188 __asm
1189 {
1190# ifdef RT_ARCH_AMD64
1191 mov rax, cr0
1192 mov [uCR0], rax
1193# else
1194 mov eax, cr0
1195 mov [uCR0], eax
1196# endif
1197 }
1198# endif
1199 return uCR0;
1200}
1201#endif
1202
1203
1204/**
1205 * Sets the CR0 register.
1206 * @param uCR0 The new CR0 value.
1207 */
1208#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1209DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1210#else
1211DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1212{
1213# if RT_INLINE_ASM_USES_INTRIN
1214 __writecr0(uCR0);
1215
1216# elif RT_INLINE_ASM_GNU_STYLE
1217# ifdef RT_ARCH_AMD64
1218 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1219# else
1220 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1221# endif
1222# else
1223 __asm
1224 {
1225# ifdef RT_ARCH_AMD64
1226 mov rax, [uCR0]
1227 mov cr0, rax
1228# else
1229 mov eax, [uCR0]
1230 mov cr0, eax
1231# endif
1232 }
1233# endif
1234}
1235#endif
1236
1237
1238/**
1239 * Get cr2.
1240 * @returns cr2.
1241 */
1242#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1243DECLASM(RTCCUINTREG) ASMGetCR2(void);
1244#else
1245DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1246{
1247 RTCCUINTREG uCR2;
1248# if RT_INLINE_ASM_USES_INTRIN
1249 uCR2 = __readcr2();
1250
1251# elif RT_INLINE_ASM_GNU_STYLE
1252# ifdef RT_ARCH_AMD64
1253 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1254# else
1255 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1256# endif
1257# else
1258 __asm
1259 {
1260# ifdef RT_ARCH_AMD64
1261 mov rax, cr2
1262 mov [uCR2], rax
1263# else
1264 mov eax, cr2
1265 mov [uCR2], eax
1266# endif
1267 }
1268# endif
1269 return uCR2;
1270}
1271#endif
1272
1273
1274/**
1275 * Sets the CR2 register.
1276 * @param uCR2 The new CR0 value.
1277 */
1278#if RT_INLINE_ASM_EXTERNAL
1279DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1280#else
1281DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1282{
1283# if RT_INLINE_ASM_GNU_STYLE
1284# ifdef RT_ARCH_AMD64
1285 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1286# else
1287 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1288# endif
1289# else
1290 __asm
1291 {
1292# ifdef RT_ARCH_AMD64
1293 mov rax, [uCR2]
1294 mov cr2, rax
1295# else
1296 mov eax, [uCR2]
1297 mov cr2, eax
1298# endif
1299 }
1300# endif
1301}
1302#endif
1303
1304
1305/**
1306 * Get cr3.
1307 * @returns cr3.
1308 */
1309#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1310DECLASM(RTCCUINTREG) ASMGetCR3(void);
1311#else
1312DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1313{
1314 RTCCUINTREG uCR3;
1315# if RT_INLINE_ASM_USES_INTRIN
1316 uCR3 = __readcr3();
1317
1318# elif RT_INLINE_ASM_GNU_STYLE
1319# ifdef RT_ARCH_AMD64
1320 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1321# else
1322 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1323# endif
1324# else
1325 __asm
1326 {
1327# ifdef RT_ARCH_AMD64
1328 mov rax, cr3
1329 mov [uCR3], rax
1330# else
1331 mov eax, cr3
1332 mov [uCR3], eax
1333# endif
1334 }
1335# endif
1336 return uCR3;
1337}
1338#endif
1339
1340
1341/**
1342 * Sets the CR3 register.
1343 *
1344 * @param uCR3 New CR3 value.
1345 */
1346#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1347DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1348#else
1349DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1350{
1351# if RT_INLINE_ASM_USES_INTRIN
1352 __writecr3(uCR3);
1353
1354# elif RT_INLINE_ASM_GNU_STYLE
1355# ifdef RT_ARCH_AMD64
1356 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1357# else
1358 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1359# endif
1360# else
1361 __asm
1362 {
1363# ifdef RT_ARCH_AMD64
1364 mov rax, [uCR3]
1365 mov cr3, rax
1366# else
1367 mov eax, [uCR3]
1368 mov cr3, eax
1369# endif
1370 }
1371# endif
1372}
1373#endif
1374
1375
1376/**
1377 * Reloads the CR3 register.
1378 */
1379#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1380DECLASM(void) ASMReloadCR3(void);
1381#else
1382DECLINLINE(void) ASMReloadCR3(void)
1383{
1384# if RT_INLINE_ASM_USES_INTRIN
1385 __writecr3(__readcr3());
1386
1387# elif RT_INLINE_ASM_GNU_STYLE
1388 RTCCUINTREG u;
1389# ifdef RT_ARCH_AMD64
1390 __asm__ __volatile__("movq %%cr3, %0\n\t"
1391 "movq %0, %%cr3\n\t"
1392 : "=r" (u));
1393# else
1394 __asm__ __volatile__("movl %%cr3, %0\n\t"
1395 "movl %0, %%cr3\n\t"
1396 : "=r" (u));
1397# endif
1398# else
1399 __asm
1400 {
1401# ifdef RT_ARCH_AMD64
1402 mov rax, cr3
1403 mov cr3, rax
1404# else
1405 mov eax, cr3
1406 mov cr3, eax
1407# endif
1408 }
1409# endif
1410}
1411#endif
1412
1413
1414/**
1415 * Get cr4.
1416 * @returns cr4.
1417 */
1418#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1419DECLASM(RTCCUINTREG) ASMGetCR4(void);
1420#else
1421DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1422{
1423 RTCCUINTREG uCR4;
1424# if RT_INLINE_ASM_USES_INTRIN
1425 uCR4 = __readcr4();
1426
1427# elif RT_INLINE_ASM_GNU_STYLE
1428# ifdef RT_ARCH_AMD64
1429 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1430# else
1431 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1432# endif
1433# else
1434 __asm
1435 {
1436# ifdef RT_ARCH_AMD64
1437 mov rax, cr4
1438 mov [uCR4], rax
1439# else
1440 push eax /* just in case */
1441 /*mov eax, cr4*/
1442 _emit 0x0f
1443 _emit 0x20
1444 _emit 0xe0
1445 mov [uCR4], eax
1446 pop eax
1447# endif
1448 }
1449# endif
1450 return uCR4;
1451}
1452#endif
1453
1454
1455/**
1456 * Sets the CR4 register.
1457 *
1458 * @param uCR4 New CR4 value.
1459 */
1460#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1461DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1462#else
1463DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1464{
1465# if RT_INLINE_ASM_USES_INTRIN
1466 __writecr4(uCR4);
1467
1468# elif RT_INLINE_ASM_GNU_STYLE
1469# ifdef RT_ARCH_AMD64
1470 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1471# else
1472 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1473# endif
1474# else
1475 __asm
1476 {
1477# ifdef RT_ARCH_AMD64
1478 mov rax, [uCR4]
1479 mov cr4, rax
1480# else
1481 mov eax, [uCR4]
1482 _emit 0x0F
1483 _emit 0x22
1484 _emit 0xE0 /* mov cr4, eax */
1485# endif
1486 }
1487# endif
1488}
1489#endif
1490
1491
1492/**
1493 * Get cr8.
1494 * @returns cr8.
1495 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1496 */
1497#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1498DECLASM(RTCCUINTREG) ASMGetCR8(void);
1499#else
1500DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1501{
1502# ifdef RT_ARCH_AMD64
1503 RTCCUINTREG uCR8;
1504# if RT_INLINE_ASM_USES_INTRIN
1505 uCR8 = __readcr8();
1506
1507# elif RT_INLINE_ASM_GNU_STYLE
1508 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1509# else
1510 __asm
1511 {
1512 mov rax, cr8
1513 mov [uCR8], rax
1514 }
1515# endif
1516 return uCR8;
1517# else /* !RT_ARCH_AMD64 */
1518 return 0;
1519# endif /* !RT_ARCH_AMD64 */
1520}
1521#endif
1522
1523
1524/**
1525 * Enables interrupts (EFLAGS.IF).
1526 */
1527#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1528DECLASM(void) ASMIntEnable(void);
1529#else
1530DECLINLINE(void) ASMIntEnable(void)
1531{
1532# if RT_INLINE_ASM_GNU_STYLE
1533 __asm("sti\n");
1534# elif RT_INLINE_ASM_USES_INTRIN
1535 _enable();
1536# else
1537 __asm sti
1538# endif
1539}
1540#endif
1541
1542
1543/**
1544 * Disables interrupts (!EFLAGS.IF).
1545 */
1546#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1547DECLASM(void) ASMIntDisable(void);
1548#else
1549DECLINLINE(void) ASMIntDisable(void)
1550{
1551# if RT_INLINE_ASM_GNU_STYLE
1552 __asm("cli\n");
1553# elif RT_INLINE_ASM_USES_INTRIN
1554 _disable();
1555# else
1556 __asm cli
1557# endif
1558}
1559#endif
1560
1561
1562/**
1563 * Disables interrupts and returns previous xFLAGS.
1564 */
1565#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1566DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1567#else
1568DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1569{
1570 RTCCUINTREG xFlags;
1571# if RT_INLINE_ASM_GNU_STYLE
1572# ifdef RT_ARCH_AMD64
1573 __asm__ __volatile__("pushfq\n\t"
1574 "cli\n\t"
1575 "popq %0\n\t"
1576 : "=r" (xFlags));
1577# else
1578 __asm__ __volatile__("pushfl\n\t"
1579 "cli\n\t"
1580 "popl %0\n\t"
1581 : "=r" (xFlags));
1582# endif
1583# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1584 xFlags = ASMGetFlags();
1585 _disable();
1586# else
1587 __asm {
1588 pushfd
1589 cli
1590 pop [xFlags]
1591 }
1592# endif
1593 return xFlags;
1594}
1595#endif
1596
1597
1598/**
1599 * Are interrupts enabled?
1600 *
1601 * @returns true / false.
1602 */
1603DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1604{
1605 RTCCUINTREG uFlags = ASMGetFlags();
1606 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1607}
1608
1609
1610/**
1611 * Halts the CPU until interrupted.
1612 */
1613#if RT_INLINE_ASM_EXTERNAL
1614DECLASM(void) ASMHalt(void);
1615#else
1616DECLINLINE(void) ASMHalt(void)
1617{
1618# if RT_INLINE_ASM_GNU_STYLE
1619 __asm__ __volatile__("hlt\n\t");
1620# else
1621 __asm {
1622 hlt
1623 }
1624# endif
1625}
1626#endif
1627
1628
1629/**
1630 * Reads a machine specific register.
1631 *
1632 * @returns Register content.
1633 * @param uRegister Register to read.
1634 */
1635#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1636DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1637#else
1638DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1639{
1640 RTUINT64U u;
1641# if RT_INLINE_ASM_GNU_STYLE
1642 __asm__ __volatile__("rdmsr\n\t"
1643 : "=a" (u.s.Lo),
1644 "=d" (u.s.Hi)
1645 : "c" (uRegister));
1646
1647# elif RT_INLINE_ASM_USES_INTRIN
1648 u.u = __readmsr(uRegister);
1649
1650# else
1651 __asm
1652 {
1653 mov ecx, [uRegister]
1654 rdmsr
1655 mov [u.s.Lo], eax
1656 mov [u.s.Hi], edx
1657 }
1658# endif
1659
1660 return u.u;
1661}
1662#endif
1663
1664
1665/**
1666 * Writes a machine specific register.
1667 *
1668 * @returns Register content.
1669 * @param uRegister Register to write to.
1670 * @param u64Val Value to write.
1671 */
1672#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1673DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1674#else
1675DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1676{
1677 RTUINT64U u;
1678
1679 u.u = u64Val;
1680# if RT_INLINE_ASM_GNU_STYLE
1681 __asm__ __volatile__("wrmsr\n\t"
1682 ::"a" (u.s.Lo),
1683 "d" (u.s.Hi),
1684 "c" (uRegister));
1685
1686# elif RT_INLINE_ASM_USES_INTRIN
1687 __writemsr(uRegister, u.u);
1688
1689# else
1690 __asm
1691 {
1692 mov ecx, [uRegister]
1693 mov edx, [u.s.Hi]
1694 mov eax, [u.s.Lo]
1695 wrmsr
1696 }
1697# endif
1698}
1699#endif
1700
1701
1702/**
1703 * Reads low part of a machine specific register.
1704 *
1705 * @returns Register content.
1706 * @param uRegister Register to read.
1707 */
1708#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1709DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1710#else
1711DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1712{
1713 uint32_t u32;
1714# if RT_INLINE_ASM_GNU_STYLE
1715 __asm__ __volatile__("rdmsr\n\t"
1716 : "=a" (u32)
1717 : "c" (uRegister)
1718 : "edx");
1719
1720# elif RT_INLINE_ASM_USES_INTRIN
1721 u32 = (uint32_t)__readmsr(uRegister);
1722
1723#else
1724 __asm
1725 {
1726 mov ecx, [uRegister]
1727 rdmsr
1728 mov [u32], eax
1729 }
1730# endif
1731
1732 return u32;
1733}
1734#endif
1735
1736
1737/**
1738 * Reads high part of a machine specific register.
1739 *
1740 * @returns Register content.
1741 * @param uRegister Register to read.
1742 */
1743#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1744DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1745#else
1746DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1747{
1748 uint32_t u32;
1749# if RT_INLINE_ASM_GNU_STYLE
1750 __asm__ __volatile__("rdmsr\n\t"
1751 : "=d" (u32)
1752 : "c" (uRegister)
1753 : "eax");
1754
1755# elif RT_INLINE_ASM_USES_INTRIN
1756 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1757
1758# else
1759 __asm
1760 {
1761 mov ecx, [uRegister]
1762 rdmsr
1763 mov [u32], edx
1764 }
1765# endif
1766
1767 return u32;
1768}
1769#endif
1770
1771
1772/**
1773 * Gets dr0.
1774 *
1775 * @returns dr0.
1776 */
1777#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1778DECLASM(RTCCUINTREG) ASMGetDR0(void);
1779#else
1780DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1781{
1782 RTCCUINTREG uDR0;
1783# if RT_INLINE_ASM_USES_INTRIN
1784 uDR0 = __readdr(0);
1785# elif RT_INLINE_ASM_GNU_STYLE
1786# ifdef RT_ARCH_AMD64
1787 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1788# else
1789 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1790# endif
1791# else
1792 __asm
1793 {
1794# ifdef RT_ARCH_AMD64
1795 mov rax, dr0
1796 mov [uDR0], rax
1797# else
1798 mov eax, dr0
1799 mov [uDR0], eax
1800# endif
1801 }
1802# endif
1803 return uDR0;
1804}
1805#endif
1806
1807
1808/**
1809 * Gets dr1.
1810 *
1811 * @returns dr1.
1812 */
1813#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1814DECLASM(RTCCUINTREG) ASMGetDR1(void);
1815#else
1816DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1817{
1818 RTCCUINTREG uDR1;
1819# if RT_INLINE_ASM_USES_INTRIN
1820 uDR1 = __readdr(1);
1821# elif RT_INLINE_ASM_GNU_STYLE
1822# ifdef RT_ARCH_AMD64
1823 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1824# else
1825 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1826# endif
1827# else
1828 __asm
1829 {
1830# ifdef RT_ARCH_AMD64
1831 mov rax, dr1
1832 mov [uDR1], rax
1833# else
1834 mov eax, dr1
1835 mov [uDR1], eax
1836# endif
1837 }
1838# endif
1839 return uDR1;
1840}
1841#endif
1842
1843
1844/**
1845 * Gets dr2.
1846 *
1847 * @returns dr2.
1848 */
1849#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1850DECLASM(RTCCUINTREG) ASMGetDR2(void);
1851#else
1852DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
1853{
1854 RTCCUINTREG uDR2;
1855# if RT_INLINE_ASM_USES_INTRIN
1856 uDR2 = __readdr(2);
1857# elif RT_INLINE_ASM_GNU_STYLE
1858# ifdef RT_ARCH_AMD64
1859 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
1860# else
1861 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
1862# endif
1863# else
1864 __asm
1865 {
1866# ifdef RT_ARCH_AMD64
1867 mov rax, dr2
1868 mov [uDR2], rax
1869# else
1870 mov eax, dr2
1871 mov [uDR2], eax
1872# endif
1873 }
1874# endif
1875 return uDR2;
1876}
1877#endif
1878
1879
1880/**
1881 * Gets dr3.
1882 *
1883 * @returns dr3.
1884 */
1885#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1886DECLASM(RTCCUINTREG) ASMGetDR3(void);
1887#else
1888DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
1889{
1890 RTCCUINTREG uDR3;
1891# if RT_INLINE_ASM_USES_INTRIN
1892 uDR3 = __readdr(3);
1893# elif RT_INLINE_ASM_GNU_STYLE
1894# ifdef RT_ARCH_AMD64
1895 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
1896# else
1897 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
1898# endif
1899# else
1900 __asm
1901 {
1902# ifdef RT_ARCH_AMD64
1903 mov rax, dr3
1904 mov [uDR3], rax
1905# else
1906 mov eax, dr3
1907 mov [uDR3], eax
1908# endif
1909 }
1910# endif
1911 return uDR3;
1912}
1913#endif
1914
1915
1916/**
1917 * Gets dr6.
1918 *
1919 * @returns dr6.
1920 */
1921#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1922DECLASM(RTCCUINTREG) ASMGetDR6(void);
1923#else
1924DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
1925{
1926 RTCCUINTREG uDR6;
1927# if RT_INLINE_ASM_USES_INTRIN
1928 uDR6 = __readdr(6);
1929# elif RT_INLINE_ASM_GNU_STYLE
1930# ifdef RT_ARCH_AMD64
1931 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
1932# else
1933 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
1934# endif
1935# else
1936 __asm
1937 {
1938# ifdef RT_ARCH_AMD64
1939 mov rax, dr6
1940 mov [uDR6], rax
1941# else
1942 mov eax, dr6
1943 mov [uDR6], eax
1944# endif
1945 }
1946# endif
1947 return uDR6;
1948}
1949#endif
1950
1951
1952/**
1953 * Reads and clears DR6.
1954 *
1955 * @returns DR6.
1956 */
1957#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1958DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
1959#else
1960DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
1961{
1962 RTCCUINTREG uDR6;
1963# if RT_INLINE_ASM_USES_INTRIN
1964 uDR6 = __readdr(6);
1965 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1966# elif RT_INLINE_ASM_GNU_STYLE
1967 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1968# ifdef RT_ARCH_AMD64
1969 __asm__ __volatile__("movq %%dr6, %0\n\t"
1970 "movq %1, %%dr6\n\t"
1971 : "=r" (uDR6)
1972 : "r" (uNewValue));
1973# else
1974 __asm__ __volatile__("movl %%dr6, %0\n\t"
1975 "movl %1, %%dr6\n\t"
1976 : "=r" (uDR6)
1977 : "r" (uNewValue));
1978# endif
1979# else
1980 __asm
1981 {
1982# ifdef RT_ARCH_AMD64
1983 mov rax, dr6
1984 mov [uDR6], rax
1985 mov rcx, rax
1986 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1987 mov dr6, rcx
1988# else
1989 mov eax, dr6
1990 mov [uDR6], eax
1991 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
1992 mov dr6, ecx
1993# endif
1994 }
1995# endif
1996 return uDR6;
1997}
1998#endif
1999
2000
2001/**
2002 * Gets dr7.
2003 *
2004 * @returns dr7.
2005 */
2006#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2007DECLASM(RTCCUINTREG) ASMGetDR7(void);
2008#else
2009DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
2010{
2011 RTCCUINTREG uDR7;
2012# if RT_INLINE_ASM_USES_INTRIN
2013 uDR7 = __readdr(7);
2014# elif RT_INLINE_ASM_GNU_STYLE
2015# ifdef RT_ARCH_AMD64
2016 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2017# else
2018 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2019# endif
2020# else
2021 __asm
2022 {
2023# ifdef RT_ARCH_AMD64
2024 mov rax, dr7
2025 mov [uDR7], rax
2026# else
2027 mov eax, dr7
2028 mov [uDR7], eax
2029# endif
2030 }
2031# endif
2032 return uDR7;
2033}
2034#endif
2035
2036
2037/**
2038 * Sets dr0.
2039 *
2040 * @param uDRVal Debug register value to write
2041 */
2042#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2043DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2044#else
2045DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2046{
2047# if RT_INLINE_ASM_USES_INTRIN
2048 __writedr(0, uDRVal);
2049# elif RT_INLINE_ASM_GNU_STYLE
2050# ifdef RT_ARCH_AMD64
2051 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2052# else
2053 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2054# endif
2055# else
2056 __asm
2057 {
2058# ifdef RT_ARCH_AMD64
2059 mov rax, [uDRVal]
2060 mov dr0, rax
2061# else
2062 mov eax, [uDRVal]
2063 mov dr0, eax
2064# endif
2065 }
2066# endif
2067}
2068#endif
2069
2070
2071/**
2072 * Sets dr1.
2073 *
2074 * @param uDRVal Debug register value to write
2075 */
2076#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2077DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2078#else
2079DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2080{
2081# if RT_INLINE_ASM_USES_INTRIN
2082 __writedr(1, uDRVal);
2083# elif RT_INLINE_ASM_GNU_STYLE
2084# ifdef RT_ARCH_AMD64
2085 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2086# else
2087 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2088# endif
2089# else
2090 __asm
2091 {
2092# ifdef RT_ARCH_AMD64
2093 mov rax, [uDRVal]
2094 mov dr1, rax
2095# else
2096 mov eax, [uDRVal]
2097 mov dr1, eax
2098# endif
2099 }
2100# endif
2101}
2102#endif
2103
2104
2105/**
2106 * Sets dr2.
2107 *
2108 * @param uDRVal Debug register value to write
2109 */
2110#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2111DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2112#else
2113DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2114{
2115# if RT_INLINE_ASM_USES_INTRIN
2116 __writedr(2, uDRVal);
2117# elif RT_INLINE_ASM_GNU_STYLE
2118# ifdef RT_ARCH_AMD64
2119 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2120# else
2121 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2122# endif
2123# else
2124 __asm
2125 {
2126# ifdef RT_ARCH_AMD64
2127 mov rax, [uDRVal]
2128 mov dr2, rax
2129# else
2130 mov eax, [uDRVal]
2131 mov dr2, eax
2132# endif
2133 }
2134# endif
2135}
2136#endif
2137
2138
2139/**
2140 * Sets dr3.
2141 *
2142 * @param uDRVal Debug register value to write
2143 */
2144#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2145DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2146#else
2147DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2148{
2149# if RT_INLINE_ASM_USES_INTRIN
2150 __writedr(3, uDRVal);
2151# elif RT_INLINE_ASM_GNU_STYLE
2152# ifdef RT_ARCH_AMD64
2153 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2154# else
2155 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2156# endif
2157# else
2158 __asm
2159 {
2160# ifdef RT_ARCH_AMD64
2161 mov rax, [uDRVal]
2162 mov dr3, rax
2163# else
2164 mov eax, [uDRVal]
2165 mov dr3, eax
2166# endif
2167 }
2168# endif
2169}
2170#endif
2171
2172
2173/**
2174 * Sets dr6.
2175 *
2176 * @param uDRVal Debug register value to write
2177 */
2178#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2179DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2180#else
2181DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2182{
2183# if RT_INLINE_ASM_USES_INTRIN
2184 __writedr(6, uDRVal);
2185# elif RT_INLINE_ASM_GNU_STYLE
2186# ifdef RT_ARCH_AMD64
2187 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2188# else
2189 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2190# endif
2191# else
2192 __asm
2193 {
2194# ifdef RT_ARCH_AMD64
2195 mov rax, [uDRVal]
2196 mov dr6, rax
2197# else
2198 mov eax, [uDRVal]
2199 mov dr6, eax
2200# endif
2201 }
2202# endif
2203}
2204#endif
2205
2206
2207/**
2208 * Sets dr7.
2209 *
2210 * @param uDRVal Debug register value to write
2211 */
2212#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2213DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2214#else
2215DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2216{
2217# if RT_INLINE_ASM_USES_INTRIN
2218 __writedr(7, uDRVal);
2219# elif RT_INLINE_ASM_GNU_STYLE
2220# ifdef RT_ARCH_AMD64
2221 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2222# else
2223 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2224# endif
2225# else
2226 __asm
2227 {
2228# ifdef RT_ARCH_AMD64
2229 mov rax, [uDRVal]
2230 mov dr7, rax
2231# else
2232 mov eax, [uDRVal]
2233 mov dr7, eax
2234# endif
2235 }
2236# endif
2237}
2238#endif
2239
2240
2241/**
2242 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2243 *
2244 * @param Port I/O port to write to.
2245 * @param u8 8-bit integer to write.
2246 */
2247#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2248DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2249#else
2250DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2251{
2252# if RT_INLINE_ASM_GNU_STYLE
2253 __asm__ __volatile__("outb %b1, %w0\n\t"
2254 :: "Nd" (Port),
2255 "a" (u8));
2256
2257# elif RT_INLINE_ASM_USES_INTRIN
2258 __outbyte(Port, u8);
2259
2260# else
2261 __asm
2262 {
2263 mov dx, [Port]
2264 mov al, [u8]
2265 out dx, al
2266 }
2267# endif
2268}
2269#endif
2270
2271
2272/**
2273 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2274 *
2275 * @returns 8-bit integer.
2276 * @param Port I/O port to read from.
2277 */
2278#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2279DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2280#else
2281DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2282{
2283 uint8_t u8;
2284# if RT_INLINE_ASM_GNU_STYLE
2285 __asm__ __volatile__("inb %w1, %b0\n\t"
2286 : "=a" (u8)
2287 : "Nd" (Port));
2288
2289# elif RT_INLINE_ASM_USES_INTRIN
2290 u8 = __inbyte(Port);
2291
2292# else
2293 __asm
2294 {
2295 mov dx, [Port]
2296 in al, dx
2297 mov [u8], al
2298 }
2299# endif
2300 return u8;
2301}
2302#endif
2303
2304
2305/**
2306 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2307 *
2308 * @param Port I/O port to write to.
2309 * @param u16 16-bit integer to write.
2310 */
2311#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2312DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2313#else
2314DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2315{
2316# if RT_INLINE_ASM_GNU_STYLE
2317 __asm__ __volatile__("outw %w1, %w0\n\t"
2318 :: "Nd" (Port),
2319 "a" (u16));
2320
2321# elif RT_INLINE_ASM_USES_INTRIN
2322 __outword(Port, u16);
2323
2324# else
2325 __asm
2326 {
2327 mov dx, [Port]
2328 mov ax, [u16]
2329 out dx, ax
2330 }
2331# endif
2332}
2333#endif
2334
2335
2336/**
2337 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2338 *
2339 * @returns 16-bit integer.
2340 * @param Port I/O port to read from.
2341 */
2342#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2343DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2344#else
2345DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2346{
2347 uint16_t u16;
2348# if RT_INLINE_ASM_GNU_STYLE
2349 __asm__ __volatile__("inw %w1, %w0\n\t"
2350 : "=a" (u16)
2351 : "Nd" (Port));
2352
2353# elif RT_INLINE_ASM_USES_INTRIN
2354 u16 = __inword(Port);
2355
2356# else
2357 __asm
2358 {
2359 mov dx, [Port]
2360 in ax, dx
2361 mov [u16], ax
2362 }
2363# endif
2364 return u16;
2365}
2366#endif
2367
2368
2369/**
2370 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2371 *
2372 * @param Port I/O port to write to.
2373 * @param u32 32-bit integer to write.
2374 */
2375#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2376DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2377#else
2378DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2379{
2380# if RT_INLINE_ASM_GNU_STYLE
2381 __asm__ __volatile__("outl %1, %w0\n\t"
2382 :: "Nd" (Port),
2383 "a" (u32));
2384
2385# elif RT_INLINE_ASM_USES_INTRIN
2386 __outdword(Port, u32);
2387
2388# else
2389 __asm
2390 {
2391 mov dx, [Port]
2392 mov eax, [u32]
2393 out dx, eax
2394 }
2395# endif
2396}
2397#endif
2398
2399
2400/**
2401 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2402 *
2403 * @returns 32-bit integer.
2404 * @param Port I/O port to read from.
2405 */
2406#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2407DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2408#else
2409DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2410{
2411 uint32_t u32;
2412# if RT_INLINE_ASM_GNU_STYLE
2413 __asm__ __volatile__("inl %w1, %0\n\t"
2414 : "=a" (u32)
2415 : "Nd" (Port));
2416
2417# elif RT_INLINE_ASM_USES_INTRIN
2418 u32 = __indword(Port);
2419
2420# else
2421 __asm
2422 {
2423 mov dx, [Port]
2424 in eax, dx
2425 mov [u32], eax
2426 }
2427# endif
2428 return u32;
2429}
2430#endif
2431
2432
2433/**
2434 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2435 *
2436 * @param Port I/O port to write to.
2437 * @param pau8 Pointer to the string buffer.
2438 * @param c The number of items to write.
2439 */
2440#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2441DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2442#else
2443DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2444{
2445# if RT_INLINE_ASM_GNU_STYLE
2446 __asm__ __volatile__("rep; outsb\n\t"
2447 : "+S" (pau8),
2448 "+c" (c)
2449 : "d" (Port));
2450
2451# elif RT_INLINE_ASM_USES_INTRIN
2452 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2453
2454# else
2455 __asm
2456 {
2457 mov dx, [Port]
2458 mov ecx, [c]
2459 mov eax, [pau8]
2460 xchg esi, eax
2461 rep outsb
2462 xchg esi, eax
2463 }
2464# endif
2465}
2466#endif
2467
2468
2469/**
2470 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2471 *
2472 * @param Port I/O port to read from.
2473 * @param pau8 Pointer to the string buffer (output).
2474 * @param c The number of items to read.
2475 */
2476#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2477DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2478#else
2479DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2480{
2481# if RT_INLINE_ASM_GNU_STYLE
2482 __asm__ __volatile__("rep; insb\n\t"
2483 : "+D" (pau8),
2484 "+c" (c)
2485 : "d" (Port));
2486
2487# elif RT_INLINE_ASM_USES_INTRIN
2488 __inbytestring(Port, pau8, (unsigned long)c);
2489
2490# else
2491 __asm
2492 {
2493 mov dx, [Port]
2494 mov ecx, [c]
2495 mov eax, [pau8]
2496 xchg edi, eax
2497 rep insb
2498 xchg edi, eax
2499 }
2500# endif
2501}
2502#endif
2503
2504
2505/**
2506 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2507 *
2508 * @param Port I/O port to write to.
2509 * @param pau16 Pointer to the string buffer.
2510 * @param c The number of items to write.
2511 */
2512#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2513DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2514#else
2515DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2516{
2517# if RT_INLINE_ASM_GNU_STYLE
2518 __asm__ __volatile__("rep; outsw\n\t"
2519 : "+S" (pau16),
2520 "+c" (c)
2521 : "d" (Port));
2522
2523# elif RT_INLINE_ASM_USES_INTRIN
2524 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2525
2526# else
2527 __asm
2528 {
2529 mov dx, [Port]
2530 mov ecx, [c]
2531 mov eax, [pau16]
2532 xchg esi, eax
2533 rep outsw
2534 xchg esi, eax
2535 }
2536# endif
2537}
2538#endif
2539
2540
2541/**
2542 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2543 *
2544 * @param Port I/O port to read from.
2545 * @param pau16 Pointer to the string buffer (output).
2546 * @param c The number of items to read.
2547 */
2548#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2549DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2550#else
2551DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2552{
2553# if RT_INLINE_ASM_GNU_STYLE
2554 __asm__ __volatile__("rep; insw\n\t"
2555 : "+D" (pau16),
2556 "+c" (c)
2557 : "d" (Port));
2558
2559# elif RT_INLINE_ASM_USES_INTRIN
2560 __inwordstring(Port, pau16, (unsigned long)c);
2561
2562# else
2563 __asm
2564 {
2565 mov dx, [Port]
2566 mov ecx, [c]
2567 mov eax, [pau16]
2568 xchg edi, eax
2569 rep insw
2570 xchg edi, eax
2571 }
2572# endif
2573}
2574#endif
2575
2576
2577/**
2578 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2579 *
2580 * @param Port I/O port to write to.
2581 * @param pau32 Pointer to the string buffer.
2582 * @param c The number of items to write.
2583 */
2584#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2585DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2586#else
2587DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2588{
2589# if RT_INLINE_ASM_GNU_STYLE
2590 __asm__ __volatile__("rep; outsl\n\t"
2591 : "+S" (pau32),
2592 "+c" (c)
2593 : "d" (Port));
2594
2595# elif RT_INLINE_ASM_USES_INTRIN
2596 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2597
2598# else
2599 __asm
2600 {
2601 mov dx, [Port]
2602 mov ecx, [c]
2603 mov eax, [pau32]
2604 xchg esi, eax
2605 rep outsd
2606 xchg esi, eax
2607 }
2608# endif
2609}
2610#endif
2611
2612
2613/**
2614 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2615 *
2616 * @param Port I/O port to read from.
2617 * @param pau32 Pointer to the string buffer (output).
2618 * @param c The number of items to read.
2619 */
2620#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2621DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2622#else
2623DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2624{
2625# if RT_INLINE_ASM_GNU_STYLE
2626 __asm__ __volatile__("rep; insl\n\t"
2627 : "+D" (pau32),
2628 "+c" (c)
2629 : "d" (Port));
2630
2631# elif RT_INLINE_ASM_USES_INTRIN
2632 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2633
2634# else
2635 __asm
2636 {
2637 mov dx, [Port]
2638 mov ecx, [c]
2639 mov eax, [pau32]
2640 xchg edi, eax
2641 rep insd
2642 xchg edi, eax
2643 }
2644# endif
2645}
2646#endif
2647
2648
2649/**
2650 * Invalidate page.
2651 *
2652 * @param pv Address of the page to invalidate.
2653 */
2654#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2655DECLASM(void) ASMInvalidatePage(void *pv);
2656#else
2657DECLINLINE(void) ASMInvalidatePage(void *pv)
2658{
2659# if RT_INLINE_ASM_USES_INTRIN
2660 __invlpg(pv);
2661
2662# elif RT_INLINE_ASM_GNU_STYLE
2663 __asm__ __volatile__("invlpg %0\n\t"
2664 : : "m" (*(uint8_t *)pv));
2665# else
2666 __asm
2667 {
2668# ifdef RT_ARCH_AMD64
2669 mov rax, [pv]
2670 invlpg [rax]
2671# else
2672 mov eax, [pv]
2673 invlpg [eax]
2674# endif
2675 }
2676# endif
2677}
2678#endif
2679
2680
2681/**
2682 * Write back the internal caches and invalidate them.
2683 */
2684#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2685DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2686#else
2687DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2688{
2689# if RT_INLINE_ASM_USES_INTRIN
2690 __wbinvd();
2691
2692# elif RT_INLINE_ASM_GNU_STYLE
2693 __asm__ __volatile__("wbinvd");
2694# else
2695 __asm
2696 {
2697 wbinvd
2698 }
2699# endif
2700}
2701#endif
2702
2703
2704/**
2705 * Invalidate internal and (perhaps) external caches without first
2706 * flushing dirty cache lines. Use with extreme care.
2707 */
2708#if RT_INLINE_ASM_EXTERNAL
2709DECLASM(void) ASMInvalidateInternalCaches(void);
2710#else
2711DECLINLINE(void) ASMInvalidateInternalCaches(void)
2712{
2713# if RT_INLINE_ASM_GNU_STYLE
2714 __asm__ __volatile__("invd");
2715# else
2716 __asm
2717 {
2718 invd
2719 }
2720# endif
2721}
2722#endif
2723
2724
2725/**
2726 * Memory load/store fence, waits for any pending writes and reads to complete.
2727 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2728 */
2729DECLINLINE(void) ASMMemoryFenceSSE2(void)
2730{
2731#if RT_INLINE_ASM_GNU_STYLE
2732 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2733#elif RT_INLINE_ASM_USES_INTRIN
2734 _mm_mfence();
2735#else
2736 __asm
2737 {
2738 _emit 0x0f
2739 _emit 0xae
2740 _emit 0xf0
2741 }
2742#endif
2743}
2744
2745
2746/**
2747 * Memory store fence, waits for any writes to complete.
2748 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2749 */
2750DECLINLINE(void) ASMWriteFenceSSE(void)
2751{
2752#if RT_INLINE_ASM_GNU_STYLE
2753 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2754#elif RT_INLINE_ASM_USES_INTRIN
2755 _mm_sfence();
2756#else
2757 __asm
2758 {
2759 _emit 0x0f
2760 _emit 0xae
2761 _emit 0xf8
2762 }
2763#endif
2764}
2765
2766
2767/**
2768 * Memory load fence, waits for any pending reads to complete.
2769 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2770 */
2771DECLINLINE(void) ASMReadFenceSSE2(void)
2772{
2773#if RT_INLINE_ASM_GNU_STYLE
2774 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2775#elif RT_INLINE_ASM_USES_INTRIN
2776 _mm_lfence();
2777#else
2778 __asm
2779 {
2780 _emit 0x0f
2781 _emit 0xae
2782 _emit 0xe8
2783 }
2784#endif
2785}
2786
2787/** @} */
2788#endif
2789
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette