VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 44260

Last change on this file since 44260 was 44260, checked in by vboxsync, 12 years ago

Fixed ASMCpuId_Idx_ECX on windows (didn't consider ECX input).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 59.0 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2010 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71#endif
72
73
74
75/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
76 * @ingroup grp_rt_asm
77 * @{
78 */
79
80/** @todo find a more proper place for this structure? */
81#pragma pack(1)
82/** IDTR */
83typedef struct RTIDTR
84{
85 /** Size of the IDT. */
86 uint16_t cbIdt;
87 /** Address of the IDT. */
88 uintptr_t pIdt;
89} RTIDTR, *PRTIDTR;
90#pragma pack()
91
92#pragma pack(1)
93/** GDTR */
94typedef struct RTGDTR
95{
96 /** Size of the GDT. */
97 uint16_t cbGdt;
98 /** Address of the GDT. */
99 uintptr_t pGdt;
100} RTGDTR, *PRTGDTR;
101#pragma pack()
102
103
104/**
105 * Gets the content of the IDTR CPU register.
106 * @param pIdtr Where to store the IDTR contents.
107 */
108#if RT_INLINE_ASM_EXTERNAL
109DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
110#else
111DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
112{
113# if RT_INLINE_ASM_GNU_STYLE
114 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
115# else
116 __asm
117 {
118# ifdef RT_ARCH_AMD64
119 mov rax, [pIdtr]
120 sidt [rax]
121# else
122 mov eax, [pIdtr]
123 sidt [eax]
124# endif
125 }
126# endif
127}
128#endif
129
130
131/**
132 * Sets the content of the IDTR CPU register.
133 * @param pIdtr Where to load the IDTR contents from
134 */
135#if RT_INLINE_ASM_EXTERNAL
136DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
137#else
138DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
139{
140# if RT_INLINE_ASM_GNU_STYLE
141 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
142# else
143 __asm
144 {
145# ifdef RT_ARCH_AMD64
146 mov rax, [pIdtr]
147 lidt [rax]
148# else
149 mov eax, [pIdtr]
150 lidt [eax]
151# endif
152 }
153# endif
154}
155#endif
156
157
158/**
159 * Gets the content of the GDTR CPU register.
160 * @param pGdtr Where to store the GDTR contents.
161 */
162#if RT_INLINE_ASM_EXTERNAL
163DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
164#else
165DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
166{
167# if RT_INLINE_ASM_GNU_STYLE
168 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
169# else
170 __asm
171 {
172# ifdef RT_ARCH_AMD64
173 mov rax, [pGdtr]
174 sgdt [rax]
175# else
176 mov eax, [pGdtr]
177 sgdt [eax]
178# endif
179 }
180# endif
181}
182#endif
183
184/**
185 * Get the cs register.
186 * @returns cs.
187 */
188#if RT_INLINE_ASM_EXTERNAL
189DECLASM(RTSEL) ASMGetCS(void);
190#else
191DECLINLINE(RTSEL) ASMGetCS(void)
192{
193 RTSEL SelCS;
194# if RT_INLINE_ASM_GNU_STYLE
195 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
196# else
197 __asm
198 {
199 mov ax, cs
200 mov [SelCS], ax
201 }
202# endif
203 return SelCS;
204}
205#endif
206
207
208/**
209 * Get the DS register.
210 * @returns DS.
211 */
212#if RT_INLINE_ASM_EXTERNAL
213DECLASM(RTSEL) ASMGetDS(void);
214#else
215DECLINLINE(RTSEL) ASMGetDS(void)
216{
217 RTSEL SelDS;
218# if RT_INLINE_ASM_GNU_STYLE
219 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
220# else
221 __asm
222 {
223 mov ax, ds
224 mov [SelDS], ax
225 }
226# endif
227 return SelDS;
228}
229#endif
230
231
232/**
233 * Get the ES register.
234 * @returns ES.
235 */
236#if RT_INLINE_ASM_EXTERNAL
237DECLASM(RTSEL) ASMGetES(void);
238#else
239DECLINLINE(RTSEL) ASMGetES(void)
240{
241 RTSEL SelES;
242# if RT_INLINE_ASM_GNU_STYLE
243 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
244# else
245 __asm
246 {
247 mov ax, es
248 mov [SelES], ax
249 }
250# endif
251 return SelES;
252}
253#endif
254
255
256/**
257 * Get the FS register.
258 * @returns FS.
259 */
260#if RT_INLINE_ASM_EXTERNAL
261DECLASM(RTSEL) ASMGetFS(void);
262#else
263DECLINLINE(RTSEL) ASMGetFS(void)
264{
265 RTSEL SelFS;
266# if RT_INLINE_ASM_GNU_STYLE
267 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
268# else
269 __asm
270 {
271 mov ax, fs
272 mov [SelFS], ax
273 }
274# endif
275 return SelFS;
276}
277# endif
278
279
280/**
281 * Get the GS register.
282 * @returns GS.
283 */
284#if RT_INLINE_ASM_EXTERNAL
285DECLASM(RTSEL) ASMGetGS(void);
286#else
287DECLINLINE(RTSEL) ASMGetGS(void)
288{
289 RTSEL SelGS;
290# if RT_INLINE_ASM_GNU_STYLE
291 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
292# else
293 __asm
294 {
295 mov ax, gs
296 mov [SelGS], ax
297 }
298# endif
299 return SelGS;
300}
301#endif
302
303
304/**
305 * Get the SS register.
306 * @returns SS.
307 */
308#if RT_INLINE_ASM_EXTERNAL
309DECLASM(RTSEL) ASMGetSS(void);
310#else
311DECLINLINE(RTSEL) ASMGetSS(void)
312{
313 RTSEL SelSS;
314# if RT_INLINE_ASM_GNU_STYLE
315 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
316# else
317 __asm
318 {
319 mov ax, ss
320 mov [SelSS], ax
321 }
322# endif
323 return SelSS;
324}
325#endif
326
327
328/**
329 * Get the TR register.
330 * @returns TR.
331 */
332#if RT_INLINE_ASM_EXTERNAL
333DECLASM(RTSEL) ASMGetTR(void);
334#else
335DECLINLINE(RTSEL) ASMGetTR(void)
336{
337 RTSEL SelTR;
338# if RT_INLINE_ASM_GNU_STYLE
339 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
340# else
341 __asm
342 {
343 str ax
344 mov [SelTR], ax
345 }
346# endif
347 return SelTR;
348}
349#endif
350
351
352/**
353 * Get the [RE]FLAGS register.
354 * @returns [RE]FLAGS.
355 */
356#if RT_INLINE_ASM_EXTERNAL
357DECLASM(RTCCUINTREG) ASMGetFlags(void);
358#else
359DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
360{
361 RTCCUINTREG uFlags;
362# if RT_INLINE_ASM_GNU_STYLE
363# ifdef RT_ARCH_AMD64
364 __asm__ __volatile__("pushfq\n\t"
365 "popq %0\n\t"
366 : "=r" (uFlags));
367# else
368 __asm__ __volatile__("pushfl\n\t"
369 "popl %0\n\t"
370 : "=r" (uFlags));
371# endif
372# else
373 __asm
374 {
375# ifdef RT_ARCH_AMD64
376 pushfq
377 pop [uFlags]
378# else
379 pushfd
380 pop [uFlags]
381# endif
382 }
383# endif
384 return uFlags;
385}
386#endif
387
388
389/**
390 * Set the [RE]FLAGS register.
391 * @param uFlags The new [RE]FLAGS value.
392 */
393#if RT_INLINE_ASM_EXTERNAL
394DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
395#else
396DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
397{
398# if RT_INLINE_ASM_GNU_STYLE
399# ifdef RT_ARCH_AMD64
400 __asm__ __volatile__("pushq %0\n\t"
401 "popfq\n\t"
402 : : "g" (uFlags));
403# else
404 __asm__ __volatile__("pushl %0\n\t"
405 "popfl\n\t"
406 : : "g" (uFlags));
407# endif
408# else
409 __asm
410 {
411# ifdef RT_ARCH_AMD64
412 push [uFlags]
413 popfq
414# else
415 push [uFlags]
416 popfd
417# endif
418 }
419# endif
420}
421#endif
422
423
424/**
425 * Gets the content of the CPU timestamp counter register.
426 *
427 * @returns TSC.
428 */
429#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
430DECLASM(uint64_t) ASMReadTSC(void);
431#else
432DECLINLINE(uint64_t) ASMReadTSC(void)
433{
434 RTUINT64U u;
435# if RT_INLINE_ASM_GNU_STYLE
436 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
437# else
438# if RT_INLINE_ASM_USES_INTRIN
439 u.u = __rdtsc();
440# else
441 __asm
442 {
443 rdtsc
444 mov [u.s.Lo], eax
445 mov [u.s.Hi], edx
446 }
447# endif
448# endif
449 return u.u;
450}
451#endif
452
453
454/**
455 * Performs the cpuid instruction returning all registers.
456 *
457 * @param uOperator CPUID operation (eax).
458 * @param pvEAX Where to store eax.
459 * @param pvEBX Where to store ebx.
460 * @param pvECX Where to store ecx.
461 * @param pvEDX Where to store edx.
462 * @remark We're using void pointers to ease the use of special bitfield structures and such.
463 */
464#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
465DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
466#else
467DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
468{
469# if RT_INLINE_ASM_GNU_STYLE
470# ifdef RT_ARCH_AMD64
471 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
472 __asm__ __volatile__ ("cpuid\n\t"
473 : "=a" (uRAX),
474 "=b" (uRBX),
475 "=c" (uRCX),
476 "=d" (uRDX)
477 : "0" (uOperator), "2" (0));
478 *(uint32_t *)pvEAX = (uint32_t)uRAX;
479 *(uint32_t *)pvEBX = (uint32_t)uRBX;
480 *(uint32_t *)pvECX = (uint32_t)uRCX;
481 *(uint32_t *)pvEDX = (uint32_t)uRDX;
482# else
483 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
484 "cpuid\n\t"
485 "xchgl %%ebx, %1\n\t"
486 : "=a" (*(uint32_t *)pvEAX),
487 "=r" (*(uint32_t *)pvEBX),
488 "=c" (*(uint32_t *)pvECX),
489 "=d" (*(uint32_t *)pvEDX)
490 : "0" (uOperator), "2" (0));
491# endif
492
493# elif RT_INLINE_ASM_USES_INTRIN
494 int aInfo[4];
495 __cpuid(aInfo, uOperator);
496 *(uint32_t *)pvEAX = aInfo[0];
497 *(uint32_t *)pvEBX = aInfo[1];
498 *(uint32_t *)pvECX = aInfo[2];
499 *(uint32_t *)pvEDX = aInfo[3];
500
501# else
502 uint32_t uEAX;
503 uint32_t uEBX;
504 uint32_t uECX;
505 uint32_t uEDX;
506 __asm
507 {
508 push ebx
509 mov eax, [uOperator]
510 cpuid
511 mov [uEAX], eax
512 mov [uEBX], ebx
513 mov [uECX], ecx
514 mov [uEDX], edx
515 pop ebx
516 }
517 *(uint32_t *)pvEAX = uEAX;
518 *(uint32_t *)pvEBX = uEBX;
519 *(uint32_t *)pvECX = uECX;
520 *(uint32_t *)pvEDX = uEDX;
521# endif
522}
523#endif
524
525
526/**
527 * Performs the CPUID instruction with EAX and ECX input returning ALL output
528 * registers.
529 *
530 * @param uOperator CPUID operation (eax).
531 * @param uIdxECX ecx index
532 * @param pvEAX Where to store eax.
533 * @param pvEBX Where to store ebx.
534 * @param pvECX Where to store ecx.
535 * @param pvEDX Where to store edx.
536 * @remark We're using void pointers to ease the use of special bitfield structures and such.
537 */
538#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
539DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
540#else
541DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
542{
543# if RT_INLINE_ASM_GNU_STYLE
544# ifdef RT_ARCH_AMD64
545 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
546 __asm__ ("cpuid\n\t"
547 : "=a" (uRAX),
548 "=b" (uRBX),
549 "=c" (uRCX),
550 "=d" (uRDX)
551 : "0" (uOperator),
552 "2" (uIdxECX));
553 *(uint32_t *)pvEAX = (uint32_t)uRAX;
554 *(uint32_t *)pvEBX = (uint32_t)uRBX;
555 *(uint32_t *)pvECX = (uint32_t)uRCX;
556 *(uint32_t *)pvEDX = (uint32_t)uRDX;
557# else
558 __asm__ ("xchgl %%ebx, %1\n\t"
559 "cpuid\n\t"
560 "xchgl %%ebx, %1\n\t"
561 : "=a" (*(uint32_t *)pvEAX),
562 "=r" (*(uint32_t *)pvEBX),
563 "=c" (*(uint32_t *)pvECX),
564 "=d" (*(uint32_t *)pvEDX)
565 : "0" (uOperator),
566 "2" (uIdxECX));
567# endif
568
569# elif RT_INLINE_ASM_USES_INTRIN
570 int aInfo[4];
571 /* ??? another intrinsic ??? */
572 __cpuidex(aInfo, uOperator, uIdxECX);
573 *(uint32_t *)pvEAX = aInfo[0];
574 *(uint32_t *)pvEBX = aInfo[1];
575 *(uint32_t *)pvECX = aInfo[2];
576 *(uint32_t *)pvEDX = aInfo[3];
577
578# else
579 uint32_t uEAX;
580 uint32_t uEBX;
581 uint32_t uECX;
582 uint32_t uEDX;
583 __asm
584 {
585 push ebx
586 mov eax, [uOperator]
587 mov ecx, [uIdxECX]
588 cpuid
589 mov [uEAX], eax
590 mov [uEBX], ebx
591 mov [uECX], ecx
592 mov [uEDX], edx
593 pop ebx
594 }
595 *(uint32_t *)pvEAX = uEAX;
596 *(uint32_t *)pvEBX = uEBX;
597 *(uint32_t *)pvECX = uECX;
598 *(uint32_t *)pvEDX = uEDX;
599# endif
600}
601#endif
602
603
604/**
605 * Performs the cpuid instruction returning ecx and edx.
606 *
607 * @param uOperator CPUID operation (eax).
608 * @param pvECX Where to store ecx.
609 * @param pvEDX Where to store edx.
610 * @remark We're using void pointers to ease the use of special bitfield structures and such.
611 */
612#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
613DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
614#else
615DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
616{
617 uint32_t uEBX;
618 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
619}
620#endif
621
622
623/**
624 * Performs the cpuid instruction returning eax.
625 *
626 * @param uOperator CPUID operation (eax).
627 * @returns EAX after cpuid operation.
628 */
629#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
630DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
631#else
632DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
633{
634 RTCCUINTREG xAX;
635# if RT_INLINE_ASM_GNU_STYLE
636# ifdef RT_ARCH_AMD64
637 __asm__ ("cpuid"
638 : "=a" (xAX)
639 : "0" (uOperator)
640 : "rbx", "rcx", "rdx");
641# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
642 __asm__ ("push %%ebx\n\t"
643 "cpuid\n\t"
644 "pop %%ebx\n\t"
645 : "=a" (xAX)
646 : "0" (uOperator)
647 : "ecx", "edx");
648# else
649 __asm__ ("cpuid"
650 : "=a" (xAX)
651 : "0" (uOperator)
652 : "edx", "ecx", "ebx");
653# endif
654
655# elif RT_INLINE_ASM_USES_INTRIN
656 int aInfo[4];
657 __cpuid(aInfo, uOperator);
658 xAX = aInfo[0];
659
660# else
661 __asm
662 {
663 push ebx
664 mov eax, [uOperator]
665 cpuid
666 mov [xAX], eax
667 pop ebx
668 }
669# endif
670 return (uint32_t)xAX;
671}
672#endif
673
674
675/**
676 * Performs the cpuid instruction returning ebx.
677 *
678 * @param uOperator CPUID operation (eax).
679 * @returns EBX after cpuid operation.
680 */
681#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
682DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
683#else
684DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
685{
686 RTCCUINTREG xBX;
687# if RT_INLINE_ASM_GNU_STYLE
688# ifdef RT_ARCH_AMD64
689 RTCCUINTREG uSpill;
690 __asm__ ("cpuid"
691 : "=a" (uSpill),
692 "=b" (xBX)
693 : "0" (uOperator)
694 : "rdx", "rcx");
695# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
696 __asm__ ("push %%ebx\n\t"
697 "cpuid\n\t"
698 "mov %%ebx, %%edx\n\t"
699 "pop %%ebx\n\t"
700 : "=a" (uOperator),
701 "=d" (xBX)
702 : "0" (uOperator)
703 : "ecx");
704# else
705 __asm__ ("cpuid"
706 : "=a" (uOperator),
707 "=b" (xBX)
708 : "0" (uOperator)
709 : "edx", "ecx");
710# endif
711
712# elif RT_INLINE_ASM_USES_INTRIN
713 int aInfo[4];
714 __cpuid(aInfo, uOperator);
715 xBX = aInfo[1];
716
717# else
718 __asm
719 {
720 push ebx
721 mov eax, [uOperator]
722 cpuid
723 mov [xBX], ebx
724 pop ebx
725 }
726# endif
727 return (uint32_t)xBX;
728}
729#endif
730
731
732/**
733 * Performs the cpuid instruction returning ecx.
734 *
735 * @param uOperator CPUID operation (eax).
736 * @returns ECX after cpuid operation.
737 */
738#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
739DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
740#else
741DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
742{
743 RTCCUINTREG xCX;
744# if RT_INLINE_ASM_GNU_STYLE
745# ifdef RT_ARCH_AMD64
746 RTCCUINTREG uSpill;
747 __asm__ ("cpuid"
748 : "=a" (uSpill),
749 "=c" (xCX)
750 : "0" (uOperator)
751 : "rbx", "rdx");
752# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
753 __asm__ ("push %%ebx\n\t"
754 "cpuid\n\t"
755 "pop %%ebx\n\t"
756 : "=a" (uOperator),
757 "=c" (xCX)
758 : "0" (uOperator)
759 : "edx");
760# else
761 __asm__ ("cpuid"
762 : "=a" (uOperator),
763 "=c" (xCX)
764 : "0" (uOperator)
765 : "ebx", "edx");
766
767# endif
768
769# elif RT_INLINE_ASM_USES_INTRIN
770 int aInfo[4];
771 __cpuid(aInfo, uOperator);
772 xCX = aInfo[2];
773
774# else
775 __asm
776 {
777 push ebx
778 mov eax, [uOperator]
779 cpuid
780 mov [xCX], ecx
781 pop ebx
782 }
783# endif
784 return (uint32_t)xCX;
785}
786#endif
787
788
789/**
790 * Performs the cpuid instruction returning edx.
791 *
792 * @param uOperator CPUID operation (eax).
793 * @returns EDX after cpuid operation.
794 */
795#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
796DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
797#else
798DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
799{
800 RTCCUINTREG xDX;
801# if RT_INLINE_ASM_GNU_STYLE
802# ifdef RT_ARCH_AMD64
803 RTCCUINTREG uSpill;
804 __asm__ ("cpuid"
805 : "=a" (uSpill),
806 "=d" (xDX)
807 : "0" (uOperator)
808 : "rbx", "rcx");
809# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
810 __asm__ ("push %%ebx\n\t"
811 "cpuid\n\t"
812 "pop %%ebx\n\t"
813 : "=a" (uOperator),
814 "=d" (xDX)
815 : "0" (uOperator)
816 : "ecx");
817# else
818 __asm__ ("cpuid"
819 : "=a" (uOperator),
820 "=d" (xDX)
821 : "0" (uOperator)
822 : "ebx", "ecx");
823# endif
824
825# elif RT_INLINE_ASM_USES_INTRIN
826 int aInfo[4];
827 __cpuid(aInfo, uOperator);
828 xDX = aInfo[3];
829
830# else
831 __asm
832 {
833 push ebx
834 mov eax, [uOperator]
835 cpuid
836 mov [xDX], edx
837 pop ebx
838 }
839# endif
840 return (uint32_t)xDX;
841}
842#endif
843
844
845/**
846 * Checks if the current CPU supports CPUID.
847 *
848 * @returns true if CPUID is supported.
849 */
850DECLINLINE(bool) ASMHasCpuId(void)
851{
852#ifdef RT_ARCH_AMD64
853 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
854#else /* !RT_ARCH_AMD64 */
855 bool fRet = false;
856# if RT_INLINE_ASM_GNU_STYLE
857 uint32_t u1;
858 uint32_t u2;
859 __asm__ ("pushf\n\t"
860 "pop %1\n\t"
861 "mov %1, %2\n\t"
862 "xorl $0x200000, %1\n\t"
863 "push %1\n\t"
864 "popf\n\t"
865 "pushf\n\t"
866 "pop %1\n\t"
867 "cmpl %1, %2\n\t"
868 "setne %0\n\t"
869 "push %2\n\t"
870 "popf\n\t"
871 : "=m" (fRet), "=r" (u1), "=r" (u2));
872# else
873 __asm
874 {
875 pushfd
876 pop eax
877 mov ebx, eax
878 xor eax, 0200000h
879 push eax
880 popfd
881 pushfd
882 pop eax
883 cmp eax, ebx
884 setne fRet
885 push ebx
886 popfd
887 }
888# endif
889 return fRet;
890#endif /* !RT_ARCH_AMD64 */
891}
892
893
894/**
895 * Gets the APIC ID of the current CPU.
896 *
897 * @returns the APIC ID.
898 */
899#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
900DECLASM(uint8_t) ASMGetApicId(void);
901#else
902DECLINLINE(uint8_t) ASMGetApicId(void)
903{
904 RTCCUINTREG xBX;
905# if RT_INLINE_ASM_GNU_STYLE
906# ifdef RT_ARCH_AMD64
907 RTCCUINTREG uSpill;
908 __asm__ __volatile__ ("cpuid"
909 : "=a" (uSpill),
910 "=b" (xBX)
911 : "0" (1)
912 : "rcx", "rdx");
913# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
914 RTCCUINTREG uSpill;
915 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
916 "cpuid\n\t"
917 "xchgl %%ebx,%1\n\t"
918 : "=a" (uSpill),
919 "=rm" (xBX)
920 : "0" (1)
921 : "ecx", "edx");
922# else
923 RTCCUINTREG uSpill;
924 __asm__ __volatile__ ("cpuid"
925 : "=a" (uSpill),
926 "=b" (xBX)
927 : "0" (1)
928 : "ecx", "edx");
929# endif
930
931# elif RT_INLINE_ASM_USES_INTRIN
932 int aInfo[4];
933 __cpuid(aInfo, 1);
934 xBX = aInfo[1];
935
936# else
937 __asm
938 {
939 push ebx
940 mov eax, 1
941 cpuid
942 mov [xBX], ebx
943 pop ebx
944 }
945# endif
946 return (uint8_t)(xBX >> 24);
947}
948#endif
949
950
951/**
952 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
953 *
954 * @returns true/false.
955 * @param uEBX EBX return from ASMCpuId(0)
956 * @param uECX ECX return from ASMCpuId(0)
957 * @param uEDX EDX return from ASMCpuId(0)
958 */
959DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
960{
961 return uEBX == UINT32_C(0x756e6547)
962 && uECX == UINT32_C(0x6c65746e)
963 && uEDX == UINT32_C(0x49656e69);
964}
965
966
967/**
968 * Tests if this is a genuine Intel CPU.
969 *
970 * @returns true/false.
971 * @remarks ASSUMES that cpuid is supported by the CPU.
972 */
973DECLINLINE(bool) ASMIsIntelCpu(void)
974{
975 uint32_t uEAX, uEBX, uECX, uEDX;
976 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
977 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
978}
979
980
981/**
982 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
983 *
984 * @returns true/false.
985 * @param uEBX EBX return from ASMCpuId(0)
986 * @param uECX ECX return from ASMCpuId(0)
987 * @param uEDX EDX return from ASMCpuId(0)
988 */
989DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
990{
991 return uEBX == UINT32_C(0x68747541)
992 && uECX == UINT32_C(0x444d4163)
993 && uEDX == UINT32_C(0x69746e65);
994}
995
996
997/**
998 * Tests if this is an authentic AMD CPU.
999 *
1000 * @returns true/false.
1001 * @remarks ASSUMES that cpuid is supported by the CPU.
1002 */
1003DECLINLINE(bool) ASMIsAmdCpu(void)
1004{
1005 uint32_t uEAX, uEBX, uECX, uEDX;
1006 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1007 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1008}
1009
1010
1011/**
1012 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1013 *
1014 * @returns true/false.
1015 * @param uEBX EBX return from ASMCpuId(0).
1016 * @param uECX ECX return from ASMCpuId(0).
1017 * @param uEDX EDX return from ASMCpuId(0).
1018 */
1019DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1020{
1021 return uEBX == UINT32_C(0x746e6543)
1022 && uECX == UINT32_C(0x736c7561)
1023 && uEDX == UINT32_C(0x48727561);
1024}
1025
1026
1027/**
1028 * Tests if this is a centaur hauling VIA CPU.
1029 *
1030 * @returns true/false.
1031 * @remarks ASSUMES that cpuid is supported by the CPU.
1032 */
1033DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1034{
1035 uint32_t uEAX, uEBX, uECX, uEDX;
1036 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1037 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1038}
1039
1040
1041/**
1042 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1043 *
1044 *
1045 * @returns true/false.
1046 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1047 *
1048 * @note This only succeeds if there are at least two leaves in the range.
1049 * @remarks The upper range limit is just some half reasonable value we've
1050 * picked out of thin air.
1051 */
1052DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1053{
1054 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1055}
1056
1057
1058/**
1059 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1060 *
1061 * This only succeeds if there are at least two leaves in the range.
1062 *
1063 * @returns true/false.
1064 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1065 *
1066 * @note This only succeeds if there are at least two leaves in the range.
1067 * @remarks The upper range limit is just some half reasonable value we've
1068 * picked out of thin air.
1069 */
1070DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1071{
1072 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1073}
1074
1075
1076/**
1077 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1078 *
1079 * @returns Family.
1080 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1081 */
1082DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1083{
1084 return ((uEAX >> 8) & 0xf) == 0xf
1085 ? ((uEAX >> 20) & 0x7f) + 0xf
1086 : ((uEAX >> 8) & 0xf);
1087}
1088
1089
1090/**
1091 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1092 *
1093 * @returns Model.
1094 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1095 */
1096DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1097{
1098 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1099 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1100 : ((uEAX >> 4) & 0xf);
1101}
1102
1103
1104/**
1105 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1106 *
1107 * @returns Model.
1108 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1109 */
1110DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1111{
1112 return ((uEAX >> 8) & 0xf) == 0xf
1113 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1114 : ((uEAX >> 4) & 0xf);
1115}
1116
1117
1118/**
1119 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1120 *
1121 * @returns Model.
1122 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1123 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1124 */
1125DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1126{
1127 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1128 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1129 : ((uEAX >> 4) & 0xf);
1130}
1131
1132
1133/**
1134 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1135 *
1136 * @returns Model.
1137 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1138 */
1139DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1140{
1141 return uEAX & 0xf;
1142}
1143
1144
1145/**
1146 * Get cr0.
1147 * @returns cr0.
1148 */
1149#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1150DECLASM(RTCCUINTREG) ASMGetCR0(void);
1151#else
1152DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1153{
1154 RTCCUINTREG uCR0;
1155# if RT_INLINE_ASM_USES_INTRIN
1156 uCR0 = __readcr0();
1157
1158# elif RT_INLINE_ASM_GNU_STYLE
1159# ifdef RT_ARCH_AMD64
1160 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1161# else
1162 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1163# endif
1164# else
1165 __asm
1166 {
1167# ifdef RT_ARCH_AMD64
1168 mov rax, cr0
1169 mov [uCR0], rax
1170# else
1171 mov eax, cr0
1172 mov [uCR0], eax
1173# endif
1174 }
1175# endif
1176 return uCR0;
1177}
1178#endif
1179
1180
1181/**
1182 * Sets the CR0 register.
1183 * @param uCR0 The new CR0 value.
1184 */
1185#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1186DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1187#else
1188DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1189{
1190# if RT_INLINE_ASM_USES_INTRIN
1191 __writecr0(uCR0);
1192
1193# elif RT_INLINE_ASM_GNU_STYLE
1194# ifdef RT_ARCH_AMD64
1195 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1196# else
1197 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1198# endif
1199# else
1200 __asm
1201 {
1202# ifdef RT_ARCH_AMD64
1203 mov rax, [uCR0]
1204 mov cr0, rax
1205# else
1206 mov eax, [uCR0]
1207 mov cr0, eax
1208# endif
1209 }
1210# endif
1211}
1212#endif
1213
1214
1215/**
1216 * Get cr2.
1217 * @returns cr2.
1218 */
1219#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1220DECLASM(RTCCUINTREG) ASMGetCR2(void);
1221#else
1222DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1223{
1224 RTCCUINTREG uCR2;
1225# if RT_INLINE_ASM_USES_INTRIN
1226 uCR2 = __readcr2();
1227
1228# elif RT_INLINE_ASM_GNU_STYLE
1229# ifdef RT_ARCH_AMD64
1230 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1231# else
1232 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1233# endif
1234# else
1235 __asm
1236 {
1237# ifdef RT_ARCH_AMD64
1238 mov rax, cr2
1239 mov [uCR2], rax
1240# else
1241 mov eax, cr2
1242 mov [uCR2], eax
1243# endif
1244 }
1245# endif
1246 return uCR2;
1247}
1248#endif
1249
1250
1251/**
1252 * Sets the CR2 register.
1253 * @param uCR2 The new CR0 value.
1254 */
1255#if RT_INLINE_ASM_EXTERNAL
1256DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1257#else
1258DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1259{
1260# if RT_INLINE_ASM_GNU_STYLE
1261# ifdef RT_ARCH_AMD64
1262 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1263# else
1264 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1265# endif
1266# else
1267 __asm
1268 {
1269# ifdef RT_ARCH_AMD64
1270 mov rax, [uCR2]
1271 mov cr2, rax
1272# else
1273 mov eax, [uCR2]
1274 mov cr2, eax
1275# endif
1276 }
1277# endif
1278}
1279#endif
1280
1281
1282/**
1283 * Get cr3.
1284 * @returns cr3.
1285 */
1286#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1287DECLASM(RTCCUINTREG) ASMGetCR3(void);
1288#else
1289DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1290{
1291 RTCCUINTREG uCR3;
1292# if RT_INLINE_ASM_USES_INTRIN
1293 uCR3 = __readcr3();
1294
1295# elif RT_INLINE_ASM_GNU_STYLE
1296# ifdef RT_ARCH_AMD64
1297 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1298# else
1299 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1300# endif
1301# else
1302 __asm
1303 {
1304# ifdef RT_ARCH_AMD64
1305 mov rax, cr3
1306 mov [uCR3], rax
1307# else
1308 mov eax, cr3
1309 mov [uCR3], eax
1310# endif
1311 }
1312# endif
1313 return uCR3;
1314}
1315#endif
1316
1317
1318/**
1319 * Sets the CR3 register.
1320 *
1321 * @param uCR3 New CR3 value.
1322 */
1323#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1324DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1325#else
1326DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1327{
1328# if RT_INLINE_ASM_USES_INTRIN
1329 __writecr3(uCR3);
1330
1331# elif RT_INLINE_ASM_GNU_STYLE
1332# ifdef RT_ARCH_AMD64
1333 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1334# else
1335 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1336# endif
1337# else
1338 __asm
1339 {
1340# ifdef RT_ARCH_AMD64
1341 mov rax, [uCR3]
1342 mov cr3, rax
1343# else
1344 mov eax, [uCR3]
1345 mov cr3, eax
1346# endif
1347 }
1348# endif
1349}
1350#endif
1351
1352
1353/**
1354 * Reloads the CR3 register.
1355 */
1356#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1357DECLASM(void) ASMReloadCR3(void);
1358#else
1359DECLINLINE(void) ASMReloadCR3(void)
1360{
1361# if RT_INLINE_ASM_USES_INTRIN
1362 __writecr3(__readcr3());
1363
1364# elif RT_INLINE_ASM_GNU_STYLE
1365 RTCCUINTREG u;
1366# ifdef RT_ARCH_AMD64
1367 __asm__ __volatile__("movq %%cr3, %0\n\t"
1368 "movq %0, %%cr3\n\t"
1369 : "=r" (u));
1370# else
1371 __asm__ __volatile__("movl %%cr3, %0\n\t"
1372 "movl %0, %%cr3\n\t"
1373 : "=r" (u));
1374# endif
1375# else
1376 __asm
1377 {
1378# ifdef RT_ARCH_AMD64
1379 mov rax, cr3
1380 mov cr3, rax
1381# else
1382 mov eax, cr3
1383 mov cr3, eax
1384# endif
1385 }
1386# endif
1387}
1388#endif
1389
1390
1391/**
1392 * Get cr4.
1393 * @returns cr4.
1394 */
1395#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1396DECLASM(RTCCUINTREG) ASMGetCR4(void);
1397#else
1398DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1399{
1400 RTCCUINTREG uCR4;
1401# if RT_INLINE_ASM_USES_INTRIN
1402 uCR4 = __readcr4();
1403
1404# elif RT_INLINE_ASM_GNU_STYLE
1405# ifdef RT_ARCH_AMD64
1406 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1407# else
1408 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1409# endif
1410# else
1411 __asm
1412 {
1413# ifdef RT_ARCH_AMD64
1414 mov rax, cr4
1415 mov [uCR4], rax
1416# else
1417 push eax /* just in case */
1418 /*mov eax, cr4*/
1419 _emit 0x0f
1420 _emit 0x20
1421 _emit 0xe0
1422 mov [uCR4], eax
1423 pop eax
1424# endif
1425 }
1426# endif
1427 return uCR4;
1428}
1429#endif
1430
1431
1432/**
1433 * Sets the CR4 register.
1434 *
1435 * @param uCR4 New CR4 value.
1436 */
1437#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1438DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1439#else
1440DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1441{
1442# if RT_INLINE_ASM_USES_INTRIN
1443 __writecr4(uCR4);
1444
1445# elif RT_INLINE_ASM_GNU_STYLE
1446# ifdef RT_ARCH_AMD64
1447 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1448# else
1449 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1450# endif
1451# else
1452 __asm
1453 {
1454# ifdef RT_ARCH_AMD64
1455 mov rax, [uCR4]
1456 mov cr4, rax
1457# else
1458 mov eax, [uCR4]
1459 _emit 0x0F
1460 _emit 0x22
1461 _emit 0xE0 /* mov cr4, eax */
1462# endif
1463 }
1464# endif
1465}
1466#endif
1467
1468
1469/**
1470 * Get cr8.
1471 * @returns cr8.
1472 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1473 */
1474#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1475DECLASM(RTCCUINTREG) ASMGetCR8(void);
1476#else
1477DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1478{
1479# ifdef RT_ARCH_AMD64
1480 RTCCUINTREG uCR8;
1481# if RT_INLINE_ASM_USES_INTRIN
1482 uCR8 = __readcr8();
1483
1484# elif RT_INLINE_ASM_GNU_STYLE
1485 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1486# else
1487 __asm
1488 {
1489 mov rax, cr8
1490 mov [uCR8], rax
1491 }
1492# endif
1493 return uCR8;
1494# else /* !RT_ARCH_AMD64 */
1495 return 0;
1496# endif /* !RT_ARCH_AMD64 */
1497}
1498#endif
1499
1500
1501/**
1502 * Enables interrupts (EFLAGS.IF).
1503 */
1504#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1505DECLASM(void) ASMIntEnable(void);
1506#else
1507DECLINLINE(void) ASMIntEnable(void)
1508{
1509# if RT_INLINE_ASM_GNU_STYLE
1510 __asm("sti\n");
1511# elif RT_INLINE_ASM_USES_INTRIN
1512 _enable();
1513# else
1514 __asm sti
1515# endif
1516}
1517#endif
1518
1519
1520/**
1521 * Disables interrupts (!EFLAGS.IF).
1522 */
1523#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1524DECLASM(void) ASMIntDisable(void);
1525#else
1526DECLINLINE(void) ASMIntDisable(void)
1527{
1528# if RT_INLINE_ASM_GNU_STYLE
1529 __asm("cli\n");
1530# elif RT_INLINE_ASM_USES_INTRIN
1531 _disable();
1532# else
1533 __asm cli
1534# endif
1535}
1536#endif
1537
1538
1539/**
1540 * Disables interrupts and returns previous xFLAGS.
1541 */
1542#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1543DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1544#else
1545DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1546{
1547 RTCCUINTREG xFlags;
1548# if RT_INLINE_ASM_GNU_STYLE
1549# ifdef RT_ARCH_AMD64
1550 __asm__ __volatile__("pushfq\n\t"
1551 "cli\n\t"
1552 "popq %0\n\t"
1553 : "=r" (xFlags));
1554# else
1555 __asm__ __volatile__("pushfl\n\t"
1556 "cli\n\t"
1557 "popl %0\n\t"
1558 : "=r" (xFlags));
1559# endif
1560# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1561 xFlags = ASMGetFlags();
1562 _disable();
1563# else
1564 __asm {
1565 pushfd
1566 cli
1567 pop [xFlags]
1568 }
1569# endif
1570 return xFlags;
1571}
1572#endif
1573
1574
1575/**
1576 * Are interrupts enabled?
1577 *
1578 * @returns true / false.
1579 */
1580DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1581{
1582 RTCCUINTREG uFlags = ASMGetFlags();
1583 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1584}
1585
1586
1587/**
1588 * Halts the CPU until interrupted.
1589 */
1590#if RT_INLINE_ASM_EXTERNAL
1591DECLASM(void) ASMHalt(void);
1592#else
1593DECLINLINE(void) ASMHalt(void)
1594{
1595# if RT_INLINE_ASM_GNU_STYLE
1596 __asm__ __volatile__("hlt\n\t");
1597# else
1598 __asm {
1599 hlt
1600 }
1601# endif
1602}
1603#endif
1604
1605
1606/**
1607 * Reads a machine specific register.
1608 *
1609 * @returns Register content.
1610 * @param uRegister Register to read.
1611 */
1612#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1613DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1614#else
1615DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1616{
1617 RTUINT64U u;
1618# if RT_INLINE_ASM_GNU_STYLE
1619 __asm__ __volatile__("rdmsr\n\t"
1620 : "=a" (u.s.Lo),
1621 "=d" (u.s.Hi)
1622 : "c" (uRegister));
1623
1624# elif RT_INLINE_ASM_USES_INTRIN
1625 u.u = __readmsr(uRegister);
1626
1627# else
1628 __asm
1629 {
1630 mov ecx, [uRegister]
1631 rdmsr
1632 mov [u.s.Lo], eax
1633 mov [u.s.Hi], edx
1634 }
1635# endif
1636
1637 return u.u;
1638}
1639#endif
1640
1641
1642/**
1643 * Writes a machine specific register.
1644 *
1645 * @returns Register content.
1646 * @param uRegister Register to write to.
1647 * @param u64Val Value to write.
1648 */
1649#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1650DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1651#else
1652DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1653{
1654 RTUINT64U u;
1655
1656 u.u = u64Val;
1657# if RT_INLINE_ASM_GNU_STYLE
1658 __asm__ __volatile__("wrmsr\n\t"
1659 ::"a" (u.s.Lo),
1660 "d" (u.s.Hi),
1661 "c" (uRegister));
1662
1663# elif RT_INLINE_ASM_USES_INTRIN
1664 __writemsr(uRegister, u.u);
1665
1666# else
1667 __asm
1668 {
1669 mov ecx, [uRegister]
1670 mov edx, [u.s.Hi]
1671 mov eax, [u.s.Lo]
1672 wrmsr
1673 }
1674# endif
1675}
1676#endif
1677
1678
1679/**
1680 * Reads low part of a machine specific register.
1681 *
1682 * @returns Register content.
1683 * @param uRegister Register to read.
1684 */
1685#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1686DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1687#else
1688DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1689{
1690 uint32_t u32;
1691# if RT_INLINE_ASM_GNU_STYLE
1692 __asm__ __volatile__("rdmsr\n\t"
1693 : "=a" (u32)
1694 : "c" (uRegister)
1695 : "edx");
1696
1697# elif RT_INLINE_ASM_USES_INTRIN
1698 u32 = (uint32_t)__readmsr(uRegister);
1699
1700#else
1701 __asm
1702 {
1703 mov ecx, [uRegister]
1704 rdmsr
1705 mov [u32], eax
1706 }
1707# endif
1708
1709 return u32;
1710}
1711#endif
1712
1713
1714/**
1715 * Reads high part of a machine specific register.
1716 *
1717 * @returns Register content.
1718 * @param uRegister Register to read.
1719 */
1720#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1721DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1722#else
1723DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1724{
1725 uint32_t u32;
1726# if RT_INLINE_ASM_GNU_STYLE
1727 __asm__ __volatile__("rdmsr\n\t"
1728 : "=d" (u32)
1729 : "c" (uRegister)
1730 : "eax");
1731
1732# elif RT_INLINE_ASM_USES_INTRIN
1733 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1734
1735# else
1736 __asm
1737 {
1738 mov ecx, [uRegister]
1739 rdmsr
1740 mov [u32], edx
1741 }
1742# endif
1743
1744 return u32;
1745}
1746#endif
1747
1748
1749/**
1750 * Gets dr0.
1751 *
1752 * @returns dr0.
1753 */
1754#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1755DECLASM(RTCCUINTREG) ASMGetDR0(void);
1756#else
1757DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1758{
1759 RTCCUINTREG uDR0;
1760# if RT_INLINE_ASM_USES_INTRIN
1761 uDR0 = __readdr(0);
1762# elif RT_INLINE_ASM_GNU_STYLE
1763# ifdef RT_ARCH_AMD64
1764 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1765# else
1766 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1767# endif
1768# else
1769 __asm
1770 {
1771# ifdef RT_ARCH_AMD64
1772 mov rax, dr0
1773 mov [uDR0], rax
1774# else
1775 mov eax, dr0
1776 mov [uDR0], eax
1777# endif
1778 }
1779# endif
1780 return uDR0;
1781}
1782#endif
1783
1784
1785/**
1786 * Gets dr1.
1787 *
1788 * @returns dr1.
1789 */
1790#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1791DECLASM(RTCCUINTREG) ASMGetDR1(void);
1792#else
1793DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1794{
1795 RTCCUINTREG uDR1;
1796# if RT_INLINE_ASM_USES_INTRIN
1797 uDR1 = __readdr(1);
1798# elif RT_INLINE_ASM_GNU_STYLE
1799# ifdef RT_ARCH_AMD64
1800 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1801# else
1802 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1803# endif
1804# else
1805 __asm
1806 {
1807# ifdef RT_ARCH_AMD64
1808 mov rax, dr1
1809 mov [uDR1], rax
1810# else
1811 mov eax, dr1
1812 mov [uDR1], eax
1813# endif
1814 }
1815# endif
1816 return uDR1;
1817}
1818#endif
1819
1820
1821/**
1822 * Gets dr2.
1823 *
1824 * @returns dr2.
1825 */
1826#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1827DECLASM(RTCCUINTREG) ASMGetDR2(void);
1828#else
1829DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
1830{
1831 RTCCUINTREG uDR2;
1832# if RT_INLINE_ASM_USES_INTRIN
1833 uDR2 = __readdr(2);
1834# elif RT_INLINE_ASM_GNU_STYLE
1835# ifdef RT_ARCH_AMD64
1836 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
1837# else
1838 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
1839# endif
1840# else
1841 __asm
1842 {
1843# ifdef RT_ARCH_AMD64
1844 mov rax, dr2
1845 mov [uDR2], rax
1846# else
1847 mov eax, dr2
1848 mov [uDR2], eax
1849# endif
1850 }
1851# endif
1852 return uDR2;
1853}
1854#endif
1855
1856
1857/**
1858 * Gets dr3.
1859 *
1860 * @returns dr3.
1861 */
1862#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1863DECLASM(RTCCUINTREG) ASMGetDR3(void);
1864#else
1865DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
1866{
1867 RTCCUINTREG uDR3;
1868# if RT_INLINE_ASM_USES_INTRIN
1869 uDR3 = __readdr(3);
1870# elif RT_INLINE_ASM_GNU_STYLE
1871# ifdef RT_ARCH_AMD64
1872 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
1873# else
1874 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
1875# endif
1876# else
1877 __asm
1878 {
1879# ifdef RT_ARCH_AMD64
1880 mov rax, dr3
1881 mov [uDR3], rax
1882# else
1883 mov eax, dr3
1884 mov [uDR3], eax
1885# endif
1886 }
1887# endif
1888 return uDR3;
1889}
1890#endif
1891
1892
1893/**
1894 * Gets dr6.
1895 *
1896 * @returns dr6.
1897 */
1898#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1899DECLASM(RTCCUINTREG) ASMGetDR6(void);
1900#else
1901DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
1902{
1903 RTCCUINTREG uDR6;
1904# if RT_INLINE_ASM_USES_INTRIN
1905 uDR6 = __readdr(6);
1906# elif RT_INLINE_ASM_GNU_STYLE
1907# ifdef RT_ARCH_AMD64
1908 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
1909# else
1910 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
1911# endif
1912# else
1913 __asm
1914 {
1915# ifdef RT_ARCH_AMD64
1916 mov rax, dr6
1917 mov [uDR6], rax
1918# else
1919 mov eax, dr6
1920 mov [uDR6], eax
1921# endif
1922 }
1923# endif
1924 return uDR6;
1925}
1926#endif
1927
1928
1929/**
1930 * Reads and clears DR6.
1931 *
1932 * @returns DR6.
1933 */
1934#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1935DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
1936#else
1937DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
1938{
1939 RTCCUINTREG uDR6;
1940# if RT_INLINE_ASM_USES_INTRIN
1941 uDR6 = __readdr(6);
1942 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1943# elif RT_INLINE_ASM_GNU_STYLE
1944 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1945# ifdef RT_ARCH_AMD64
1946 __asm__ __volatile__("movq %%dr6, %0\n\t"
1947 "movq %1, %%dr6\n\t"
1948 : "=r" (uDR6)
1949 : "r" (uNewValue));
1950# else
1951 __asm__ __volatile__("movl %%dr6, %0\n\t"
1952 "movl %1, %%dr6\n\t"
1953 : "=r" (uDR6)
1954 : "r" (uNewValue));
1955# endif
1956# else
1957 __asm
1958 {
1959# ifdef RT_ARCH_AMD64
1960 mov rax, dr6
1961 mov [uDR6], rax
1962 mov rcx, rax
1963 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1964 mov dr6, rcx
1965# else
1966 mov eax, dr6
1967 mov [uDR6], eax
1968 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
1969 mov dr6, ecx
1970# endif
1971 }
1972# endif
1973 return uDR6;
1974}
1975#endif
1976
1977
1978/**
1979 * Gets dr7.
1980 *
1981 * @returns dr7.
1982 */
1983#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1984DECLASM(RTCCUINTREG) ASMGetDR7(void);
1985#else
1986DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
1987{
1988 RTCCUINTREG uDR7;
1989# if RT_INLINE_ASM_USES_INTRIN
1990 uDR7 = __readdr(7);
1991# elif RT_INLINE_ASM_GNU_STYLE
1992# ifdef RT_ARCH_AMD64
1993 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
1994# else
1995 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
1996# endif
1997# else
1998 __asm
1999 {
2000# ifdef RT_ARCH_AMD64
2001 mov rax, dr7
2002 mov [uDR7], rax
2003# else
2004 mov eax, dr7
2005 mov [uDR7], eax
2006# endif
2007 }
2008# endif
2009 return uDR7;
2010}
2011#endif
2012
2013
2014/**
2015 * Sets dr0.
2016 *
2017 * @param uDRVal Debug register value to write
2018 */
2019#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2020DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2021#else
2022DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2023{
2024# if RT_INLINE_ASM_USES_INTRIN
2025 __writedr(0, uDRVal);
2026# elif RT_INLINE_ASM_GNU_STYLE
2027# ifdef RT_ARCH_AMD64
2028 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2029# else
2030 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2031# endif
2032# else
2033 __asm
2034 {
2035# ifdef RT_ARCH_AMD64
2036 mov rax, [uDRVal]
2037 mov dr0, rax
2038# else
2039 mov eax, [uDRVal]
2040 mov dr0, eax
2041# endif
2042 }
2043# endif
2044}
2045#endif
2046
2047
2048/**
2049 * Sets dr1.
2050 *
2051 * @param uDRVal Debug register value to write
2052 */
2053#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2054DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2055#else
2056DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2057{
2058# if RT_INLINE_ASM_USES_INTRIN
2059 __writedr(1, uDRVal);
2060# elif RT_INLINE_ASM_GNU_STYLE
2061# ifdef RT_ARCH_AMD64
2062 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2063# else
2064 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2065# endif
2066# else
2067 __asm
2068 {
2069# ifdef RT_ARCH_AMD64
2070 mov rax, [uDRVal]
2071 mov dr1, rax
2072# else
2073 mov eax, [uDRVal]
2074 mov dr1, eax
2075# endif
2076 }
2077# endif
2078}
2079#endif
2080
2081
2082/**
2083 * Sets dr2.
2084 *
2085 * @param uDRVal Debug register value to write
2086 */
2087#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2088DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2089#else
2090DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2091{
2092# if RT_INLINE_ASM_USES_INTRIN
2093 __writedr(2, uDRVal);
2094# elif RT_INLINE_ASM_GNU_STYLE
2095# ifdef RT_ARCH_AMD64
2096 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2097# else
2098 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2099# endif
2100# else
2101 __asm
2102 {
2103# ifdef RT_ARCH_AMD64
2104 mov rax, [uDRVal]
2105 mov dr2, rax
2106# else
2107 mov eax, [uDRVal]
2108 mov dr2, eax
2109# endif
2110 }
2111# endif
2112}
2113#endif
2114
2115
2116/**
2117 * Sets dr3.
2118 *
2119 * @param uDRVal Debug register value to write
2120 */
2121#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2122DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2123#else
2124DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2125{
2126# if RT_INLINE_ASM_USES_INTRIN
2127 __writedr(3, uDRVal);
2128# elif RT_INLINE_ASM_GNU_STYLE
2129# ifdef RT_ARCH_AMD64
2130 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2131# else
2132 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2133# endif
2134# else
2135 __asm
2136 {
2137# ifdef RT_ARCH_AMD64
2138 mov rax, [uDRVal]
2139 mov dr3, rax
2140# else
2141 mov eax, [uDRVal]
2142 mov dr3, eax
2143# endif
2144 }
2145# endif
2146}
2147#endif
2148
2149
2150/**
2151 * Sets dr6.
2152 *
2153 * @param uDRVal Debug register value to write
2154 */
2155#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2156DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2157#else
2158DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2159{
2160# if RT_INLINE_ASM_USES_INTRIN
2161 __writedr(6, uDRVal);
2162# elif RT_INLINE_ASM_GNU_STYLE
2163# ifdef RT_ARCH_AMD64
2164 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2165# else
2166 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2167# endif
2168# else
2169 __asm
2170 {
2171# ifdef RT_ARCH_AMD64
2172 mov rax, [uDRVal]
2173 mov dr6, rax
2174# else
2175 mov eax, [uDRVal]
2176 mov dr6, eax
2177# endif
2178 }
2179# endif
2180}
2181#endif
2182
2183
2184/**
2185 * Sets dr7.
2186 *
2187 * @param uDRVal Debug register value to write
2188 */
2189#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2190DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2191#else
2192DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2193{
2194# if RT_INLINE_ASM_USES_INTRIN
2195 __writedr(7, uDRVal);
2196# elif RT_INLINE_ASM_GNU_STYLE
2197# ifdef RT_ARCH_AMD64
2198 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2199# else
2200 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2201# endif
2202# else
2203 __asm
2204 {
2205# ifdef RT_ARCH_AMD64
2206 mov rax, [uDRVal]
2207 mov dr7, rax
2208# else
2209 mov eax, [uDRVal]
2210 mov dr7, eax
2211# endif
2212 }
2213# endif
2214}
2215#endif
2216
2217
2218/**
2219 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2220 *
2221 * @param Port I/O port to write to.
2222 * @param u8 8-bit integer to write.
2223 */
2224#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2225DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2226#else
2227DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2228{
2229# if RT_INLINE_ASM_GNU_STYLE
2230 __asm__ __volatile__("outb %b1, %w0\n\t"
2231 :: "Nd" (Port),
2232 "a" (u8));
2233
2234# elif RT_INLINE_ASM_USES_INTRIN
2235 __outbyte(Port, u8);
2236
2237# else
2238 __asm
2239 {
2240 mov dx, [Port]
2241 mov al, [u8]
2242 out dx, al
2243 }
2244# endif
2245}
2246#endif
2247
2248
2249/**
2250 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2251 *
2252 * @returns 8-bit integer.
2253 * @param Port I/O port to read from.
2254 */
2255#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2256DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2257#else
2258DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2259{
2260 uint8_t u8;
2261# if RT_INLINE_ASM_GNU_STYLE
2262 __asm__ __volatile__("inb %w1, %b0\n\t"
2263 : "=a" (u8)
2264 : "Nd" (Port));
2265
2266# elif RT_INLINE_ASM_USES_INTRIN
2267 u8 = __inbyte(Port);
2268
2269# else
2270 __asm
2271 {
2272 mov dx, [Port]
2273 in al, dx
2274 mov [u8], al
2275 }
2276# endif
2277 return u8;
2278}
2279#endif
2280
2281
2282/**
2283 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2284 *
2285 * @param Port I/O port to write to.
2286 * @param u16 16-bit integer to write.
2287 */
2288#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2289DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2290#else
2291DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2292{
2293# if RT_INLINE_ASM_GNU_STYLE
2294 __asm__ __volatile__("outw %w1, %w0\n\t"
2295 :: "Nd" (Port),
2296 "a" (u16));
2297
2298# elif RT_INLINE_ASM_USES_INTRIN
2299 __outword(Port, u16);
2300
2301# else
2302 __asm
2303 {
2304 mov dx, [Port]
2305 mov ax, [u16]
2306 out dx, ax
2307 }
2308# endif
2309}
2310#endif
2311
2312
2313/**
2314 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2315 *
2316 * @returns 16-bit integer.
2317 * @param Port I/O port to read from.
2318 */
2319#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2320DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2321#else
2322DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2323{
2324 uint16_t u16;
2325# if RT_INLINE_ASM_GNU_STYLE
2326 __asm__ __volatile__("inw %w1, %w0\n\t"
2327 : "=a" (u16)
2328 : "Nd" (Port));
2329
2330# elif RT_INLINE_ASM_USES_INTRIN
2331 u16 = __inword(Port);
2332
2333# else
2334 __asm
2335 {
2336 mov dx, [Port]
2337 in ax, dx
2338 mov [u16], ax
2339 }
2340# endif
2341 return u16;
2342}
2343#endif
2344
2345
2346/**
2347 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2348 *
2349 * @param Port I/O port to write to.
2350 * @param u32 32-bit integer to write.
2351 */
2352#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2353DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2354#else
2355DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2356{
2357# if RT_INLINE_ASM_GNU_STYLE
2358 __asm__ __volatile__("outl %1, %w0\n\t"
2359 :: "Nd" (Port),
2360 "a" (u32));
2361
2362# elif RT_INLINE_ASM_USES_INTRIN
2363 __outdword(Port, u32);
2364
2365# else
2366 __asm
2367 {
2368 mov dx, [Port]
2369 mov eax, [u32]
2370 out dx, eax
2371 }
2372# endif
2373}
2374#endif
2375
2376
2377/**
2378 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2379 *
2380 * @returns 32-bit integer.
2381 * @param Port I/O port to read from.
2382 */
2383#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2384DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2385#else
2386DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2387{
2388 uint32_t u32;
2389# if RT_INLINE_ASM_GNU_STYLE
2390 __asm__ __volatile__("inl %w1, %0\n\t"
2391 : "=a" (u32)
2392 : "Nd" (Port));
2393
2394# elif RT_INLINE_ASM_USES_INTRIN
2395 u32 = __indword(Port);
2396
2397# else
2398 __asm
2399 {
2400 mov dx, [Port]
2401 in eax, dx
2402 mov [u32], eax
2403 }
2404# endif
2405 return u32;
2406}
2407#endif
2408
2409
2410/**
2411 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2412 *
2413 * @param Port I/O port to write to.
2414 * @param pau8 Pointer to the string buffer.
2415 * @param c The number of items to write.
2416 */
2417#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2418DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2419#else
2420DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2421{
2422# if RT_INLINE_ASM_GNU_STYLE
2423 __asm__ __volatile__("rep; outsb\n\t"
2424 : "+S" (pau8),
2425 "+c" (c)
2426 : "d" (Port));
2427
2428# elif RT_INLINE_ASM_USES_INTRIN
2429 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2430
2431# else
2432 __asm
2433 {
2434 mov dx, [Port]
2435 mov ecx, [c]
2436 mov eax, [pau8]
2437 xchg esi, eax
2438 rep outsb
2439 xchg esi, eax
2440 }
2441# endif
2442}
2443#endif
2444
2445
2446/**
2447 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2448 *
2449 * @param Port I/O port to read from.
2450 * @param pau8 Pointer to the string buffer (output).
2451 * @param c The number of items to read.
2452 */
2453#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2454DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2455#else
2456DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2457{
2458# if RT_INLINE_ASM_GNU_STYLE
2459 __asm__ __volatile__("rep; insb\n\t"
2460 : "+D" (pau8),
2461 "+c" (c)
2462 : "d" (Port));
2463
2464# elif RT_INLINE_ASM_USES_INTRIN
2465 __inbytestring(Port, pau8, (unsigned long)c);
2466
2467# else
2468 __asm
2469 {
2470 mov dx, [Port]
2471 mov ecx, [c]
2472 mov eax, [pau8]
2473 xchg edi, eax
2474 rep insb
2475 xchg edi, eax
2476 }
2477# endif
2478}
2479#endif
2480
2481
2482/**
2483 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2484 *
2485 * @param Port I/O port to write to.
2486 * @param pau16 Pointer to the string buffer.
2487 * @param c The number of items to write.
2488 */
2489#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2490DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2491#else
2492DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2493{
2494# if RT_INLINE_ASM_GNU_STYLE
2495 __asm__ __volatile__("rep; outsw\n\t"
2496 : "+S" (pau16),
2497 "+c" (c)
2498 : "d" (Port));
2499
2500# elif RT_INLINE_ASM_USES_INTRIN
2501 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2502
2503# else
2504 __asm
2505 {
2506 mov dx, [Port]
2507 mov ecx, [c]
2508 mov eax, [pau16]
2509 xchg esi, eax
2510 rep outsw
2511 xchg esi, eax
2512 }
2513# endif
2514}
2515#endif
2516
2517
2518/**
2519 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2520 *
2521 * @param Port I/O port to read from.
2522 * @param pau16 Pointer to the string buffer (output).
2523 * @param c The number of items to read.
2524 */
2525#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2526DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2527#else
2528DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2529{
2530# if RT_INLINE_ASM_GNU_STYLE
2531 __asm__ __volatile__("rep; insw\n\t"
2532 : "+D" (pau16),
2533 "+c" (c)
2534 : "d" (Port));
2535
2536# elif RT_INLINE_ASM_USES_INTRIN
2537 __inwordstring(Port, pau16, (unsigned long)c);
2538
2539# else
2540 __asm
2541 {
2542 mov dx, [Port]
2543 mov ecx, [c]
2544 mov eax, [pau16]
2545 xchg edi, eax
2546 rep insw
2547 xchg edi, eax
2548 }
2549# endif
2550}
2551#endif
2552
2553
2554/**
2555 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2556 *
2557 * @param Port I/O port to write to.
2558 * @param pau32 Pointer to the string buffer.
2559 * @param c The number of items to write.
2560 */
2561#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2562DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2563#else
2564DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2565{
2566# if RT_INLINE_ASM_GNU_STYLE
2567 __asm__ __volatile__("rep; outsl\n\t"
2568 : "+S" (pau32),
2569 "+c" (c)
2570 : "d" (Port));
2571
2572# elif RT_INLINE_ASM_USES_INTRIN
2573 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2574
2575# else
2576 __asm
2577 {
2578 mov dx, [Port]
2579 mov ecx, [c]
2580 mov eax, [pau32]
2581 xchg esi, eax
2582 rep outsd
2583 xchg esi, eax
2584 }
2585# endif
2586}
2587#endif
2588
2589
2590/**
2591 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2592 *
2593 * @param Port I/O port to read from.
2594 * @param pau32 Pointer to the string buffer (output).
2595 * @param c The number of items to read.
2596 */
2597#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2598DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2599#else
2600DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2601{
2602# if RT_INLINE_ASM_GNU_STYLE
2603 __asm__ __volatile__("rep; insl\n\t"
2604 : "+D" (pau32),
2605 "+c" (c)
2606 : "d" (Port));
2607
2608# elif RT_INLINE_ASM_USES_INTRIN
2609 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2610
2611# else
2612 __asm
2613 {
2614 mov dx, [Port]
2615 mov ecx, [c]
2616 mov eax, [pau32]
2617 xchg edi, eax
2618 rep insd
2619 xchg edi, eax
2620 }
2621# endif
2622}
2623#endif
2624
2625
2626/**
2627 * Invalidate page.
2628 *
2629 * @param pv Address of the page to invalidate.
2630 */
2631#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2632DECLASM(void) ASMInvalidatePage(void *pv);
2633#else
2634DECLINLINE(void) ASMInvalidatePage(void *pv)
2635{
2636# if RT_INLINE_ASM_USES_INTRIN
2637 __invlpg(pv);
2638
2639# elif RT_INLINE_ASM_GNU_STYLE
2640 __asm__ __volatile__("invlpg %0\n\t"
2641 : : "m" (*(uint8_t *)pv));
2642# else
2643 __asm
2644 {
2645# ifdef RT_ARCH_AMD64
2646 mov rax, [pv]
2647 invlpg [rax]
2648# else
2649 mov eax, [pv]
2650 invlpg [eax]
2651# endif
2652 }
2653# endif
2654}
2655#endif
2656
2657
2658/**
2659 * Write back the internal caches and invalidate them.
2660 */
2661#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2662DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2663#else
2664DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2665{
2666# if RT_INLINE_ASM_USES_INTRIN
2667 __wbinvd();
2668
2669# elif RT_INLINE_ASM_GNU_STYLE
2670 __asm__ __volatile__("wbinvd");
2671# else
2672 __asm
2673 {
2674 wbinvd
2675 }
2676# endif
2677}
2678#endif
2679
2680
2681/**
2682 * Invalidate internal and (perhaps) external caches without first
2683 * flushing dirty cache lines. Use with extreme care.
2684 */
2685#if RT_INLINE_ASM_EXTERNAL
2686DECLASM(void) ASMInvalidateInternalCaches(void);
2687#else
2688DECLINLINE(void) ASMInvalidateInternalCaches(void)
2689{
2690# if RT_INLINE_ASM_GNU_STYLE
2691 __asm__ __volatile__("invd");
2692# else
2693 __asm
2694 {
2695 invd
2696 }
2697# endif
2698}
2699#endif
2700
2701
2702/**
2703 * Memory load/store fence, waits for any pending writes and reads to complete.
2704 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2705 */
2706DECLINLINE(void) ASMMemoryFenceSSE2(void)
2707{
2708#if RT_INLINE_ASM_GNU_STYLE
2709 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2710#elif RT_INLINE_ASM_USES_INTRIN
2711 _mm_mfence();
2712#else
2713 __asm
2714 {
2715 _emit 0x0f
2716 _emit 0xae
2717 _emit 0xf0
2718 }
2719#endif
2720}
2721
2722
2723/**
2724 * Memory store fence, waits for any writes to complete.
2725 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2726 */
2727DECLINLINE(void) ASMWriteFenceSSE(void)
2728{
2729#if RT_INLINE_ASM_GNU_STYLE
2730 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2731#elif RT_INLINE_ASM_USES_INTRIN
2732 _mm_sfence();
2733#else
2734 __asm
2735 {
2736 _emit 0x0f
2737 _emit 0xae
2738 _emit 0xf8
2739 }
2740#endif
2741}
2742
2743
2744/**
2745 * Memory load fence, waits for any pending reads to complete.
2746 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2747 */
2748DECLINLINE(void) ASMReadFenceSSE2(void)
2749{
2750#if RT_INLINE_ASM_GNU_STYLE
2751 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2752#elif RT_INLINE_ASM_USES_INTRIN
2753 _mm_lfence();
2754#else
2755 __asm
2756 {
2757 _emit 0x0f
2758 _emit 0xae
2759 _emit 0xe8
2760 }
2761#endif
2762}
2763
2764/** @} */
2765#endif
2766
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette