VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 33021

Last change on this file since 33021 was 31427, checked in by vboxsync, 14 years ago

mac build fix

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 57.3 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2010 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71#endif
72
73
74
75/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
76 * @ingroup grp_rt_asm
77 * @{
78 */
79
80/** @todo find a more proper place for this structure? */
81#pragma pack(1)
82/** IDTR */
83typedef struct RTIDTR
84{
85 /** Size of the IDT. */
86 uint16_t cbIdt;
87 /** Address of the IDT. */
88 uintptr_t pIdt;
89} RTIDTR, *PRTIDTR;
90#pragma pack()
91
92#pragma pack(1)
93/** GDTR */
94typedef struct RTGDTR
95{
96 /** Size of the GDT. */
97 uint16_t cbGdt;
98 /** Address of the GDT. */
99 uintptr_t pGdt;
100} RTGDTR, *PRTGDTR;
101#pragma pack()
102
103
104/**
105 * Gets the content of the IDTR CPU register.
106 * @param pIdtr Where to store the IDTR contents.
107 */
108#if RT_INLINE_ASM_EXTERNAL
109DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
110#else
111DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
112{
113# if RT_INLINE_ASM_GNU_STYLE
114 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
115# else
116 __asm
117 {
118# ifdef RT_ARCH_AMD64
119 mov rax, [pIdtr]
120 sidt [rax]
121# else
122 mov eax, [pIdtr]
123 sidt [eax]
124# endif
125 }
126# endif
127}
128#endif
129
130
131/**
132 * Sets the content of the IDTR CPU register.
133 * @param pIdtr Where to load the IDTR contents from
134 */
135#if RT_INLINE_ASM_EXTERNAL
136DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
137#else
138DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
139{
140# if RT_INLINE_ASM_GNU_STYLE
141 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
142# else
143 __asm
144 {
145# ifdef RT_ARCH_AMD64
146 mov rax, [pIdtr]
147 lidt [rax]
148# else
149 mov eax, [pIdtr]
150 lidt [eax]
151# endif
152 }
153# endif
154}
155#endif
156
157
158/**
159 * Gets the content of the GDTR CPU register.
160 * @param pGdtr Where to store the GDTR contents.
161 */
162#if RT_INLINE_ASM_EXTERNAL
163DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
164#else
165DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
166{
167# if RT_INLINE_ASM_GNU_STYLE
168 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
169# else
170 __asm
171 {
172# ifdef RT_ARCH_AMD64
173 mov rax, [pGdtr]
174 sgdt [rax]
175# else
176 mov eax, [pGdtr]
177 sgdt [eax]
178# endif
179 }
180# endif
181}
182#endif
183
184/**
185 * Get the cs register.
186 * @returns cs.
187 */
188#if RT_INLINE_ASM_EXTERNAL
189DECLASM(RTSEL) ASMGetCS(void);
190#else
191DECLINLINE(RTSEL) ASMGetCS(void)
192{
193 RTSEL SelCS;
194# if RT_INLINE_ASM_GNU_STYLE
195 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
196# else
197 __asm
198 {
199 mov ax, cs
200 mov [SelCS], ax
201 }
202# endif
203 return SelCS;
204}
205#endif
206
207
208/**
209 * Get the DS register.
210 * @returns DS.
211 */
212#if RT_INLINE_ASM_EXTERNAL
213DECLASM(RTSEL) ASMGetDS(void);
214#else
215DECLINLINE(RTSEL) ASMGetDS(void)
216{
217 RTSEL SelDS;
218# if RT_INLINE_ASM_GNU_STYLE
219 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
220# else
221 __asm
222 {
223 mov ax, ds
224 mov [SelDS], ax
225 }
226# endif
227 return SelDS;
228}
229#endif
230
231
232/**
233 * Get the ES register.
234 * @returns ES.
235 */
236#if RT_INLINE_ASM_EXTERNAL
237DECLASM(RTSEL) ASMGetES(void);
238#else
239DECLINLINE(RTSEL) ASMGetES(void)
240{
241 RTSEL SelES;
242# if RT_INLINE_ASM_GNU_STYLE
243 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
244# else
245 __asm
246 {
247 mov ax, es
248 mov [SelES], ax
249 }
250# endif
251 return SelES;
252}
253#endif
254
255
256/**
257 * Get the FS register.
258 * @returns FS.
259 */
260#if RT_INLINE_ASM_EXTERNAL
261DECLASM(RTSEL) ASMGetFS(void);
262#else
263DECLINLINE(RTSEL) ASMGetFS(void)
264{
265 RTSEL SelFS;
266# if RT_INLINE_ASM_GNU_STYLE
267 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
268# else
269 __asm
270 {
271 mov ax, fs
272 mov [SelFS], ax
273 }
274# endif
275 return SelFS;
276}
277# endif
278
279
280/**
281 * Get the GS register.
282 * @returns GS.
283 */
284#if RT_INLINE_ASM_EXTERNAL
285DECLASM(RTSEL) ASMGetGS(void);
286#else
287DECLINLINE(RTSEL) ASMGetGS(void)
288{
289 RTSEL SelGS;
290# if RT_INLINE_ASM_GNU_STYLE
291 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
292# else
293 __asm
294 {
295 mov ax, gs
296 mov [SelGS], ax
297 }
298# endif
299 return SelGS;
300}
301#endif
302
303
304/**
305 * Get the SS register.
306 * @returns SS.
307 */
308#if RT_INLINE_ASM_EXTERNAL
309DECLASM(RTSEL) ASMGetSS(void);
310#else
311DECLINLINE(RTSEL) ASMGetSS(void)
312{
313 RTSEL SelSS;
314# if RT_INLINE_ASM_GNU_STYLE
315 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
316# else
317 __asm
318 {
319 mov ax, ss
320 mov [SelSS], ax
321 }
322# endif
323 return SelSS;
324}
325#endif
326
327
328/**
329 * Get the TR register.
330 * @returns TR.
331 */
332#if RT_INLINE_ASM_EXTERNAL
333DECLASM(RTSEL) ASMGetTR(void);
334#else
335DECLINLINE(RTSEL) ASMGetTR(void)
336{
337 RTSEL SelTR;
338# if RT_INLINE_ASM_GNU_STYLE
339 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
340# else
341 __asm
342 {
343 str ax
344 mov [SelTR], ax
345 }
346# endif
347 return SelTR;
348}
349#endif
350
351
352/**
353 * Get the [RE]FLAGS register.
354 * @returns [RE]FLAGS.
355 */
356#if RT_INLINE_ASM_EXTERNAL
357DECLASM(RTCCUINTREG) ASMGetFlags(void);
358#else
359DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
360{
361 RTCCUINTREG uFlags;
362# if RT_INLINE_ASM_GNU_STYLE
363# ifdef RT_ARCH_AMD64
364 __asm__ __volatile__("pushfq\n\t"
365 "popq %0\n\t"
366 : "=r" (uFlags));
367# else
368 __asm__ __volatile__("pushfl\n\t"
369 "popl %0\n\t"
370 : "=r" (uFlags));
371# endif
372# else
373 __asm
374 {
375# ifdef RT_ARCH_AMD64
376 pushfq
377 pop [uFlags]
378# else
379 pushfd
380 pop [uFlags]
381# endif
382 }
383# endif
384 return uFlags;
385}
386#endif
387
388
389/**
390 * Set the [RE]FLAGS register.
391 * @param uFlags The new [RE]FLAGS value.
392 */
393#if RT_INLINE_ASM_EXTERNAL
394DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
395#else
396DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
397{
398# if RT_INLINE_ASM_GNU_STYLE
399# ifdef RT_ARCH_AMD64
400 __asm__ __volatile__("pushq %0\n\t"
401 "popfq\n\t"
402 : : "g" (uFlags));
403# else
404 __asm__ __volatile__("pushl %0\n\t"
405 "popfl\n\t"
406 : : "g" (uFlags));
407# endif
408# else
409 __asm
410 {
411# ifdef RT_ARCH_AMD64
412 push [uFlags]
413 popfq
414# else
415 push [uFlags]
416 popfd
417# endif
418 }
419# endif
420}
421#endif
422
423
424/**
425 * Gets the content of the CPU timestamp counter register.
426 *
427 * @returns TSC.
428 */
429#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
430DECLASM(uint64_t) ASMReadTSC(void);
431#else
432DECLINLINE(uint64_t) ASMReadTSC(void)
433{
434 RTUINT64U u;
435# if RT_INLINE_ASM_GNU_STYLE
436 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
437# else
438# if RT_INLINE_ASM_USES_INTRIN
439 u.u = __rdtsc();
440# else
441 __asm
442 {
443 rdtsc
444 mov [u.s.Lo], eax
445 mov [u.s.Hi], edx
446 }
447# endif
448# endif
449 return u.u;
450}
451#endif
452
453
454/**
455 * Performs the cpuid instruction returning all registers.
456 *
457 * @param uOperator CPUID operation (eax).
458 * @param pvEAX Where to store eax.
459 * @param pvEBX Where to store ebx.
460 * @param pvECX Where to store ecx.
461 * @param pvEDX Where to store edx.
462 * @remark We're using void pointers to ease the use of special bitfield structures and such.
463 */
464#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
465DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
466#else
467DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
468{
469# if RT_INLINE_ASM_GNU_STYLE
470# ifdef RT_ARCH_AMD64
471 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
472 __asm__ __volatile__ ("cpuid\n\t"
473 : "=a" (uRAX),
474 "=b" (uRBX),
475 "=c" (uRCX),
476 "=d" (uRDX)
477 : "0" (uOperator));
478 *(uint32_t *)pvEAX = (uint32_t)uRAX;
479 *(uint32_t *)pvEBX = (uint32_t)uRBX;
480 *(uint32_t *)pvECX = (uint32_t)uRCX;
481 *(uint32_t *)pvEDX = (uint32_t)uRDX;
482# else
483 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
484 "cpuid\n\t"
485 "xchgl %%ebx, %1\n\t"
486 : "=a" (*(uint32_t *)pvEAX),
487 "=r" (*(uint32_t *)pvEBX),
488 "=c" (*(uint32_t *)pvECX),
489 "=d" (*(uint32_t *)pvEDX)
490 : "0" (uOperator));
491# endif
492
493# elif RT_INLINE_ASM_USES_INTRIN
494 int aInfo[4];
495 __cpuid(aInfo, uOperator);
496 *(uint32_t *)pvEAX = aInfo[0];
497 *(uint32_t *)pvEBX = aInfo[1];
498 *(uint32_t *)pvECX = aInfo[2];
499 *(uint32_t *)pvEDX = aInfo[3];
500
501# else
502 uint32_t uEAX;
503 uint32_t uEBX;
504 uint32_t uECX;
505 uint32_t uEDX;
506 __asm
507 {
508 push ebx
509 mov eax, [uOperator]
510 cpuid
511 mov [uEAX], eax
512 mov [uEBX], ebx
513 mov [uECX], ecx
514 mov [uEDX], edx
515 pop ebx
516 }
517 *(uint32_t *)pvEAX = uEAX;
518 *(uint32_t *)pvEBX = uEBX;
519 *(uint32_t *)pvECX = uECX;
520 *(uint32_t *)pvEDX = uEDX;
521# endif
522}
523#endif
524
525
526/**
527 * Performs the cpuid instruction returning all registers.
528 * Some subfunctions of cpuid take ECX as additional parameter (currently known for EAX=4)
529 *
530 * @param uOperator CPUID operation (eax).
531 * @param uIdxECX ecx index
532 * @param pvEAX Where to store eax.
533 * @param pvEBX Where to store ebx.
534 * @param pvECX Where to store ecx.
535 * @param pvEDX Where to store edx.
536 * @remark We're using void pointers to ease the use of special bitfield structures and such.
537 */
538#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
539DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
540#else
541DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
542{
543# if RT_INLINE_ASM_GNU_STYLE
544# ifdef RT_ARCH_AMD64
545 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
546 __asm__ ("cpuid\n\t"
547 : "=a" (uRAX),
548 "=b" (uRBX),
549 "=c" (uRCX),
550 "=d" (uRDX)
551 : "0" (uOperator),
552 "2" (uIdxECX));
553 *(uint32_t *)pvEAX = (uint32_t)uRAX;
554 *(uint32_t *)pvEBX = (uint32_t)uRBX;
555 *(uint32_t *)pvECX = (uint32_t)uRCX;
556 *(uint32_t *)pvEDX = (uint32_t)uRDX;
557# else
558 __asm__ ("xchgl %%ebx, %1\n\t"
559 "cpuid\n\t"
560 "xchgl %%ebx, %1\n\t"
561 : "=a" (*(uint32_t *)pvEAX),
562 "=r" (*(uint32_t *)pvEBX),
563 "=c" (*(uint32_t *)pvECX),
564 "=d" (*(uint32_t *)pvEDX)
565 : "0" (uOperator),
566 "2" (uIdxECX));
567# endif
568
569# elif RT_INLINE_ASM_USES_INTRIN
570 int aInfo[4];
571 /* ??? another intrinsic ??? */
572 __cpuid(aInfo, uOperator);
573 *(uint32_t *)pvEAX = aInfo[0];
574 *(uint32_t *)pvEBX = aInfo[1];
575 *(uint32_t *)pvECX = aInfo[2];
576 *(uint32_t *)pvEDX = aInfo[3];
577
578# else
579 uint32_t uEAX;
580 uint32_t uEBX;
581 uint32_t uECX;
582 uint32_t uEDX;
583 __asm
584 {
585 push ebx
586 mov eax, [uOperator]
587 mov ecx, [uIdxECX]
588 cpuid
589 mov [uEAX], eax
590 mov [uEBX], ebx
591 mov [uECX], ecx
592 mov [uEDX], edx
593 pop ebx
594 }
595 *(uint32_t *)pvEAX = uEAX;
596 *(uint32_t *)pvEBX = uEBX;
597 *(uint32_t *)pvECX = uECX;
598 *(uint32_t *)pvEDX = uEDX;
599# endif
600}
601#endif
602
603
604/**
605 * Performs the cpuid instruction returning ecx and edx.
606 *
607 * @param uOperator CPUID operation (eax).
608 * @param pvECX Where to store ecx.
609 * @param pvEDX Where to store edx.
610 * @remark We're using void pointers to ease the use of special bitfield structures and such.
611 */
612#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
613DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
614#else
615DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
616{
617 uint32_t uEBX;
618 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
619}
620#endif
621
622
623/**
624 * Performs the cpuid instruction returning eax.
625 *
626 * @param uOperator CPUID operation (eax).
627 * @returns EAX after cpuid operation.
628 */
629#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
630DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
631#else
632DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
633{
634 RTCCUINTREG xAX;
635# if RT_INLINE_ASM_GNU_STYLE
636# ifdef RT_ARCH_AMD64
637 __asm__ ("cpuid"
638 : "=a" (xAX)
639 : "0" (uOperator)
640 : "rbx", "rcx", "rdx");
641# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
642 __asm__ ("push %%ebx\n\t"
643 "cpuid\n\t"
644 "pop %%ebx\n\t"
645 : "=a" (xAX)
646 : "0" (uOperator)
647 : "ecx", "edx");
648# else
649 __asm__ ("cpuid"
650 : "=a" (xAX)
651 : "0" (uOperator)
652 : "edx", "ecx", "ebx");
653# endif
654
655# elif RT_INLINE_ASM_USES_INTRIN
656 int aInfo[4];
657 __cpuid(aInfo, uOperator);
658 xAX = aInfo[0];
659
660# else
661 __asm
662 {
663 push ebx
664 mov eax, [uOperator]
665 cpuid
666 mov [xAX], eax
667 pop ebx
668 }
669# endif
670 return (uint32_t)xAX;
671}
672#endif
673
674
675/**
676 * Performs the cpuid instruction returning ebx.
677 *
678 * @param uOperator CPUID operation (eax).
679 * @returns EBX after cpuid operation.
680 */
681#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
682DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
683#else
684DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
685{
686 RTCCUINTREG xBX;
687# if RT_INLINE_ASM_GNU_STYLE
688# ifdef RT_ARCH_AMD64
689 RTCCUINTREG uSpill;
690 __asm__ ("cpuid"
691 : "=a" (uSpill),
692 "=b" (xBX)
693 : "0" (uOperator)
694 : "rdx", "rcx");
695# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
696 __asm__ ("push %%ebx\n\t"
697 "cpuid\n\t"
698 "mov %%ebx, %%edx\n\t"
699 "pop %%ebx\n\t"
700 : "=a" (uOperator),
701 "=d" (xBX)
702 : "0" (uOperator)
703 : "ecx");
704# else
705 __asm__ ("cpuid"
706 : "=a" (uOperator),
707 "=b" (xBX)
708 : "0" (uOperator)
709 : "edx", "ecx");
710# endif
711
712# elif RT_INLINE_ASM_USES_INTRIN
713 int aInfo[4];
714 __cpuid(aInfo, uOperator);
715 xBX = aInfo[1];
716
717# else
718 __asm
719 {
720 push ebx
721 mov eax, [uOperator]
722 cpuid
723 mov [xBX], ebx
724 pop ebx
725 }
726# endif
727 return (uint32_t)xBX;
728}
729#endif
730
731
732/**
733 * Performs the cpuid instruction returning ecx.
734 *
735 * @param uOperator CPUID operation (eax).
736 * @returns ECX after cpuid operation.
737 */
738#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
739DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
740#else
741DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
742{
743 RTCCUINTREG xCX;
744# if RT_INLINE_ASM_GNU_STYLE
745# ifdef RT_ARCH_AMD64
746 RTCCUINTREG uSpill;
747 __asm__ ("cpuid"
748 : "=a" (uSpill),
749 "=c" (xCX)
750 : "0" (uOperator)
751 : "rbx", "rdx");
752# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
753 __asm__ ("push %%ebx\n\t"
754 "cpuid\n\t"
755 "pop %%ebx\n\t"
756 : "=a" (uOperator),
757 "=c" (xCX)
758 : "0" (uOperator)
759 : "edx");
760# else
761 __asm__ ("cpuid"
762 : "=a" (uOperator),
763 "=c" (xCX)
764 : "0" (uOperator)
765 : "ebx", "edx");
766
767# endif
768
769# elif RT_INLINE_ASM_USES_INTRIN
770 int aInfo[4];
771 __cpuid(aInfo, uOperator);
772 xCX = aInfo[2];
773
774# else
775 __asm
776 {
777 push ebx
778 mov eax, [uOperator]
779 cpuid
780 mov [xCX], ecx
781 pop ebx
782 }
783# endif
784 return (uint32_t)xCX;
785}
786#endif
787
788
789/**
790 * Performs the cpuid instruction returning edx.
791 *
792 * @param uOperator CPUID operation (eax).
793 * @returns EDX after cpuid operation.
794 */
795#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
796DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
797#else
798DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
799{
800 RTCCUINTREG xDX;
801# if RT_INLINE_ASM_GNU_STYLE
802# ifdef RT_ARCH_AMD64
803 RTCCUINTREG uSpill;
804 __asm__ ("cpuid"
805 : "=a" (uSpill),
806 "=d" (xDX)
807 : "0" (uOperator)
808 : "rbx", "rcx");
809# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
810 __asm__ ("push %%ebx\n\t"
811 "cpuid\n\t"
812 "pop %%ebx\n\t"
813 : "=a" (uOperator),
814 "=d" (xDX)
815 : "0" (uOperator)
816 : "ecx");
817# else
818 __asm__ ("cpuid"
819 : "=a" (uOperator),
820 "=d" (xDX)
821 : "0" (uOperator)
822 : "ebx", "ecx");
823# endif
824
825# elif RT_INLINE_ASM_USES_INTRIN
826 int aInfo[4];
827 __cpuid(aInfo, uOperator);
828 xDX = aInfo[3];
829
830# else
831 __asm
832 {
833 push ebx
834 mov eax, [uOperator]
835 cpuid
836 mov [xDX], edx
837 pop ebx
838 }
839# endif
840 return (uint32_t)xDX;
841}
842#endif
843
844
845/**
846 * Checks if the current CPU supports CPUID.
847 *
848 * @returns true if CPUID is supported.
849 */
850DECLINLINE(bool) ASMHasCpuId(void)
851{
852#ifdef RT_ARCH_AMD64
853 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
854#else /* !RT_ARCH_AMD64 */
855 bool fRet = false;
856# if RT_INLINE_ASM_GNU_STYLE
857 uint32_t u1;
858 uint32_t u2;
859 __asm__ ("pushf\n\t"
860 "pop %1\n\t"
861 "mov %1, %2\n\t"
862 "xorl $0x200000, %1\n\t"
863 "push %1\n\t"
864 "popf\n\t"
865 "pushf\n\t"
866 "pop %1\n\t"
867 "cmpl %1, %2\n\t"
868 "setne %0\n\t"
869 "push %2\n\t"
870 "popf\n\t"
871 : "=m" (fRet), "=r" (u1), "=r" (u2));
872# else
873 __asm
874 {
875 pushfd
876 pop eax
877 mov ebx, eax
878 xor eax, 0200000h
879 push eax
880 popfd
881 pushfd
882 pop eax
883 cmp eax, ebx
884 setne fRet
885 push ebx
886 popfd
887 }
888# endif
889 return fRet;
890#endif /* !RT_ARCH_AMD64 */
891}
892
893
894/**
895 * Gets the APIC ID of the current CPU.
896 *
897 * @returns the APIC ID.
898 */
899#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
900DECLASM(uint8_t) ASMGetApicId(void);
901#else
902DECLINLINE(uint8_t) ASMGetApicId(void)
903{
904 RTCCUINTREG xBX;
905# if RT_INLINE_ASM_GNU_STYLE
906# ifdef RT_ARCH_AMD64
907 RTCCUINTREG uSpill;
908 __asm__ __volatile__ ("cpuid"
909 : "=a" (uSpill),
910 "=b" (xBX)
911 : "0" (1)
912 : "rcx", "rdx");
913# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
914 RTCCUINTREG uSpill;
915 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
916 "cpuid\n\t"
917 "xchgl %%ebx,%1\n\t"
918 : "=a" (uSpill),
919 "=rm" (xBX)
920 : "0" (1)
921 : "ecx", "edx");
922# else
923 RTCCUINTREG uSpill;
924 __asm__ __volatile__ ("cpuid"
925 : "=a" (uSpill),
926 "=b" (xBX)
927 : "0" (1)
928 : "ecx", "edx");
929# endif
930
931# elif RT_INLINE_ASM_USES_INTRIN
932 int aInfo[4];
933 __cpuid(aInfo, 1);
934 xBX = aInfo[1];
935
936# else
937 __asm
938 {
939 push ebx
940 mov eax, 1
941 cpuid
942 mov [xBX], ebx
943 pop ebx
944 }
945# endif
946 return (uint8_t)(xBX >> 24);
947}
948#endif
949
950
951/**
952 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
953 *
954 * @returns true/false.
955 * @param uEBX EBX return from ASMCpuId(0)
956 * @param uECX ECX return from ASMCpuId(0)
957 * @param uEDX EDX return from ASMCpuId(0)
958 */
959DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
960{
961 return uEBX == UINT32_C(0x756e6547)
962 && uECX == UINT32_C(0x6c65746e)
963 && uEDX == UINT32_C(0x49656e69);
964}
965
966
967/**
968 * Tests if this is a genuine Intel CPU.
969 *
970 * @returns true/false.
971 * @remarks ASSUMES that cpuid is supported by the CPU.
972 */
973DECLINLINE(bool) ASMIsIntelCpu(void)
974{
975 uint32_t uEAX, uEBX, uECX, uEDX;
976 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
977 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
978}
979
980
981/**
982 * Tests if it a authentic AMD CPU based on the ASMCpuId(0) output.
983 *
984 * @returns true/false.
985 * @param uEBX EBX return from ASMCpuId(0)
986 * @param uECX ECX return from ASMCpuId(0)
987 * @param uEDX EDX return from ASMCpuId(0)
988 */
989DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
990{
991 return uEBX == UINT32_C(0x68747541)
992 && uECX == UINT32_C(0x444d4163)
993 && uEDX == UINT32_C(0x69746e65);
994}
995
996
997/**
998 * Tests if this is an authentic AMD CPU.
999 *
1000 * @returns true/false.
1001 * @remarks ASSUMES that cpuid is supported by the CPU.
1002 */
1003DECLINLINE(bool) ASMIsAmdCpu(void)
1004{
1005 uint32_t uEAX, uEBX, uECX, uEDX;
1006 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1007 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1008}
1009
1010
1011/**
1012 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1013 *
1014 * @returns Family.
1015 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1016 */
1017DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1018{
1019 return ((uEAX >> 8) & 0xf) == 0xf
1020 ? ((uEAX >> 20) & 0x7f) + 0xf
1021 : ((uEAX >> 8) & 0xf);
1022}
1023
1024
1025/**
1026 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1027 *
1028 * @returns Model.
1029 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1030 */
1031DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1032{
1033 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1034 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1035 : ((uEAX >> 4) & 0xf);
1036}
1037
1038
1039/**
1040 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1041 *
1042 * @returns Model.
1043 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1044 */
1045DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1046{
1047 return ((uEAX >> 8) & 0xf) == 0xf
1048 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1049 : ((uEAX >> 4) & 0xf);
1050}
1051
1052
1053/**
1054 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1055 *
1056 * @returns Model.
1057 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1058 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1059 */
1060DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1061{
1062 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1063 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1064 : ((uEAX >> 4) & 0xf);
1065}
1066
1067
1068/**
1069 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1070 *
1071 * @returns Model.
1072 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1073 */
1074DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1075{
1076 return uEAX & 0xf;
1077}
1078
1079
1080/**
1081 * Get cr0.
1082 * @returns cr0.
1083 */
1084#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1085DECLASM(RTCCUINTREG) ASMGetCR0(void);
1086#else
1087DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1088{
1089 RTCCUINTREG uCR0;
1090# if RT_INLINE_ASM_USES_INTRIN
1091 uCR0 = __readcr0();
1092
1093# elif RT_INLINE_ASM_GNU_STYLE
1094# ifdef RT_ARCH_AMD64
1095 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1096# else
1097 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1098# endif
1099# else
1100 __asm
1101 {
1102# ifdef RT_ARCH_AMD64
1103 mov rax, cr0
1104 mov [uCR0], rax
1105# else
1106 mov eax, cr0
1107 mov [uCR0], eax
1108# endif
1109 }
1110# endif
1111 return uCR0;
1112}
1113#endif
1114
1115
1116/**
1117 * Sets the CR0 register.
1118 * @param uCR0 The new CR0 value.
1119 */
1120#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1121DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1122#else
1123DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1124{
1125# if RT_INLINE_ASM_USES_INTRIN
1126 __writecr0(uCR0);
1127
1128# elif RT_INLINE_ASM_GNU_STYLE
1129# ifdef RT_ARCH_AMD64
1130 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1131# else
1132 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1133# endif
1134# else
1135 __asm
1136 {
1137# ifdef RT_ARCH_AMD64
1138 mov rax, [uCR0]
1139 mov cr0, rax
1140# else
1141 mov eax, [uCR0]
1142 mov cr0, eax
1143# endif
1144 }
1145# endif
1146}
1147#endif
1148
1149
1150/**
1151 * Get cr2.
1152 * @returns cr2.
1153 */
1154#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1155DECLASM(RTCCUINTREG) ASMGetCR2(void);
1156#else
1157DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1158{
1159 RTCCUINTREG uCR2;
1160# if RT_INLINE_ASM_USES_INTRIN
1161 uCR2 = __readcr2();
1162
1163# elif RT_INLINE_ASM_GNU_STYLE
1164# ifdef RT_ARCH_AMD64
1165 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1166# else
1167 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1168# endif
1169# else
1170 __asm
1171 {
1172# ifdef RT_ARCH_AMD64
1173 mov rax, cr2
1174 mov [uCR2], rax
1175# else
1176 mov eax, cr2
1177 mov [uCR2], eax
1178# endif
1179 }
1180# endif
1181 return uCR2;
1182}
1183#endif
1184
1185
1186/**
1187 * Sets the CR2 register.
1188 * @param uCR2 The new CR0 value.
1189 */
1190#if RT_INLINE_ASM_EXTERNAL
1191DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1192#else
1193DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1194{
1195# if RT_INLINE_ASM_GNU_STYLE
1196# ifdef RT_ARCH_AMD64
1197 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1198# else
1199 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1200# endif
1201# else
1202 __asm
1203 {
1204# ifdef RT_ARCH_AMD64
1205 mov rax, [uCR2]
1206 mov cr2, rax
1207# else
1208 mov eax, [uCR2]
1209 mov cr2, eax
1210# endif
1211 }
1212# endif
1213}
1214#endif
1215
1216
1217/**
1218 * Get cr3.
1219 * @returns cr3.
1220 */
1221#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1222DECLASM(RTCCUINTREG) ASMGetCR3(void);
1223#else
1224DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1225{
1226 RTCCUINTREG uCR3;
1227# if RT_INLINE_ASM_USES_INTRIN
1228 uCR3 = __readcr3();
1229
1230# elif RT_INLINE_ASM_GNU_STYLE
1231# ifdef RT_ARCH_AMD64
1232 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1233# else
1234 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1235# endif
1236# else
1237 __asm
1238 {
1239# ifdef RT_ARCH_AMD64
1240 mov rax, cr3
1241 mov [uCR3], rax
1242# else
1243 mov eax, cr3
1244 mov [uCR3], eax
1245# endif
1246 }
1247# endif
1248 return uCR3;
1249}
1250#endif
1251
1252
1253/**
1254 * Sets the CR3 register.
1255 *
1256 * @param uCR3 New CR3 value.
1257 */
1258#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1259DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1260#else
1261DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1262{
1263# if RT_INLINE_ASM_USES_INTRIN
1264 __writecr3(uCR3);
1265
1266# elif RT_INLINE_ASM_GNU_STYLE
1267# ifdef RT_ARCH_AMD64
1268 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1269# else
1270 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1271# endif
1272# else
1273 __asm
1274 {
1275# ifdef RT_ARCH_AMD64
1276 mov rax, [uCR3]
1277 mov cr3, rax
1278# else
1279 mov eax, [uCR3]
1280 mov cr3, eax
1281# endif
1282 }
1283# endif
1284}
1285#endif
1286
1287
1288/**
1289 * Reloads the CR3 register.
1290 */
1291#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1292DECLASM(void) ASMReloadCR3(void);
1293#else
1294DECLINLINE(void) ASMReloadCR3(void)
1295{
1296# if RT_INLINE_ASM_USES_INTRIN
1297 __writecr3(__readcr3());
1298
1299# elif RT_INLINE_ASM_GNU_STYLE
1300 RTCCUINTREG u;
1301# ifdef RT_ARCH_AMD64
1302 __asm__ __volatile__("movq %%cr3, %0\n\t"
1303 "movq %0, %%cr3\n\t"
1304 : "=r" (u));
1305# else
1306 __asm__ __volatile__("movl %%cr3, %0\n\t"
1307 "movl %0, %%cr3\n\t"
1308 : "=r" (u));
1309# endif
1310# else
1311 __asm
1312 {
1313# ifdef RT_ARCH_AMD64
1314 mov rax, cr3
1315 mov cr3, rax
1316# else
1317 mov eax, cr3
1318 mov cr3, eax
1319# endif
1320 }
1321# endif
1322}
1323#endif
1324
1325
1326/**
1327 * Get cr4.
1328 * @returns cr4.
1329 */
1330#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1331DECLASM(RTCCUINTREG) ASMGetCR4(void);
1332#else
1333DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1334{
1335 RTCCUINTREG uCR4;
1336# if RT_INLINE_ASM_USES_INTRIN
1337 uCR4 = __readcr4();
1338
1339# elif RT_INLINE_ASM_GNU_STYLE
1340# ifdef RT_ARCH_AMD64
1341 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1342# else
1343 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1344# endif
1345# else
1346 __asm
1347 {
1348# ifdef RT_ARCH_AMD64
1349 mov rax, cr4
1350 mov [uCR4], rax
1351# else
1352 push eax /* just in case */
1353 /*mov eax, cr4*/
1354 _emit 0x0f
1355 _emit 0x20
1356 _emit 0xe0
1357 mov [uCR4], eax
1358 pop eax
1359# endif
1360 }
1361# endif
1362 return uCR4;
1363}
1364#endif
1365
1366
1367/**
1368 * Sets the CR4 register.
1369 *
1370 * @param uCR4 New CR4 value.
1371 */
1372#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1373DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1374#else
1375DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1376{
1377# if RT_INLINE_ASM_USES_INTRIN
1378 __writecr4(uCR4);
1379
1380# elif RT_INLINE_ASM_GNU_STYLE
1381# ifdef RT_ARCH_AMD64
1382 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1383# else
1384 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1385# endif
1386# else
1387 __asm
1388 {
1389# ifdef RT_ARCH_AMD64
1390 mov rax, [uCR4]
1391 mov cr4, rax
1392# else
1393 mov eax, [uCR4]
1394 _emit 0x0F
1395 _emit 0x22
1396 _emit 0xE0 /* mov cr4, eax */
1397# endif
1398 }
1399# endif
1400}
1401#endif
1402
1403
1404/**
1405 * Get cr8.
1406 * @returns cr8.
1407 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1408 */
1409#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1410DECLASM(RTCCUINTREG) ASMGetCR8(void);
1411#else
1412DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1413{
1414# ifdef RT_ARCH_AMD64
1415 RTCCUINTREG uCR8;
1416# if RT_INLINE_ASM_USES_INTRIN
1417 uCR8 = __readcr8();
1418
1419# elif RT_INLINE_ASM_GNU_STYLE
1420 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1421# else
1422 __asm
1423 {
1424 mov rax, cr8
1425 mov [uCR8], rax
1426 }
1427# endif
1428 return uCR8;
1429# else /* !RT_ARCH_AMD64 */
1430 return 0;
1431# endif /* !RT_ARCH_AMD64 */
1432}
1433#endif
1434
1435
1436/**
1437 * Enables interrupts (EFLAGS.IF).
1438 */
1439#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1440DECLASM(void) ASMIntEnable(void);
1441#else
1442DECLINLINE(void) ASMIntEnable(void)
1443{
1444# if RT_INLINE_ASM_GNU_STYLE
1445 __asm("sti\n");
1446# elif RT_INLINE_ASM_USES_INTRIN
1447 _enable();
1448# else
1449 __asm sti
1450# endif
1451}
1452#endif
1453
1454
1455/**
1456 * Disables interrupts (!EFLAGS.IF).
1457 */
1458#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1459DECLASM(void) ASMIntDisable(void);
1460#else
1461DECLINLINE(void) ASMIntDisable(void)
1462{
1463# if RT_INLINE_ASM_GNU_STYLE
1464 __asm("cli\n");
1465# elif RT_INLINE_ASM_USES_INTRIN
1466 _disable();
1467# else
1468 __asm cli
1469# endif
1470}
1471#endif
1472
1473
1474/**
1475 * Disables interrupts and returns previous xFLAGS.
1476 */
1477#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1478DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1479#else
1480DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1481{
1482 RTCCUINTREG xFlags;
1483# if RT_INLINE_ASM_GNU_STYLE
1484# ifdef RT_ARCH_AMD64
1485 __asm__ __volatile__("pushfq\n\t"
1486 "cli\n\t"
1487 "popq %0\n\t"
1488 : "=r" (xFlags));
1489# else
1490 __asm__ __volatile__("pushfl\n\t"
1491 "cli\n\t"
1492 "popl %0\n\t"
1493 : "=r" (xFlags));
1494# endif
1495# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1496 xFlags = ASMGetFlags();
1497 _disable();
1498# else
1499 __asm {
1500 pushfd
1501 cli
1502 pop [xFlags]
1503 }
1504# endif
1505 return xFlags;
1506}
1507#endif
1508
1509
1510/**
1511 * Are interrupts enabled?
1512 *
1513 * @returns true / false.
1514 */
1515DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1516{
1517 RTCCUINTREG uFlags = ASMGetFlags();
1518 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1519}
1520
1521
1522/**
1523 * Halts the CPU until interrupted.
1524 */
1525#if RT_INLINE_ASM_EXTERNAL
1526DECLASM(void) ASMHalt(void);
1527#else
1528DECLINLINE(void) ASMHalt(void)
1529{
1530# if RT_INLINE_ASM_GNU_STYLE
1531 __asm__ __volatile__("hlt\n\t");
1532# else
1533 __asm {
1534 hlt
1535 }
1536# endif
1537}
1538#endif
1539
1540
1541/**
1542 * Reads a machine specific register.
1543 *
1544 * @returns Register content.
1545 * @param uRegister Register to read.
1546 */
1547#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1548DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1549#else
1550DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1551{
1552 RTUINT64U u;
1553# if RT_INLINE_ASM_GNU_STYLE
1554 __asm__ __volatile__("rdmsr\n\t"
1555 : "=a" (u.s.Lo),
1556 "=d" (u.s.Hi)
1557 : "c" (uRegister));
1558
1559# elif RT_INLINE_ASM_USES_INTRIN
1560 u.u = __readmsr(uRegister);
1561
1562# else
1563 __asm
1564 {
1565 mov ecx, [uRegister]
1566 rdmsr
1567 mov [u.s.Lo], eax
1568 mov [u.s.Hi], edx
1569 }
1570# endif
1571
1572 return u.u;
1573}
1574#endif
1575
1576
1577/**
1578 * Writes a machine specific register.
1579 *
1580 * @returns Register content.
1581 * @param uRegister Register to write to.
1582 * @param u64Val Value to write.
1583 */
1584#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1585DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1586#else
1587DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1588{
1589 RTUINT64U u;
1590
1591 u.u = u64Val;
1592# if RT_INLINE_ASM_GNU_STYLE
1593 __asm__ __volatile__("wrmsr\n\t"
1594 ::"a" (u.s.Lo),
1595 "d" (u.s.Hi),
1596 "c" (uRegister));
1597
1598# elif RT_INLINE_ASM_USES_INTRIN
1599 __writemsr(uRegister, u.u);
1600
1601# else
1602 __asm
1603 {
1604 mov ecx, [uRegister]
1605 mov edx, [u.s.Hi]
1606 mov eax, [u.s.Lo]
1607 wrmsr
1608 }
1609# endif
1610}
1611#endif
1612
1613
1614/**
1615 * Reads low part of a machine specific register.
1616 *
1617 * @returns Register content.
1618 * @param uRegister Register to read.
1619 */
1620#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1621DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1622#else
1623DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1624{
1625 uint32_t u32;
1626# if RT_INLINE_ASM_GNU_STYLE
1627 __asm__ __volatile__("rdmsr\n\t"
1628 : "=a" (u32)
1629 : "c" (uRegister)
1630 : "edx");
1631
1632# elif RT_INLINE_ASM_USES_INTRIN
1633 u32 = (uint32_t)__readmsr(uRegister);
1634
1635#else
1636 __asm
1637 {
1638 mov ecx, [uRegister]
1639 rdmsr
1640 mov [u32], eax
1641 }
1642# endif
1643
1644 return u32;
1645}
1646#endif
1647
1648
1649/**
1650 * Reads high part of a machine specific register.
1651 *
1652 * @returns Register content.
1653 * @param uRegister Register to read.
1654 */
1655#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1656DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1657#else
1658DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1659{
1660 uint32_t u32;
1661# if RT_INLINE_ASM_GNU_STYLE
1662 __asm__ __volatile__("rdmsr\n\t"
1663 : "=d" (u32)
1664 : "c" (uRegister)
1665 : "eax");
1666
1667# elif RT_INLINE_ASM_USES_INTRIN
1668 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1669
1670# else
1671 __asm
1672 {
1673 mov ecx, [uRegister]
1674 rdmsr
1675 mov [u32], edx
1676 }
1677# endif
1678
1679 return u32;
1680}
1681#endif
1682
1683
1684/**
1685 * Gets dr0.
1686 *
1687 * @returns dr0.
1688 */
1689#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1690DECLASM(RTCCUINTREG) ASMGetDR0(void);
1691#else
1692DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1693{
1694 RTCCUINTREG uDR0;
1695# if RT_INLINE_ASM_USES_INTRIN
1696 uDR0 = __readdr(0);
1697# elif RT_INLINE_ASM_GNU_STYLE
1698# ifdef RT_ARCH_AMD64
1699 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1700# else
1701 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1702# endif
1703# else
1704 __asm
1705 {
1706# ifdef RT_ARCH_AMD64
1707 mov rax, dr0
1708 mov [uDR0], rax
1709# else
1710 mov eax, dr0
1711 mov [uDR0], eax
1712# endif
1713 }
1714# endif
1715 return uDR0;
1716}
1717#endif
1718
1719
1720/**
1721 * Gets dr1.
1722 *
1723 * @returns dr1.
1724 */
1725#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1726DECLASM(RTCCUINTREG) ASMGetDR1(void);
1727#else
1728DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1729{
1730 RTCCUINTREG uDR1;
1731# if RT_INLINE_ASM_USES_INTRIN
1732 uDR1 = __readdr(1);
1733# elif RT_INLINE_ASM_GNU_STYLE
1734# ifdef RT_ARCH_AMD64
1735 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1736# else
1737 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1738# endif
1739# else
1740 __asm
1741 {
1742# ifdef RT_ARCH_AMD64
1743 mov rax, dr1
1744 mov [uDR1], rax
1745# else
1746 mov eax, dr1
1747 mov [uDR1], eax
1748# endif
1749 }
1750# endif
1751 return uDR1;
1752}
1753#endif
1754
1755
1756/**
1757 * Gets dr2.
1758 *
1759 * @returns dr2.
1760 */
1761#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1762DECLASM(RTCCUINTREG) ASMGetDR2(void);
1763#else
1764DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
1765{
1766 RTCCUINTREG uDR2;
1767# if RT_INLINE_ASM_USES_INTRIN
1768 uDR2 = __readdr(2);
1769# elif RT_INLINE_ASM_GNU_STYLE
1770# ifdef RT_ARCH_AMD64
1771 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
1772# else
1773 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
1774# endif
1775# else
1776 __asm
1777 {
1778# ifdef RT_ARCH_AMD64
1779 mov rax, dr2
1780 mov [uDR2], rax
1781# else
1782 mov eax, dr2
1783 mov [uDR2], eax
1784# endif
1785 }
1786# endif
1787 return uDR2;
1788}
1789#endif
1790
1791
1792/**
1793 * Gets dr3.
1794 *
1795 * @returns dr3.
1796 */
1797#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1798DECLASM(RTCCUINTREG) ASMGetDR3(void);
1799#else
1800DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
1801{
1802 RTCCUINTREG uDR3;
1803# if RT_INLINE_ASM_USES_INTRIN
1804 uDR3 = __readdr(3);
1805# elif RT_INLINE_ASM_GNU_STYLE
1806# ifdef RT_ARCH_AMD64
1807 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
1808# else
1809 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
1810# endif
1811# else
1812 __asm
1813 {
1814# ifdef RT_ARCH_AMD64
1815 mov rax, dr3
1816 mov [uDR3], rax
1817# else
1818 mov eax, dr3
1819 mov [uDR3], eax
1820# endif
1821 }
1822# endif
1823 return uDR3;
1824}
1825#endif
1826
1827
1828/**
1829 * Gets dr6.
1830 *
1831 * @returns dr6.
1832 */
1833#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1834DECLASM(RTCCUINTREG) ASMGetDR6(void);
1835#else
1836DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
1837{
1838 RTCCUINTREG uDR6;
1839# if RT_INLINE_ASM_USES_INTRIN
1840 uDR6 = __readdr(6);
1841# elif RT_INLINE_ASM_GNU_STYLE
1842# ifdef RT_ARCH_AMD64
1843 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
1844# else
1845 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
1846# endif
1847# else
1848 __asm
1849 {
1850# ifdef RT_ARCH_AMD64
1851 mov rax, dr6
1852 mov [uDR6], rax
1853# else
1854 mov eax, dr6
1855 mov [uDR6], eax
1856# endif
1857 }
1858# endif
1859 return uDR6;
1860}
1861#endif
1862
1863
1864/**
1865 * Reads and clears DR6.
1866 *
1867 * @returns DR6.
1868 */
1869#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1870DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
1871#else
1872DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
1873{
1874 RTCCUINTREG uDR6;
1875# if RT_INLINE_ASM_USES_INTRIN
1876 uDR6 = __readdr(6);
1877 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1878# elif RT_INLINE_ASM_GNU_STYLE
1879 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1880# ifdef RT_ARCH_AMD64
1881 __asm__ __volatile__("movq %%dr6, %0\n\t"
1882 "movq %1, %%dr6\n\t"
1883 : "=r" (uDR6)
1884 : "r" (uNewValue));
1885# else
1886 __asm__ __volatile__("movl %%dr6, %0\n\t"
1887 "movl %1, %%dr6\n\t"
1888 : "=r" (uDR6)
1889 : "r" (uNewValue));
1890# endif
1891# else
1892 __asm
1893 {
1894# ifdef RT_ARCH_AMD64
1895 mov rax, dr6
1896 mov [uDR6], rax
1897 mov rcx, rax
1898 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1899 mov dr6, rcx
1900# else
1901 mov eax, dr6
1902 mov [uDR6], eax
1903 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
1904 mov dr6, ecx
1905# endif
1906 }
1907# endif
1908 return uDR6;
1909}
1910#endif
1911
1912
1913/**
1914 * Gets dr7.
1915 *
1916 * @returns dr7.
1917 */
1918#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1919DECLASM(RTCCUINTREG) ASMGetDR7(void);
1920#else
1921DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
1922{
1923 RTCCUINTREG uDR7;
1924# if RT_INLINE_ASM_USES_INTRIN
1925 uDR7 = __readdr(7);
1926# elif RT_INLINE_ASM_GNU_STYLE
1927# ifdef RT_ARCH_AMD64
1928 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
1929# else
1930 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
1931# endif
1932# else
1933 __asm
1934 {
1935# ifdef RT_ARCH_AMD64
1936 mov rax, dr7
1937 mov [uDR7], rax
1938# else
1939 mov eax, dr7
1940 mov [uDR7], eax
1941# endif
1942 }
1943# endif
1944 return uDR7;
1945}
1946#endif
1947
1948
1949/**
1950 * Sets dr0.
1951 *
1952 * @param uDRVal Debug register value to write
1953 */
1954#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1955DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
1956#else
1957DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
1958{
1959# if RT_INLINE_ASM_USES_INTRIN
1960 __writedr(0, uDRVal);
1961# elif RT_INLINE_ASM_GNU_STYLE
1962# ifdef RT_ARCH_AMD64
1963 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
1964# else
1965 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
1966# endif
1967# else
1968 __asm
1969 {
1970# ifdef RT_ARCH_AMD64
1971 mov rax, [uDRVal]
1972 mov dr0, rax
1973# else
1974 mov eax, [uDRVal]
1975 mov dr0, eax
1976# endif
1977 }
1978# endif
1979}
1980#endif
1981
1982
1983/**
1984 * Sets dr1.
1985 *
1986 * @param uDRVal Debug register value to write
1987 */
1988#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1989DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
1990#else
1991DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
1992{
1993# if RT_INLINE_ASM_USES_INTRIN
1994 __writedr(1, uDRVal);
1995# elif RT_INLINE_ASM_GNU_STYLE
1996# ifdef RT_ARCH_AMD64
1997 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
1998# else
1999 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2000# endif
2001# else
2002 __asm
2003 {
2004# ifdef RT_ARCH_AMD64
2005 mov rax, [uDRVal]
2006 mov dr1, rax
2007# else
2008 mov eax, [uDRVal]
2009 mov dr1, eax
2010# endif
2011 }
2012# endif
2013}
2014#endif
2015
2016
2017/**
2018 * Sets dr2.
2019 *
2020 * @param uDRVal Debug register value to write
2021 */
2022#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2023DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2024#else
2025DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2026{
2027# if RT_INLINE_ASM_USES_INTRIN
2028 __writedr(2, uDRVal);
2029# elif RT_INLINE_ASM_GNU_STYLE
2030# ifdef RT_ARCH_AMD64
2031 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2032# else
2033 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2034# endif
2035# else
2036 __asm
2037 {
2038# ifdef RT_ARCH_AMD64
2039 mov rax, [uDRVal]
2040 mov dr2, rax
2041# else
2042 mov eax, [uDRVal]
2043 mov dr2, eax
2044# endif
2045 }
2046# endif
2047}
2048#endif
2049
2050
2051/**
2052 * Sets dr3.
2053 *
2054 * @param uDRVal Debug register value to write
2055 */
2056#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2057DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2058#else
2059DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2060{
2061# if RT_INLINE_ASM_USES_INTRIN
2062 __writedr(3, uDRVal);
2063# elif RT_INLINE_ASM_GNU_STYLE
2064# ifdef RT_ARCH_AMD64
2065 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2066# else
2067 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2068# endif
2069# else
2070 __asm
2071 {
2072# ifdef RT_ARCH_AMD64
2073 mov rax, [uDRVal]
2074 mov dr3, rax
2075# else
2076 mov eax, [uDRVal]
2077 mov dr3, eax
2078# endif
2079 }
2080# endif
2081}
2082#endif
2083
2084
2085/**
2086 * Sets dr6.
2087 *
2088 * @param uDRVal Debug register value to write
2089 */
2090#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2091DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2092#else
2093DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2094{
2095# if RT_INLINE_ASM_USES_INTRIN
2096 __writedr(6, uDRVal);
2097# elif RT_INLINE_ASM_GNU_STYLE
2098# ifdef RT_ARCH_AMD64
2099 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2100# else
2101 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2102# endif
2103# else
2104 __asm
2105 {
2106# ifdef RT_ARCH_AMD64
2107 mov rax, [uDRVal]
2108 mov dr6, rax
2109# else
2110 mov eax, [uDRVal]
2111 mov dr6, eax
2112# endif
2113 }
2114# endif
2115}
2116#endif
2117
2118
2119/**
2120 * Sets dr7.
2121 *
2122 * @param uDRVal Debug register value to write
2123 */
2124#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2125DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2126#else
2127DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2128{
2129# if RT_INLINE_ASM_USES_INTRIN
2130 __writedr(7, uDRVal);
2131# elif RT_INLINE_ASM_GNU_STYLE
2132# ifdef RT_ARCH_AMD64
2133 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2134# else
2135 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2136# endif
2137# else
2138 __asm
2139 {
2140# ifdef RT_ARCH_AMD64
2141 mov rax, [uDRVal]
2142 mov dr7, rax
2143# else
2144 mov eax, [uDRVal]
2145 mov dr7, eax
2146# endif
2147 }
2148# endif
2149}
2150#endif
2151
2152
2153/**
2154 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2155 *
2156 * @param Port I/O port to write to.
2157 * @param u8 8-bit integer to write.
2158 */
2159#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2160DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2161#else
2162DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2163{
2164# if RT_INLINE_ASM_GNU_STYLE
2165 __asm__ __volatile__("outb %b1, %w0\n\t"
2166 :: "Nd" (Port),
2167 "a" (u8));
2168
2169# elif RT_INLINE_ASM_USES_INTRIN
2170 __outbyte(Port, u8);
2171
2172# else
2173 __asm
2174 {
2175 mov dx, [Port]
2176 mov al, [u8]
2177 out dx, al
2178 }
2179# endif
2180}
2181#endif
2182
2183
2184/**
2185 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2186 *
2187 * @returns 8-bit integer.
2188 * @param Port I/O port to read from.
2189 */
2190#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2191DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2192#else
2193DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2194{
2195 uint8_t u8;
2196# if RT_INLINE_ASM_GNU_STYLE
2197 __asm__ __volatile__("inb %w1, %b0\n\t"
2198 : "=a" (u8)
2199 : "Nd" (Port));
2200
2201# elif RT_INLINE_ASM_USES_INTRIN
2202 u8 = __inbyte(Port);
2203
2204# else
2205 __asm
2206 {
2207 mov dx, [Port]
2208 in al, dx
2209 mov [u8], al
2210 }
2211# endif
2212 return u8;
2213}
2214#endif
2215
2216
2217/**
2218 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2219 *
2220 * @param Port I/O port to write to.
2221 * @param u16 16-bit integer to write.
2222 */
2223#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2224DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2225#else
2226DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2227{
2228# if RT_INLINE_ASM_GNU_STYLE
2229 __asm__ __volatile__("outw %w1, %w0\n\t"
2230 :: "Nd" (Port),
2231 "a" (u16));
2232
2233# elif RT_INLINE_ASM_USES_INTRIN
2234 __outword(Port, u16);
2235
2236# else
2237 __asm
2238 {
2239 mov dx, [Port]
2240 mov ax, [u16]
2241 out dx, ax
2242 }
2243# endif
2244}
2245#endif
2246
2247
2248/**
2249 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2250 *
2251 * @returns 16-bit integer.
2252 * @param Port I/O port to read from.
2253 */
2254#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2255DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2256#else
2257DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2258{
2259 uint16_t u16;
2260# if RT_INLINE_ASM_GNU_STYLE
2261 __asm__ __volatile__("inw %w1, %w0\n\t"
2262 : "=a" (u16)
2263 : "Nd" (Port));
2264
2265# elif RT_INLINE_ASM_USES_INTRIN
2266 u16 = __inword(Port);
2267
2268# else
2269 __asm
2270 {
2271 mov dx, [Port]
2272 in ax, dx
2273 mov [u16], ax
2274 }
2275# endif
2276 return u16;
2277}
2278#endif
2279
2280
2281/**
2282 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2283 *
2284 * @param Port I/O port to write to.
2285 * @param u32 32-bit integer to write.
2286 */
2287#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2288DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2289#else
2290DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2291{
2292# if RT_INLINE_ASM_GNU_STYLE
2293 __asm__ __volatile__("outl %1, %w0\n\t"
2294 :: "Nd" (Port),
2295 "a" (u32));
2296
2297# elif RT_INLINE_ASM_USES_INTRIN
2298 __outdword(Port, u32);
2299
2300# else
2301 __asm
2302 {
2303 mov dx, [Port]
2304 mov eax, [u32]
2305 out dx, eax
2306 }
2307# endif
2308}
2309#endif
2310
2311
2312/**
2313 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2314 *
2315 * @returns 32-bit integer.
2316 * @param Port I/O port to read from.
2317 */
2318#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2319DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2320#else
2321DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2322{
2323 uint32_t u32;
2324# if RT_INLINE_ASM_GNU_STYLE
2325 __asm__ __volatile__("inl %w1, %0\n\t"
2326 : "=a" (u32)
2327 : "Nd" (Port));
2328
2329# elif RT_INLINE_ASM_USES_INTRIN
2330 u32 = __indword(Port);
2331
2332# else
2333 __asm
2334 {
2335 mov dx, [Port]
2336 in eax, dx
2337 mov [u32], eax
2338 }
2339# endif
2340 return u32;
2341}
2342#endif
2343
2344
2345/**
2346 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2347 *
2348 * @param Port I/O port to write to.
2349 * @param pau8 Pointer to the string buffer.
2350 * @param c The number of items to write.
2351 */
2352#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2353DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2354#else
2355DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2356{
2357# if RT_INLINE_ASM_GNU_STYLE
2358 __asm__ __volatile__("rep; outsb\n\t"
2359 : "+S" (pau8),
2360 "+c" (c)
2361 : "d" (Port));
2362
2363# elif RT_INLINE_ASM_USES_INTRIN
2364 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2365
2366# else
2367 __asm
2368 {
2369 mov dx, [Port]
2370 mov ecx, [c]
2371 mov eax, [pau8]
2372 xchg esi, eax
2373 rep outsb
2374 xchg esi, eax
2375 }
2376# endif
2377}
2378#endif
2379
2380
2381/**
2382 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2383 *
2384 * @param Port I/O port to read from.
2385 * @param pau8 Pointer to the string buffer (output).
2386 * @param c The number of items to read.
2387 */
2388#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2389DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2390#else
2391DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2392{
2393# if RT_INLINE_ASM_GNU_STYLE
2394 __asm__ __volatile__("rep; insb\n\t"
2395 : "+D" (pau8),
2396 "+c" (c)
2397 : "d" (Port));
2398
2399# elif RT_INLINE_ASM_USES_INTRIN
2400 __inbytestring(Port, pau8, (unsigned long)c);
2401
2402# else
2403 __asm
2404 {
2405 mov dx, [Port]
2406 mov ecx, [c]
2407 mov eax, [pau8]
2408 xchg edi, eax
2409 rep insb
2410 xchg edi, eax
2411 }
2412# endif
2413}
2414#endif
2415
2416
2417/**
2418 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2419 *
2420 * @param Port I/O port to write to.
2421 * @param pau16 Pointer to the string buffer.
2422 * @param c The number of items to write.
2423 */
2424#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2425DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2426#else
2427DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2428{
2429# if RT_INLINE_ASM_GNU_STYLE
2430 __asm__ __volatile__("rep; outsw\n\t"
2431 : "+S" (pau16),
2432 "+c" (c)
2433 : "d" (Port));
2434
2435# elif RT_INLINE_ASM_USES_INTRIN
2436 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2437
2438# else
2439 __asm
2440 {
2441 mov dx, [Port]
2442 mov ecx, [c]
2443 mov eax, [pau16]
2444 xchg esi, eax
2445 rep outsw
2446 xchg esi, eax
2447 }
2448# endif
2449}
2450#endif
2451
2452
2453/**
2454 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2455 *
2456 * @param Port I/O port to read from.
2457 * @param pau16 Pointer to the string buffer (output).
2458 * @param c The number of items to read.
2459 */
2460#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2461DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2462#else
2463DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2464{
2465# if RT_INLINE_ASM_GNU_STYLE
2466 __asm__ __volatile__("rep; insw\n\t"
2467 : "+D" (pau16),
2468 "+c" (c)
2469 : "d" (Port));
2470
2471# elif RT_INLINE_ASM_USES_INTRIN
2472 __inwordstring(Port, pau16, (unsigned long)c);
2473
2474# else
2475 __asm
2476 {
2477 mov dx, [Port]
2478 mov ecx, [c]
2479 mov eax, [pau16]
2480 xchg edi, eax
2481 rep insw
2482 xchg edi, eax
2483 }
2484# endif
2485}
2486#endif
2487
2488
2489/**
2490 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2491 *
2492 * @param Port I/O port to write to.
2493 * @param pau32 Pointer to the string buffer.
2494 * @param c The number of items to write.
2495 */
2496#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2497DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2498#else
2499DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2500{
2501# if RT_INLINE_ASM_GNU_STYLE
2502 __asm__ __volatile__("rep; outsl\n\t"
2503 : "+S" (pau32),
2504 "+c" (c)
2505 : "d" (Port));
2506
2507# elif RT_INLINE_ASM_USES_INTRIN
2508 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2509
2510# else
2511 __asm
2512 {
2513 mov dx, [Port]
2514 mov ecx, [c]
2515 mov eax, [pau32]
2516 xchg esi, eax
2517 rep outsd
2518 xchg esi, eax
2519 }
2520# endif
2521}
2522#endif
2523
2524
2525/**
2526 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2527 *
2528 * @param Port I/O port to read from.
2529 * @param pau32 Pointer to the string buffer (output).
2530 * @param c The number of items to read.
2531 */
2532#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2533DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2534#else
2535DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2536{
2537# if RT_INLINE_ASM_GNU_STYLE
2538 __asm__ __volatile__("rep; insl\n\t"
2539 : "+D" (pau32),
2540 "+c" (c)
2541 : "d" (Port));
2542
2543# elif RT_INLINE_ASM_USES_INTRIN
2544 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2545
2546# else
2547 __asm
2548 {
2549 mov dx, [Port]
2550 mov ecx, [c]
2551 mov eax, [pau32]
2552 xchg edi, eax
2553 rep insd
2554 xchg edi, eax
2555 }
2556# endif
2557}
2558#endif
2559
2560
2561/**
2562 * Invalidate page.
2563 *
2564 * @param pv Address of the page to invalidate.
2565 */
2566#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2567DECLASM(void) ASMInvalidatePage(void *pv);
2568#else
2569DECLINLINE(void) ASMInvalidatePage(void *pv)
2570{
2571# if RT_INLINE_ASM_USES_INTRIN
2572 __invlpg(pv);
2573
2574# elif RT_INLINE_ASM_GNU_STYLE
2575 __asm__ __volatile__("invlpg %0\n\t"
2576 : : "m" (*(uint8_t *)pv));
2577# else
2578 __asm
2579 {
2580# ifdef RT_ARCH_AMD64
2581 mov rax, [pv]
2582 invlpg [rax]
2583# else
2584 mov eax, [pv]
2585 invlpg [eax]
2586# endif
2587 }
2588# endif
2589}
2590#endif
2591
2592
2593/**
2594 * Write back the internal caches and invalidate them.
2595 */
2596#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2597DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2598#else
2599DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2600{
2601# if RT_INLINE_ASM_USES_INTRIN
2602 __wbinvd();
2603
2604# elif RT_INLINE_ASM_GNU_STYLE
2605 __asm__ __volatile__("wbinvd");
2606# else
2607 __asm
2608 {
2609 wbinvd
2610 }
2611# endif
2612}
2613#endif
2614
2615
2616/**
2617 * Invalidate internal and (perhaps) external caches without first
2618 * flushing dirty cache lines. Use with extreme care.
2619 */
2620#if RT_INLINE_ASM_EXTERNAL
2621DECLASM(void) ASMInvalidateInternalCaches(void);
2622#else
2623DECLINLINE(void) ASMInvalidateInternalCaches(void)
2624{
2625# if RT_INLINE_ASM_GNU_STYLE
2626 __asm__ __volatile__("invd");
2627# else
2628 __asm
2629 {
2630 invd
2631 }
2632# endif
2633}
2634#endif
2635
2636
2637/**
2638 * Memory load/store fence, waits for any pending writes and reads to complete.
2639 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2640 */
2641DECLINLINE(void) ASMMemoryFenceSSE2(void)
2642{
2643#if RT_INLINE_ASM_GNU_STYLE
2644 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2645#elif RT_INLINE_ASM_USES_INTRIN
2646 _mm_mfence();
2647#else
2648 __asm
2649 {
2650 _emit 0x0f
2651 _emit 0xae
2652 _emit 0xf0
2653 }
2654#endif
2655}
2656
2657
2658/**
2659 * Memory store fence, waits for any writes to complete.
2660 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2661 */
2662DECLINLINE(void) ASMWriteFenceSSE(void)
2663{
2664#if RT_INLINE_ASM_GNU_STYLE
2665 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2666#elif RT_INLINE_ASM_USES_INTRIN
2667 _mm_sfence();
2668#else
2669 __asm
2670 {
2671 _emit 0x0f
2672 _emit 0xae
2673 _emit 0xf8
2674 }
2675#endif
2676}
2677
2678
2679/**
2680 * Memory load fence, waits for any pending reads to complete.
2681 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2682 */
2683DECLINLINE(void) ASMReadFenceSSE2(void)
2684{
2685#if RT_INLINE_ASM_GNU_STYLE
2686 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2687#elif RT_INLINE_ASM_USES_INTRIN
2688 _mm_lfence();
2689#else
2690 __asm
2691 {
2692 _emit 0x0f
2693 _emit 0xae
2694 _emit 0xe8
2695 }
2696#endif
2697}
2698
2699/** @} */
2700#endif
2701
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette