VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 45809

Last change on this file since 45809 was 44528, checked in by vboxsync, 12 years ago

header (C) fixes

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 59.0 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2013 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71#endif
72
73
74
75/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
76 * @ingroup grp_rt_asm
77 * @{
78 */
79
80/** @todo find a more proper place for this structure? */
81#pragma pack(1)
82/** IDTR */
83typedef struct RTIDTR
84{
85 /** Size of the IDT. */
86 uint16_t cbIdt;
87 /** Address of the IDT. */
88 uintptr_t pIdt;
89} RTIDTR, *PRTIDTR;
90#pragma pack()
91
92#pragma pack(1)
93/** GDTR */
94typedef struct RTGDTR
95{
96 /** Size of the GDT. */
97 uint16_t cbGdt;
98 /** Address of the GDT. */
99 uintptr_t pGdt;
100} RTGDTR, *PRTGDTR;
101#pragma pack()
102
103
104/**
105 * Gets the content of the IDTR CPU register.
106 * @param pIdtr Where to store the IDTR contents.
107 */
108#if RT_INLINE_ASM_EXTERNAL
109DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
110#else
111DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
112{
113# if RT_INLINE_ASM_GNU_STYLE
114 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
115# else
116 __asm
117 {
118# ifdef RT_ARCH_AMD64
119 mov rax, [pIdtr]
120 sidt [rax]
121# else
122 mov eax, [pIdtr]
123 sidt [eax]
124# endif
125 }
126# endif
127}
128#endif
129
130
131/**
132 * Sets the content of the IDTR CPU register.
133 * @param pIdtr Where to load the IDTR contents from
134 */
135#if RT_INLINE_ASM_EXTERNAL
136DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
137#else
138DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
139{
140# if RT_INLINE_ASM_GNU_STYLE
141 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
142# else
143 __asm
144 {
145# ifdef RT_ARCH_AMD64
146 mov rax, [pIdtr]
147 lidt [rax]
148# else
149 mov eax, [pIdtr]
150 lidt [eax]
151# endif
152 }
153# endif
154}
155#endif
156
157
158/**
159 * Gets the content of the GDTR CPU register.
160 * @param pGdtr Where to store the GDTR contents.
161 */
162#if RT_INLINE_ASM_EXTERNAL
163DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
164#else
165DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
166{
167# if RT_INLINE_ASM_GNU_STYLE
168 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
169# else
170 __asm
171 {
172# ifdef RT_ARCH_AMD64
173 mov rax, [pGdtr]
174 sgdt [rax]
175# else
176 mov eax, [pGdtr]
177 sgdt [eax]
178# endif
179 }
180# endif
181}
182#endif
183
184/**
185 * Get the cs register.
186 * @returns cs.
187 */
188#if RT_INLINE_ASM_EXTERNAL
189DECLASM(RTSEL) ASMGetCS(void);
190#else
191DECLINLINE(RTSEL) ASMGetCS(void)
192{
193 RTSEL SelCS;
194# if RT_INLINE_ASM_GNU_STYLE
195 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
196# else
197 __asm
198 {
199 mov ax, cs
200 mov [SelCS], ax
201 }
202# endif
203 return SelCS;
204}
205#endif
206
207
208/**
209 * Get the DS register.
210 * @returns DS.
211 */
212#if RT_INLINE_ASM_EXTERNAL
213DECLASM(RTSEL) ASMGetDS(void);
214#else
215DECLINLINE(RTSEL) ASMGetDS(void)
216{
217 RTSEL SelDS;
218# if RT_INLINE_ASM_GNU_STYLE
219 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
220# else
221 __asm
222 {
223 mov ax, ds
224 mov [SelDS], ax
225 }
226# endif
227 return SelDS;
228}
229#endif
230
231
232/**
233 * Get the ES register.
234 * @returns ES.
235 */
236#if RT_INLINE_ASM_EXTERNAL
237DECLASM(RTSEL) ASMGetES(void);
238#else
239DECLINLINE(RTSEL) ASMGetES(void)
240{
241 RTSEL SelES;
242# if RT_INLINE_ASM_GNU_STYLE
243 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
244# else
245 __asm
246 {
247 mov ax, es
248 mov [SelES], ax
249 }
250# endif
251 return SelES;
252}
253#endif
254
255
256/**
257 * Get the FS register.
258 * @returns FS.
259 */
260#if RT_INLINE_ASM_EXTERNAL
261DECLASM(RTSEL) ASMGetFS(void);
262#else
263DECLINLINE(RTSEL) ASMGetFS(void)
264{
265 RTSEL SelFS;
266# if RT_INLINE_ASM_GNU_STYLE
267 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
268# else
269 __asm
270 {
271 mov ax, fs
272 mov [SelFS], ax
273 }
274# endif
275 return SelFS;
276}
277# endif
278
279
280/**
281 * Get the GS register.
282 * @returns GS.
283 */
284#if RT_INLINE_ASM_EXTERNAL
285DECLASM(RTSEL) ASMGetGS(void);
286#else
287DECLINLINE(RTSEL) ASMGetGS(void)
288{
289 RTSEL SelGS;
290# if RT_INLINE_ASM_GNU_STYLE
291 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
292# else
293 __asm
294 {
295 mov ax, gs
296 mov [SelGS], ax
297 }
298# endif
299 return SelGS;
300}
301#endif
302
303
304/**
305 * Get the SS register.
306 * @returns SS.
307 */
308#if RT_INLINE_ASM_EXTERNAL
309DECLASM(RTSEL) ASMGetSS(void);
310#else
311DECLINLINE(RTSEL) ASMGetSS(void)
312{
313 RTSEL SelSS;
314# if RT_INLINE_ASM_GNU_STYLE
315 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
316# else
317 __asm
318 {
319 mov ax, ss
320 mov [SelSS], ax
321 }
322# endif
323 return SelSS;
324}
325#endif
326
327
328/**
329 * Get the TR register.
330 * @returns TR.
331 */
332#if RT_INLINE_ASM_EXTERNAL
333DECLASM(RTSEL) ASMGetTR(void);
334#else
335DECLINLINE(RTSEL) ASMGetTR(void)
336{
337 RTSEL SelTR;
338# if RT_INLINE_ASM_GNU_STYLE
339 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
340# else
341 __asm
342 {
343 str ax
344 mov [SelTR], ax
345 }
346# endif
347 return SelTR;
348}
349#endif
350
351
352/**
353 * Get the [RE]FLAGS register.
354 * @returns [RE]FLAGS.
355 */
356#if RT_INLINE_ASM_EXTERNAL
357DECLASM(RTCCUINTREG) ASMGetFlags(void);
358#else
359DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
360{
361 RTCCUINTREG uFlags;
362# if RT_INLINE_ASM_GNU_STYLE
363# ifdef RT_ARCH_AMD64
364 __asm__ __volatile__("pushfq\n\t"
365 "popq %0\n\t"
366 : "=r" (uFlags));
367# else
368 __asm__ __volatile__("pushfl\n\t"
369 "popl %0\n\t"
370 : "=r" (uFlags));
371# endif
372# else
373 __asm
374 {
375# ifdef RT_ARCH_AMD64
376 pushfq
377 pop [uFlags]
378# else
379 pushfd
380 pop [uFlags]
381# endif
382 }
383# endif
384 return uFlags;
385}
386#endif
387
388
389/**
390 * Set the [RE]FLAGS register.
391 * @param uFlags The new [RE]FLAGS value.
392 */
393#if RT_INLINE_ASM_EXTERNAL
394DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
395#else
396DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
397{
398# if RT_INLINE_ASM_GNU_STYLE
399# ifdef RT_ARCH_AMD64
400 __asm__ __volatile__("pushq %0\n\t"
401 "popfq\n\t"
402 : : "g" (uFlags));
403# else
404 __asm__ __volatile__("pushl %0\n\t"
405 "popfl\n\t"
406 : : "g" (uFlags));
407# endif
408# else
409 __asm
410 {
411# ifdef RT_ARCH_AMD64
412 push [uFlags]
413 popfq
414# else
415 push [uFlags]
416 popfd
417# endif
418 }
419# endif
420}
421#endif
422
423
424/**
425 * Gets the content of the CPU timestamp counter register.
426 *
427 * @returns TSC.
428 */
429#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
430DECLASM(uint64_t) ASMReadTSC(void);
431#else
432DECLINLINE(uint64_t) ASMReadTSC(void)
433{
434 RTUINT64U u;
435# if RT_INLINE_ASM_GNU_STYLE
436 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
437# else
438# if RT_INLINE_ASM_USES_INTRIN
439 u.u = __rdtsc();
440# else
441 __asm
442 {
443 rdtsc
444 mov [u.s.Lo], eax
445 mov [u.s.Hi], edx
446 }
447# endif
448# endif
449 return u.u;
450}
451#endif
452
453
454/**
455 * Performs the cpuid instruction returning all registers.
456 *
457 * @param uOperator CPUID operation (eax).
458 * @param pvEAX Where to store eax.
459 * @param pvEBX Where to store ebx.
460 * @param pvECX Where to store ecx.
461 * @param pvEDX Where to store edx.
462 * @remark We're using void pointers to ease the use of special bitfield structures and such.
463 */
464#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
465DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
466#else
467DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
468{
469# if RT_INLINE_ASM_GNU_STYLE
470# ifdef RT_ARCH_AMD64
471 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
472 __asm__ __volatile__ ("cpuid\n\t"
473 : "=a" (uRAX),
474 "=b" (uRBX),
475 "=c" (uRCX),
476 "=d" (uRDX)
477 : "0" (uOperator), "2" (0));
478 *(uint32_t *)pvEAX = (uint32_t)uRAX;
479 *(uint32_t *)pvEBX = (uint32_t)uRBX;
480 *(uint32_t *)pvECX = (uint32_t)uRCX;
481 *(uint32_t *)pvEDX = (uint32_t)uRDX;
482# else
483 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
484 "cpuid\n\t"
485 "xchgl %%ebx, %1\n\t"
486 : "=a" (*(uint32_t *)pvEAX),
487 "=r" (*(uint32_t *)pvEBX),
488 "=c" (*(uint32_t *)pvECX),
489 "=d" (*(uint32_t *)pvEDX)
490 : "0" (uOperator), "2" (0));
491# endif
492
493# elif RT_INLINE_ASM_USES_INTRIN
494 int aInfo[4];
495 __cpuid(aInfo, uOperator);
496 *(uint32_t *)pvEAX = aInfo[0];
497 *(uint32_t *)pvEBX = aInfo[1];
498 *(uint32_t *)pvECX = aInfo[2];
499 *(uint32_t *)pvEDX = aInfo[3];
500
501# else
502 uint32_t uEAX;
503 uint32_t uEBX;
504 uint32_t uECX;
505 uint32_t uEDX;
506 __asm
507 {
508 push ebx
509 mov eax, [uOperator]
510 cpuid
511 mov [uEAX], eax
512 mov [uEBX], ebx
513 mov [uECX], ecx
514 mov [uEDX], edx
515 pop ebx
516 }
517 *(uint32_t *)pvEAX = uEAX;
518 *(uint32_t *)pvEBX = uEBX;
519 *(uint32_t *)pvECX = uECX;
520 *(uint32_t *)pvEDX = uEDX;
521# endif
522}
523#endif
524
525
526/**
527 * Performs the CPUID instruction with EAX and ECX input returning ALL output
528 * registers.
529 *
530 * @param uOperator CPUID operation (eax).
531 * @param uIdxECX ecx index
532 * @param pvEAX Where to store eax.
533 * @param pvEBX Where to store ebx.
534 * @param pvECX Where to store ecx.
535 * @param pvEDX Where to store edx.
536 * @remark We're using void pointers to ease the use of special bitfield structures and such.
537 */
538#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
539DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
540#else
541DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
542{
543# if RT_INLINE_ASM_GNU_STYLE
544# ifdef RT_ARCH_AMD64
545 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
546 __asm__ ("cpuid\n\t"
547 : "=a" (uRAX),
548 "=b" (uRBX),
549 "=c" (uRCX),
550 "=d" (uRDX)
551 : "0" (uOperator),
552 "2" (uIdxECX));
553 *(uint32_t *)pvEAX = (uint32_t)uRAX;
554 *(uint32_t *)pvEBX = (uint32_t)uRBX;
555 *(uint32_t *)pvECX = (uint32_t)uRCX;
556 *(uint32_t *)pvEDX = (uint32_t)uRDX;
557# else
558 __asm__ ("xchgl %%ebx, %1\n\t"
559 "cpuid\n\t"
560 "xchgl %%ebx, %1\n\t"
561 : "=a" (*(uint32_t *)pvEAX),
562 "=r" (*(uint32_t *)pvEBX),
563 "=c" (*(uint32_t *)pvECX),
564 "=d" (*(uint32_t *)pvEDX)
565 : "0" (uOperator),
566 "2" (uIdxECX));
567# endif
568
569# elif RT_INLINE_ASM_USES_INTRIN
570 int aInfo[4];
571 __cpuidex(aInfo, uOperator, uIdxECX);
572 *(uint32_t *)pvEAX = aInfo[0];
573 *(uint32_t *)pvEBX = aInfo[1];
574 *(uint32_t *)pvECX = aInfo[2];
575 *(uint32_t *)pvEDX = aInfo[3];
576
577# else
578 uint32_t uEAX;
579 uint32_t uEBX;
580 uint32_t uECX;
581 uint32_t uEDX;
582 __asm
583 {
584 push ebx
585 mov eax, [uOperator]
586 mov ecx, [uIdxECX]
587 cpuid
588 mov [uEAX], eax
589 mov [uEBX], ebx
590 mov [uECX], ecx
591 mov [uEDX], edx
592 pop ebx
593 }
594 *(uint32_t *)pvEAX = uEAX;
595 *(uint32_t *)pvEBX = uEBX;
596 *(uint32_t *)pvECX = uECX;
597 *(uint32_t *)pvEDX = uEDX;
598# endif
599}
600#endif
601
602
603/**
604 * Performs the cpuid instruction returning ecx and edx.
605 *
606 * @param uOperator CPUID operation (eax).
607 * @param pvECX Where to store ecx.
608 * @param pvEDX Where to store edx.
609 * @remark We're using void pointers to ease the use of special bitfield structures and such.
610 */
611#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
612DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
613#else
614DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
615{
616 uint32_t uEBX;
617 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
618}
619#endif
620
621
622/**
623 * Performs the cpuid instruction returning eax.
624 *
625 * @param uOperator CPUID operation (eax).
626 * @returns EAX after cpuid operation.
627 */
628#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
629DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
630#else
631DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
632{
633 RTCCUINTREG xAX;
634# if RT_INLINE_ASM_GNU_STYLE
635# ifdef RT_ARCH_AMD64
636 __asm__ ("cpuid"
637 : "=a" (xAX)
638 : "0" (uOperator)
639 : "rbx", "rcx", "rdx");
640# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
641 __asm__ ("push %%ebx\n\t"
642 "cpuid\n\t"
643 "pop %%ebx\n\t"
644 : "=a" (xAX)
645 : "0" (uOperator)
646 : "ecx", "edx");
647# else
648 __asm__ ("cpuid"
649 : "=a" (xAX)
650 : "0" (uOperator)
651 : "edx", "ecx", "ebx");
652# endif
653
654# elif RT_INLINE_ASM_USES_INTRIN
655 int aInfo[4];
656 __cpuid(aInfo, uOperator);
657 xAX = aInfo[0];
658
659# else
660 __asm
661 {
662 push ebx
663 mov eax, [uOperator]
664 cpuid
665 mov [xAX], eax
666 pop ebx
667 }
668# endif
669 return (uint32_t)xAX;
670}
671#endif
672
673
674/**
675 * Performs the cpuid instruction returning ebx.
676 *
677 * @param uOperator CPUID operation (eax).
678 * @returns EBX after cpuid operation.
679 */
680#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
681DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
682#else
683DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
684{
685 RTCCUINTREG xBX;
686# if RT_INLINE_ASM_GNU_STYLE
687# ifdef RT_ARCH_AMD64
688 RTCCUINTREG uSpill;
689 __asm__ ("cpuid"
690 : "=a" (uSpill),
691 "=b" (xBX)
692 : "0" (uOperator)
693 : "rdx", "rcx");
694# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
695 __asm__ ("push %%ebx\n\t"
696 "cpuid\n\t"
697 "mov %%ebx, %%edx\n\t"
698 "pop %%ebx\n\t"
699 : "=a" (uOperator),
700 "=d" (xBX)
701 : "0" (uOperator)
702 : "ecx");
703# else
704 __asm__ ("cpuid"
705 : "=a" (uOperator),
706 "=b" (xBX)
707 : "0" (uOperator)
708 : "edx", "ecx");
709# endif
710
711# elif RT_INLINE_ASM_USES_INTRIN
712 int aInfo[4];
713 __cpuid(aInfo, uOperator);
714 xBX = aInfo[1];
715
716# else
717 __asm
718 {
719 push ebx
720 mov eax, [uOperator]
721 cpuid
722 mov [xBX], ebx
723 pop ebx
724 }
725# endif
726 return (uint32_t)xBX;
727}
728#endif
729
730
731/**
732 * Performs the cpuid instruction returning ecx.
733 *
734 * @param uOperator CPUID operation (eax).
735 * @returns ECX after cpuid operation.
736 */
737#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
738DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
739#else
740DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
741{
742 RTCCUINTREG xCX;
743# if RT_INLINE_ASM_GNU_STYLE
744# ifdef RT_ARCH_AMD64
745 RTCCUINTREG uSpill;
746 __asm__ ("cpuid"
747 : "=a" (uSpill),
748 "=c" (xCX)
749 : "0" (uOperator)
750 : "rbx", "rdx");
751# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
752 __asm__ ("push %%ebx\n\t"
753 "cpuid\n\t"
754 "pop %%ebx\n\t"
755 : "=a" (uOperator),
756 "=c" (xCX)
757 : "0" (uOperator)
758 : "edx");
759# else
760 __asm__ ("cpuid"
761 : "=a" (uOperator),
762 "=c" (xCX)
763 : "0" (uOperator)
764 : "ebx", "edx");
765
766# endif
767
768# elif RT_INLINE_ASM_USES_INTRIN
769 int aInfo[4];
770 __cpuid(aInfo, uOperator);
771 xCX = aInfo[2];
772
773# else
774 __asm
775 {
776 push ebx
777 mov eax, [uOperator]
778 cpuid
779 mov [xCX], ecx
780 pop ebx
781 }
782# endif
783 return (uint32_t)xCX;
784}
785#endif
786
787
788/**
789 * Performs the cpuid instruction returning edx.
790 *
791 * @param uOperator CPUID operation (eax).
792 * @returns EDX after cpuid operation.
793 */
794#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
795DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
796#else
797DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
798{
799 RTCCUINTREG xDX;
800# if RT_INLINE_ASM_GNU_STYLE
801# ifdef RT_ARCH_AMD64
802 RTCCUINTREG uSpill;
803 __asm__ ("cpuid"
804 : "=a" (uSpill),
805 "=d" (xDX)
806 : "0" (uOperator)
807 : "rbx", "rcx");
808# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
809 __asm__ ("push %%ebx\n\t"
810 "cpuid\n\t"
811 "pop %%ebx\n\t"
812 : "=a" (uOperator),
813 "=d" (xDX)
814 : "0" (uOperator)
815 : "ecx");
816# else
817 __asm__ ("cpuid"
818 : "=a" (uOperator),
819 "=d" (xDX)
820 : "0" (uOperator)
821 : "ebx", "ecx");
822# endif
823
824# elif RT_INLINE_ASM_USES_INTRIN
825 int aInfo[4];
826 __cpuid(aInfo, uOperator);
827 xDX = aInfo[3];
828
829# else
830 __asm
831 {
832 push ebx
833 mov eax, [uOperator]
834 cpuid
835 mov [xDX], edx
836 pop ebx
837 }
838# endif
839 return (uint32_t)xDX;
840}
841#endif
842
843
844/**
845 * Checks if the current CPU supports CPUID.
846 *
847 * @returns true if CPUID is supported.
848 */
849DECLINLINE(bool) ASMHasCpuId(void)
850{
851#ifdef RT_ARCH_AMD64
852 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
853#else /* !RT_ARCH_AMD64 */
854 bool fRet = false;
855# if RT_INLINE_ASM_GNU_STYLE
856 uint32_t u1;
857 uint32_t u2;
858 __asm__ ("pushf\n\t"
859 "pop %1\n\t"
860 "mov %1, %2\n\t"
861 "xorl $0x200000, %1\n\t"
862 "push %1\n\t"
863 "popf\n\t"
864 "pushf\n\t"
865 "pop %1\n\t"
866 "cmpl %1, %2\n\t"
867 "setne %0\n\t"
868 "push %2\n\t"
869 "popf\n\t"
870 : "=m" (fRet), "=r" (u1), "=r" (u2));
871# else
872 __asm
873 {
874 pushfd
875 pop eax
876 mov ebx, eax
877 xor eax, 0200000h
878 push eax
879 popfd
880 pushfd
881 pop eax
882 cmp eax, ebx
883 setne fRet
884 push ebx
885 popfd
886 }
887# endif
888 return fRet;
889#endif /* !RT_ARCH_AMD64 */
890}
891
892
893/**
894 * Gets the APIC ID of the current CPU.
895 *
896 * @returns the APIC ID.
897 */
898#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
899DECLASM(uint8_t) ASMGetApicId(void);
900#else
901DECLINLINE(uint8_t) ASMGetApicId(void)
902{
903 RTCCUINTREG xBX;
904# if RT_INLINE_ASM_GNU_STYLE
905# ifdef RT_ARCH_AMD64
906 RTCCUINTREG uSpill;
907 __asm__ __volatile__ ("cpuid"
908 : "=a" (uSpill),
909 "=b" (xBX)
910 : "0" (1)
911 : "rcx", "rdx");
912# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
913 RTCCUINTREG uSpill;
914 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
915 "cpuid\n\t"
916 "xchgl %%ebx,%1\n\t"
917 : "=a" (uSpill),
918 "=rm" (xBX)
919 : "0" (1)
920 : "ecx", "edx");
921# else
922 RTCCUINTREG uSpill;
923 __asm__ __volatile__ ("cpuid"
924 : "=a" (uSpill),
925 "=b" (xBX)
926 : "0" (1)
927 : "ecx", "edx");
928# endif
929
930# elif RT_INLINE_ASM_USES_INTRIN
931 int aInfo[4];
932 __cpuid(aInfo, 1);
933 xBX = aInfo[1];
934
935# else
936 __asm
937 {
938 push ebx
939 mov eax, 1
940 cpuid
941 mov [xBX], ebx
942 pop ebx
943 }
944# endif
945 return (uint8_t)(xBX >> 24);
946}
947#endif
948
949
950/**
951 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
952 *
953 * @returns true/false.
954 * @param uEBX EBX return from ASMCpuId(0)
955 * @param uECX ECX return from ASMCpuId(0)
956 * @param uEDX EDX return from ASMCpuId(0)
957 */
958DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
959{
960 return uEBX == UINT32_C(0x756e6547)
961 && uECX == UINT32_C(0x6c65746e)
962 && uEDX == UINT32_C(0x49656e69);
963}
964
965
966/**
967 * Tests if this is a genuine Intel CPU.
968 *
969 * @returns true/false.
970 * @remarks ASSUMES that cpuid is supported by the CPU.
971 */
972DECLINLINE(bool) ASMIsIntelCpu(void)
973{
974 uint32_t uEAX, uEBX, uECX, uEDX;
975 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
976 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
977}
978
979
980/**
981 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
982 *
983 * @returns true/false.
984 * @param uEBX EBX return from ASMCpuId(0)
985 * @param uECX ECX return from ASMCpuId(0)
986 * @param uEDX EDX return from ASMCpuId(0)
987 */
988DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
989{
990 return uEBX == UINT32_C(0x68747541)
991 && uECX == UINT32_C(0x444d4163)
992 && uEDX == UINT32_C(0x69746e65);
993}
994
995
996/**
997 * Tests if this is an authentic AMD CPU.
998 *
999 * @returns true/false.
1000 * @remarks ASSUMES that cpuid is supported by the CPU.
1001 */
1002DECLINLINE(bool) ASMIsAmdCpu(void)
1003{
1004 uint32_t uEAX, uEBX, uECX, uEDX;
1005 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1006 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1007}
1008
1009
1010/**
1011 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1012 *
1013 * @returns true/false.
1014 * @param uEBX EBX return from ASMCpuId(0).
1015 * @param uECX ECX return from ASMCpuId(0).
1016 * @param uEDX EDX return from ASMCpuId(0).
1017 */
1018DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1019{
1020 return uEBX == UINT32_C(0x746e6543)
1021 && uECX == UINT32_C(0x736c7561)
1022 && uEDX == UINT32_C(0x48727561);
1023}
1024
1025
1026/**
1027 * Tests if this is a centaur hauling VIA CPU.
1028 *
1029 * @returns true/false.
1030 * @remarks ASSUMES that cpuid is supported by the CPU.
1031 */
1032DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1033{
1034 uint32_t uEAX, uEBX, uECX, uEDX;
1035 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1036 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1037}
1038
1039
1040/**
1041 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1042 *
1043 *
1044 * @returns true/false.
1045 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1046 *
1047 * @note This only succeeds if there are at least two leaves in the range.
1048 * @remarks The upper range limit is just some half reasonable value we've
1049 * picked out of thin air.
1050 */
1051DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1052{
1053 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1054}
1055
1056
1057/**
1058 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1059 *
1060 * This only succeeds if there are at least two leaves in the range.
1061 *
1062 * @returns true/false.
1063 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1064 *
1065 * @note This only succeeds if there are at least two leaves in the range.
1066 * @remarks The upper range limit is just some half reasonable value we've
1067 * picked out of thin air.
1068 */
1069DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1070{
1071 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1072}
1073
1074
1075/**
1076 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1077 *
1078 * @returns Family.
1079 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1080 */
1081DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1082{
1083 return ((uEAX >> 8) & 0xf) == 0xf
1084 ? ((uEAX >> 20) & 0x7f) + 0xf
1085 : ((uEAX >> 8) & 0xf);
1086}
1087
1088
1089/**
1090 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1091 *
1092 * @returns Model.
1093 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1094 */
1095DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1096{
1097 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1098 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1099 : ((uEAX >> 4) & 0xf);
1100}
1101
1102
1103/**
1104 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1105 *
1106 * @returns Model.
1107 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1108 */
1109DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1110{
1111 return ((uEAX >> 8) & 0xf) == 0xf
1112 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1113 : ((uEAX >> 4) & 0xf);
1114}
1115
1116
1117/**
1118 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1119 *
1120 * @returns Model.
1121 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1122 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1123 */
1124DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1125{
1126 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1127 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1128 : ((uEAX >> 4) & 0xf);
1129}
1130
1131
1132/**
1133 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1134 *
1135 * @returns Model.
1136 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1137 */
1138DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1139{
1140 return uEAX & 0xf;
1141}
1142
1143
1144/**
1145 * Get cr0.
1146 * @returns cr0.
1147 */
1148#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1149DECLASM(RTCCUINTREG) ASMGetCR0(void);
1150#else
1151DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1152{
1153 RTCCUINTREG uCR0;
1154# if RT_INLINE_ASM_USES_INTRIN
1155 uCR0 = __readcr0();
1156
1157# elif RT_INLINE_ASM_GNU_STYLE
1158# ifdef RT_ARCH_AMD64
1159 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1160# else
1161 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1162# endif
1163# else
1164 __asm
1165 {
1166# ifdef RT_ARCH_AMD64
1167 mov rax, cr0
1168 mov [uCR0], rax
1169# else
1170 mov eax, cr0
1171 mov [uCR0], eax
1172# endif
1173 }
1174# endif
1175 return uCR0;
1176}
1177#endif
1178
1179
1180/**
1181 * Sets the CR0 register.
1182 * @param uCR0 The new CR0 value.
1183 */
1184#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1185DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1186#else
1187DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1188{
1189# if RT_INLINE_ASM_USES_INTRIN
1190 __writecr0(uCR0);
1191
1192# elif RT_INLINE_ASM_GNU_STYLE
1193# ifdef RT_ARCH_AMD64
1194 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1195# else
1196 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1197# endif
1198# else
1199 __asm
1200 {
1201# ifdef RT_ARCH_AMD64
1202 mov rax, [uCR0]
1203 mov cr0, rax
1204# else
1205 mov eax, [uCR0]
1206 mov cr0, eax
1207# endif
1208 }
1209# endif
1210}
1211#endif
1212
1213
1214/**
1215 * Get cr2.
1216 * @returns cr2.
1217 */
1218#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1219DECLASM(RTCCUINTREG) ASMGetCR2(void);
1220#else
1221DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1222{
1223 RTCCUINTREG uCR2;
1224# if RT_INLINE_ASM_USES_INTRIN
1225 uCR2 = __readcr2();
1226
1227# elif RT_INLINE_ASM_GNU_STYLE
1228# ifdef RT_ARCH_AMD64
1229 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1230# else
1231 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1232# endif
1233# else
1234 __asm
1235 {
1236# ifdef RT_ARCH_AMD64
1237 mov rax, cr2
1238 mov [uCR2], rax
1239# else
1240 mov eax, cr2
1241 mov [uCR2], eax
1242# endif
1243 }
1244# endif
1245 return uCR2;
1246}
1247#endif
1248
1249
1250/**
1251 * Sets the CR2 register.
1252 * @param uCR2 The new CR0 value.
1253 */
1254#if RT_INLINE_ASM_EXTERNAL
1255DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1256#else
1257DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1258{
1259# if RT_INLINE_ASM_GNU_STYLE
1260# ifdef RT_ARCH_AMD64
1261 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1262# else
1263 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1264# endif
1265# else
1266 __asm
1267 {
1268# ifdef RT_ARCH_AMD64
1269 mov rax, [uCR2]
1270 mov cr2, rax
1271# else
1272 mov eax, [uCR2]
1273 mov cr2, eax
1274# endif
1275 }
1276# endif
1277}
1278#endif
1279
1280
1281/**
1282 * Get cr3.
1283 * @returns cr3.
1284 */
1285#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1286DECLASM(RTCCUINTREG) ASMGetCR3(void);
1287#else
1288DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1289{
1290 RTCCUINTREG uCR3;
1291# if RT_INLINE_ASM_USES_INTRIN
1292 uCR3 = __readcr3();
1293
1294# elif RT_INLINE_ASM_GNU_STYLE
1295# ifdef RT_ARCH_AMD64
1296 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1297# else
1298 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1299# endif
1300# else
1301 __asm
1302 {
1303# ifdef RT_ARCH_AMD64
1304 mov rax, cr3
1305 mov [uCR3], rax
1306# else
1307 mov eax, cr3
1308 mov [uCR3], eax
1309# endif
1310 }
1311# endif
1312 return uCR3;
1313}
1314#endif
1315
1316
1317/**
1318 * Sets the CR3 register.
1319 *
1320 * @param uCR3 New CR3 value.
1321 */
1322#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1323DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1324#else
1325DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1326{
1327# if RT_INLINE_ASM_USES_INTRIN
1328 __writecr3(uCR3);
1329
1330# elif RT_INLINE_ASM_GNU_STYLE
1331# ifdef RT_ARCH_AMD64
1332 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1333# else
1334 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1335# endif
1336# else
1337 __asm
1338 {
1339# ifdef RT_ARCH_AMD64
1340 mov rax, [uCR3]
1341 mov cr3, rax
1342# else
1343 mov eax, [uCR3]
1344 mov cr3, eax
1345# endif
1346 }
1347# endif
1348}
1349#endif
1350
1351
1352/**
1353 * Reloads the CR3 register.
1354 */
1355#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1356DECLASM(void) ASMReloadCR3(void);
1357#else
1358DECLINLINE(void) ASMReloadCR3(void)
1359{
1360# if RT_INLINE_ASM_USES_INTRIN
1361 __writecr3(__readcr3());
1362
1363# elif RT_INLINE_ASM_GNU_STYLE
1364 RTCCUINTREG u;
1365# ifdef RT_ARCH_AMD64
1366 __asm__ __volatile__("movq %%cr3, %0\n\t"
1367 "movq %0, %%cr3\n\t"
1368 : "=r" (u));
1369# else
1370 __asm__ __volatile__("movl %%cr3, %0\n\t"
1371 "movl %0, %%cr3\n\t"
1372 : "=r" (u));
1373# endif
1374# else
1375 __asm
1376 {
1377# ifdef RT_ARCH_AMD64
1378 mov rax, cr3
1379 mov cr3, rax
1380# else
1381 mov eax, cr3
1382 mov cr3, eax
1383# endif
1384 }
1385# endif
1386}
1387#endif
1388
1389
1390/**
1391 * Get cr4.
1392 * @returns cr4.
1393 */
1394#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1395DECLASM(RTCCUINTREG) ASMGetCR4(void);
1396#else
1397DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1398{
1399 RTCCUINTREG uCR4;
1400# if RT_INLINE_ASM_USES_INTRIN
1401 uCR4 = __readcr4();
1402
1403# elif RT_INLINE_ASM_GNU_STYLE
1404# ifdef RT_ARCH_AMD64
1405 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1406# else
1407 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1408# endif
1409# else
1410 __asm
1411 {
1412# ifdef RT_ARCH_AMD64
1413 mov rax, cr4
1414 mov [uCR4], rax
1415# else
1416 push eax /* just in case */
1417 /*mov eax, cr4*/
1418 _emit 0x0f
1419 _emit 0x20
1420 _emit 0xe0
1421 mov [uCR4], eax
1422 pop eax
1423# endif
1424 }
1425# endif
1426 return uCR4;
1427}
1428#endif
1429
1430
1431/**
1432 * Sets the CR4 register.
1433 *
1434 * @param uCR4 New CR4 value.
1435 */
1436#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1437DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1438#else
1439DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1440{
1441# if RT_INLINE_ASM_USES_INTRIN
1442 __writecr4(uCR4);
1443
1444# elif RT_INLINE_ASM_GNU_STYLE
1445# ifdef RT_ARCH_AMD64
1446 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1447# else
1448 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1449# endif
1450# else
1451 __asm
1452 {
1453# ifdef RT_ARCH_AMD64
1454 mov rax, [uCR4]
1455 mov cr4, rax
1456# else
1457 mov eax, [uCR4]
1458 _emit 0x0F
1459 _emit 0x22
1460 _emit 0xE0 /* mov cr4, eax */
1461# endif
1462 }
1463# endif
1464}
1465#endif
1466
1467
1468/**
1469 * Get cr8.
1470 * @returns cr8.
1471 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1472 */
1473#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1474DECLASM(RTCCUINTREG) ASMGetCR8(void);
1475#else
1476DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1477{
1478# ifdef RT_ARCH_AMD64
1479 RTCCUINTREG uCR8;
1480# if RT_INLINE_ASM_USES_INTRIN
1481 uCR8 = __readcr8();
1482
1483# elif RT_INLINE_ASM_GNU_STYLE
1484 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1485# else
1486 __asm
1487 {
1488 mov rax, cr8
1489 mov [uCR8], rax
1490 }
1491# endif
1492 return uCR8;
1493# else /* !RT_ARCH_AMD64 */
1494 return 0;
1495# endif /* !RT_ARCH_AMD64 */
1496}
1497#endif
1498
1499
1500/**
1501 * Enables interrupts (EFLAGS.IF).
1502 */
1503#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1504DECLASM(void) ASMIntEnable(void);
1505#else
1506DECLINLINE(void) ASMIntEnable(void)
1507{
1508# if RT_INLINE_ASM_GNU_STYLE
1509 __asm("sti\n");
1510# elif RT_INLINE_ASM_USES_INTRIN
1511 _enable();
1512# else
1513 __asm sti
1514# endif
1515}
1516#endif
1517
1518
1519/**
1520 * Disables interrupts (!EFLAGS.IF).
1521 */
1522#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1523DECLASM(void) ASMIntDisable(void);
1524#else
1525DECLINLINE(void) ASMIntDisable(void)
1526{
1527# if RT_INLINE_ASM_GNU_STYLE
1528 __asm("cli\n");
1529# elif RT_INLINE_ASM_USES_INTRIN
1530 _disable();
1531# else
1532 __asm cli
1533# endif
1534}
1535#endif
1536
1537
1538/**
1539 * Disables interrupts and returns previous xFLAGS.
1540 */
1541#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1542DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1543#else
1544DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1545{
1546 RTCCUINTREG xFlags;
1547# if RT_INLINE_ASM_GNU_STYLE
1548# ifdef RT_ARCH_AMD64
1549 __asm__ __volatile__("pushfq\n\t"
1550 "cli\n\t"
1551 "popq %0\n\t"
1552 : "=r" (xFlags));
1553# else
1554 __asm__ __volatile__("pushfl\n\t"
1555 "cli\n\t"
1556 "popl %0\n\t"
1557 : "=r" (xFlags));
1558# endif
1559# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1560 xFlags = ASMGetFlags();
1561 _disable();
1562# else
1563 __asm {
1564 pushfd
1565 cli
1566 pop [xFlags]
1567 }
1568# endif
1569 return xFlags;
1570}
1571#endif
1572
1573
1574/**
1575 * Are interrupts enabled?
1576 *
1577 * @returns true / false.
1578 */
1579DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1580{
1581 RTCCUINTREG uFlags = ASMGetFlags();
1582 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1583}
1584
1585
1586/**
1587 * Halts the CPU until interrupted.
1588 */
1589#if RT_INLINE_ASM_EXTERNAL
1590DECLASM(void) ASMHalt(void);
1591#else
1592DECLINLINE(void) ASMHalt(void)
1593{
1594# if RT_INLINE_ASM_GNU_STYLE
1595 __asm__ __volatile__("hlt\n\t");
1596# else
1597 __asm {
1598 hlt
1599 }
1600# endif
1601}
1602#endif
1603
1604
1605/**
1606 * Reads a machine specific register.
1607 *
1608 * @returns Register content.
1609 * @param uRegister Register to read.
1610 */
1611#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1612DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1613#else
1614DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1615{
1616 RTUINT64U u;
1617# if RT_INLINE_ASM_GNU_STYLE
1618 __asm__ __volatile__("rdmsr\n\t"
1619 : "=a" (u.s.Lo),
1620 "=d" (u.s.Hi)
1621 : "c" (uRegister));
1622
1623# elif RT_INLINE_ASM_USES_INTRIN
1624 u.u = __readmsr(uRegister);
1625
1626# else
1627 __asm
1628 {
1629 mov ecx, [uRegister]
1630 rdmsr
1631 mov [u.s.Lo], eax
1632 mov [u.s.Hi], edx
1633 }
1634# endif
1635
1636 return u.u;
1637}
1638#endif
1639
1640
1641/**
1642 * Writes a machine specific register.
1643 *
1644 * @returns Register content.
1645 * @param uRegister Register to write to.
1646 * @param u64Val Value to write.
1647 */
1648#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1649DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1650#else
1651DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1652{
1653 RTUINT64U u;
1654
1655 u.u = u64Val;
1656# if RT_INLINE_ASM_GNU_STYLE
1657 __asm__ __volatile__("wrmsr\n\t"
1658 ::"a" (u.s.Lo),
1659 "d" (u.s.Hi),
1660 "c" (uRegister));
1661
1662# elif RT_INLINE_ASM_USES_INTRIN
1663 __writemsr(uRegister, u.u);
1664
1665# else
1666 __asm
1667 {
1668 mov ecx, [uRegister]
1669 mov edx, [u.s.Hi]
1670 mov eax, [u.s.Lo]
1671 wrmsr
1672 }
1673# endif
1674}
1675#endif
1676
1677
1678/**
1679 * Reads low part of a machine specific register.
1680 *
1681 * @returns Register content.
1682 * @param uRegister Register to read.
1683 */
1684#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1685DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1686#else
1687DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1688{
1689 uint32_t u32;
1690# if RT_INLINE_ASM_GNU_STYLE
1691 __asm__ __volatile__("rdmsr\n\t"
1692 : "=a" (u32)
1693 : "c" (uRegister)
1694 : "edx");
1695
1696# elif RT_INLINE_ASM_USES_INTRIN
1697 u32 = (uint32_t)__readmsr(uRegister);
1698
1699#else
1700 __asm
1701 {
1702 mov ecx, [uRegister]
1703 rdmsr
1704 mov [u32], eax
1705 }
1706# endif
1707
1708 return u32;
1709}
1710#endif
1711
1712
1713/**
1714 * Reads high part of a machine specific register.
1715 *
1716 * @returns Register content.
1717 * @param uRegister Register to read.
1718 */
1719#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1720DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1721#else
1722DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1723{
1724 uint32_t u32;
1725# if RT_INLINE_ASM_GNU_STYLE
1726 __asm__ __volatile__("rdmsr\n\t"
1727 : "=d" (u32)
1728 : "c" (uRegister)
1729 : "eax");
1730
1731# elif RT_INLINE_ASM_USES_INTRIN
1732 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1733
1734# else
1735 __asm
1736 {
1737 mov ecx, [uRegister]
1738 rdmsr
1739 mov [u32], edx
1740 }
1741# endif
1742
1743 return u32;
1744}
1745#endif
1746
1747
1748/**
1749 * Gets dr0.
1750 *
1751 * @returns dr0.
1752 */
1753#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1754DECLASM(RTCCUINTREG) ASMGetDR0(void);
1755#else
1756DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1757{
1758 RTCCUINTREG uDR0;
1759# if RT_INLINE_ASM_USES_INTRIN
1760 uDR0 = __readdr(0);
1761# elif RT_INLINE_ASM_GNU_STYLE
1762# ifdef RT_ARCH_AMD64
1763 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1764# else
1765 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1766# endif
1767# else
1768 __asm
1769 {
1770# ifdef RT_ARCH_AMD64
1771 mov rax, dr0
1772 mov [uDR0], rax
1773# else
1774 mov eax, dr0
1775 mov [uDR0], eax
1776# endif
1777 }
1778# endif
1779 return uDR0;
1780}
1781#endif
1782
1783
1784/**
1785 * Gets dr1.
1786 *
1787 * @returns dr1.
1788 */
1789#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1790DECLASM(RTCCUINTREG) ASMGetDR1(void);
1791#else
1792DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1793{
1794 RTCCUINTREG uDR1;
1795# if RT_INLINE_ASM_USES_INTRIN
1796 uDR1 = __readdr(1);
1797# elif RT_INLINE_ASM_GNU_STYLE
1798# ifdef RT_ARCH_AMD64
1799 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1800# else
1801 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1802# endif
1803# else
1804 __asm
1805 {
1806# ifdef RT_ARCH_AMD64
1807 mov rax, dr1
1808 mov [uDR1], rax
1809# else
1810 mov eax, dr1
1811 mov [uDR1], eax
1812# endif
1813 }
1814# endif
1815 return uDR1;
1816}
1817#endif
1818
1819
1820/**
1821 * Gets dr2.
1822 *
1823 * @returns dr2.
1824 */
1825#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1826DECLASM(RTCCUINTREG) ASMGetDR2(void);
1827#else
1828DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
1829{
1830 RTCCUINTREG uDR2;
1831# if RT_INLINE_ASM_USES_INTRIN
1832 uDR2 = __readdr(2);
1833# elif RT_INLINE_ASM_GNU_STYLE
1834# ifdef RT_ARCH_AMD64
1835 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
1836# else
1837 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
1838# endif
1839# else
1840 __asm
1841 {
1842# ifdef RT_ARCH_AMD64
1843 mov rax, dr2
1844 mov [uDR2], rax
1845# else
1846 mov eax, dr2
1847 mov [uDR2], eax
1848# endif
1849 }
1850# endif
1851 return uDR2;
1852}
1853#endif
1854
1855
1856/**
1857 * Gets dr3.
1858 *
1859 * @returns dr3.
1860 */
1861#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1862DECLASM(RTCCUINTREG) ASMGetDR3(void);
1863#else
1864DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
1865{
1866 RTCCUINTREG uDR3;
1867# if RT_INLINE_ASM_USES_INTRIN
1868 uDR3 = __readdr(3);
1869# elif RT_INLINE_ASM_GNU_STYLE
1870# ifdef RT_ARCH_AMD64
1871 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
1872# else
1873 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
1874# endif
1875# else
1876 __asm
1877 {
1878# ifdef RT_ARCH_AMD64
1879 mov rax, dr3
1880 mov [uDR3], rax
1881# else
1882 mov eax, dr3
1883 mov [uDR3], eax
1884# endif
1885 }
1886# endif
1887 return uDR3;
1888}
1889#endif
1890
1891
1892/**
1893 * Gets dr6.
1894 *
1895 * @returns dr6.
1896 */
1897#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1898DECLASM(RTCCUINTREG) ASMGetDR6(void);
1899#else
1900DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
1901{
1902 RTCCUINTREG uDR6;
1903# if RT_INLINE_ASM_USES_INTRIN
1904 uDR6 = __readdr(6);
1905# elif RT_INLINE_ASM_GNU_STYLE
1906# ifdef RT_ARCH_AMD64
1907 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
1908# else
1909 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
1910# endif
1911# else
1912 __asm
1913 {
1914# ifdef RT_ARCH_AMD64
1915 mov rax, dr6
1916 mov [uDR6], rax
1917# else
1918 mov eax, dr6
1919 mov [uDR6], eax
1920# endif
1921 }
1922# endif
1923 return uDR6;
1924}
1925#endif
1926
1927
1928/**
1929 * Reads and clears DR6.
1930 *
1931 * @returns DR6.
1932 */
1933#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1934DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
1935#else
1936DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
1937{
1938 RTCCUINTREG uDR6;
1939# if RT_INLINE_ASM_USES_INTRIN
1940 uDR6 = __readdr(6);
1941 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1942# elif RT_INLINE_ASM_GNU_STYLE
1943 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1944# ifdef RT_ARCH_AMD64
1945 __asm__ __volatile__("movq %%dr6, %0\n\t"
1946 "movq %1, %%dr6\n\t"
1947 : "=r" (uDR6)
1948 : "r" (uNewValue));
1949# else
1950 __asm__ __volatile__("movl %%dr6, %0\n\t"
1951 "movl %1, %%dr6\n\t"
1952 : "=r" (uDR6)
1953 : "r" (uNewValue));
1954# endif
1955# else
1956 __asm
1957 {
1958# ifdef RT_ARCH_AMD64
1959 mov rax, dr6
1960 mov [uDR6], rax
1961 mov rcx, rax
1962 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
1963 mov dr6, rcx
1964# else
1965 mov eax, dr6
1966 mov [uDR6], eax
1967 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
1968 mov dr6, ecx
1969# endif
1970 }
1971# endif
1972 return uDR6;
1973}
1974#endif
1975
1976
1977/**
1978 * Gets dr7.
1979 *
1980 * @returns dr7.
1981 */
1982#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1983DECLASM(RTCCUINTREG) ASMGetDR7(void);
1984#else
1985DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
1986{
1987 RTCCUINTREG uDR7;
1988# if RT_INLINE_ASM_USES_INTRIN
1989 uDR7 = __readdr(7);
1990# elif RT_INLINE_ASM_GNU_STYLE
1991# ifdef RT_ARCH_AMD64
1992 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
1993# else
1994 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
1995# endif
1996# else
1997 __asm
1998 {
1999# ifdef RT_ARCH_AMD64
2000 mov rax, dr7
2001 mov [uDR7], rax
2002# else
2003 mov eax, dr7
2004 mov [uDR7], eax
2005# endif
2006 }
2007# endif
2008 return uDR7;
2009}
2010#endif
2011
2012
2013/**
2014 * Sets dr0.
2015 *
2016 * @param uDRVal Debug register value to write
2017 */
2018#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2019DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2020#else
2021DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2022{
2023# if RT_INLINE_ASM_USES_INTRIN
2024 __writedr(0, uDRVal);
2025# elif RT_INLINE_ASM_GNU_STYLE
2026# ifdef RT_ARCH_AMD64
2027 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2028# else
2029 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2030# endif
2031# else
2032 __asm
2033 {
2034# ifdef RT_ARCH_AMD64
2035 mov rax, [uDRVal]
2036 mov dr0, rax
2037# else
2038 mov eax, [uDRVal]
2039 mov dr0, eax
2040# endif
2041 }
2042# endif
2043}
2044#endif
2045
2046
2047/**
2048 * Sets dr1.
2049 *
2050 * @param uDRVal Debug register value to write
2051 */
2052#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2053DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2054#else
2055DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2056{
2057# if RT_INLINE_ASM_USES_INTRIN
2058 __writedr(1, uDRVal);
2059# elif RT_INLINE_ASM_GNU_STYLE
2060# ifdef RT_ARCH_AMD64
2061 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2062# else
2063 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2064# endif
2065# else
2066 __asm
2067 {
2068# ifdef RT_ARCH_AMD64
2069 mov rax, [uDRVal]
2070 mov dr1, rax
2071# else
2072 mov eax, [uDRVal]
2073 mov dr1, eax
2074# endif
2075 }
2076# endif
2077}
2078#endif
2079
2080
2081/**
2082 * Sets dr2.
2083 *
2084 * @param uDRVal Debug register value to write
2085 */
2086#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2087DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2088#else
2089DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2090{
2091# if RT_INLINE_ASM_USES_INTRIN
2092 __writedr(2, uDRVal);
2093# elif RT_INLINE_ASM_GNU_STYLE
2094# ifdef RT_ARCH_AMD64
2095 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2096# else
2097 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2098# endif
2099# else
2100 __asm
2101 {
2102# ifdef RT_ARCH_AMD64
2103 mov rax, [uDRVal]
2104 mov dr2, rax
2105# else
2106 mov eax, [uDRVal]
2107 mov dr2, eax
2108# endif
2109 }
2110# endif
2111}
2112#endif
2113
2114
2115/**
2116 * Sets dr3.
2117 *
2118 * @param uDRVal Debug register value to write
2119 */
2120#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2121DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2122#else
2123DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2124{
2125# if RT_INLINE_ASM_USES_INTRIN
2126 __writedr(3, uDRVal);
2127# elif RT_INLINE_ASM_GNU_STYLE
2128# ifdef RT_ARCH_AMD64
2129 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2130# else
2131 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2132# endif
2133# else
2134 __asm
2135 {
2136# ifdef RT_ARCH_AMD64
2137 mov rax, [uDRVal]
2138 mov dr3, rax
2139# else
2140 mov eax, [uDRVal]
2141 mov dr3, eax
2142# endif
2143 }
2144# endif
2145}
2146#endif
2147
2148
2149/**
2150 * Sets dr6.
2151 *
2152 * @param uDRVal Debug register value to write
2153 */
2154#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2155DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2156#else
2157DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2158{
2159# if RT_INLINE_ASM_USES_INTRIN
2160 __writedr(6, uDRVal);
2161# elif RT_INLINE_ASM_GNU_STYLE
2162# ifdef RT_ARCH_AMD64
2163 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2164# else
2165 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2166# endif
2167# else
2168 __asm
2169 {
2170# ifdef RT_ARCH_AMD64
2171 mov rax, [uDRVal]
2172 mov dr6, rax
2173# else
2174 mov eax, [uDRVal]
2175 mov dr6, eax
2176# endif
2177 }
2178# endif
2179}
2180#endif
2181
2182
2183/**
2184 * Sets dr7.
2185 *
2186 * @param uDRVal Debug register value to write
2187 */
2188#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2189DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2190#else
2191DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2192{
2193# if RT_INLINE_ASM_USES_INTRIN
2194 __writedr(7, uDRVal);
2195# elif RT_INLINE_ASM_GNU_STYLE
2196# ifdef RT_ARCH_AMD64
2197 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2198# else
2199 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2200# endif
2201# else
2202 __asm
2203 {
2204# ifdef RT_ARCH_AMD64
2205 mov rax, [uDRVal]
2206 mov dr7, rax
2207# else
2208 mov eax, [uDRVal]
2209 mov dr7, eax
2210# endif
2211 }
2212# endif
2213}
2214#endif
2215
2216
2217/**
2218 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2219 *
2220 * @param Port I/O port to write to.
2221 * @param u8 8-bit integer to write.
2222 */
2223#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2224DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2225#else
2226DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2227{
2228# if RT_INLINE_ASM_GNU_STYLE
2229 __asm__ __volatile__("outb %b1, %w0\n\t"
2230 :: "Nd" (Port),
2231 "a" (u8));
2232
2233# elif RT_INLINE_ASM_USES_INTRIN
2234 __outbyte(Port, u8);
2235
2236# else
2237 __asm
2238 {
2239 mov dx, [Port]
2240 mov al, [u8]
2241 out dx, al
2242 }
2243# endif
2244}
2245#endif
2246
2247
2248/**
2249 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2250 *
2251 * @returns 8-bit integer.
2252 * @param Port I/O port to read from.
2253 */
2254#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2255DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2256#else
2257DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2258{
2259 uint8_t u8;
2260# if RT_INLINE_ASM_GNU_STYLE
2261 __asm__ __volatile__("inb %w1, %b0\n\t"
2262 : "=a" (u8)
2263 : "Nd" (Port));
2264
2265# elif RT_INLINE_ASM_USES_INTRIN
2266 u8 = __inbyte(Port);
2267
2268# else
2269 __asm
2270 {
2271 mov dx, [Port]
2272 in al, dx
2273 mov [u8], al
2274 }
2275# endif
2276 return u8;
2277}
2278#endif
2279
2280
2281/**
2282 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2283 *
2284 * @param Port I/O port to write to.
2285 * @param u16 16-bit integer to write.
2286 */
2287#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2288DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2289#else
2290DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2291{
2292# if RT_INLINE_ASM_GNU_STYLE
2293 __asm__ __volatile__("outw %w1, %w0\n\t"
2294 :: "Nd" (Port),
2295 "a" (u16));
2296
2297# elif RT_INLINE_ASM_USES_INTRIN
2298 __outword(Port, u16);
2299
2300# else
2301 __asm
2302 {
2303 mov dx, [Port]
2304 mov ax, [u16]
2305 out dx, ax
2306 }
2307# endif
2308}
2309#endif
2310
2311
2312/**
2313 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2314 *
2315 * @returns 16-bit integer.
2316 * @param Port I/O port to read from.
2317 */
2318#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2319DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2320#else
2321DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2322{
2323 uint16_t u16;
2324# if RT_INLINE_ASM_GNU_STYLE
2325 __asm__ __volatile__("inw %w1, %w0\n\t"
2326 : "=a" (u16)
2327 : "Nd" (Port));
2328
2329# elif RT_INLINE_ASM_USES_INTRIN
2330 u16 = __inword(Port);
2331
2332# else
2333 __asm
2334 {
2335 mov dx, [Port]
2336 in ax, dx
2337 mov [u16], ax
2338 }
2339# endif
2340 return u16;
2341}
2342#endif
2343
2344
2345/**
2346 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2347 *
2348 * @param Port I/O port to write to.
2349 * @param u32 32-bit integer to write.
2350 */
2351#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2352DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2353#else
2354DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2355{
2356# if RT_INLINE_ASM_GNU_STYLE
2357 __asm__ __volatile__("outl %1, %w0\n\t"
2358 :: "Nd" (Port),
2359 "a" (u32));
2360
2361# elif RT_INLINE_ASM_USES_INTRIN
2362 __outdword(Port, u32);
2363
2364# else
2365 __asm
2366 {
2367 mov dx, [Port]
2368 mov eax, [u32]
2369 out dx, eax
2370 }
2371# endif
2372}
2373#endif
2374
2375
2376/**
2377 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2378 *
2379 * @returns 32-bit integer.
2380 * @param Port I/O port to read from.
2381 */
2382#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2383DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2384#else
2385DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2386{
2387 uint32_t u32;
2388# if RT_INLINE_ASM_GNU_STYLE
2389 __asm__ __volatile__("inl %w1, %0\n\t"
2390 : "=a" (u32)
2391 : "Nd" (Port));
2392
2393# elif RT_INLINE_ASM_USES_INTRIN
2394 u32 = __indword(Port);
2395
2396# else
2397 __asm
2398 {
2399 mov dx, [Port]
2400 in eax, dx
2401 mov [u32], eax
2402 }
2403# endif
2404 return u32;
2405}
2406#endif
2407
2408
2409/**
2410 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2411 *
2412 * @param Port I/O port to write to.
2413 * @param pau8 Pointer to the string buffer.
2414 * @param c The number of items to write.
2415 */
2416#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2417DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2418#else
2419DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2420{
2421# if RT_INLINE_ASM_GNU_STYLE
2422 __asm__ __volatile__("rep; outsb\n\t"
2423 : "+S" (pau8),
2424 "+c" (c)
2425 : "d" (Port));
2426
2427# elif RT_INLINE_ASM_USES_INTRIN
2428 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2429
2430# else
2431 __asm
2432 {
2433 mov dx, [Port]
2434 mov ecx, [c]
2435 mov eax, [pau8]
2436 xchg esi, eax
2437 rep outsb
2438 xchg esi, eax
2439 }
2440# endif
2441}
2442#endif
2443
2444
2445/**
2446 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2447 *
2448 * @param Port I/O port to read from.
2449 * @param pau8 Pointer to the string buffer (output).
2450 * @param c The number of items to read.
2451 */
2452#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2453DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2454#else
2455DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2456{
2457# if RT_INLINE_ASM_GNU_STYLE
2458 __asm__ __volatile__("rep; insb\n\t"
2459 : "+D" (pau8),
2460 "+c" (c)
2461 : "d" (Port));
2462
2463# elif RT_INLINE_ASM_USES_INTRIN
2464 __inbytestring(Port, pau8, (unsigned long)c);
2465
2466# else
2467 __asm
2468 {
2469 mov dx, [Port]
2470 mov ecx, [c]
2471 mov eax, [pau8]
2472 xchg edi, eax
2473 rep insb
2474 xchg edi, eax
2475 }
2476# endif
2477}
2478#endif
2479
2480
2481/**
2482 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2483 *
2484 * @param Port I/O port to write to.
2485 * @param pau16 Pointer to the string buffer.
2486 * @param c The number of items to write.
2487 */
2488#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2489DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2490#else
2491DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2492{
2493# if RT_INLINE_ASM_GNU_STYLE
2494 __asm__ __volatile__("rep; outsw\n\t"
2495 : "+S" (pau16),
2496 "+c" (c)
2497 : "d" (Port));
2498
2499# elif RT_INLINE_ASM_USES_INTRIN
2500 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2501
2502# else
2503 __asm
2504 {
2505 mov dx, [Port]
2506 mov ecx, [c]
2507 mov eax, [pau16]
2508 xchg esi, eax
2509 rep outsw
2510 xchg esi, eax
2511 }
2512# endif
2513}
2514#endif
2515
2516
2517/**
2518 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2519 *
2520 * @param Port I/O port to read from.
2521 * @param pau16 Pointer to the string buffer (output).
2522 * @param c The number of items to read.
2523 */
2524#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2525DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2526#else
2527DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2528{
2529# if RT_INLINE_ASM_GNU_STYLE
2530 __asm__ __volatile__("rep; insw\n\t"
2531 : "+D" (pau16),
2532 "+c" (c)
2533 : "d" (Port));
2534
2535# elif RT_INLINE_ASM_USES_INTRIN
2536 __inwordstring(Port, pau16, (unsigned long)c);
2537
2538# else
2539 __asm
2540 {
2541 mov dx, [Port]
2542 mov ecx, [c]
2543 mov eax, [pau16]
2544 xchg edi, eax
2545 rep insw
2546 xchg edi, eax
2547 }
2548# endif
2549}
2550#endif
2551
2552
2553/**
2554 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2555 *
2556 * @param Port I/O port to write to.
2557 * @param pau32 Pointer to the string buffer.
2558 * @param c The number of items to write.
2559 */
2560#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2561DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2562#else
2563DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2564{
2565# if RT_INLINE_ASM_GNU_STYLE
2566 __asm__ __volatile__("rep; outsl\n\t"
2567 : "+S" (pau32),
2568 "+c" (c)
2569 : "d" (Port));
2570
2571# elif RT_INLINE_ASM_USES_INTRIN
2572 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2573
2574# else
2575 __asm
2576 {
2577 mov dx, [Port]
2578 mov ecx, [c]
2579 mov eax, [pau32]
2580 xchg esi, eax
2581 rep outsd
2582 xchg esi, eax
2583 }
2584# endif
2585}
2586#endif
2587
2588
2589/**
2590 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2591 *
2592 * @param Port I/O port to read from.
2593 * @param pau32 Pointer to the string buffer (output).
2594 * @param c The number of items to read.
2595 */
2596#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2597DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2598#else
2599DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2600{
2601# if RT_INLINE_ASM_GNU_STYLE
2602 __asm__ __volatile__("rep; insl\n\t"
2603 : "+D" (pau32),
2604 "+c" (c)
2605 : "d" (Port));
2606
2607# elif RT_INLINE_ASM_USES_INTRIN
2608 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2609
2610# else
2611 __asm
2612 {
2613 mov dx, [Port]
2614 mov ecx, [c]
2615 mov eax, [pau32]
2616 xchg edi, eax
2617 rep insd
2618 xchg edi, eax
2619 }
2620# endif
2621}
2622#endif
2623
2624
2625/**
2626 * Invalidate page.
2627 *
2628 * @param pv Address of the page to invalidate.
2629 */
2630#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2631DECLASM(void) ASMInvalidatePage(void *pv);
2632#else
2633DECLINLINE(void) ASMInvalidatePage(void *pv)
2634{
2635# if RT_INLINE_ASM_USES_INTRIN
2636 __invlpg(pv);
2637
2638# elif RT_INLINE_ASM_GNU_STYLE
2639 __asm__ __volatile__("invlpg %0\n\t"
2640 : : "m" (*(uint8_t *)pv));
2641# else
2642 __asm
2643 {
2644# ifdef RT_ARCH_AMD64
2645 mov rax, [pv]
2646 invlpg [rax]
2647# else
2648 mov eax, [pv]
2649 invlpg [eax]
2650# endif
2651 }
2652# endif
2653}
2654#endif
2655
2656
2657/**
2658 * Write back the internal caches and invalidate them.
2659 */
2660#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2661DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2662#else
2663DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2664{
2665# if RT_INLINE_ASM_USES_INTRIN
2666 __wbinvd();
2667
2668# elif RT_INLINE_ASM_GNU_STYLE
2669 __asm__ __volatile__("wbinvd");
2670# else
2671 __asm
2672 {
2673 wbinvd
2674 }
2675# endif
2676}
2677#endif
2678
2679
2680/**
2681 * Invalidate internal and (perhaps) external caches without first
2682 * flushing dirty cache lines. Use with extreme care.
2683 */
2684#if RT_INLINE_ASM_EXTERNAL
2685DECLASM(void) ASMInvalidateInternalCaches(void);
2686#else
2687DECLINLINE(void) ASMInvalidateInternalCaches(void)
2688{
2689# if RT_INLINE_ASM_GNU_STYLE
2690 __asm__ __volatile__("invd");
2691# else
2692 __asm
2693 {
2694 invd
2695 }
2696# endif
2697}
2698#endif
2699
2700
2701/**
2702 * Memory load/store fence, waits for any pending writes and reads to complete.
2703 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2704 */
2705DECLINLINE(void) ASMMemoryFenceSSE2(void)
2706{
2707#if RT_INLINE_ASM_GNU_STYLE
2708 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2709#elif RT_INLINE_ASM_USES_INTRIN
2710 _mm_mfence();
2711#else
2712 __asm
2713 {
2714 _emit 0x0f
2715 _emit 0xae
2716 _emit 0xf0
2717 }
2718#endif
2719}
2720
2721
2722/**
2723 * Memory store fence, waits for any writes to complete.
2724 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2725 */
2726DECLINLINE(void) ASMWriteFenceSSE(void)
2727{
2728#if RT_INLINE_ASM_GNU_STYLE
2729 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2730#elif RT_INLINE_ASM_USES_INTRIN
2731 _mm_sfence();
2732#else
2733 __asm
2734 {
2735 _emit 0x0f
2736 _emit 0xae
2737 _emit 0xf8
2738 }
2739#endif
2740}
2741
2742
2743/**
2744 * Memory load fence, waits for any pending reads to complete.
2745 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2746 */
2747DECLINLINE(void) ASMReadFenceSSE2(void)
2748{
2749#if RT_INLINE_ASM_GNU_STYLE
2750 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2751#elif RT_INLINE_ASM_USES_INTRIN
2752 _mm_lfence();
2753#else
2754 __asm
2755 {
2756 _emit 0x0f
2757 _emit 0xae
2758 _emit 0xe8
2759 }
2760#endif
2761}
2762
2763/** @} */
2764#endif
2765
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette