VirtualBox

source: vbox/trunk/include/iprt/asm-amd64-x86.h@ 49083

Last change on this file since 49083 was 49001, checked in by vboxsync, 12 years ago

asm-amd64-x86.h: Fix ASMGetSegAttr input operand size.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 60.8 KB
Line 
1/** @file
2 * IPRT - AMD64 and x86 Specific Assembly Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2013 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef ___iprt_asm_amd64_x86_h
27#define ___iprt_asm_amd64_x86_h
28
29#include <iprt/types.h>
30#if !defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86)
31# error "Not on AMD64 or x86"
32#endif
33
34#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
35# include <intrin.h>
36 /* Emit the intrinsics at all optimization levels. */
37# pragma intrinsic(_ReadWriteBarrier)
38# pragma intrinsic(__cpuid)
39# pragma intrinsic(_enable)
40# pragma intrinsic(_disable)
41# pragma intrinsic(__rdtsc)
42# pragma intrinsic(__readmsr)
43# pragma intrinsic(__writemsr)
44# pragma intrinsic(__outbyte)
45# pragma intrinsic(__outbytestring)
46# pragma intrinsic(__outword)
47# pragma intrinsic(__outwordstring)
48# pragma intrinsic(__outdword)
49# pragma intrinsic(__outdwordstring)
50# pragma intrinsic(__inbyte)
51# pragma intrinsic(__inbytestring)
52# pragma intrinsic(__inword)
53# pragma intrinsic(__inwordstring)
54# pragma intrinsic(__indword)
55# pragma intrinsic(__indwordstring)
56# pragma intrinsic(__invlpg)
57# pragma intrinsic(__wbinvd)
58# pragma intrinsic(__readcr0)
59# pragma intrinsic(__readcr2)
60# pragma intrinsic(__readcr3)
61# pragma intrinsic(__readcr4)
62# pragma intrinsic(__writecr0)
63# pragma intrinsic(__writecr3)
64# pragma intrinsic(__writecr4)
65# pragma intrinsic(__readdr)
66# pragma intrinsic(__writedr)
67# ifdef RT_ARCH_AMD64
68# pragma intrinsic(__readcr8)
69# pragma intrinsic(__writecr8)
70# endif
71# if RT_INLINE_ASM_USES_INTRIN >= 15
72# pragma intrinsic(__readeflags)
73# pragma intrinsic(__writeeflags)
74# endif
75#endif
76
77
78
79/** @defgroup grp_rt_asm_amd64_x86 AMD64 and x86 Specific ASM Routines
80 * @ingroup grp_rt_asm
81 * @{
82 */
83
84/** @todo find a more proper place for this structure? */
85#pragma pack(1)
86/** IDTR */
87typedef struct RTIDTR
88{
89 /** Size of the IDT. */
90 uint16_t cbIdt;
91 /** Address of the IDT. */
92 uintptr_t pIdt;
93} RTIDTR, *PRTIDTR;
94#pragma pack()
95
96#pragma pack(1)
97/** GDTR */
98typedef struct RTGDTR
99{
100 /** Size of the GDT. */
101 uint16_t cbGdt;
102 /** Address of the GDT. */
103 uintptr_t pGdt;
104} RTGDTR, *PRTGDTR;
105#pragma pack()
106
107
108/**
109 * Gets the content of the IDTR CPU register.
110 * @param pIdtr Where to store the IDTR contents.
111 */
112#if RT_INLINE_ASM_EXTERNAL
113DECLASM(void) ASMGetIDTR(PRTIDTR pIdtr);
114#else
115DECLINLINE(void) ASMGetIDTR(PRTIDTR pIdtr)
116{
117# if RT_INLINE_ASM_GNU_STYLE
118 __asm__ __volatile__("sidt %0" : "=m" (*pIdtr));
119# else
120 __asm
121 {
122# ifdef RT_ARCH_AMD64
123 mov rax, [pIdtr]
124 sidt [rax]
125# else
126 mov eax, [pIdtr]
127 sidt [eax]
128# endif
129 }
130# endif
131}
132#endif
133
134
135/**
136 * Sets the content of the IDTR CPU register.
137 * @param pIdtr Where to load the IDTR contents from
138 */
139#if RT_INLINE_ASM_EXTERNAL
140DECLASM(void) ASMSetIDTR(const RTIDTR *pIdtr);
141#else
142DECLINLINE(void) ASMSetIDTR(const RTIDTR *pIdtr)
143{
144# if RT_INLINE_ASM_GNU_STYLE
145 __asm__ __volatile__("lidt %0" : : "m" (*pIdtr));
146# else
147 __asm
148 {
149# ifdef RT_ARCH_AMD64
150 mov rax, [pIdtr]
151 lidt [rax]
152# else
153 mov eax, [pIdtr]
154 lidt [eax]
155# endif
156 }
157# endif
158}
159#endif
160
161
162/**
163 * Gets the content of the GDTR CPU register.
164 * @param pGdtr Where to store the GDTR contents.
165 */
166#if RT_INLINE_ASM_EXTERNAL
167DECLASM(void) ASMGetGDTR(PRTGDTR pGdtr);
168#else
169DECLINLINE(void) ASMGetGDTR(PRTGDTR pGdtr)
170{
171# if RT_INLINE_ASM_GNU_STYLE
172 __asm__ __volatile__("sgdt %0" : "=m" (*pGdtr));
173# else
174 __asm
175 {
176# ifdef RT_ARCH_AMD64
177 mov rax, [pGdtr]
178 sgdt [rax]
179# else
180 mov eax, [pGdtr]
181 sgdt [eax]
182# endif
183 }
184# endif
185}
186#endif
187
188/**
189 * Get the cs register.
190 * @returns cs.
191 */
192#if RT_INLINE_ASM_EXTERNAL
193DECLASM(RTSEL) ASMGetCS(void);
194#else
195DECLINLINE(RTSEL) ASMGetCS(void)
196{
197 RTSEL SelCS;
198# if RT_INLINE_ASM_GNU_STYLE
199 __asm__ __volatile__("movw %%cs, %0\n\t" : "=r" (SelCS));
200# else
201 __asm
202 {
203 mov ax, cs
204 mov [SelCS], ax
205 }
206# endif
207 return SelCS;
208}
209#endif
210
211
212/**
213 * Get the DS register.
214 * @returns DS.
215 */
216#if RT_INLINE_ASM_EXTERNAL
217DECLASM(RTSEL) ASMGetDS(void);
218#else
219DECLINLINE(RTSEL) ASMGetDS(void)
220{
221 RTSEL SelDS;
222# if RT_INLINE_ASM_GNU_STYLE
223 __asm__ __volatile__("movw %%ds, %0\n\t" : "=r" (SelDS));
224# else
225 __asm
226 {
227 mov ax, ds
228 mov [SelDS], ax
229 }
230# endif
231 return SelDS;
232}
233#endif
234
235
236/**
237 * Get the ES register.
238 * @returns ES.
239 */
240#if RT_INLINE_ASM_EXTERNAL
241DECLASM(RTSEL) ASMGetES(void);
242#else
243DECLINLINE(RTSEL) ASMGetES(void)
244{
245 RTSEL SelES;
246# if RT_INLINE_ASM_GNU_STYLE
247 __asm__ __volatile__("movw %%es, %0\n\t" : "=r" (SelES));
248# else
249 __asm
250 {
251 mov ax, es
252 mov [SelES], ax
253 }
254# endif
255 return SelES;
256}
257#endif
258
259
260/**
261 * Get the FS register.
262 * @returns FS.
263 */
264#if RT_INLINE_ASM_EXTERNAL
265DECLASM(RTSEL) ASMGetFS(void);
266#else
267DECLINLINE(RTSEL) ASMGetFS(void)
268{
269 RTSEL SelFS;
270# if RT_INLINE_ASM_GNU_STYLE
271 __asm__ __volatile__("movw %%fs, %0\n\t" : "=r" (SelFS));
272# else
273 __asm
274 {
275 mov ax, fs
276 mov [SelFS], ax
277 }
278# endif
279 return SelFS;
280}
281# endif
282
283
284/**
285 * Get the GS register.
286 * @returns GS.
287 */
288#if RT_INLINE_ASM_EXTERNAL
289DECLASM(RTSEL) ASMGetGS(void);
290#else
291DECLINLINE(RTSEL) ASMGetGS(void)
292{
293 RTSEL SelGS;
294# if RT_INLINE_ASM_GNU_STYLE
295 __asm__ __volatile__("movw %%gs, %0\n\t" : "=r" (SelGS));
296# else
297 __asm
298 {
299 mov ax, gs
300 mov [SelGS], ax
301 }
302# endif
303 return SelGS;
304}
305#endif
306
307
308/**
309 * Get the SS register.
310 * @returns SS.
311 */
312#if RT_INLINE_ASM_EXTERNAL
313DECLASM(RTSEL) ASMGetSS(void);
314#else
315DECLINLINE(RTSEL) ASMGetSS(void)
316{
317 RTSEL SelSS;
318# if RT_INLINE_ASM_GNU_STYLE
319 __asm__ __volatile__("movw %%ss, %0\n\t" : "=r" (SelSS));
320# else
321 __asm
322 {
323 mov ax, ss
324 mov [SelSS], ax
325 }
326# endif
327 return SelSS;
328}
329#endif
330
331
332/**
333 * Get the TR register.
334 * @returns TR.
335 */
336#if RT_INLINE_ASM_EXTERNAL
337DECLASM(RTSEL) ASMGetTR(void);
338#else
339DECLINLINE(RTSEL) ASMGetTR(void)
340{
341 RTSEL SelTR;
342# if RT_INLINE_ASM_GNU_STYLE
343 __asm__ __volatile__("str %w0\n\t" : "=r" (SelTR));
344# else
345 __asm
346 {
347 str ax
348 mov [SelTR], ax
349 }
350# endif
351 return SelTR;
352}
353#endif
354
355
356/**
357 * Get the LDTR register.
358 * @returns LDTR.
359 */
360#if RT_INLINE_ASM_EXTERNAL
361DECLASM(RTSEL) ASMGetLDTR(void);
362#else
363DECLINLINE(RTSEL) ASMGetLDTR(void)
364{
365 RTSEL SelLDTR;
366# if RT_INLINE_ASM_GNU_STYLE
367 __asm__ __volatile__("sldt %w0\n\t" : "=r" (SelLDTR));
368# else
369 __asm
370 {
371 sldt ax
372 mov [SelLDTR], ax
373 }
374# endif
375 return SelLDTR;
376}
377#endif
378
379
380/**
381 * Get the access rights for the segment selector.
382 *
383 * @returns The access rights on success or ~0U on failure.
384 * @param uSel The selector value.
385 *
386 * @remarks Using ~0U for failure is chosen because valid access rights always
387 * have bits 0:7 as 0 (on both Intel & AMD).
388 */
389#if RT_INLINE_ASM_EXTERNAL
390DECLASM(uint32_t) ASMGetSegAttr(uint32_t uSel);
391#else
392DECLINLINE(uint32_t) ASMGetSegAttr(uint32_t uSel)
393{
394 uint32_t uAttr;
395 /* LAR only accesses 16-bit of the source operand, but eax for the
396 destination operand is required for getting the full 32-bit access rights. */
397# if RT_INLINE_ASM_GNU_STYLE
398 __asm__ __volatile__("lar %1, %%eax\n\t"
399 "jz done%=\n\t"
400 "movl $0xffffffff, %%eax\n\t"
401 "done%=:\n\t"
402 "movl %%eax, %0\n\t"
403 : "=r" (uAttr)
404 : "r" (uSel)
405 : "cc", "%eax");
406# else
407 __asm
408 {
409 lar eax, [uSel]
410 jz done
411 mov eax, 0ffffffffh
412 done:
413 mov [uAttr], eax
414 }
415# endif
416 return uAttr;
417}
418#endif
419
420
421/**
422 * Get the [RE]FLAGS register.
423 * @returns [RE]FLAGS.
424 */
425#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
426DECLASM(RTCCUINTREG) ASMGetFlags(void);
427#else
428DECLINLINE(RTCCUINTREG) ASMGetFlags(void)
429{
430 RTCCUINTREG uFlags;
431# if RT_INLINE_ASM_GNU_STYLE
432# ifdef RT_ARCH_AMD64
433 __asm__ __volatile__("pushfq\n\t"
434 "popq %0\n\t"
435 : "=r" (uFlags));
436# else
437 __asm__ __volatile__("pushfl\n\t"
438 "popl %0\n\t"
439 : "=r" (uFlags));
440# endif
441# elif RT_INLINE_ASM_USES_INTRIN >= 15
442 uFlags = __readeflags();
443# else
444 __asm
445 {
446# ifdef RT_ARCH_AMD64
447 pushfq
448 pop [uFlags]
449# else
450 pushfd
451 pop [uFlags]
452# endif
453 }
454# endif
455 return uFlags;
456}
457#endif
458
459
460/**
461 * Set the [RE]FLAGS register.
462 * @param uFlags The new [RE]FLAGS value.
463 */
464#if RT_INLINE_ASM_EXTERNAL && RT_INLINE_ASM_USES_INTRIN < 15
465DECLASM(void) ASMSetFlags(RTCCUINTREG uFlags);
466#else
467DECLINLINE(void) ASMSetFlags(RTCCUINTREG uFlags)
468{
469# if RT_INLINE_ASM_GNU_STYLE
470# ifdef RT_ARCH_AMD64
471 __asm__ __volatile__("pushq %0\n\t"
472 "popfq\n\t"
473 : : "g" (uFlags));
474# else
475 __asm__ __volatile__("pushl %0\n\t"
476 "popfl\n\t"
477 : : "g" (uFlags));
478# endif
479# elif RT_INLINE_ASM_USES_INTRIN >= 15
480 __writeeflags(uFlags);
481# else
482 __asm
483 {
484# ifdef RT_ARCH_AMD64
485 push [uFlags]
486 popfq
487# else
488 push [uFlags]
489 popfd
490# endif
491 }
492# endif
493}
494#endif
495
496
497/**
498 * Gets the content of the CPU timestamp counter register.
499 *
500 * @returns TSC.
501 */
502#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
503DECLASM(uint64_t) ASMReadTSC(void);
504#else
505DECLINLINE(uint64_t) ASMReadTSC(void)
506{
507 RTUINT64U u;
508# if RT_INLINE_ASM_GNU_STYLE
509 __asm__ __volatile__("rdtsc\n\t" : "=a" (u.s.Lo), "=d" (u.s.Hi));
510# else
511# if RT_INLINE_ASM_USES_INTRIN
512 u.u = __rdtsc();
513# else
514 __asm
515 {
516 rdtsc
517 mov [u.s.Lo], eax
518 mov [u.s.Hi], edx
519 }
520# endif
521# endif
522 return u.u;
523}
524#endif
525
526
527/**
528 * Performs the cpuid instruction returning all registers.
529 *
530 * @param uOperator CPUID operation (eax).
531 * @param pvEAX Where to store eax.
532 * @param pvEBX Where to store ebx.
533 * @param pvECX Where to store ecx.
534 * @param pvEDX Where to store edx.
535 * @remark We're using void pointers to ease the use of special bitfield structures and such.
536 */
537#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
538DECLASM(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
539#else
540DECLINLINE(void) ASMCpuId(uint32_t uOperator, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
541{
542# if RT_INLINE_ASM_GNU_STYLE
543# ifdef RT_ARCH_AMD64
544 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
545 __asm__ __volatile__ ("cpuid\n\t"
546 : "=a" (uRAX),
547 "=b" (uRBX),
548 "=c" (uRCX),
549 "=d" (uRDX)
550 : "0" (uOperator), "2" (0));
551 *(uint32_t *)pvEAX = (uint32_t)uRAX;
552 *(uint32_t *)pvEBX = (uint32_t)uRBX;
553 *(uint32_t *)pvECX = (uint32_t)uRCX;
554 *(uint32_t *)pvEDX = (uint32_t)uRDX;
555# else
556 __asm__ __volatile__ ("xchgl %%ebx, %1\n\t"
557 "cpuid\n\t"
558 "xchgl %%ebx, %1\n\t"
559 : "=a" (*(uint32_t *)pvEAX),
560 "=r" (*(uint32_t *)pvEBX),
561 "=c" (*(uint32_t *)pvECX),
562 "=d" (*(uint32_t *)pvEDX)
563 : "0" (uOperator), "2" (0));
564# endif
565
566# elif RT_INLINE_ASM_USES_INTRIN
567 int aInfo[4];
568 __cpuid(aInfo, uOperator);
569 *(uint32_t *)pvEAX = aInfo[0];
570 *(uint32_t *)pvEBX = aInfo[1];
571 *(uint32_t *)pvECX = aInfo[2];
572 *(uint32_t *)pvEDX = aInfo[3];
573
574# else
575 uint32_t uEAX;
576 uint32_t uEBX;
577 uint32_t uECX;
578 uint32_t uEDX;
579 __asm
580 {
581 push ebx
582 mov eax, [uOperator]
583 cpuid
584 mov [uEAX], eax
585 mov [uEBX], ebx
586 mov [uECX], ecx
587 mov [uEDX], edx
588 pop ebx
589 }
590 *(uint32_t *)pvEAX = uEAX;
591 *(uint32_t *)pvEBX = uEBX;
592 *(uint32_t *)pvECX = uECX;
593 *(uint32_t *)pvEDX = uEDX;
594# endif
595}
596#endif
597
598
599/**
600 * Performs the CPUID instruction with EAX and ECX input returning ALL output
601 * registers.
602 *
603 * @param uOperator CPUID operation (eax).
604 * @param uIdxECX ecx index
605 * @param pvEAX Where to store eax.
606 * @param pvEBX Where to store ebx.
607 * @param pvECX Where to store ecx.
608 * @param pvEDX Where to store edx.
609 * @remark We're using void pointers to ease the use of special bitfield structures and such.
610 */
611#if RT_INLINE_ASM_EXTERNAL || RT_INLINE_ASM_USES_INTRIN
612DECLASM(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX);
613#else
614DECLINLINE(void) ASMCpuId_Idx_ECX(uint32_t uOperator, uint32_t uIdxECX, void *pvEAX, void *pvEBX, void *pvECX, void *pvEDX)
615{
616# if RT_INLINE_ASM_GNU_STYLE
617# ifdef RT_ARCH_AMD64
618 RTCCUINTREG uRAX, uRBX, uRCX, uRDX;
619 __asm__ ("cpuid\n\t"
620 : "=a" (uRAX),
621 "=b" (uRBX),
622 "=c" (uRCX),
623 "=d" (uRDX)
624 : "0" (uOperator),
625 "2" (uIdxECX));
626 *(uint32_t *)pvEAX = (uint32_t)uRAX;
627 *(uint32_t *)pvEBX = (uint32_t)uRBX;
628 *(uint32_t *)pvECX = (uint32_t)uRCX;
629 *(uint32_t *)pvEDX = (uint32_t)uRDX;
630# else
631 __asm__ ("xchgl %%ebx, %1\n\t"
632 "cpuid\n\t"
633 "xchgl %%ebx, %1\n\t"
634 : "=a" (*(uint32_t *)pvEAX),
635 "=r" (*(uint32_t *)pvEBX),
636 "=c" (*(uint32_t *)pvECX),
637 "=d" (*(uint32_t *)pvEDX)
638 : "0" (uOperator),
639 "2" (uIdxECX));
640# endif
641
642# elif RT_INLINE_ASM_USES_INTRIN
643 int aInfo[4];
644 __cpuidex(aInfo, uOperator, uIdxECX);
645 *(uint32_t *)pvEAX = aInfo[0];
646 *(uint32_t *)pvEBX = aInfo[1];
647 *(uint32_t *)pvECX = aInfo[2];
648 *(uint32_t *)pvEDX = aInfo[3];
649
650# else
651 uint32_t uEAX;
652 uint32_t uEBX;
653 uint32_t uECX;
654 uint32_t uEDX;
655 __asm
656 {
657 push ebx
658 mov eax, [uOperator]
659 mov ecx, [uIdxECX]
660 cpuid
661 mov [uEAX], eax
662 mov [uEBX], ebx
663 mov [uECX], ecx
664 mov [uEDX], edx
665 pop ebx
666 }
667 *(uint32_t *)pvEAX = uEAX;
668 *(uint32_t *)pvEBX = uEBX;
669 *(uint32_t *)pvECX = uECX;
670 *(uint32_t *)pvEDX = uEDX;
671# endif
672}
673#endif
674
675
676/**
677 * Performs the cpuid instruction returning ecx and edx.
678 *
679 * @param uOperator CPUID operation (eax).
680 * @param pvECX Where to store ecx.
681 * @param pvEDX Where to store edx.
682 * @remark We're using void pointers to ease the use of special bitfield structures and such.
683 */
684#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
685DECLASM(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX);
686#else
687DECLINLINE(void) ASMCpuId_ECX_EDX(uint32_t uOperator, void *pvECX, void *pvEDX)
688{
689 uint32_t uEBX;
690 ASMCpuId(uOperator, &uOperator, &uEBX, pvECX, pvEDX);
691}
692#endif
693
694
695/**
696 * Performs the cpuid instruction returning eax.
697 *
698 * @param uOperator CPUID operation (eax).
699 * @returns EAX after cpuid operation.
700 */
701#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
702DECLASM(uint32_t) ASMCpuId_EAX(uint32_t uOperator);
703#else
704DECLINLINE(uint32_t) ASMCpuId_EAX(uint32_t uOperator)
705{
706 RTCCUINTREG xAX;
707# if RT_INLINE_ASM_GNU_STYLE
708# ifdef RT_ARCH_AMD64
709 __asm__ ("cpuid"
710 : "=a" (xAX)
711 : "0" (uOperator)
712 : "rbx", "rcx", "rdx");
713# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
714 __asm__ ("push %%ebx\n\t"
715 "cpuid\n\t"
716 "pop %%ebx\n\t"
717 : "=a" (xAX)
718 : "0" (uOperator)
719 : "ecx", "edx");
720# else
721 __asm__ ("cpuid"
722 : "=a" (xAX)
723 : "0" (uOperator)
724 : "edx", "ecx", "ebx");
725# endif
726
727# elif RT_INLINE_ASM_USES_INTRIN
728 int aInfo[4];
729 __cpuid(aInfo, uOperator);
730 xAX = aInfo[0];
731
732# else
733 __asm
734 {
735 push ebx
736 mov eax, [uOperator]
737 cpuid
738 mov [xAX], eax
739 pop ebx
740 }
741# endif
742 return (uint32_t)xAX;
743}
744#endif
745
746
747/**
748 * Performs the cpuid instruction returning ebx.
749 *
750 * @param uOperator CPUID operation (eax).
751 * @returns EBX after cpuid operation.
752 */
753#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
754DECLASM(uint32_t) ASMCpuId_EBX(uint32_t uOperator);
755#else
756DECLINLINE(uint32_t) ASMCpuId_EBX(uint32_t uOperator)
757{
758 RTCCUINTREG xBX;
759# if RT_INLINE_ASM_GNU_STYLE
760# ifdef RT_ARCH_AMD64
761 RTCCUINTREG uSpill;
762 __asm__ ("cpuid"
763 : "=a" (uSpill),
764 "=b" (xBX)
765 : "0" (uOperator)
766 : "rdx", "rcx");
767# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
768 __asm__ ("push %%ebx\n\t"
769 "cpuid\n\t"
770 "mov %%ebx, %%edx\n\t"
771 "pop %%ebx\n\t"
772 : "=a" (uOperator),
773 "=d" (xBX)
774 : "0" (uOperator)
775 : "ecx");
776# else
777 __asm__ ("cpuid"
778 : "=a" (uOperator),
779 "=b" (xBX)
780 : "0" (uOperator)
781 : "edx", "ecx");
782# endif
783
784# elif RT_INLINE_ASM_USES_INTRIN
785 int aInfo[4];
786 __cpuid(aInfo, uOperator);
787 xBX = aInfo[1];
788
789# else
790 __asm
791 {
792 push ebx
793 mov eax, [uOperator]
794 cpuid
795 mov [xBX], ebx
796 pop ebx
797 }
798# endif
799 return (uint32_t)xBX;
800}
801#endif
802
803
804/**
805 * Performs the cpuid instruction returning ecx.
806 *
807 * @param uOperator CPUID operation (eax).
808 * @returns ECX after cpuid operation.
809 */
810#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
811DECLASM(uint32_t) ASMCpuId_ECX(uint32_t uOperator);
812#else
813DECLINLINE(uint32_t) ASMCpuId_ECX(uint32_t uOperator)
814{
815 RTCCUINTREG xCX;
816# if RT_INLINE_ASM_GNU_STYLE
817# ifdef RT_ARCH_AMD64
818 RTCCUINTREG uSpill;
819 __asm__ ("cpuid"
820 : "=a" (uSpill),
821 "=c" (xCX)
822 : "0" (uOperator)
823 : "rbx", "rdx");
824# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
825 __asm__ ("push %%ebx\n\t"
826 "cpuid\n\t"
827 "pop %%ebx\n\t"
828 : "=a" (uOperator),
829 "=c" (xCX)
830 : "0" (uOperator)
831 : "edx");
832# else
833 __asm__ ("cpuid"
834 : "=a" (uOperator),
835 "=c" (xCX)
836 : "0" (uOperator)
837 : "ebx", "edx");
838
839# endif
840
841# elif RT_INLINE_ASM_USES_INTRIN
842 int aInfo[4];
843 __cpuid(aInfo, uOperator);
844 xCX = aInfo[2];
845
846# else
847 __asm
848 {
849 push ebx
850 mov eax, [uOperator]
851 cpuid
852 mov [xCX], ecx
853 pop ebx
854 }
855# endif
856 return (uint32_t)xCX;
857}
858#endif
859
860
861/**
862 * Performs the cpuid instruction returning edx.
863 *
864 * @param uOperator CPUID operation (eax).
865 * @returns EDX after cpuid operation.
866 */
867#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
868DECLASM(uint32_t) ASMCpuId_EDX(uint32_t uOperator);
869#else
870DECLINLINE(uint32_t) ASMCpuId_EDX(uint32_t uOperator)
871{
872 RTCCUINTREG xDX;
873# if RT_INLINE_ASM_GNU_STYLE
874# ifdef RT_ARCH_AMD64
875 RTCCUINTREG uSpill;
876 __asm__ ("cpuid"
877 : "=a" (uSpill),
878 "=d" (xDX)
879 : "0" (uOperator)
880 : "rbx", "rcx");
881# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
882 __asm__ ("push %%ebx\n\t"
883 "cpuid\n\t"
884 "pop %%ebx\n\t"
885 : "=a" (uOperator),
886 "=d" (xDX)
887 : "0" (uOperator)
888 : "ecx");
889# else
890 __asm__ ("cpuid"
891 : "=a" (uOperator),
892 "=d" (xDX)
893 : "0" (uOperator)
894 : "ebx", "ecx");
895# endif
896
897# elif RT_INLINE_ASM_USES_INTRIN
898 int aInfo[4];
899 __cpuid(aInfo, uOperator);
900 xDX = aInfo[3];
901
902# else
903 __asm
904 {
905 push ebx
906 mov eax, [uOperator]
907 cpuid
908 mov [xDX], edx
909 pop ebx
910 }
911# endif
912 return (uint32_t)xDX;
913}
914#endif
915
916
917/**
918 * Checks if the current CPU supports CPUID.
919 *
920 * @returns true if CPUID is supported.
921 */
922DECLINLINE(bool) ASMHasCpuId(void)
923{
924#ifdef RT_ARCH_AMD64
925 return true; /* ASSUME that all amd64 compatible CPUs have cpuid. */
926#else /* !RT_ARCH_AMD64 */
927 bool fRet = false;
928# if RT_INLINE_ASM_GNU_STYLE
929 uint32_t u1;
930 uint32_t u2;
931 __asm__ ("pushf\n\t"
932 "pop %1\n\t"
933 "mov %1, %2\n\t"
934 "xorl $0x200000, %1\n\t"
935 "push %1\n\t"
936 "popf\n\t"
937 "pushf\n\t"
938 "pop %1\n\t"
939 "cmpl %1, %2\n\t"
940 "setne %0\n\t"
941 "push %2\n\t"
942 "popf\n\t"
943 : "=m" (fRet), "=r" (u1), "=r" (u2));
944# else
945 __asm
946 {
947 pushfd
948 pop eax
949 mov ebx, eax
950 xor eax, 0200000h
951 push eax
952 popfd
953 pushfd
954 pop eax
955 cmp eax, ebx
956 setne fRet
957 push ebx
958 popfd
959 }
960# endif
961 return fRet;
962#endif /* !RT_ARCH_AMD64 */
963}
964
965
966/**
967 * Gets the APIC ID of the current CPU.
968 *
969 * @returns the APIC ID.
970 */
971#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
972DECLASM(uint8_t) ASMGetApicId(void);
973#else
974DECLINLINE(uint8_t) ASMGetApicId(void)
975{
976 RTCCUINTREG xBX;
977# if RT_INLINE_ASM_GNU_STYLE
978# ifdef RT_ARCH_AMD64
979 RTCCUINTREG uSpill;
980 __asm__ __volatile__ ("cpuid"
981 : "=a" (uSpill),
982 "=b" (xBX)
983 : "0" (1)
984 : "rcx", "rdx");
985# elif (defined(PIC) || defined(__PIC__)) && defined(__i386__)
986 RTCCUINTREG uSpill;
987 __asm__ __volatile__ ("mov %%ebx,%1\n\t"
988 "cpuid\n\t"
989 "xchgl %%ebx,%1\n\t"
990 : "=a" (uSpill),
991 "=rm" (xBX)
992 : "0" (1)
993 : "ecx", "edx");
994# else
995 RTCCUINTREG uSpill;
996 __asm__ __volatile__ ("cpuid"
997 : "=a" (uSpill),
998 "=b" (xBX)
999 : "0" (1)
1000 : "ecx", "edx");
1001# endif
1002
1003# elif RT_INLINE_ASM_USES_INTRIN
1004 int aInfo[4];
1005 __cpuid(aInfo, 1);
1006 xBX = aInfo[1];
1007
1008# else
1009 __asm
1010 {
1011 push ebx
1012 mov eax, 1
1013 cpuid
1014 mov [xBX], ebx
1015 pop ebx
1016 }
1017# endif
1018 return (uint8_t)(xBX >> 24);
1019}
1020#endif
1021
1022
1023/**
1024 * Tests if it a genuine Intel CPU based on the ASMCpuId(0) output.
1025 *
1026 * @returns true/false.
1027 * @param uEBX EBX return from ASMCpuId(0)
1028 * @param uECX ECX return from ASMCpuId(0)
1029 * @param uEDX EDX return from ASMCpuId(0)
1030 */
1031DECLINLINE(bool) ASMIsIntelCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1032{
1033 return uEBX == UINT32_C(0x756e6547)
1034 && uECX == UINT32_C(0x6c65746e)
1035 && uEDX == UINT32_C(0x49656e69);
1036}
1037
1038
1039/**
1040 * Tests if this is a genuine Intel CPU.
1041 *
1042 * @returns true/false.
1043 * @remarks ASSUMES that cpuid is supported by the CPU.
1044 */
1045DECLINLINE(bool) ASMIsIntelCpu(void)
1046{
1047 uint32_t uEAX, uEBX, uECX, uEDX;
1048 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1049 return ASMIsIntelCpuEx(uEBX, uECX, uEDX);
1050}
1051
1052
1053/**
1054 * Tests if it an authentic AMD CPU based on the ASMCpuId(0) output.
1055 *
1056 * @returns true/false.
1057 * @param uEBX EBX return from ASMCpuId(0)
1058 * @param uECX ECX return from ASMCpuId(0)
1059 * @param uEDX EDX return from ASMCpuId(0)
1060 */
1061DECLINLINE(bool) ASMIsAmdCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1062{
1063 return uEBX == UINT32_C(0x68747541)
1064 && uECX == UINT32_C(0x444d4163)
1065 && uEDX == UINT32_C(0x69746e65);
1066}
1067
1068
1069/**
1070 * Tests if this is an authentic AMD CPU.
1071 *
1072 * @returns true/false.
1073 * @remarks ASSUMES that cpuid is supported by the CPU.
1074 */
1075DECLINLINE(bool) ASMIsAmdCpu(void)
1076{
1077 uint32_t uEAX, uEBX, uECX, uEDX;
1078 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1079 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1080}
1081
1082
1083/**
1084 * Tests if it a centaur hauling VIA CPU based on the ASMCpuId(0) output.
1085 *
1086 * @returns true/false.
1087 * @param uEBX EBX return from ASMCpuId(0).
1088 * @param uECX ECX return from ASMCpuId(0).
1089 * @param uEDX EDX return from ASMCpuId(0).
1090 */
1091DECLINLINE(bool) ASMIsViaCentaurCpuEx(uint32_t uEBX, uint32_t uECX, uint32_t uEDX)
1092{
1093 return uEBX == UINT32_C(0x746e6543)
1094 && uECX == UINT32_C(0x736c7561)
1095 && uEDX == UINT32_C(0x48727561);
1096}
1097
1098
1099/**
1100 * Tests if this is a centaur hauling VIA CPU.
1101 *
1102 * @returns true/false.
1103 * @remarks ASSUMES that cpuid is supported by the CPU.
1104 */
1105DECLINLINE(bool) ASMIsViaCentaurCpu(void)
1106{
1107 uint32_t uEAX, uEBX, uECX, uEDX;
1108 ASMCpuId(0, &uEAX, &uEBX, &uECX, &uEDX);
1109 return ASMIsAmdCpuEx(uEBX, uECX, uEDX);
1110}
1111
1112
1113/**
1114 * Checks whether ASMCpuId_EAX(0x00000000) indicates a valid range.
1115 *
1116 *
1117 * @returns true/false.
1118 * @param uEAX The EAX value of CPUID leaf 0x00000000.
1119 *
1120 * @note This only succeeds if there are at least two leaves in the range.
1121 * @remarks The upper range limit is just some half reasonable value we've
1122 * picked out of thin air.
1123 */
1124DECLINLINE(bool) ASMIsValidStdRange(uint32_t uEAX)
1125{
1126 return uEAX >= UINT32_C(0x00000001) && uEAX <= UINT32_C(0x000fffff);
1127}
1128
1129
1130/**
1131 * Checks whether ASMCpuId_EAX(0x80000000) indicates a valid range.
1132 *
1133 * This only succeeds if there are at least two leaves in the range.
1134 *
1135 * @returns true/false.
1136 * @param uEAX The EAX value of CPUID leaf 0x80000000.
1137 *
1138 * @note This only succeeds if there are at least two leaves in the range.
1139 * @remarks The upper range limit is just some half reasonable value we've
1140 * picked out of thin air.
1141 */
1142DECLINLINE(bool) ASMIsValidExtRange(uint32_t uEAX)
1143{
1144 return uEAX >= UINT32_C(0x80000001) && uEAX <= UINT32_C(0x800fffff);
1145}
1146
1147
1148/**
1149 * Extracts the CPU family from ASMCpuId(1) or ASMCpuId(0x80000001)
1150 *
1151 * @returns Family.
1152 * @param uEAX EAX return from ASMCpuId(1) or ASMCpuId(0x80000001).
1153 */
1154DECLINLINE(uint32_t) ASMGetCpuFamily(uint32_t uEAX)
1155{
1156 return ((uEAX >> 8) & 0xf) == 0xf
1157 ? ((uEAX >> 20) & 0x7f) + 0xf
1158 : ((uEAX >> 8) & 0xf);
1159}
1160
1161
1162/**
1163 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), Intel variant.
1164 *
1165 * @returns Model.
1166 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1167 */
1168DECLINLINE(uint32_t) ASMGetCpuModelIntel(uint32_t uEAX)
1169{
1170 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6) /* family! */
1171 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1172 : ((uEAX >> 4) & 0xf);
1173}
1174
1175
1176/**
1177 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001), AMD variant.
1178 *
1179 * @returns Model.
1180 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1181 */
1182DECLINLINE(uint32_t) ASMGetCpuModelAMD(uint32_t uEAX)
1183{
1184 return ((uEAX >> 8) & 0xf) == 0xf
1185 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1186 : ((uEAX >> 4) & 0xf);
1187}
1188
1189
1190/**
1191 * Extracts the CPU model from ASMCpuId(1) or ASMCpuId(0x80000001)
1192 *
1193 * @returns Model.
1194 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1195 * @param fIntel Whether it's an intel CPU. Use ASMIsIntelCpuEx() or ASMIsIntelCpu().
1196 */
1197DECLINLINE(uint32_t) ASMGetCpuModel(uint32_t uEAX, bool fIntel)
1198{
1199 return ((uEAX >> 8) & 0xf) == 0xf || (((uEAX >> 8) & 0xf) == 0x6 && fIntel) /* family! */
1200 ? ((uEAX >> 4) & 0xf) | ((uEAX >> 12) & 0xf0)
1201 : ((uEAX >> 4) & 0xf);
1202}
1203
1204
1205/**
1206 * Extracts the CPU stepping from ASMCpuId(1) or ASMCpuId(0x80000001)
1207 *
1208 * @returns Model.
1209 * @param uEAX EAX from ASMCpuId(1) or ASMCpuId(0x80000001).
1210 */
1211DECLINLINE(uint32_t) ASMGetCpuStepping(uint32_t uEAX)
1212{
1213 return uEAX & 0xf;
1214}
1215
1216
1217/**
1218 * Get cr0.
1219 * @returns cr0.
1220 */
1221#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1222DECLASM(RTCCUINTREG) ASMGetCR0(void);
1223#else
1224DECLINLINE(RTCCUINTREG) ASMGetCR0(void)
1225{
1226 RTCCUINTREG uCR0;
1227# if RT_INLINE_ASM_USES_INTRIN
1228 uCR0 = __readcr0();
1229
1230# elif RT_INLINE_ASM_GNU_STYLE
1231# ifdef RT_ARCH_AMD64
1232 __asm__ __volatile__("movq %%cr0, %0\t\n" : "=r" (uCR0));
1233# else
1234 __asm__ __volatile__("movl %%cr0, %0\t\n" : "=r" (uCR0));
1235# endif
1236# else
1237 __asm
1238 {
1239# ifdef RT_ARCH_AMD64
1240 mov rax, cr0
1241 mov [uCR0], rax
1242# else
1243 mov eax, cr0
1244 mov [uCR0], eax
1245# endif
1246 }
1247# endif
1248 return uCR0;
1249}
1250#endif
1251
1252
1253/**
1254 * Sets the CR0 register.
1255 * @param uCR0 The new CR0 value.
1256 */
1257#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1258DECLASM(void) ASMSetCR0(RTCCUINTREG uCR0);
1259#else
1260DECLINLINE(void) ASMSetCR0(RTCCUINTREG uCR0)
1261{
1262# if RT_INLINE_ASM_USES_INTRIN
1263 __writecr0(uCR0);
1264
1265# elif RT_INLINE_ASM_GNU_STYLE
1266# ifdef RT_ARCH_AMD64
1267 __asm__ __volatile__("movq %0, %%cr0\n\t" :: "r" (uCR0));
1268# else
1269 __asm__ __volatile__("movl %0, %%cr0\n\t" :: "r" (uCR0));
1270# endif
1271# else
1272 __asm
1273 {
1274# ifdef RT_ARCH_AMD64
1275 mov rax, [uCR0]
1276 mov cr0, rax
1277# else
1278 mov eax, [uCR0]
1279 mov cr0, eax
1280# endif
1281 }
1282# endif
1283}
1284#endif
1285
1286
1287/**
1288 * Get cr2.
1289 * @returns cr2.
1290 */
1291#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1292DECLASM(RTCCUINTREG) ASMGetCR2(void);
1293#else
1294DECLINLINE(RTCCUINTREG) ASMGetCR2(void)
1295{
1296 RTCCUINTREG uCR2;
1297# if RT_INLINE_ASM_USES_INTRIN
1298 uCR2 = __readcr2();
1299
1300# elif RT_INLINE_ASM_GNU_STYLE
1301# ifdef RT_ARCH_AMD64
1302 __asm__ __volatile__("movq %%cr2, %0\t\n" : "=r" (uCR2));
1303# else
1304 __asm__ __volatile__("movl %%cr2, %0\t\n" : "=r" (uCR2));
1305# endif
1306# else
1307 __asm
1308 {
1309# ifdef RT_ARCH_AMD64
1310 mov rax, cr2
1311 mov [uCR2], rax
1312# else
1313 mov eax, cr2
1314 mov [uCR2], eax
1315# endif
1316 }
1317# endif
1318 return uCR2;
1319}
1320#endif
1321
1322
1323/**
1324 * Sets the CR2 register.
1325 * @param uCR2 The new CR0 value.
1326 */
1327#if RT_INLINE_ASM_EXTERNAL
1328DECLASM(void) ASMSetCR2(RTCCUINTREG uCR2);
1329#else
1330DECLINLINE(void) ASMSetCR2(RTCCUINTREG uCR2)
1331{
1332# if RT_INLINE_ASM_GNU_STYLE
1333# ifdef RT_ARCH_AMD64
1334 __asm__ __volatile__("movq %0, %%cr2\n\t" :: "r" (uCR2));
1335# else
1336 __asm__ __volatile__("movl %0, %%cr2\n\t" :: "r" (uCR2));
1337# endif
1338# else
1339 __asm
1340 {
1341# ifdef RT_ARCH_AMD64
1342 mov rax, [uCR2]
1343 mov cr2, rax
1344# else
1345 mov eax, [uCR2]
1346 mov cr2, eax
1347# endif
1348 }
1349# endif
1350}
1351#endif
1352
1353
1354/**
1355 * Get cr3.
1356 * @returns cr3.
1357 */
1358#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1359DECLASM(RTCCUINTREG) ASMGetCR3(void);
1360#else
1361DECLINLINE(RTCCUINTREG) ASMGetCR3(void)
1362{
1363 RTCCUINTREG uCR3;
1364# if RT_INLINE_ASM_USES_INTRIN
1365 uCR3 = __readcr3();
1366
1367# elif RT_INLINE_ASM_GNU_STYLE
1368# ifdef RT_ARCH_AMD64
1369 __asm__ __volatile__("movq %%cr3, %0\t\n" : "=r" (uCR3));
1370# else
1371 __asm__ __volatile__("movl %%cr3, %0\t\n" : "=r" (uCR3));
1372# endif
1373# else
1374 __asm
1375 {
1376# ifdef RT_ARCH_AMD64
1377 mov rax, cr3
1378 mov [uCR3], rax
1379# else
1380 mov eax, cr3
1381 mov [uCR3], eax
1382# endif
1383 }
1384# endif
1385 return uCR3;
1386}
1387#endif
1388
1389
1390/**
1391 * Sets the CR3 register.
1392 *
1393 * @param uCR3 New CR3 value.
1394 */
1395#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1396DECLASM(void) ASMSetCR3(RTCCUINTREG uCR3);
1397#else
1398DECLINLINE(void) ASMSetCR3(RTCCUINTREG uCR3)
1399{
1400# if RT_INLINE_ASM_USES_INTRIN
1401 __writecr3(uCR3);
1402
1403# elif RT_INLINE_ASM_GNU_STYLE
1404# ifdef RT_ARCH_AMD64
1405 __asm__ __volatile__("movq %0, %%cr3\n\t" : : "r" (uCR3));
1406# else
1407 __asm__ __volatile__("movl %0, %%cr3\n\t" : : "r" (uCR3));
1408# endif
1409# else
1410 __asm
1411 {
1412# ifdef RT_ARCH_AMD64
1413 mov rax, [uCR3]
1414 mov cr3, rax
1415# else
1416 mov eax, [uCR3]
1417 mov cr3, eax
1418# endif
1419 }
1420# endif
1421}
1422#endif
1423
1424
1425/**
1426 * Reloads the CR3 register.
1427 */
1428#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1429DECLASM(void) ASMReloadCR3(void);
1430#else
1431DECLINLINE(void) ASMReloadCR3(void)
1432{
1433# if RT_INLINE_ASM_USES_INTRIN
1434 __writecr3(__readcr3());
1435
1436# elif RT_INLINE_ASM_GNU_STYLE
1437 RTCCUINTREG u;
1438# ifdef RT_ARCH_AMD64
1439 __asm__ __volatile__("movq %%cr3, %0\n\t"
1440 "movq %0, %%cr3\n\t"
1441 : "=r" (u));
1442# else
1443 __asm__ __volatile__("movl %%cr3, %0\n\t"
1444 "movl %0, %%cr3\n\t"
1445 : "=r" (u));
1446# endif
1447# else
1448 __asm
1449 {
1450# ifdef RT_ARCH_AMD64
1451 mov rax, cr3
1452 mov cr3, rax
1453# else
1454 mov eax, cr3
1455 mov cr3, eax
1456# endif
1457 }
1458# endif
1459}
1460#endif
1461
1462
1463/**
1464 * Get cr4.
1465 * @returns cr4.
1466 */
1467#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1468DECLASM(RTCCUINTREG) ASMGetCR4(void);
1469#else
1470DECLINLINE(RTCCUINTREG) ASMGetCR4(void)
1471{
1472 RTCCUINTREG uCR4;
1473# if RT_INLINE_ASM_USES_INTRIN
1474 uCR4 = __readcr4();
1475
1476# elif RT_INLINE_ASM_GNU_STYLE
1477# ifdef RT_ARCH_AMD64
1478 __asm__ __volatile__("movq %%cr4, %0\t\n" : "=r" (uCR4));
1479# else
1480 __asm__ __volatile__("movl %%cr4, %0\t\n" : "=r" (uCR4));
1481# endif
1482# else
1483 __asm
1484 {
1485# ifdef RT_ARCH_AMD64
1486 mov rax, cr4
1487 mov [uCR4], rax
1488# else
1489 push eax /* just in case */
1490 /*mov eax, cr4*/
1491 _emit 0x0f
1492 _emit 0x20
1493 _emit 0xe0
1494 mov [uCR4], eax
1495 pop eax
1496# endif
1497 }
1498# endif
1499 return uCR4;
1500}
1501#endif
1502
1503
1504/**
1505 * Sets the CR4 register.
1506 *
1507 * @param uCR4 New CR4 value.
1508 */
1509#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1510DECLASM(void) ASMSetCR4(RTCCUINTREG uCR4);
1511#else
1512DECLINLINE(void) ASMSetCR4(RTCCUINTREG uCR4)
1513{
1514# if RT_INLINE_ASM_USES_INTRIN
1515 __writecr4(uCR4);
1516
1517# elif RT_INLINE_ASM_GNU_STYLE
1518# ifdef RT_ARCH_AMD64
1519 __asm__ __volatile__("movq %0, %%cr4\n\t" : : "r" (uCR4));
1520# else
1521 __asm__ __volatile__("movl %0, %%cr4\n\t" : : "r" (uCR4));
1522# endif
1523# else
1524 __asm
1525 {
1526# ifdef RT_ARCH_AMD64
1527 mov rax, [uCR4]
1528 mov cr4, rax
1529# else
1530 mov eax, [uCR4]
1531 _emit 0x0F
1532 _emit 0x22
1533 _emit 0xE0 /* mov cr4, eax */
1534# endif
1535 }
1536# endif
1537}
1538#endif
1539
1540
1541/**
1542 * Get cr8.
1543 * @returns cr8.
1544 * @remark The lock prefix hack for access from non-64-bit modes is NOT used and 0 is returned.
1545 */
1546#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1547DECLASM(RTCCUINTREG) ASMGetCR8(void);
1548#else
1549DECLINLINE(RTCCUINTREG) ASMGetCR8(void)
1550{
1551# ifdef RT_ARCH_AMD64
1552 RTCCUINTREG uCR8;
1553# if RT_INLINE_ASM_USES_INTRIN
1554 uCR8 = __readcr8();
1555
1556# elif RT_INLINE_ASM_GNU_STYLE
1557 __asm__ __volatile__("movq %%cr8, %0\t\n" : "=r" (uCR8));
1558# else
1559 __asm
1560 {
1561 mov rax, cr8
1562 mov [uCR8], rax
1563 }
1564# endif
1565 return uCR8;
1566# else /* !RT_ARCH_AMD64 */
1567 return 0;
1568# endif /* !RT_ARCH_AMD64 */
1569}
1570#endif
1571
1572
1573/**
1574 * Enables interrupts (EFLAGS.IF).
1575 */
1576#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1577DECLASM(void) ASMIntEnable(void);
1578#else
1579DECLINLINE(void) ASMIntEnable(void)
1580{
1581# if RT_INLINE_ASM_GNU_STYLE
1582 __asm("sti\n");
1583# elif RT_INLINE_ASM_USES_INTRIN
1584 _enable();
1585# else
1586 __asm sti
1587# endif
1588}
1589#endif
1590
1591
1592/**
1593 * Disables interrupts (!EFLAGS.IF).
1594 */
1595#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1596DECLASM(void) ASMIntDisable(void);
1597#else
1598DECLINLINE(void) ASMIntDisable(void)
1599{
1600# if RT_INLINE_ASM_GNU_STYLE
1601 __asm("cli\n");
1602# elif RT_INLINE_ASM_USES_INTRIN
1603 _disable();
1604# else
1605 __asm cli
1606# endif
1607}
1608#endif
1609
1610
1611/**
1612 * Disables interrupts and returns previous xFLAGS.
1613 */
1614#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1615DECLASM(RTCCUINTREG) ASMIntDisableFlags(void);
1616#else
1617DECLINLINE(RTCCUINTREG) ASMIntDisableFlags(void)
1618{
1619 RTCCUINTREG xFlags;
1620# if RT_INLINE_ASM_GNU_STYLE
1621# ifdef RT_ARCH_AMD64
1622 __asm__ __volatile__("pushfq\n\t"
1623 "cli\n\t"
1624 "popq %0\n\t"
1625 : "=r" (xFlags));
1626# else
1627 __asm__ __volatile__("pushfl\n\t"
1628 "cli\n\t"
1629 "popl %0\n\t"
1630 : "=r" (xFlags));
1631# endif
1632# elif RT_INLINE_ASM_USES_INTRIN && !defined(RT_ARCH_X86)
1633 xFlags = ASMGetFlags();
1634 _disable();
1635# else
1636 __asm {
1637 pushfd
1638 cli
1639 pop [xFlags]
1640 }
1641# endif
1642 return xFlags;
1643}
1644#endif
1645
1646
1647/**
1648 * Are interrupts enabled?
1649 *
1650 * @returns true / false.
1651 */
1652DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
1653{
1654 RTCCUINTREG uFlags = ASMGetFlags();
1655 return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
1656}
1657
1658
1659/**
1660 * Halts the CPU until interrupted.
1661 */
1662#if RT_INLINE_ASM_EXTERNAL
1663DECLASM(void) ASMHalt(void);
1664#else
1665DECLINLINE(void) ASMHalt(void)
1666{
1667# if RT_INLINE_ASM_GNU_STYLE
1668 __asm__ __volatile__("hlt\n\t");
1669# else
1670 __asm {
1671 hlt
1672 }
1673# endif
1674}
1675#endif
1676
1677
1678/**
1679 * Reads a machine specific register.
1680 *
1681 * @returns Register content.
1682 * @param uRegister Register to read.
1683 */
1684#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1685DECLASM(uint64_t) ASMRdMsr(uint32_t uRegister);
1686#else
1687DECLINLINE(uint64_t) ASMRdMsr(uint32_t uRegister)
1688{
1689 RTUINT64U u;
1690# if RT_INLINE_ASM_GNU_STYLE
1691 __asm__ __volatile__("rdmsr\n\t"
1692 : "=a" (u.s.Lo),
1693 "=d" (u.s.Hi)
1694 : "c" (uRegister));
1695
1696# elif RT_INLINE_ASM_USES_INTRIN
1697 u.u = __readmsr(uRegister);
1698
1699# else
1700 __asm
1701 {
1702 mov ecx, [uRegister]
1703 rdmsr
1704 mov [u.s.Lo], eax
1705 mov [u.s.Hi], edx
1706 }
1707# endif
1708
1709 return u.u;
1710}
1711#endif
1712
1713
1714/**
1715 * Writes a machine specific register.
1716 *
1717 * @returns Register content.
1718 * @param uRegister Register to write to.
1719 * @param u64Val Value to write.
1720 */
1721#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1722DECLASM(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val);
1723#else
1724DECLINLINE(void) ASMWrMsr(uint32_t uRegister, uint64_t u64Val)
1725{
1726 RTUINT64U u;
1727
1728 u.u = u64Val;
1729# if RT_INLINE_ASM_GNU_STYLE
1730 __asm__ __volatile__("wrmsr\n\t"
1731 ::"a" (u.s.Lo),
1732 "d" (u.s.Hi),
1733 "c" (uRegister));
1734
1735# elif RT_INLINE_ASM_USES_INTRIN
1736 __writemsr(uRegister, u.u);
1737
1738# else
1739 __asm
1740 {
1741 mov ecx, [uRegister]
1742 mov edx, [u.s.Hi]
1743 mov eax, [u.s.Lo]
1744 wrmsr
1745 }
1746# endif
1747}
1748#endif
1749
1750
1751/**
1752 * Reads low part of a machine specific register.
1753 *
1754 * @returns Register content.
1755 * @param uRegister Register to read.
1756 */
1757#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1758DECLASM(uint32_t) ASMRdMsr_Low(uint32_t uRegister);
1759#else
1760DECLINLINE(uint32_t) ASMRdMsr_Low(uint32_t uRegister)
1761{
1762 uint32_t u32;
1763# if RT_INLINE_ASM_GNU_STYLE
1764 __asm__ __volatile__("rdmsr\n\t"
1765 : "=a" (u32)
1766 : "c" (uRegister)
1767 : "edx");
1768
1769# elif RT_INLINE_ASM_USES_INTRIN
1770 u32 = (uint32_t)__readmsr(uRegister);
1771
1772#else
1773 __asm
1774 {
1775 mov ecx, [uRegister]
1776 rdmsr
1777 mov [u32], eax
1778 }
1779# endif
1780
1781 return u32;
1782}
1783#endif
1784
1785
1786/**
1787 * Reads high part of a machine specific register.
1788 *
1789 * @returns Register content.
1790 * @param uRegister Register to read.
1791 */
1792#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1793DECLASM(uint32_t) ASMRdMsr_High(uint32_t uRegister);
1794#else
1795DECLINLINE(uint32_t) ASMRdMsr_High(uint32_t uRegister)
1796{
1797 uint32_t u32;
1798# if RT_INLINE_ASM_GNU_STYLE
1799 __asm__ __volatile__("rdmsr\n\t"
1800 : "=d" (u32)
1801 : "c" (uRegister)
1802 : "eax");
1803
1804# elif RT_INLINE_ASM_USES_INTRIN
1805 u32 = (uint32_t)(__readmsr(uRegister) >> 32);
1806
1807# else
1808 __asm
1809 {
1810 mov ecx, [uRegister]
1811 rdmsr
1812 mov [u32], edx
1813 }
1814# endif
1815
1816 return u32;
1817}
1818#endif
1819
1820
1821/**
1822 * Gets dr0.
1823 *
1824 * @returns dr0.
1825 */
1826#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1827DECLASM(RTCCUINTREG) ASMGetDR0(void);
1828#else
1829DECLINLINE(RTCCUINTREG) ASMGetDR0(void)
1830{
1831 RTCCUINTREG uDR0;
1832# if RT_INLINE_ASM_USES_INTRIN
1833 uDR0 = __readdr(0);
1834# elif RT_INLINE_ASM_GNU_STYLE
1835# ifdef RT_ARCH_AMD64
1836 __asm__ __volatile__("movq %%dr0, %0\n\t" : "=r" (uDR0));
1837# else
1838 __asm__ __volatile__("movl %%dr0, %0\n\t" : "=r" (uDR0));
1839# endif
1840# else
1841 __asm
1842 {
1843# ifdef RT_ARCH_AMD64
1844 mov rax, dr0
1845 mov [uDR0], rax
1846# else
1847 mov eax, dr0
1848 mov [uDR0], eax
1849# endif
1850 }
1851# endif
1852 return uDR0;
1853}
1854#endif
1855
1856
1857/**
1858 * Gets dr1.
1859 *
1860 * @returns dr1.
1861 */
1862#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1863DECLASM(RTCCUINTREG) ASMGetDR1(void);
1864#else
1865DECLINLINE(RTCCUINTREG) ASMGetDR1(void)
1866{
1867 RTCCUINTREG uDR1;
1868# if RT_INLINE_ASM_USES_INTRIN
1869 uDR1 = __readdr(1);
1870# elif RT_INLINE_ASM_GNU_STYLE
1871# ifdef RT_ARCH_AMD64
1872 __asm__ __volatile__("movq %%dr1, %0\n\t" : "=r" (uDR1));
1873# else
1874 __asm__ __volatile__("movl %%dr1, %0\n\t" : "=r" (uDR1));
1875# endif
1876# else
1877 __asm
1878 {
1879# ifdef RT_ARCH_AMD64
1880 mov rax, dr1
1881 mov [uDR1], rax
1882# else
1883 mov eax, dr1
1884 mov [uDR1], eax
1885# endif
1886 }
1887# endif
1888 return uDR1;
1889}
1890#endif
1891
1892
1893/**
1894 * Gets dr2.
1895 *
1896 * @returns dr2.
1897 */
1898#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1899DECLASM(RTCCUINTREG) ASMGetDR2(void);
1900#else
1901DECLINLINE(RTCCUINTREG) ASMGetDR2(void)
1902{
1903 RTCCUINTREG uDR2;
1904# if RT_INLINE_ASM_USES_INTRIN
1905 uDR2 = __readdr(2);
1906# elif RT_INLINE_ASM_GNU_STYLE
1907# ifdef RT_ARCH_AMD64
1908 __asm__ __volatile__("movq %%dr2, %0\n\t" : "=r" (uDR2));
1909# else
1910 __asm__ __volatile__("movl %%dr2, %0\n\t" : "=r" (uDR2));
1911# endif
1912# else
1913 __asm
1914 {
1915# ifdef RT_ARCH_AMD64
1916 mov rax, dr2
1917 mov [uDR2], rax
1918# else
1919 mov eax, dr2
1920 mov [uDR2], eax
1921# endif
1922 }
1923# endif
1924 return uDR2;
1925}
1926#endif
1927
1928
1929/**
1930 * Gets dr3.
1931 *
1932 * @returns dr3.
1933 */
1934#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1935DECLASM(RTCCUINTREG) ASMGetDR3(void);
1936#else
1937DECLINLINE(RTCCUINTREG) ASMGetDR3(void)
1938{
1939 RTCCUINTREG uDR3;
1940# if RT_INLINE_ASM_USES_INTRIN
1941 uDR3 = __readdr(3);
1942# elif RT_INLINE_ASM_GNU_STYLE
1943# ifdef RT_ARCH_AMD64
1944 __asm__ __volatile__("movq %%dr3, %0\n\t" : "=r" (uDR3));
1945# else
1946 __asm__ __volatile__("movl %%dr3, %0\n\t" : "=r" (uDR3));
1947# endif
1948# else
1949 __asm
1950 {
1951# ifdef RT_ARCH_AMD64
1952 mov rax, dr3
1953 mov [uDR3], rax
1954# else
1955 mov eax, dr3
1956 mov [uDR3], eax
1957# endif
1958 }
1959# endif
1960 return uDR3;
1961}
1962#endif
1963
1964
1965/**
1966 * Gets dr6.
1967 *
1968 * @returns dr6.
1969 */
1970#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
1971DECLASM(RTCCUINTREG) ASMGetDR6(void);
1972#else
1973DECLINLINE(RTCCUINTREG) ASMGetDR6(void)
1974{
1975 RTCCUINTREG uDR6;
1976# if RT_INLINE_ASM_USES_INTRIN
1977 uDR6 = __readdr(6);
1978# elif RT_INLINE_ASM_GNU_STYLE
1979# ifdef RT_ARCH_AMD64
1980 __asm__ __volatile__("movq %%dr6, %0\n\t" : "=r" (uDR6));
1981# else
1982 __asm__ __volatile__("movl %%dr6, %0\n\t" : "=r" (uDR6));
1983# endif
1984# else
1985 __asm
1986 {
1987# ifdef RT_ARCH_AMD64
1988 mov rax, dr6
1989 mov [uDR6], rax
1990# else
1991 mov eax, dr6
1992 mov [uDR6], eax
1993# endif
1994 }
1995# endif
1996 return uDR6;
1997}
1998#endif
1999
2000
2001/**
2002 * Reads and clears DR6.
2003 *
2004 * @returns DR6.
2005 */
2006#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2007DECLASM(RTCCUINTREG) ASMGetAndClearDR6(void);
2008#else
2009DECLINLINE(RTCCUINTREG) ASMGetAndClearDR6(void)
2010{
2011 RTCCUINTREG uDR6;
2012# if RT_INLINE_ASM_USES_INTRIN
2013 uDR6 = __readdr(6);
2014 __writedr(6, 0xffff0ff0U); /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2015# elif RT_INLINE_ASM_GNU_STYLE
2016 RTCCUINTREG uNewValue = 0xffff0ff0U;/* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2017# ifdef RT_ARCH_AMD64
2018 __asm__ __volatile__("movq %%dr6, %0\n\t"
2019 "movq %1, %%dr6\n\t"
2020 : "=r" (uDR6)
2021 : "r" (uNewValue));
2022# else
2023 __asm__ __volatile__("movl %%dr6, %0\n\t"
2024 "movl %1, %%dr6\n\t"
2025 : "=r" (uDR6)
2026 : "r" (uNewValue));
2027# endif
2028# else
2029 __asm
2030 {
2031# ifdef RT_ARCH_AMD64
2032 mov rax, dr6
2033 mov [uDR6], rax
2034 mov rcx, rax
2035 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 and 63-31 are zero. */
2036 mov dr6, rcx
2037# else
2038 mov eax, dr6
2039 mov [uDR6], eax
2040 mov ecx, 0ffff0ff0h; /* 31-16 and 4-11 are 1's, 12 is zero. */
2041 mov dr6, ecx
2042# endif
2043 }
2044# endif
2045 return uDR6;
2046}
2047#endif
2048
2049
2050/**
2051 * Gets dr7.
2052 *
2053 * @returns dr7.
2054 */
2055#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2056DECLASM(RTCCUINTREG) ASMGetDR7(void);
2057#else
2058DECLINLINE(RTCCUINTREG) ASMGetDR7(void)
2059{
2060 RTCCUINTREG uDR7;
2061# if RT_INLINE_ASM_USES_INTRIN
2062 uDR7 = __readdr(7);
2063# elif RT_INLINE_ASM_GNU_STYLE
2064# ifdef RT_ARCH_AMD64
2065 __asm__ __volatile__("movq %%dr7, %0\n\t" : "=r" (uDR7));
2066# else
2067 __asm__ __volatile__("movl %%dr7, %0\n\t" : "=r" (uDR7));
2068# endif
2069# else
2070 __asm
2071 {
2072# ifdef RT_ARCH_AMD64
2073 mov rax, dr7
2074 mov [uDR7], rax
2075# else
2076 mov eax, dr7
2077 mov [uDR7], eax
2078# endif
2079 }
2080# endif
2081 return uDR7;
2082}
2083#endif
2084
2085
2086/**
2087 * Sets dr0.
2088 *
2089 * @param uDRVal Debug register value to write
2090 */
2091#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2092DECLASM(void) ASMSetDR0(RTCCUINTREG uDRVal);
2093#else
2094DECLINLINE(void) ASMSetDR0(RTCCUINTREG uDRVal)
2095{
2096# if RT_INLINE_ASM_USES_INTRIN
2097 __writedr(0, uDRVal);
2098# elif RT_INLINE_ASM_GNU_STYLE
2099# ifdef RT_ARCH_AMD64
2100 __asm__ __volatile__("movq %0, %%dr0\n\t" : : "r" (uDRVal));
2101# else
2102 __asm__ __volatile__("movl %0, %%dr0\n\t" : : "r" (uDRVal));
2103# endif
2104# else
2105 __asm
2106 {
2107# ifdef RT_ARCH_AMD64
2108 mov rax, [uDRVal]
2109 mov dr0, rax
2110# else
2111 mov eax, [uDRVal]
2112 mov dr0, eax
2113# endif
2114 }
2115# endif
2116}
2117#endif
2118
2119
2120/**
2121 * Sets dr1.
2122 *
2123 * @param uDRVal Debug register value to write
2124 */
2125#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2126DECLASM(void) ASMSetDR1(RTCCUINTREG uDRVal);
2127#else
2128DECLINLINE(void) ASMSetDR1(RTCCUINTREG uDRVal)
2129{
2130# if RT_INLINE_ASM_USES_INTRIN
2131 __writedr(1, uDRVal);
2132# elif RT_INLINE_ASM_GNU_STYLE
2133# ifdef RT_ARCH_AMD64
2134 __asm__ __volatile__("movq %0, %%dr1\n\t" : : "r" (uDRVal));
2135# else
2136 __asm__ __volatile__("movl %0, %%dr1\n\t" : : "r" (uDRVal));
2137# endif
2138# else
2139 __asm
2140 {
2141# ifdef RT_ARCH_AMD64
2142 mov rax, [uDRVal]
2143 mov dr1, rax
2144# else
2145 mov eax, [uDRVal]
2146 mov dr1, eax
2147# endif
2148 }
2149# endif
2150}
2151#endif
2152
2153
2154/**
2155 * Sets dr2.
2156 *
2157 * @param uDRVal Debug register value to write
2158 */
2159#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2160DECLASM(void) ASMSetDR2(RTCCUINTREG uDRVal);
2161#else
2162DECLINLINE(void) ASMSetDR2(RTCCUINTREG uDRVal)
2163{
2164# if RT_INLINE_ASM_USES_INTRIN
2165 __writedr(2, uDRVal);
2166# elif RT_INLINE_ASM_GNU_STYLE
2167# ifdef RT_ARCH_AMD64
2168 __asm__ __volatile__("movq %0, %%dr2\n\t" : : "r" (uDRVal));
2169# else
2170 __asm__ __volatile__("movl %0, %%dr2\n\t" : : "r" (uDRVal));
2171# endif
2172# else
2173 __asm
2174 {
2175# ifdef RT_ARCH_AMD64
2176 mov rax, [uDRVal]
2177 mov dr2, rax
2178# else
2179 mov eax, [uDRVal]
2180 mov dr2, eax
2181# endif
2182 }
2183# endif
2184}
2185#endif
2186
2187
2188/**
2189 * Sets dr3.
2190 *
2191 * @param uDRVal Debug register value to write
2192 */
2193#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2194DECLASM(void) ASMSetDR3(RTCCUINTREG uDRVal);
2195#else
2196DECLINLINE(void) ASMSetDR3(RTCCUINTREG uDRVal)
2197{
2198# if RT_INLINE_ASM_USES_INTRIN
2199 __writedr(3, uDRVal);
2200# elif RT_INLINE_ASM_GNU_STYLE
2201# ifdef RT_ARCH_AMD64
2202 __asm__ __volatile__("movq %0, %%dr3\n\t" : : "r" (uDRVal));
2203# else
2204 __asm__ __volatile__("movl %0, %%dr3\n\t" : : "r" (uDRVal));
2205# endif
2206# else
2207 __asm
2208 {
2209# ifdef RT_ARCH_AMD64
2210 mov rax, [uDRVal]
2211 mov dr3, rax
2212# else
2213 mov eax, [uDRVal]
2214 mov dr3, eax
2215# endif
2216 }
2217# endif
2218}
2219#endif
2220
2221
2222/**
2223 * Sets dr6.
2224 *
2225 * @param uDRVal Debug register value to write
2226 */
2227#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2228DECLASM(void) ASMSetDR6(RTCCUINTREG uDRVal);
2229#else
2230DECLINLINE(void) ASMSetDR6(RTCCUINTREG uDRVal)
2231{
2232# if RT_INLINE_ASM_USES_INTRIN
2233 __writedr(6, uDRVal);
2234# elif RT_INLINE_ASM_GNU_STYLE
2235# ifdef RT_ARCH_AMD64
2236 __asm__ __volatile__("movq %0, %%dr6\n\t" : : "r" (uDRVal));
2237# else
2238 __asm__ __volatile__("movl %0, %%dr6\n\t" : : "r" (uDRVal));
2239# endif
2240# else
2241 __asm
2242 {
2243# ifdef RT_ARCH_AMD64
2244 mov rax, [uDRVal]
2245 mov dr6, rax
2246# else
2247 mov eax, [uDRVal]
2248 mov dr6, eax
2249# endif
2250 }
2251# endif
2252}
2253#endif
2254
2255
2256/**
2257 * Sets dr7.
2258 *
2259 * @param uDRVal Debug register value to write
2260 */
2261#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2262DECLASM(void) ASMSetDR7(RTCCUINTREG uDRVal);
2263#else
2264DECLINLINE(void) ASMSetDR7(RTCCUINTREG uDRVal)
2265{
2266# if RT_INLINE_ASM_USES_INTRIN
2267 __writedr(7, uDRVal);
2268# elif RT_INLINE_ASM_GNU_STYLE
2269# ifdef RT_ARCH_AMD64
2270 __asm__ __volatile__("movq %0, %%dr7\n\t" : : "r" (uDRVal));
2271# else
2272 __asm__ __volatile__("movl %0, %%dr7\n\t" : : "r" (uDRVal));
2273# endif
2274# else
2275 __asm
2276 {
2277# ifdef RT_ARCH_AMD64
2278 mov rax, [uDRVal]
2279 mov dr7, rax
2280# else
2281 mov eax, [uDRVal]
2282 mov dr7, eax
2283# endif
2284 }
2285# endif
2286}
2287#endif
2288
2289
2290/**
2291 * Writes a 8-bit unsigned integer to an I/O port, ordered.
2292 *
2293 * @param Port I/O port to write to.
2294 * @param u8 8-bit integer to write.
2295 */
2296#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2297DECLASM(void) ASMOutU8(RTIOPORT Port, uint8_t u8);
2298#else
2299DECLINLINE(void) ASMOutU8(RTIOPORT Port, uint8_t u8)
2300{
2301# if RT_INLINE_ASM_GNU_STYLE
2302 __asm__ __volatile__("outb %b1, %w0\n\t"
2303 :: "Nd" (Port),
2304 "a" (u8));
2305
2306# elif RT_INLINE_ASM_USES_INTRIN
2307 __outbyte(Port, u8);
2308
2309# else
2310 __asm
2311 {
2312 mov dx, [Port]
2313 mov al, [u8]
2314 out dx, al
2315 }
2316# endif
2317}
2318#endif
2319
2320
2321/**
2322 * Reads a 8-bit unsigned integer from an I/O port, ordered.
2323 *
2324 * @returns 8-bit integer.
2325 * @param Port I/O port to read from.
2326 */
2327#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2328DECLASM(uint8_t) ASMInU8(RTIOPORT Port);
2329#else
2330DECLINLINE(uint8_t) ASMInU8(RTIOPORT Port)
2331{
2332 uint8_t u8;
2333# if RT_INLINE_ASM_GNU_STYLE
2334 __asm__ __volatile__("inb %w1, %b0\n\t"
2335 : "=a" (u8)
2336 : "Nd" (Port));
2337
2338# elif RT_INLINE_ASM_USES_INTRIN
2339 u8 = __inbyte(Port);
2340
2341# else
2342 __asm
2343 {
2344 mov dx, [Port]
2345 in al, dx
2346 mov [u8], al
2347 }
2348# endif
2349 return u8;
2350}
2351#endif
2352
2353
2354/**
2355 * Writes a 16-bit unsigned integer to an I/O port, ordered.
2356 *
2357 * @param Port I/O port to write to.
2358 * @param u16 16-bit integer to write.
2359 */
2360#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2361DECLASM(void) ASMOutU16(RTIOPORT Port, uint16_t u16);
2362#else
2363DECLINLINE(void) ASMOutU16(RTIOPORT Port, uint16_t u16)
2364{
2365# if RT_INLINE_ASM_GNU_STYLE
2366 __asm__ __volatile__("outw %w1, %w0\n\t"
2367 :: "Nd" (Port),
2368 "a" (u16));
2369
2370# elif RT_INLINE_ASM_USES_INTRIN
2371 __outword(Port, u16);
2372
2373# else
2374 __asm
2375 {
2376 mov dx, [Port]
2377 mov ax, [u16]
2378 out dx, ax
2379 }
2380# endif
2381}
2382#endif
2383
2384
2385/**
2386 * Reads a 16-bit unsigned integer from an I/O port, ordered.
2387 *
2388 * @returns 16-bit integer.
2389 * @param Port I/O port to read from.
2390 */
2391#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2392DECLASM(uint16_t) ASMInU16(RTIOPORT Port);
2393#else
2394DECLINLINE(uint16_t) ASMInU16(RTIOPORT Port)
2395{
2396 uint16_t u16;
2397# if RT_INLINE_ASM_GNU_STYLE
2398 __asm__ __volatile__("inw %w1, %w0\n\t"
2399 : "=a" (u16)
2400 : "Nd" (Port));
2401
2402# elif RT_INLINE_ASM_USES_INTRIN
2403 u16 = __inword(Port);
2404
2405# else
2406 __asm
2407 {
2408 mov dx, [Port]
2409 in ax, dx
2410 mov [u16], ax
2411 }
2412# endif
2413 return u16;
2414}
2415#endif
2416
2417
2418/**
2419 * Writes a 32-bit unsigned integer to an I/O port, ordered.
2420 *
2421 * @param Port I/O port to write to.
2422 * @param u32 32-bit integer to write.
2423 */
2424#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2425DECLASM(void) ASMOutU32(RTIOPORT Port, uint32_t u32);
2426#else
2427DECLINLINE(void) ASMOutU32(RTIOPORT Port, uint32_t u32)
2428{
2429# if RT_INLINE_ASM_GNU_STYLE
2430 __asm__ __volatile__("outl %1, %w0\n\t"
2431 :: "Nd" (Port),
2432 "a" (u32));
2433
2434# elif RT_INLINE_ASM_USES_INTRIN
2435 __outdword(Port, u32);
2436
2437# else
2438 __asm
2439 {
2440 mov dx, [Port]
2441 mov eax, [u32]
2442 out dx, eax
2443 }
2444# endif
2445}
2446#endif
2447
2448
2449/**
2450 * Reads a 32-bit unsigned integer from an I/O port, ordered.
2451 *
2452 * @returns 32-bit integer.
2453 * @param Port I/O port to read from.
2454 */
2455#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2456DECLASM(uint32_t) ASMInU32(RTIOPORT Port);
2457#else
2458DECLINLINE(uint32_t) ASMInU32(RTIOPORT Port)
2459{
2460 uint32_t u32;
2461# if RT_INLINE_ASM_GNU_STYLE
2462 __asm__ __volatile__("inl %w1, %0\n\t"
2463 : "=a" (u32)
2464 : "Nd" (Port));
2465
2466# elif RT_INLINE_ASM_USES_INTRIN
2467 u32 = __indword(Port);
2468
2469# else
2470 __asm
2471 {
2472 mov dx, [Port]
2473 in eax, dx
2474 mov [u32], eax
2475 }
2476# endif
2477 return u32;
2478}
2479#endif
2480
2481
2482/**
2483 * Writes a string of 8-bit unsigned integer items to an I/O port, ordered.
2484 *
2485 * @param Port I/O port to write to.
2486 * @param pau8 Pointer to the string buffer.
2487 * @param c The number of items to write.
2488 */
2489#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2490DECLASM(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c);
2491#else
2492DECLINLINE(void) ASMOutStrU8(RTIOPORT Port, uint8_t const *pau8, size_t c)
2493{
2494# if RT_INLINE_ASM_GNU_STYLE
2495 __asm__ __volatile__("rep; outsb\n\t"
2496 : "+S" (pau8),
2497 "+c" (c)
2498 : "d" (Port));
2499
2500# elif RT_INLINE_ASM_USES_INTRIN
2501 __outbytestring(Port, (unsigned char *)pau8, (unsigned long)c);
2502
2503# else
2504 __asm
2505 {
2506 mov dx, [Port]
2507 mov ecx, [c]
2508 mov eax, [pau8]
2509 xchg esi, eax
2510 rep outsb
2511 xchg esi, eax
2512 }
2513# endif
2514}
2515#endif
2516
2517
2518/**
2519 * Reads a string of 8-bit unsigned integer items from an I/O port, ordered.
2520 *
2521 * @param Port I/O port to read from.
2522 * @param pau8 Pointer to the string buffer (output).
2523 * @param c The number of items to read.
2524 */
2525#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2526DECLASM(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c);
2527#else
2528DECLINLINE(void) ASMInStrU8(RTIOPORT Port, uint8_t *pau8, size_t c)
2529{
2530# if RT_INLINE_ASM_GNU_STYLE
2531 __asm__ __volatile__("rep; insb\n\t"
2532 : "+D" (pau8),
2533 "+c" (c)
2534 : "d" (Port));
2535
2536# elif RT_INLINE_ASM_USES_INTRIN
2537 __inbytestring(Port, pau8, (unsigned long)c);
2538
2539# else
2540 __asm
2541 {
2542 mov dx, [Port]
2543 mov ecx, [c]
2544 mov eax, [pau8]
2545 xchg edi, eax
2546 rep insb
2547 xchg edi, eax
2548 }
2549# endif
2550}
2551#endif
2552
2553
2554/**
2555 * Writes a string of 16-bit unsigned integer items to an I/O port, ordered.
2556 *
2557 * @param Port I/O port to write to.
2558 * @param pau16 Pointer to the string buffer.
2559 * @param c The number of items to write.
2560 */
2561#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2562DECLASM(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c);
2563#else
2564DECLINLINE(void) ASMOutStrU16(RTIOPORT Port, uint16_t const *pau16, size_t c)
2565{
2566# if RT_INLINE_ASM_GNU_STYLE
2567 __asm__ __volatile__("rep; outsw\n\t"
2568 : "+S" (pau16),
2569 "+c" (c)
2570 : "d" (Port));
2571
2572# elif RT_INLINE_ASM_USES_INTRIN
2573 __outwordstring(Port, (unsigned short *)pau16, (unsigned long)c);
2574
2575# else
2576 __asm
2577 {
2578 mov dx, [Port]
2579 mov ecx, [c]
2580 mov eax, [pau16]
2581 xchg esi, eax
2582 rep outsw
2583 xchg esi, eax
2584 }
2585# endif
2586}
2587#endif
2588
2589
2590/**
2591 * Reads a string of 16-bit unsigned integer items from an I/O port, ordered.
2592 *
2593 * @param Port I/O port to read from.
2594 * @param pau16 Pointer to the string buffer (output).
2595 * @param c The number of items to read.
2596 */
2597#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2598DECLASM(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c);
2599#else
2600DECLINLINE(void) ASMInStrU16(RTIOPORT Port, uint16_t *pau16, size_t c)
2601{
2602# if RT_INLINE_ASM_GNU_STYLE
2603 __asm__ __volatile__("rep; insw\n\t"
2604 : "+D" (pau16),
2605 "+c" (c)
2606 : "d" (Port));
2607
2608# elif RT_INLINE_ASM_USES_INTRIN
2609 __inwordstring(Port, pau16, (unsigned long)c);
2610
2611# else
2612 __asm
2613 {
2614 mov dx, [Port]
2615 mov ecx, [c]
2616 mov eax, [pau16]
2617 xchg edi, eax
2618 rep insw
2619 xchg edi, eax
2620 }
2621# endif
2622}
2623#endif
2624
2625
2626/**
2627 * Writes a string of 32-bit unsigned integer items to an I/O port, ordered.
2628 *
2629 * @param Port I/O port to write to.
2630 * @param pau32 Pointer to the string buffer.
2631 * @param c The number of items to write.
2632 */
2633#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2634DECLASM(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c);
2635#else
2636DECLINLINE(void) ASMOutStrU32(RTIOPORT Port, uint32_t const *pau32, size_t c)
2637{
2638# if RT_INLINE_ASM_GNU_STYLE
2639 __asm__ __volatile__("rep; outsl\n\t"
2640 : "+S" (pau32),
2641 "+c" (c)
2642 : "d" (Port));
2643
2644# elif RT_INLINE_ASM_USES_INTRIN
2645 __outdwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2646
2647# else
2648 __asm
2649 {
2650 mov dx, [Port]
2651 mov ecx, [c]
2652 mov eax, [pau32]
2653 xchg esi, eax
2654 rep outsd
2655 xchg esi, eax
2656 }
2657# endif
2658}
2659#endif
2660
2661
2662/**
2663 * Reads a string of 32-bit unsigned integer items from an I/O port, ordered.
2664 *
2665 * @param Port I/O port to read from.
2666 * @param pau32 Pointer to the string buffer (output).
2667 * @param c The number of items to read.
2668 */
2669#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2670DECLASM(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c);
2671#else
2672DECLINLINE(void) ASMInStrU32(RTIOPORT Port, uint32_t *pau32, size_t c)
2673{
2674# if RT_INLINE_ASM_GNU_STYLE
2675 __asm__ __volatile__("rep; insl\n\t"
2676 : "+D" (pau32),
2677 "+c" (c)
2678 : "d" (Port));
2679
2680# elif RT_INLINE_ASM_USES_INTRIN
2681 __indwordstring(Port, (unsigned long *)pau32, (unsigned long)c);
2682
2683# else
2684 __asm
2685 {
2686 mov dx, [Port]
2687 mov ecx, [c]
2688 mov eax, [pau32]
2689 xchg edi, eax
2690 rep insd
2691 xchg edi, eax
2692 }
2693# endif
2694}
2695#endif
2696
2697
2698/**
2699 * Invalidate page.
2700 *
2701 * @param pv Address of the page to invalidate.
2702 */
2703#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2704DECLASM(void) ASMInvalidatePage(void *pv);
2705#else
2706DECLINLINE(void) ASMInvalidatePage(void *pv)
2707{
2708# if RT_INLINE_ASM_USES_INTRIN
2709 __invlpg(pv);
2710
2711# elif RT_INLINE_ASM_GNU_STYLE
2712 __asm__ __volatile__("invlpg %0\n\t"
2713 : : "m" (*(uint8_t *)pv));
2714# else
2715 __asm
2716 {
2717# ifdef RT_ARCH_AMD64
2718 mov rax, [pv]
2719 invlpg [rax]
2720# else
2721 mov eax, [pv]
2722 invlpg [eax]
2723# endif
2724 }
2725# endif
2726}
2727#endif
2728
2729
2730/**
2731 * Write back the internal caches and invalidate them.
2732 */
2733#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
2734DECLASM(void) ASMWriteBackAndInvalidateCaches(void);
2735#else
2736DECLINLINE(void) ASMWriteBackAndInvalidateCaches(void)
2737{
2738# if RT_INLINE_ASM_USES_INTRIN
2739 __wbinvd();
2740
2741# elif RT_INLINE_ASM_GNU_STYLE
2742 __asm__ __volatile__("wbinvd");
2743# else
2744 __asm
2745 {
2746 wbinvd
2747 }
2748# endif
2749}
2750#endif
2751
2752
2753/**
2754 * Invalidate internal and (perhaps) external caches without first
2755 * flushing dirty cache lines. Use with extreme care.
2756 */
2757#if RT_INLINE_ASM_EXTERNAL
2758DECLASM(void) ASMInvalidateInternalCaches(void);
2759#else
2760DECLINLINE(void) ASMInvalidateInternalCaches(void)
2761{
2762# if RT_INLINE_ASM_GNU_STYLE
2763 __asm__ __volatile__("invd");
2764# else
2765 __asm
2766 {
2767 invd
2768 }
2769# endif
2770}
2771#endif
2772
2773
2774/**
2775 * Memory load/store fence, waits for any pending writes and reads to complete.
2776 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2777 */
2778DECLINLINE(void) ASMMemoryFenceSSE2(void)
2779{
2780#if RT_INLINE_ASM_GNU_STYLE
2781 __asm__ __volatile__ (".byte 0x0f,0xae,0xf0\n\t");
2782#elif RT_INLINE_ASM_USES_INTRIN
2783 _mm_mfence();
2784#else
2785 __asm
2786 {
2787 _emit 0x0f
2788 _emit 0xae
2789 _emit 0xf0
2790 }
2791#endif
2792}
2793
2794
2795/**
2796 * Memory store fence, waits for any writes to complete.
2797 * Requires the X86_CPUID_FEATURE_EDX_SSE CPUID bit set.
2798 */
2799DECLINLINE(void) ASMWriteFenceSSE(void)
2800{
2801#if RT_INLINE_ASM_GNU_STYLE
2802 __asm__ __volatile__ (".byte 0x0f,0xae,0xf8\n\t");
2803#elif RT_INLINE_ASM_USES_INTRIN
2804 _mm_sfence();
2805#else
2806 __asm
2807 {
2808 _emit 0x0f
2809 _emit 0xae
2810 _emit 0xf8
2811 }
2812#endif
2813}
2814
2815
2816/**
2817 * Memory load fence, waits for any pending reads to complete.
2818 * Requires the X86_CPUID_FEATURE_EDX_SSE2 CPUID bit set.
2819 */
2820DECLINLINE(void) ASMReadFenceSSE2(void)
2821{
2822#if RT_INLINE_ASM_GNU_STYLE
2823 __asm__ __volatile__ (".byte 0x0f,0xae,0xe8\n\t");
2824#elif RT_INLINE_ASM_USES_INTRIN
2825 _mm_lfence();
2826#else
2827 __asm
2828 {
2829 _emit 0x0f
2830 _emit 0xae
2831 _emit 0xe8
2832 }
2833#endif
2834}
2835
2836/** @} */
2837#endif
2838
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette