VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 86208

Last change on this file since 86208 was 85963, checked in by vboxsync, 4 years ago

tstRTInlineAsm.cpp: Display the extended APIC IDs too.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id Revision
File size: 76.4 KB
Line 
1/* $Id: tstRTInlineAsm.cpp 85963 2020-08-31 20:12:01Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include <iprt/asm-math.h>
33
34/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
35 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
36 * fine. */
37#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
38# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
39# define GCC44_32BIT_PIC
40# endif
41#endif
42
43#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
44# include <iprt/asm-amd64-x86.h>
45# include <iprt/x86.h>
46#else
47# include <iprt/time.h>
48#endif
49#include <iprt/rand.h>
50#include <iprt/stream.h>
51#include <iprt/string.h>
52#include <iprt/param.h>
53#include <iprt/thread.h>
54#include <iprt/test.h>
55#include <iprt/time.h>
56
57
58
59/*********************************************************************************************************************************
60* Defined Constants And Macros *
61*********************************************************************************************************************************/
62#define CHECKVAL(val, expect, fmt) \
63 do \
64 { \
65 if ((val) != (expect)) \
66 { \
67 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
68 } \
69 } while (0)
70
71#define CHECKOP(op, expect, fmt, type) \
72 do \
73 { \
74 type val = op; \
75 if (val != (type)(expect)) \
76 { \
77 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
78 } \
79 } while (0)
80
81/**
82 * Calls a worker function with different worker variable storage types.
83 */
84#define DO_SIMPLE_TEST(name, type) \
85 do \
86 { \
87 RTTestISub(#name); \
88 type StackVar; \
89 tst ## name ## Worker(&StackVar); \
90 \
91 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
92 RTTEST_CHECK_BREAK(g_hTest, pVar); \
93 tst ## name ## Worker(pVar); \
94 RTTestGuardedFree(g_hTest, pVar); \
95 \
96 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
97 RTTEST_CHECK_BREAK(g_hTest, pVar); \
98 tst ## name ## Worker(pVar); \
99 RTTestGuardedFree(g_hTest, pVar); \
100 } while (0)
101
102
103/*********************************************************************************************************************************
104* Global Variables *
105*********************************************************************************************************************************/
106/** The test instance. */
107static RTTEST g_hTest;
108
109
110
111#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
112
113const char *getCacheAss(unsigned u)
114{
115 if (u == 0)
116 return "res0 ";
117 if (u == 1)
118 return "direct";
119 if (u >= 256)
120 return "???";
121
122 char *pszRet;
123 RTStrAPrintf(&pszRet, "%d way", u); /* intentional leak! */
124 return pszRet;
125}
126
127
128const char *getL2CacheAss(unsigned u)
129{
130 switch (u)
131 {
132 case 0: return "off ";
133 case 1: return "direct";
134 case 2: return "2 way ";
135 case 3: return "res3 ";
136 case 4: return "4 way ";
137 case 5: return "res5 ";
138 case 6: return "8 way ";
139 case 7: return "res7 ";
140 case 8: return "16 way";
141 case 9: return "res9 ";
142 case 10: return "res10 ";
143 case 11: return "res11 ";
144 case 12: return "res12 ";
145 case 13: return "res13 ";
146 case 14: return "res14 ";
147 case 15: return "fully ";
148 default:
149 return "????";
150 }
151}
152
153
154/**
155 * Test and dump all possible info from the CPUID instruction.
156 *
157 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
158 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
159 */
160void tstASMCpuId(void)
161{
162 RTTestISub("ASMCpuId");
163
164 unsigned iBit;
165 struct
166 {
167 uint32_t uEBX, uEAX, uEDX, uECX;
168 } s;
169 if (!ASMHasCpuId())
170 {
171 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
172 return;
173 }
174
175 /*
176 * Try the 0 function and use that for checking the ASMCpuId_* variants.
177 */
178 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
179
180 uint32_t u32;
181
182 u32 = ASMCpuId_EAX(0);
183 CHECKVAL(u32, s.uEAX, "%x");
184 u32 = ASMCpuId_EBX(0);
185 CHECKVAL(u32, s.uEBX, "%x");
186 u32 = ASMCpuId_ECX(0);
187 CHECKVAL(u32, s.uECX, "%x");
188 u32 = ASMCpuId_EDX(0);
189 CHECKVAL(u32, s.uEDX, "%x");
190
191 uint32_t uECX2 = s.uECX - 1;
192 uint32_t uEDX2 = s.uEDX - 1;
193 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
194 CHECKVAL(uECX2, s.uECX, "%x");
195 CHECKVAL(uEDX2, s.uEDX, "%x");
196
197 uint32_t uEAX2 = s.uEAX - 1;
198 uint32_t uEBX2 = s.uEBX - 1;
199 uECX2 = s.uECX - 1;
200 uEDX2 = s.uEDX - 1;
201 ASMCpuIdExSlow(0, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
202 CHECKVAL(uEAX2, s.uEAX, "%x");
203 CHECKVAL(uEBX2, s.uEBX, "%x");
204 CHECKVAL(uECX2, s.uECX, "%x");
205 CHECKVAL(uEDX2, s.uEDX, "%x");
206
207 /*
208 * Check the extended APIC stuff.
209 */
210 uint32_t idExtApic;
211 if (ASMCpuId_EAX(0) >= 0xb)
212 {
213 uint8_t idApic = ASMGetApicId();
214 do
215 {
216 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
217 ASMCpuIdExSlow(0xb, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
218 idExtApic = ASMGetApicIdExt0B();
219 } while (ASMGetApicId() != idApic);
220
221 CHECKVAL(uEDX2, idExtApic, "%x");
222 if (idApic != (uint8_t)idExtApic && uECX2 != 0)
223 RTTestIFailed("ASMGetApicIdExt0B() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
224 }
225 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
226 {
227 uint8_t idApic = ASMGetApicId();
228 do
229 {
230 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
231 ASMCpuIdExSlow(0x8000001e, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
232 idExtApic = ASMGetApicIdExt8000001E();
233 } while (ASMGetApicId() != idApic);
234 CHECKVAL(uEAX2, idExtApic, "%x");
235 if (idApic != (uint8_t)idExtApic)
236 RTTestIFailed("ASMGetApicIdExt8000001E() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
237 }
238
239 /*
240 * Done testing, dump the information.
241 */
242 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
243 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
244 const uint32_t cFunctions = s.uEAX;
245
246 /* raw dump */
247 RTTestIPrintf(RTTESTLVL_ALWAYS,
248 "\n"
249 " RAW Standard CPUIDs\n"
250 "Function eax ebx ecx edx\n");
251 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
252 {
253 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
254 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
255 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
256
257 /* Some leafs output depend on the initial value of ECX.
258 * The same seems to apply to invalid standard functions */
259 if (iStd > cFunctions)
260 continue;
261 if (iStd == 0x04) /* Deterministic Cache Parameters Leaf */
262 for (uint32_t uECX = 1; s.uEAX & 0x1f; uECX++)
263 {
264 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
265 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
266 RTTESTI_CHECK_BREAK(uECX < 128);
267 }
268 else if (iStd == 0x07) /* Structured Extended Feature Flags */
269 {
270 uint32_t uMax = s.uEAX;
271 for (uint32_t uECX = 1; uECX < uMax; uECX++)
272 {
273 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
274 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
275 RTTESTI_CHECK_BREAK(uECX < 128);
276 }
277 }
278 else if (iStd == 0x0b) /* Extended Topology Enumeration Leafs */
279 for (uint32_t uECX = 1; (s.uEAX & 0x1f) && (s.uEBX & 0xffff); uECX++)
280 {
281 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
282 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
283 RTTESTI_CHECK_BREAK(uECX < 128);
284 }
285 else if (iStd == 0x0d) /* Extended State Enumeration Leafs */
286 for (uint32_t uECX = 1; s.uEAX != 0 || s.uEBX != 0 || s.uECX != 0 || s.uEDX != 0; uECX++)
287 {
288 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
289 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
290 RTTESTI_CHECK_BREAK(uECX < 128);
291 }
292 else if ( iStd == 0x0f /* Platform quality of service monitoring (PQM) */
293 || iStd == 0x10 /* Platform quality of service enforcement (PQE) */
294 || iStd == 0x12 /* SGX Enumeration */
295 || iStd == 0x14 /* Processor Trace Enumeration */
296 || iStd == 0x17 /* SoC Vendor Attribute Enumeration */
297 || iStd == 0x18 /* Deterministic Address Translation Parameters */)
298 {
299 /** @todo */
300 }
301 else
302 {
303 u32 = ASMCpuId_EAX(iStd);
304 CHECKVAL(u32, s.uEAX, "%x");
305
306 uint32_t u32EbxMask = UINT32_MAX;
307 if (iStd == 1)
308 u32EbxMask = UINT32_C(0x00ffffff); /* Omit the local apic ID in case we're rescheduled. */
309 u32 = ASMCpuId_EBX(iStd);
310 CHECKVAL(u32 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
311
312 u32 = ASMCpuId_ECX(iStd);
313 CHECKVAL(u32, s.uECX, "%x");
314 u32 = ASMCpuId_EDX(iStd);
315 CHECKVAL(u32, s.uEDX, "%x");
316
317 uECX2 = s.uECX - 1;
318 uEDX2 = s.uEDX - 1;
319 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
320 CHECKVAL(uECX2, s.uECX, "%x");
321 CHECKVAL(uEDX2, s.uEDX, "%x");
322
323 uEAX2 = s.uEAX - 1;
324 uEBX2 = s.uEBX - 1;
325 uECX2 = s.uECX - 1;
326 uEDX2 = s.uEDX - 1;
327 ASMCpuId(iStd, &uEAX2, &uEBX2, &uECX2, &uEDX2);
328 CHECKVAL(uEAX2, s.uEAX, "%x");
329 CHECKVAL(uEBX2 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
330 CHECKVAL(uECX2, s.uECX, "%x");
331 CHECKVAL(uEDX2, s.uEDX, "%x");
332 }
333 }
334
335 /*
336 * Understandable output
337 */
338 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
339 RTTestIPrintf(RTTESTLVL_ALWAYS,
340 "Name: %.04s%.04s%.04s\n"
341 "Support: 0-%u\n",
342 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
343 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
344
345 /*
346 * Get Features.
347 */
348 if (cFunctions >= 1)
349 {
350 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
351 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
352 RTTestIPrintf(RTTESTLVL_ALWAYS,
353 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
354 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
355 "Stepping: %d\n"
356 "Type: %d (%s)\n"
357 "APIC ID: %#04x\n"
358 "Logical CPUs: %d\n"
359 "CLFLUSH Size: %d\n"
360 "Brand ID: %#04x\n",
361 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
362 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
363 ASMGetCpuStepping(s.uEAX),
364 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
365 (s.uEBX >> 24) & 0xff,
366 (s.uEBX >> 16) & 0xff,
367 (s.uEBX >> 8) & 0xff,
368 (s.uEBX >> 0) & 0xff);
369
370 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
371 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
372 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
373 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
374 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
375 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
376 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
377 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
378 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
379 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
380 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
381 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
382 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
383 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
384 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
385 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
386 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
387 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
388 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
389 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
390 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
391 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
392 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
393 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
394 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
395 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
396 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
397 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
398 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
399 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
400 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
401 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
402 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
403 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
404
405 /** @todo check intel docs. */
406 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
407 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
408 for (iBit = 1; iBit < 13; iBit++)
409 if (s.uECX & RT_BIT(iBit))
410 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
411 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
412 for (iBit = 14; iBit < 32; iBit++)
413 if (s.uECX & RT_BIT(iBit))
414 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
415 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
416 }
417 if (ASMCpuId_EAX(0) >= 0xb)
418 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 0b): %#010x\n", ASMGetApicIdExt0B());
419
420 /*
421 * Extended.
422 * Implemented after AMD specs.
423 */
424 /** @todo check out the intel specs. */
425 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
426 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
427 {
428 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
429 return;
430 }
431 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
432
433 /* raw dump */
434 RTTestIPrintf(RTTESTLVL_ALWAYS,
435 "\n"
436 " RAW Extended CPUIDs\n"
437 "Function eax ebx ecx edx\n");
438 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
439 {
440 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
441 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
442 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
443
444 if (iExt > cExtFunctions)
445 continue; /* Invalid extended functions seems change the value if ECX changes */
446 if (iExt == 0x8000001d)
447 continue; /* Takes cache level in ecx. */
448
449 u32 = ASMCpuId_EAX(iExt);
450 CHECKVAL(u32, s.uEAX, "%x");
451 u32 = ASMCpuId_EBX(iExt);
452 CHECKVAL(u32, s.uEBX, "%x");
453 u32 = ASMCpuId_ECX(iExt);
454 CHECKVAL(u32, s.uECX, "%x");
455 u32 = ASMCpuId_EDX(iExt);
456 CHECKVAL(u32, s.uEDX, "%x");
457
458 uECX2 = s.uECX - 1;
459 uEDX2 = s.uEDX - 1;
460 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
461 CHECKVAL(uECX2, s.uECX, "%x");
462 CHECKVAL(uEDX2, s.uEDX, "%x");
463
464 uEAX2 = s.uEAX - 1;
465 uEBX2 = s.uEBX - 1;
466 uECX2 = s.uECX - 1;
467 uEDX2 = s.uEDX - 1;
468 ASMCpuId(iExt, &uEAX2, &uEBX2, &uECX2, &uEDX2);
469 CHECKVAL(uEAX2, s.uEAX, "%x");
470 CHECKVAL(uEBX2, s.uEBX, "%x");
471 CHECKVAL(uECX2, s.uECX, "%x");
472 CHECKVAL(uEDX2, s.uEDX, "%x");
473 }
474
475 /*
476 * Understandable output
477 */
478 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
479 RTTestIPrintf(RTTESTLVL_ALWAYS,
480 "Ext Name: %.4s%.4s%.4s\n"
481 "Ext Supports: 0x80000000-%#010x\n",
482 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
483
484 if (cExtFunctions >= 0x80000001)
485 {
486 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
487 RTTestIPrintf(RTTESTLVL_ALWAYS,
488 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
489 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
490 "Stepping: %d\n"
491 "Brand ID: %#05x\n",
492 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
493 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
494 ASMGetCpuStepping(s.uEAX),
495 s.uEBX & 0xfff);
496
497 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
498 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
499 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
500 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
501 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
502 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
503 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
504 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
505 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
506 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
507 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
508 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
509 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
510 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
511 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
512 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
513 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
514 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
515 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
516 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
517 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
518 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
519 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
520 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
521 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
522 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
523 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
524 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
525 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
526 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
527 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
528 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
529 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
530 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
531
532 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
533 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
534 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
535 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
536 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
537 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
538 for (iBit = 5; iBit < 32; iBit++)
539 if (s.uECX & RT_BIT(iBit))
540 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
541 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
542 }
543
544 char szString[4*4*3+1] = {0};
545 if (cExtFunctions >= 0x80000002)
546 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
547 if (cExtFunctions >= 0x80000003)
548 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
549 if (cExtFunctions >= 0x80000004)
550 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
551 if (cExtFunctions >= 0x80000002)
552 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
553
554 if (cExtFunctions >= 0x80000005)
555 {
556 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
557 RTTestIPrintf(RTTESTLVL_ALWAYS,
558 "TLB 2/4M Instr/Uni: %s %3d entries\n"
559 "TLB 2/4M Data: %s %3d entries\n",
560 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
561 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
562 RTTestIPrintf(RTTESTLVL_ALWAYS,
563 "TLB 4K Instr/Uni: %s %3d entries\n"
564 "TLB 4K Data: %s %3d entries\n",
565 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
566 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
567 RTTestIPrintf(RTTESTLVL_ALWAYS,
568 "L1 Instr Cache Line Size: %d bytes\n"
569 "L1 Instr Cache Lines Per Tag: %d\n"
570 "L1 Instr Cache Associativity: %s\n"
571 "L1 Instr Cache Size: %d KB\n",
572 (s.uEDX >> 0) & 0xff,
573 (s.uEDX >> 8) & 0xff,
574 getCacheAss((s.uEDX >> 16) & 0xff),
575 (s.uEDX >> 24) & 0xff);
576 RTTestIPrintf(RTTESTLVL_ALWAYS,
577 "L1 Data Cache Line Size: %d bytes\n"
578 "L1 Data Cache Lines Per Tag: %d\n"
579 "L1 Data Cache Associativity: %s\n"
580 "L1 Data Cache Size: %d KB\n",
581 (s.uECX >> 0) & 0xff,
582 (s.uECX >> 8) & 0xff,
583 getCacheAss((s.uECX >> 16) & 0xff),
584 (s.uECX >> 24) & 0xff);
585 }
586
587 if (cExtFunctions >= 0x80000006)
588 {
589 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
590 RTTestIPrintf(RTTESTLVL_ALWAYS,
591 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
592 "L2 TLB 2/4M Data: %s %4d entries\n",
593 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
594 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
595 RTTestIPrintf(RTTESTLVL_ALWAYS,
596 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
597 "L2 TLB 4K Data: %s %4d entries\n",
598 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
599 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
600 RTTestIPrintf(RTTESTLVL_ALWAYS,
601 "L2 Cache Line Size: %d bytes\n"
602 "L2 Cache Lines Per Tag: %d\n"
603 "L2 Cache Associativity: %s\n"
604 "L2 Cache Size: %d KB\n",
605 (s.uEDX >> 0) & 0xff,
606 (s.uEDX >> 8) & 0xf,
607 getL2CacheAss((s.uEDX >> 12) & 0xf),
608 (s.uEDX >> 16) & 0xffff);
609 }
610
611 if (cExtFunctions >= 0x80000007)
612 {
613 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
614 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
615 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
616 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
617 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
618 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
619 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
620 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
621 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
622 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
623 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
624 for (iBit = 9; iBit < 32; iBit++)
625 if (s.uEDX & RT_BIT(iBit))
626 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
627 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
628 }
629
630 if (cExtFunctions >= 0x80000008)
631 {
632 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
633 RTTestIPrintf(RTTESTLVL_ALWAYS,
634 "Physical Address Width: %d bits\n"
635 "Virtual Address Width: %d bits\n"
636 "Guest Physical Address Width: %d bits\n",
637 (s.uEAX >> 0) & 0xff,
638 (s.uEAX >> 8) & 0xff,
639 (s.uEAX >> 16) & 0xff);
640 RTTestIPrintf(RTTESTLVL_ALWAYS,
641 "Physical Core Count: %d\n",
642 ((s.uECX >> 0) & 0xff) + 1);
643 if ((s.uECX >> 12) & 0xf)
644 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
645 }
646
647 if (cExtFunctions >= 0x8000000a)
648 {
649 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
650 RTTestIPrintf(RTTESTLVL_ALWAYS,
651 "SVM Revision: %d (%#x)\n"
652 "Number of Address Space IDs: %d (%#x)\n",
653 s.uEAX & 0xff, s.uEAX & 0xff,
654 s.uEBX, s.uEBX);
655 }
656 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
657 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 8000001b): %#010x\n", ASMGetApicIdExt8000001E());
658}
659
660# if 0
661static void bruteForceCpuId(void)
662{
663 RTTestISub("brute force CPUID leafs");
664 uint32_t auPrevValues[4] = { 0, 0, 0, 0};
665 uint32_t uLeaf = 0;
666 do
667 {
668 uint32_t auValues[4];
669 ASMCpuIdExSlow(uLeaf, 0, 0, 0, &auValues[0], &auValues[1], &auValues[2], &auValues[3]);
670 if ( (auValues[0] != auPrevValues[0] && auValues[0] != uLeaf)
671 || (auValues[1] != auPrevValues[1] && auValues[1] != 0)
672 || (auValues[2] != auPrevValues[2] && auValues[2] != 0)
673 || (auValues[3] != auPrevValues[3] && auValues[3] != 0)
674 || (uLeaf & (UINT32_C(0x08000000) - UINT32_C(1))) == 0)
675 {
676 RTTestIPrintf(RTTESTLVL_ALWAYS,
677 "%08x: %08x %08x %08x %08x\n", uLeaf,
678 auValues[0], auValues[1], auValues[2], auValues[3]);
679 }
680 auPrevValues[0] = auValues[0];
681 auPrevValues[1] = auValues[1];
682 auPrevValues[2] = auValues[2];
683 auPrevValues[3] = auValues[3];
684
685 //uint32_t uSubLeaf = 0;
686 //do
687 //{
688 //
689 //
690 //} while (false);
691 } while (uLeaf++ < UINT32_MAX);
692}
693# endif
694
695#endif /* AMD64 || X86 */
696
697DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
698{
699 *pu8 = 0;
700 CHECKOP(ASMAtomicXchgU8(pu8, 1), 0, "%#x", uint8_t);
701 CHECKVAL(*pu8, 1, "%#x");
702
703 CHECKOP(ASMAtomicXchgU8(pu8, 0), 1, "%#x", uint8_t);
704 CHECKVAL(*pu8, 0, "%#x");
705
706 CHECKOP(ASMAtomicXchgU8(pu8, UINT8_C(0xff)), 0, "%#x", uint8_t);
707 CHECKVAL(*pu8, 0xff, "%#x");
708
709 CHECKOP(ASMAtomicXchgU8(pu8, UINT8_C(0x87)), UINT8_C(0xff), "%#x", uint8_t);
710 CHECKVAL(*pu8, 0x87, "%#x");
711}
712
713
714static void tstASMAtomicXchgU8(void)
715{
716 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
717}
718
719
720DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
721{
722 *pu16 = 0;
723
724 CHECKOP(ASMAtomicXchgU16(pu16, 1), 0, "%#x", uint16_t);
725 CHECKVAL(*pu16, 1, "%#x");
726
727 CHECKOP(ASMAtomicXchgU16(pu16, 0), 1, "%#x", uint16_t);
728 CHECKVAL(*pu16, 0, "%#x");
729
730 CHECKOP(ASMAtomicXchgU16(pu16, 0xffff), 0, "%#x", uint16_t);
731 CHECKVAL(*pu16, 0xffff, "%#x");
732
733 CHECKOP(ASMAtomicXchgU16(pu16, 0x8765), 0xffff, "%#x", uint16_t);
734 CHECKVAL(*pu16, 0x8765, "%#x");
735}
736
737
738static void tstASMAtomicXchgU16(void)
739{
740 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
741}
742
743
744DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
745{
746 *pu32 = 0;
747
748 CHECKOP(ASMAtomicXchgU32(pu32, 1), 0, "%#x", uint32_t);
749 CHECKVAL(*pu32, 1, "%#x");
750
751 CHECKOP(ASMAtomicXchgU32(pu32, 0), 1, "%#x", uint32_t);
752 CHECKVAL(*pu32, 0, "%#x");
753
754 CHECKOP(ASMAtomicXchgU32(pu32, ~UINT32_C(0)), 0, "%#x", uint32_t);
755 CHECKVAL(*pu32, ~UINT32_C(0), "%#x");
756
757 CHECKOP(ASMAtomicXchgU32(pu32, 0x87654321), ~UINT32_C(0), "%#x", uint32_t);
758 CHECKVAL(*pu32, 0x87654321, "%#x");
759}
760
761
762static void tstASMAtomicXchgU32(void)
763{
764 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
765}
766
767
768DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
769{
770 *pu64 = 0;
771
772 CHECKOP(ASMAtomicXchgU64(pu64, 1), UINT64_C(0), "%#llx", uint64_t);
773 CHECKVAL(*pu64, UINT64_C(1), "%#llx");
774
775 CHECKOP(ASMAtomicXchgU64(pu64, 0), UINT64_C(1), "%#llx", uint64_t);
776 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
777
778 CHECKOP(ASMAtomicXchgU64(pu64, ~UINT64_C(0)), UINT64_C(0), "%#llx", uint64_t);
779 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
780
781 CHECKOP(ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), ~UINT64_C(0), "%#llx", uint64_t);
782 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
783}
784
785
786static void tstASMAtomicXchgU64(void)
787{
788 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
789}
790
791
792DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
793{
794 *ppv = NULL;
795
796 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, "%p", void *);
797 CHECKVAL(*ppv, (void *)(~(uintptr_t)0), "%p");
798
799 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)(uintptr_t)0x87654321), (void *)(~(uintptr_t)0), "%p", void *);
800 CHECKVAL(*ppv, (void *)(uintptr_t)0x87654321, "%p");
801
802 CHECKOP(ASMAtomicXchgPtr(ppv, NULL), (void *)(uintptr_t)0x87654321, "%p", void *);
803 CHECKVAL(*ppv, NULL, "%p");
804}
805
806
807static void tstASMAtomicXchgPtr(void)
808{
809 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
810}
811
812
813DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
814{
815 *pu8 = 0xff;
816
817 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0), false, "%d", bool);
818 CHECKVAL(*pu8, 0xff, "%x");
819
820 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, "%d", bool);
821 CHECKVAL(*pu8, 0, "%x");
822
823 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x79, 0xff), false, "%d", bool);
824 CHECKVAL(*pu8, 0, "%x");
825
826 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, "%d", bool);
827 CHECKVAL(*pu8, 0x97, "%x");
828}
829
830
831static void tstASMAtomicCmpXchgU8(void)
832{
833 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
834}
835
836
837DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
838{
839 *pu32 = UINT32_C(0xffffffff);
840
841 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, 0), false, "%d", bool);
842 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
843
844 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, "%d", bool);
845 CHECKVAL(*pu32, 0, "%x");
846
847 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff)), false, "%d", bool);
848 CHECKVAL(*pu32, 0, "%x");
849
850 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), 0), true, "%d", bool);
851 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x");
852}
853
854
855static void tstASMAtomicCmpXchgU32(void)
856{
857 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
858}
859
860
861
862DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
863{
864 *pu64 = UINT64_C(0xffffffffffffff);
865
866 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, 0), false, "%d", bool);
867 CHECKVAL(*pu64, UINT64_C(0xffffffffffffff), "%#llx");
868
869 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, "%d", bool);
870 CHECKVAL(*pu64, 0, "%x");
871
872 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff)), false, "%d", bool);
873 CHECKVAL(*pu64, 0, "%x");
874
875 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000)), false, "%d", bool);
876 CHECKVAL(*pu64, 0, "%x");
877
878 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, "%d", bool);
879 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%#llx");
880}
881
882
883static void tstASMAtomicCmpXchgU64(void)
884{
885 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
886}
887
888
889DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
890{
891 *pu32 = UINT32_C(0xffffffff);
892 uint32_t u32Old = UINT32_C(0x80005111);
893
894 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, "%d", bool);
895 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
896 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x");
897
898 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, "%d", bool);
899 CHECKVAL(*pu32, 0, "%x");
900 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x");
901
902 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff), &u32Old), false, "%d", bool);
903 CHECKVAL(*pu32, 0, "%x");
904 CHECKVAL(u32Old, 0, "%x");
905
906 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), 0, &u32Old), true, "%d", bool);
907 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x");
908 CHECKVAL(u32Old, 0, "%x");
909
910 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0x8008efd), &u32Old), true, "%d", bool);
911 CHECKVAL(*pu32, 0, "%x");
912 CHECKVAL(u32Old, UINT32_C(0x8008efd), "%x");
913}
914
915
916static void tstASMAtomicCmpXchgExU32(void)
917{
918 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
919}
920
921
922DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
923{
924 *pu64 = UINT64_C(0xffffffffffffffff);
925 uint64_t u64Old = UINT64_C(0x8000000051111111);
926
927 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, "%d", bool);
928 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%llx");
929 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx");
930
931 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, "%d", bool);
932 CHECKVAL(*pu64, UINT64_C(0), "%llx");
933 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx");
934
935 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0xffffffff, &u64Old), false, "%d", bool);
936 CHECKVAL(*pu64, UINT64_C(0), "%llx");
937 CHECKVAL(u64Old, UINT64_C(0), "%llx");
938
939 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000), &u64Old), false, "%d", bool);
940 CHECKVAL(*pu64, UINT64_C(0), "%llx");
941 CHECKVAL(u64Old, UINT64_C(0), "%llx");
942
943 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0, &u64Old), true, "%d", bool);
944 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%llx");
945 CHECKVAL(u64Old, UINT64_C(0), "%llx");
946
947 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0x80040008008efd), &u64Old), true, "%d", bool);
948 CHECKVAL(*pu64, UINT64_C(0), "%llx");
949 CHECKVAL(u64Old, UINT64_C(0x80040008008efd), "%llx");
950}
951
952
953static void tstASMAtomicCmpXchgExU64(void)
954{
955 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
956}
957
958
959DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
960{
961 *pu64 = 0;
962
963 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0), "%#llx", uint64_t);
964 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
965
966 *pu64 = ~UINT64_C(0);
967 CHECKOP(ASMAtomicReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t);
968 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
969
970 *pu64 = UINT64_C(0xfedcba0987654321);
971 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t);
972 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
973}
974
975
976static void tstASMAtomicReadU64(void)
977{
978 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
979}
980
981
982DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
983{
984 *pu64 = 0;
985
986 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0), "%#llx", uint64_t);
987 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
988
989 *pu64 = ~UINT64_C(0);
990 CHECKOP(ASMAtomicUoReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t);
991 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
992
993 *pu64 = UINT64_C(0xfedcba0987654321);
994 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t);
995 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
996}
997
998
999static void tstASMAtomicUoReadU64(void)
1000{
1001 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
1002}
1003
1004
1005DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
1006{
1007 int32_t i32Rc;
1008 *pi32 = 10;
1009#define MYCHECK(op, rc, val) \
1010 do { \
1011 i32Rc = op; \
1012 if (i32Rc != (rc)) \
1013 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
1014 if (*pi32 != (val)) \
1015 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, val); \
1016 } while (0)
1017 MYCHECK(ASMAtomicAddS32(pi32, 1), 10, 11);
1018 MYCHECK(ASMAtomicAddS32(pi32, -2), 11, 9);
1019 MYCHECK(ASMAtomicAddS32(pi32, -9), 9, 0);
1020 MYCHECK(ASMAtomicAddS32(pi32, -0x7fffffff), 0, -0x7fffffff);
1021 MYCHECK(ASMAtomicAddS32(pi32, 0), -0x7fffffff, -0x7fffffff);
1022 MYCHECK(ASMAtomicAddS32(pi32, 0x7fffffff), -0x7fffffff, 0);
1023 MYCHECK(ASMAtomicAddS32(pi32, 0), 0, 0);
1024#undef MYCHECK
1025}
1026
1027
1028static void tstASMAtomicAddS32(void)
1029{
1030 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
1031}
1032
1033
1034DECLINLINE(void) tstASMAtomicUoIncU32Worker(uint32_t volatile *pu32)
1035{
1036 *pu32 = 0;
1037
1038 CHECKOP(ASMAtomicUoIncU32(pu32), UINT32_C(1), "%#x", uint32_t);
1039 CHECKVAL(*pu32, UINT32_C(1), "%#x");
1040
1041 *pu32 = ~UINT32_C(0);
1042 CHECKOP(ASMAtomicUoIncU32(pu32), 0, "%#x", uint32_t);
1043 CHECKVAL(*pu32, 0, "%#x");
1044
1045 *pu32 = UINT32_C(0x7fffffff);
1046 CHECKOP(ASMAtomicUoIncU32(pu32), UINT32_C(0x80000000), "%#x", uint32_t);
1047 CHECKVAL(*pu32, UINT32_C(0x80000000), "%#x");
1048}
1049
1050
1051static void tstASMAtomicUoIncU32(void)
1052{
1053 DO_SIMPLE_TEST(ASMAtomicUoIncU32, uint32_t);
1054}
1055
1056
1057DECLINLINE(void) tstASMAtomicUoDecU32Worker(uint32_t volatile *pu32)
1058{
1059 *pu32 = 0;
1060
1061 CHECKOP(ASMAtomicUoDecU32(pu32), ~UINT32_C(0), "%#x", uint32_t);
1062 CHECKVAL(*pu32, ~UINT32_C(0), "%#x");
1063
1064 *pu32 = ~UINT32_C(0);
1065 CHECKOP(ASMAtomicUoDecU32(pu32), UINT32_C(0xfffffffe), "%#x", uint32_t);
1066 CHECKVAL(*pu32, UINT32_C(0xfffffffe), "%#x");
1067
1068 *pu32 = UINT32_C(0x80000000);
1069 CHECKOP(ASMAtomicUoDecU32(pu32), UINT32_C(0x7fffffff), "%#x", uint32_t);
1070 CHECKVAL(*pu32, UINT32_C(0x7fffffff), "%#x");
1071}
1072
1073
1074static void tstASMAtomicUoDecU32(void)
1075{
1076 DO_SIMPLE_TEST(ASMAtomicUoDecU32, uint32_t);
1077}
1078
1079
1080DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
1081{
1082 int64_t i64Rc;
1083 *pi64 = 10;
1084#define MYCHECK(op, rc, val) \
1085 do { \
1086 i64Rc = op; \
1087 if (i64Rc != (rc)) \
1088 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %llx expected %llx\n", __FUNCTION__, __LINE__, #op, i64Rc, (int64_t)rc); \
1089 if (*pi64 != (val)) \
1090 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%llx expected %llx\n", __FUNCTION__, __LINE__, #op, *pi64, (int64_t)(val)); \
1091 } while (0)
1092 MYCHECK(ASMAtomicAddS64(pi64, 1), 10, 11);
1093 MYCHECK(ASMAtomicAddS64(pi64, -2), 11, 9);
1094 MYCHECK(ASMAtomicAddS64(pi64, -9), 9, 0);
1095 MYCHECK(ASMAtomicAddS64(pi64, -INT64_MAX), 0, -INT64_MAX);
1096 MYCHECK(ASMAtomicAddS64(pi64, 0), -INT64_MAX, -INT64_MAX);
1097 MYCHECK(ASMAtomicAddS64(pi64, -1), -INT64_MAX, INT64_MIN);
1098 MYCHECK(ASMAtomicAddS64(pi64, INT64_MAX), INT64_MIN, -1);
1099 MYCHECK(ASMAtomicAddS64(pi64, 1), -1, 0);
1100 MYCHECK(ASMAtomicAddS64(pi64, 0), 0, 0);
1101#undef MYCHECK
1102}
1103
1104
1105static void tstASMAtomicAddS64(void)
1106{
1107 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
1108}
1109
1110
1111DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
1112{
1113 int32_t i32Rc;
1114 *pi32 = 10;
1115#define MYCHECK(op, rc) \
1116 do { \
1117 i32Rc = op; \
1118 if (i32Rc != (rc)) \
1119 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
1120 if (*pi32 != (rc)) \
1121 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, rc); \
1122 } while (0)
1123 MYCHECK(ASMAtomicDecS32(pi32), 9);
1124 MYCHECK(ASMAtomicDecS32(pi32), 8);
1125 MYCHECK(ASMAtomicDecS32(pi32), 7);
1126 MYCHECK(ASMAtomicDecS32(pi32), 6);
1127 MYCHECK(ASMAtomicDecS32(pi32), 5);
1128 MYCHECK(ASMAtomicDecS32(pi32), 4);
1129 MYCHECK(ASMAtomicDecS32(pi32), 3);
1130 MYCHECK(ASMAtomicDecS32(pi32), 2);
1131 MYCHECK(ASMAtomicDecS32(pi32), 1);
1132 MYCHECK(ASMAtomicDecS32(pi32), 0);
1133 MYCHECK(ASMAtomicDecS32(pi32), -1);
1134 MYCHECK(ASMAtomicDecS32(pi32), -2);
1135 MYCHECK(ASMAtomicIncS32(pi32), -1);
1136 MYCHECK(ASMAtomicIncS32(pi32), 0);
1137 MYCHECK(ASMAtomicIncS32(pi32), 1);
1138 MYCHECK(ASMAtomicIncS32(pi32), 2);
1139 MYCHECK(ASMAtomicIncS32(pi32), 3);
1140 MYCHECK(ASMAtomicDecS32(pi32), 2);
1141 MYCHECK(ASMAtomicIncS32(pi32), 3);
1142 MYCHECK(ASMAtomicDecS32(pi32), 2);
1143 MYCHECK(ASMAtomicIncS32(pi32), 3);
1144#undef MYCHECK
1145}
1146
1147
1148static void tstASMAtomicDecIncS32(void)
1149{
1150 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
1151}
1152
1153
1154DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
1155{
1156 int64_t i64Rc;
1157 *pi64 = 10;
1158#define MYCHECK(op, rc) \
1159 do { \
1160 i64Rc = op; \
1161 if (i64Rc != (rc)) \
1162 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, rc); \
1163 if (*pi64 != (rc)) \
1164 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, rc); \
1165 } while (0)
1166 MYCHECK(ASMAtomicDecS64(pi64), 9);
1167 MYCHECK(ASMAtomicDecS64(pi64), 8);
1168 MYCHECK(ASMAtomicDecS64(pi64), 7);
1169 MYCHECK(ASMAtomicDecS64(pi64), 6);
1170 MYCHECK(ASMAtomicDecS64(pi64), 5);
1171 MYCHECK(ASMAtomicDecS64(pi64), 4);
1172 MYCHECK(ASMAtomicDecS64(pi64), 3);
1173 MYCHECK(ASMAtomicDecS64(pi64), 2);
1174 MYCHECK(ASMAtomicDecS64(pi64), 1);
1175 MYCHECK(ASMAtomicDecS64(pi64), 0);
1176 MYCHECK(ASMAtomicDecS64(pi64), -1);
1177 MYCHECK(ASMAtomicDecS64(pi64), -2);
1178 MYCHECK(ASMAtomicIncS64(pi64), -1);
1179 MYCHECK(ASMAtomicIncS64(pi64), 0);
1180 MYCHECK(ASMAtomicIncS64(pi64), 1);
1181 MYCHECK(ASMAtomicIncS64(pi64), 2);
1182 MYCHECK(ASMAtomicIncS64(pi64), 3);
1183 MYCHECK(ASMAtomicDecS64(pi64), 2);
1184 MYCHECK(ASMAtomicIncS64(pi64), 3);
1185 MYCHECK(ASMAtomicDecS64(pi64), 2);
1186 MYCHECK(ASMAtomicIncS64(pi64), 3);
1187#undef MYCHECK
1188}
1189
1190
1191static void tstASMAtomicDecIncS64(void)
1192{
1193 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
1194}
1195
1196
1197DECLINLINE(void) tstASMAtomicAndOrU32Worker(uint32_t volatile *pu32)
1198{
1199 *pu32 = UINT32_C(0xffffffff);
1200
1201 ASMAtomicOrU32(pu32, UINT32_C(0xffffffff));
1202 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1203
1204 ASMAtomicAndU32(pu32, UINT32_C(0xffffffff));
1205 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1206
1207 ASMAtomicAndU32(pu32, UINT32_C(0x8f8f8f8f));
1208 CHECKVAL(*pu32, UINT32_C(0x8f8f8f8f), "%x");
1209
1210 ASMAtomicOrU32(pu32, UINT32_C(0x70707070));
1211 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1212
1213 ASMAtomicAndU32(pu32, UINT32_C(1));
1214 CHECKVAL(*pu32, UINT32_C(1), "%x");
1215
1216 ASMAtomicOrU32(pu32, UINT32_C(0x80000000));
1217 CHECKVAL(*pu32, UINT32_C(0x80000001), "%x");
1218
1219 ASMAtomicAndU32(pu32, UINT32_C(0x80000000));
1220 CHECKVAL(*pu32, UINT32_C(0x80000000), "%x");
1221
1222 ASMAtomicAndU32(pu32, UINT32_C(0));
1223 CHECKVAL(*pu32, UINT32_C(0), "%x");
1224
1225 ASMAtomicOrU32(pu32, UINT32_C(0x42424242));
1226 CHECKVAL(*pu32, UINT32_C(0x42424242), "%x");
1227}
1228
1229
1230static void tstASMAtomicAndOrU32(void)
1231{
1232 DO_SIMPLE_TEST(ASMAtomicAndOrU32, uint32_t);
1233}
1234
1235
1236DECLINLINE(void) tstASMAtomicAndOrU64Worker(uint64_t volatile *pu64)
1237{
1238 *pu64 = UINT64_C(0xffffffff);
1239
1240 ASMAtomicOrU64(pu64, UINT64_C(0xffffffff));
1241 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1242
1243 ASMAtomicAndU64(pu64, UINT64_C(0xffffffff));
1244 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1245
1246 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f));
1247 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f), "%x");
1248
1249 ASMAtomicOrU64(pu64, UINT64_C(0x70707070));
1250 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1251
1252 ASMAtomicAndU64(pu64, UINT64_C(1));
1253 CHECKVAL(*pu64, UINT64_C(1), "%x");
1254
1255 ASMAtomicOrU64(pu64, UINT64_C(0x80000000));
1256 CHECKVAL(*pu64, UINT64_C(0x80000001), "%x");
1257
1258 ASMAtomicAndU64(pu64, UINT64_C(0x80000000));
1259 CHECKVAL(*pu64, UINT64_C(0x80000000), "%x");
1260
1261 ASMAtomicAndU64(pu64, UINT64_C(0));
1262 CHECKVAL(*pu64, UINT64_C(0), "%x");
1263
1264 ASMAtomicOrU64(pu64, UINT64_C(0x42424242));
1265 CHECKVAL(*pu64, UINT64_C(0x42424242), "%x");
1266
1267 // Same as above, but now 64-bit wide.
1268 ASMAtomicAndU64(pu64, UINT64_C(0));
1269 CHECKVAL(*pu64, UINT64_C(0), "%x");
1270
1271 ASMAtomicOrU64(pu64, UINT64_C(0xffffffffffffffff));
1272 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1273
1274 ASMAtomicAndU64(pu64, UINT64_C(0xffffffffffffffff));
1275 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1276
1277 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f8f8f8f8f));
1278 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f8f8f8f8f), "%x");
1279
1280 ASMAtomicOrU64(pu64, UINT64_C(0x7070707070707070));
1281 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1282
1283 ASMAtomicAndU64(pu64, UINT64_C(1));
1284 CHECKVAL(*pu64, UINT64_C(1), "%x");
1285
1286 ASMAtomicOrU64(pu64, UINT64_C(0x8000000000000000));
1287 CHECKVAL(*pu64, UINT64_C(0x8000000000000001), "%x");
1288
1289 ASMAtomicAndU64(pu64, UINT64_C(0x8000000000000000));
1290 CHECKVAL(*pu64, UINT64_C(0x8000000000000000), "%x");
1291
1292 ASMAtomicAndU64(pu64, UINT64_C(0));
1293 CHECKVAL(*pu64, UINT64_C(0), "%x");
1294
1295 ASMAtomicOrU64(pu64, UINT64_C(0x4242424242424242));
1296 CHECKVAL(*pu64, UINT64_C(0x4242424242424242), "%x");
1297}
1298
1299
1300static void tstASMAtomicAndOrU64(void)
1301{
1302 DO_SIMPLE_TEST(ASMAtomicAndOrU64, uint64_t);
1303}
1304
1305
1306DECLINLINE(void) tstASMAtomicUoAndOrU32Worker(uint32_t volatile *pu32)
1307{
1308 *pu32 = UINT32_C(0xffffffff);
1309
1310 ASMAtomicUoOrU32(pu32, UINT32_C(0xffffffff));
1311 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%#x");
1312
1313 ASMAtomicUoAndU32(pu32, UINT32_C(0xffffffff));
1314 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%#x");
1315
1316 ASMAtomicUoAndU32(pu32, UINT32_C(0x8f8f8f8f));
1317 CHECKVAL(*pu32, UINT32_C(0x8f8f8f8f), "%#x");
1318
1319 ASMAtomicUoOrU32(pu32, UINT32_C(0x70707070));
1320 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%#x");
1321
1322 ASMAtomicUoAndU32(pu32, UINT32_C(1));
1323 CHECKVAL(*pu32, UINT32_C(1), "%#x");
1324
1325 ASMAtomicUoOrU32(pu32, UINT32_C(0x80000000));
1326 CHECKVAL(*pu32, UINT32_C(0x80000001), "%#x");
1327
1328 ASMAtomicUoAndU32(pu32, UINT32_C(0x80000000));
1329 CHECKVAL(*pu32, UINT32_C(0x80000000), "%#x");
1330
1331 ASMAtomicUoAndU32(pu32, UINT32_C(0));
1332 CHECKVAL(*pu32, UINT32_C(0), "%#x");
1333
1334 ASMAtomicUoOrU32(pu32, UINT32_C(0x42424242));
1335 CHECKVAL(*pu32, UINT32_C(0x42424242), "%#x");
1336}
1337
1338
1339static void tstASMAtomicUoAndOrU32(void)
1340{
1341 DO_SIMPLE_TEST(ASMAtomicUoAndOrU32, uint32_t);
1342}
1343
1344
1345typedef struct
1346{
1347 uint8_t ab[PAGE_SIZE];
1348} TSTPAGE;
1349
1350
1351DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
1352{
1353 for (unsigned j = 0; j < 16; j++)
1354 {
1355 memset(pPage, 0x11 * j, sizeof(*pPage));
1356 ASMMemZeroPage(pPage);
1357 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
1358 if (pPage->ab[i])
1359 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
1360 }
1361}
1362
1363
1364static void tstASMMemZeroPage(void)
1365{
1366 DO_SIMPLE_TEST(ASMMemZeroPage, TSTPAGE);
1367}
1368
1369
1370void tstASMMemIsZeroPage(RTTEST hTest)
1371{
1372 RTTestSub(hTest, "ASMMemIsZeroPage");
1373
1374 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
1375 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
1376 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
1377
1378 memset(pvPage1, 0, PAGE_SIZE);
1379 memset(pvPage2, 0, PAGE_SIZE);
1380 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
1381 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
1382
1383 memset(pvPage1, 0xff, PAGE_SIZE);
1384 memset(pvPage2, 0xff, PAGE_SIZE);
1385 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
1386 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
1387
1388 memset(pvPage1, 0, PAGE_SIZE);
1389 memset(pvPage2, 0, PAGE_SIZE);
1390 for (unsigned off = 0; off < PAGE_SIZE; off++)
1391 {
1392 ((uint8_t *)pvPage1)[off] = 1;
1393 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
1394 ((uint8_t *)pvPage1)[off] = 0;
1395
1396 ((uint8_t *)pvPage2)[off] = 0x80;
1397 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
1398 ((uint8_t *)pvPage2)[off] = 0;
1399 }
1400
1401 RTTestSubDone(hTest);
1402}
1403
1404
1405void tstASMMemFirstMismatchingU8(RTTEST hTest)
1406{
1407 RTTestSub(hTest, "ASMMemFirstMismatchingU8");
1408
1409 uint8_t *pbPage1 = (uint8_t *)RTTestGuardedAllocHead(hTest, PAGE_SIZE);
1410 uint8_t *pbPage2 = (uint8_t *)RTTestGuardedAllocTail(hTest, PAGE_SIZE);
1411 RTTESTI_CHECK_RETV(pbPage1 && pbPage2);
1412
1413 memset(pbPage1, 0, PAGE_SIZE);
1414 memset(pbPage2, 0, PAGE_SIZE);
1415 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0) == NULL);
1416 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0) == NULL);
1417 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 1) == pbPage1);
1418 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 1) == pbPage2);
1419 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0x87) == pbPage1);
1420 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0x87) == pbPage2);
1421 RTTESTI_CHECK(ASMMemIsZero(pbPage1, PAGE_SIZE));
1422 RTTESTI_CHECK(ASMMemIsZero(pbPage2, PAGE_SIZE));
1423 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
1424 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
1425 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0x34));
1426 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0x88));
1427 unsigned cbSub = 32;
1428 while (cbSub-- > 0)
1429 {
1430 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
1431 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
1432 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0) == NULL);
1433 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0) == NULL);
1434
1435 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0x34) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
1436 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0x99) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
1437 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0x42) == pbPage1 || !cbSub);
1438 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0x88) == pbPage2 || !cbSub);
1439 }
1440
1441 memset(pbPage1, 0xff, PAGE_SIZE);
1442 memset(pbPage2, 0xff, PAGE_SIZE);
1443 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xff) == NULL);
1444 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xff) == NULL);
1445 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xfe) == pbPage1);
1446 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xfe) == pbPage2);
1447 RTTESTI_CHECK(!ASMMemIsZero(pbPage1, PAGE_SIZE));
1448 RTTESTI_CHECK(!ASMMemIsZero(pbPage2, PAGE_SIZE));
1449 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0xff));
1450 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0xff));
1451 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
1452 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
1453 cbSub = 32;
1454 while (cbSub-- > 0)
1455 {
1456 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
1457 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
1458 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xff) == NULL);
1459 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xff) == NULL);
1460
1461 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
1462 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
1463 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xfe) == pbPage1 || !cbSub);
1464 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xfe) == pbPage2 || !cbSub);
1465 }
1466
1467
1468 /*
1469 * Various alignments and sizes.
1470 */
1471 uint8_t const bFiller1 = 0x00;
1472 uint8_t const bFiller2 = 0xf6;
1473 size_t const cbBuf = 128;
1474 uint8_t *pbBuf1 = pbPage1;
1475 uint8_t *pbBuf2 = &pbPage2[PAGE_SIZE - cbBuf]; /* Put it up against the tail guard */
1476 memset(pbPage1, ~bFiller1, PAGE_SIZE);
1477 memset(pbPage2, ~bFiller2, PAGE_SIZE);
1478 memset(pbBuf1, bFiller1, cbBuf);
1479 memset(pbBuf2, bFiller2, cbBuf);
1480 for (size_t offNonZero = 0; offNonZero < cbBuf; offNonZero++)
1481 {
1482 uint8_t bRand = (uint8_t)RTRandU32();
1483 pbBuf1[offNonZero] = bRand | 1;
1484 pbBuf2[offNonZero] = (0x80 | bRand) ^ 0xf6;
1485
1486 for (size_t offStart = 0; offStart < 32; offStart++)
1487 {
1488 size_t const cbMax = cbBuf - offStart;
1489 for (size_t cb = 0; cb < cbMax; cb++)
1490 {
1491 size_t const offEnd = offStart + cb;
1492 uint8_t bSaved1, bSaved2;
1493 if (offEnd < PAGE_SIZE)
1494 {
1495 bSaved1 = pbBuf1[offEnd];
1496 bSaved2 = pbBuf2[offEnd];
1497 pbBuf1[offEnd] = 0xff;
1498 pbBuf2[offEnd] = 0xff;
1499 }
1500#ifdef _MSC_VER /* simple stupid compiler warnings */
1501 else
1502 bSaved1 = bSaved2 = 0;
1503#endif
1504
1505 uint8_t *pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf1 + offStart, cb, bFiller1);
1506 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf1[offNonZero] : pbRet == NULL);
1507
1508 pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf2 + offStart, cb, bFiller2);
1509 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf2[offNonZero] : pbRet == NULL);
1510
1511 if (offEnd < PAGE_SIZE)
1512 {
1513 pbBuf1[offEnd] = bSaved1;
1514 pbBuf2[offEnd] = bSaved2;
1515 }
1516 }
1517 }
1518
1519 pbBuf1[offNonZero] = 0;
1520 pbBuf2[offNonZero] = 0xf6;
1521 }
1522
1523 RTTestSubDone(hTest);
1524}
1525
1526
1527void tstASMMemZero32(void)
1528{
1529 RTTestSub(g_hTest, "ASMMemFill32");
1530
1531 struct
1532 {
1533 uint64_t u64Magic1;
1534 uint8_t abPage[PAGE_SIZE - 32];
1535 uint64_t u64Magic2;
1536 } Buf1, Buf2, Buf3;
1537
1538 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
1539 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
1540 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
1541 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
1542 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
1543 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
1544 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
1545 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
1546 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
1547 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
1548 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
1549 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
1550 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
1551 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
1552 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
1553 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
1554 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
1555 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
1556 {
1557 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
1558 }
1559 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
1560 if (Buf1.abPage[i])
1561 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1562 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
1563 if (Buf2.abPage[i])
1564 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1565 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
1566 if (Buf3.abPage[i])
1567 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1568}
1569
1570
1571void tstASMMemFill32(void)
1572{
1573 RTTestSub(g_hTest, "ASMMemFill32");
1574
1575 struct
1576 {
1577 uint64_t u64Magic1;
1578 uint32_t au32Page[PAGE_SIZE / 4];
1579 uint64_t u64Magic2;
1580 } Buf1;
1581 struct
1582 {
1583 uint64_t u64Magic1;
1584 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
1585 uint64_t u64Magic2;
1586 } Buf2;
1587 struct
1588 {
1589 uint64_t u64Magic1;
1590 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
1591 uint64_t u64Magic2;
1592 } Buf3;
1593
1594 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
1595 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
1596 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
1597 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
1598 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
1599 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
1600 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
1601 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
1602 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
1603 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
1604 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
1605 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
1606 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
1607 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
1608 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
1609 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
1610 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
1611 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
1612 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
1613 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
1614 if (Buf1.au32Page[i] != 0xdeadbeef)
1615 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
1616 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
1617 if (Buf2.au32Page[i] != 0xcafeff01)
1618 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
1619 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
1620 if (Buf3.au32Page[i] != 0xf00dd00f)
1621 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
1622}
1623
1624
1625
1626void tstASMMath(void)
1627{
1628 RTTestSub(g_hTest, "Math");
1629
1630 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
1631 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
1632
1633 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
1634 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
1635
1636 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x00000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
1637 CHECKVAL(u32, UINT32_C(0x00000001), "%#018RX32");
1638 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10000000), UINT32_C(0x80000000), UINT32_C(0x20000000));
1639 CHECKVAL(u32, UINT32_C(0x40000000), "%#018RX32");
1640 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x76543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1641 CHECKVAL(u32, UINT32_C(0x76543210), "%#018RX32");
1642 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1643 CHECKVAL(u32, UINT32_C(0xffffffff), "%#018RX32");
1644 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
1645 CHECKVAL(u32, UINT32_C(0xfffffff0), "%#018RX32");
1646 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
1647 CHECKVAL(u32, UINT32_C(0x05c584ce), "%#018RX32");
1648 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
1649 CHECKVAL(u32, UINT32_C(0x2d860795), "%#018RX32");
1650
1651#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
1652 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
1653 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
1654 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
1655 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
1656 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1657 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
1658 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1659 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
1660 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
1661 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
1662 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
1663 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
1664 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
1665 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
1666
1667# if 0 /* bird: question is whether this should trap or not:
1668 *
1669 * frank: Of course it must trap:
1670 *
1671 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
1672 *
1673 * During the following division, the quotient must fit into a 32-bit register.
1674 * Therefore the smallest valid divisor is
1675 *
1676 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
1677 *
1678 * which is definitely greater than 0x3b9aca00.
1679 *
1680 * bird: No, the C version does *not* crash. So, the question is whether there's any
1681 * code depending on it not crashing.
1682 *
1683 * Of course the assembly versions of the code crash right now for the reasons you've
1684 * given, but the 32-bit MSC version does not crash.
1685 *
1686 * frank: The C version does not crash but delivers incorrect results for this case.
1687 * The reason is
1688 *
1689 * u.s.Hi = (unsigned long)(u64Hi / u32C);
1690 *
1691 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
1692 * to 32 bit. If using this (optimized and fast) function we should just be sure that
1693 * the operands are in a valid range.
1694 */
1695 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
1696 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
1697# endif
1698#endif /* AMD64 || X86 */
1699
1700 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
1701 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
1702
1703 int32_t i32;
1704 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
1705 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1706 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
1707 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1708 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
1709 CHECKVAL(i32, INT32_C(1), "%010RI32");
1710
1711 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
1712 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
1713 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
1714 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
1715}
1716
1717
1718void tstASMByteSwap(void)
1719{
1720 RTTestSub(g_hTest, "ASMByteSwap*");
1721
1722 uint64_t u64In = UINT64_C(0x0011223344556677);
1723 uint64_t u64Out = ASMByteSwapU64(u64In);
1724 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
1725 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
1726 u64Out = ASMByteSwapU64(u64Out);
1727 CHECKVAL(u64Out, u64In, "%#018RX64");
1728 u64In = UINT64_C(0x0123456789abcdef);
1729 u64Out = ASMByteSwapU64(u64In);
1730 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
1731 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
1732 u64Out = ASMByteSwapU64(u64Out);
1733 CHECKVAL(u64Out, u64In, "%#018RX64");
1734 u64In = 0;
1735 u64Out = ASMByteSwapU64(u64In);
1736 CHECKVAL(u64Out, u64In, "%#018RX64");
1737 u64In = UINT64_MAX;
1738 u64Out = ASMByteSwapU64(u64In);
1739 CHECKVAL(u64Out, u64In, "%#018RX64");
1740
1741 uint32_t u32In = UINT32_C(0x00112233);
1742 uint32_t u32Out = ASMByteSwapU32(u32In);
1743 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
1744 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
1745 u32Out = ASMByteSwapU32(u32Out);
1746 CHECKVAL(u32Out, u32In, "%#010RX32");
1747 u32In = UINT32_C(0x12345678);
1748 u32Out = ASMByteSwapU32(u32In);
1749 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
1750 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
1751 u32Out = ASMByteSwapU32(u32Out);
1752 CHECKVAL(u32Out, u32In, "%#010RX32");
1753 u32In = 0;
1754 u32Out = ASMByteSwapU32(u32In);
1755 CHECKVAL(u32Out, u32In, "%#010RX32");
1756 u32In = UINT32_MAX;
1757 u32Out = ASMByteSwapU32(u32In);
1758 CHECKVAL(u32Out, u32In, "%#010RX32");
1759
1760 uint16_t u16In = UINT16_C(0x0011);
1761 uint16_t u16Out = ASMByteSwapU16(u16In);
1762 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
1763 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
1764 u16Out = ASMByteSwapU16(u16Out);
1765 CHECKVAL(u16Out, u16In, "%#06RX16");
1766 u16In = UINT16_C(0x1234);
1767 u16Out = ASMByteSwapU16(u16In);
1768 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
1769 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
1770 u16Out = ASMByteSwapU16(u16Out);
1771 CHECKVAL(u16Out, u16In, "%#06RX16");
1772 u16In = 0;
1773 u16Out = ASMByteSwapU16(u16In);
1774 CHECKVAL(u16Out, u16In, "%#06RX16");
1775 u16In = UINT16_MAX;
1776 u16Out = ASMByteSwapU16(u16In);
1777 CHECKVAL(u16Out, u16In, "%#06RX16");
1778}
1779
1780
1781void tstASMBench(void)
1782{
1783 /*
1784 * Make this static. We don't want to have this located on the stack.
1785 */
1786 static uint8_t volatile s_u8;
1787 static int8_t volatile s_i8;
1788 static uint16_t volatile s_u16;
1789 static int16_t volatile s_i16;
1790 static uint32_t volatile s_u32;
1791 static int32_t volatile s_i32;
1792 static uint64_t volatile s_u64;
1793 static int64_t volatile s_i64;
1794 unsigned i;
1795 const unsigned cRounds = _2M; /* Must be multiple of 8 */
1796 uint64_t u64Elapsed;
1797
1798 RTTestSub(g_hTest, "Benchmarking");
1799
1800#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1801# define BENCH(op, str) \
1802 do { \
1803 RTThreadYield(); \
1804 u64Elapsed = ASMReadTSC(); \
1805 for (i = cRounds; i > 0; i--) \
1806 op; \
1807 u64Elapsed = ASMReadTSC() - u64Elapsed; \
1808 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
1809 } while (0)
1810#else
1811# define BENCH(op, str) \
1812 do { \
1813 RTThreadYield(); \
1814 u64Elapsed = RTTimeNanoTS(); \
1815 for (i = cRounds / 8; i > 0; i--) \
1816 { \
1817 op; \
1818 op; \
1819 op; \
1820 op; \
1821 op; \
1822 op; \
1823 op; \
1824 op; \
1825 } \
1826 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
1827 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \
1828 } while (0)
1829#endif
1830#if (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) && !defined(GCC44_32BIT_PIC)
1831# define BENCH_TSC(op, str) \
1832 do { \
1833 RTThreadYield(); \
1834 u64Elapsed = ASMReadTSC(); \
1835 for (i = cRounds / 8; i > 0; i--) \
1836 { \
1837 op; \
1838 op; \
1839 op; \
1840 op; \
1841 op; \
1842 op; \
1843 op; \
1844 op; \
1845 } \
1846 u64Elapsed = ASMReadTSC() - u64Elapsed; \
1847 RTTestValue(g_hTest, str, u64Elapsed / cRounds, /*RTTESTUNIT_TICKS_PER_CALL*/ RTTESTUNIT_NONE); \
1848 } while (0)
1849#else
1850# define BENCH_TSC(op, str) BENCH(op, str)
1851#endif
1852
1853 BENCH(s_u32 = 0, "s_u32 = 0");
1854 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
1855 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
1856 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
1857 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
1858 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
1859 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
1860 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
1861 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
1862 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
1863 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
1864 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
1865 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
1866 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
1867 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
1868 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
1869 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
1870 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
1871 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
1872 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
1873 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
1874 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
1875 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
1876 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
1877 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
1878 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
1879 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
1880 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
1881 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
1882 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
1883 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
1884 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
1885 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
1886 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
1887 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
1888 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
1889 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
1890 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
1891 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
1892 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
1893 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
1894 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
1895 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
1896 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
1897 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
1898 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
1899 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
1900 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
1901 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
1902 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
1903 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
1904 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
1905 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
1906 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
1907 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
1908 BENCH(ASMAtomicUoIncU32(&s_u32), "ASMAtomicUoIncU32");
1909 BENCH(ASMAtomicUoDecU32(&s_u32), "ASMAtomicUoDecU32");
1910 BENCH(ASMAtomicUoAndU32(&s_u32, 0xffffffff), "ASMAtomicUoAndU32");
1911 BENCH(ASMAtomicUoOrU32(&s_u32, 0xffffffff), "ASMAtomicUoOrU32");
1912#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
1913 BENCH_TSC(ASMSerializeInstructionCpuId(), "ASMSerializeInstructionCpuId");
1914 BENCH_TSC(ASMSerializeInstructionIRet(), "ASMSerializeInstructionIRet");
1915#endif
1916
1917 /* The Darwin gcc does not like this ... */
1918#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1919 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
1920 BENCH(s_u32 = ASMGetApicIdExt0B(), "ASMGetApicIdExt0B");
1921 BENCH(s_u32 = ASMGetApicIdExt8000001E(), "ASMGetApicIdExt8000001E");
1922#endif
1923#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1924 uint32_t uAux;
1925 if ( ASMHasCpuId()
1926 && ASMIsValidExtRange(ASMCpuId_EAX(0x80000000))
1927 && (ASMCpuId_EDX(0x80000001) & X86_CPUID_EXT_FEATURE_EDX_RDTSCP) )
1928 {
1929 BENCH_TSC(ASMSerializeInstructionRdTscp(), "ASMSerializeInstructionRdTscp");
1930 BENCH(s_u64 = ASMReadTscWithAux(&uAux), "ASMReadTscWithAux");
1931 }
1932 BENCH(s_u64 = ASMReadTSC(), "ASMReadTSC");
1933 union
1934 {
1935 uint64_t u64[2];
1936 RTIDTR Unaligned;
1937 struct
1938 {
1939 uint16_t abPadding[3];
1940 RTIDTR Aligned;
1941 } s;
1942 } uBuf;
1943 Assert(((uintptr_t)&uBuf.Unaligned.pIdt & (sizeof(uintptr_t) - 1)) != 0);
1944 BENCH(ASMGetIDTR(&uBuf.Unaligned), "ASMGetIDTR/unaligned");
1945 Assert(((uintptr_t)&uBuf.s.Aligned.pIdt & (sizeof(uintptr_t) - 1)) == 0);
1946 BENCH(ASMGetIDTR(&uBuf.s.Aligned), "ASMGetIDTR/aligned");
1947#endif
1948
1949#undef BENCH
1950}
1951
1952
1953int main(int argc, char **argv)
1954{
1955 RT_NOREF_PV(argc); RT_NOREF_PV(argv);
1956
1957 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
1958 if (rc)
1959 return rc;
1960 RTTestBanner(g_hTest);
1961
1962 /*
1963 * Execute the tests.
1964 */
1965#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1966 tstASMCpuId();
1967 //bruteForceCpuId();
1968#endif
1969#if 1
1970 tstASMAtomicXchgU8();
1971 tstASMAtomicXchgU16();
1972 tstASMAtomicXchgU32();
1973 tstASMAtomicXchgU64();
1974 tstASMAtomicXchgPtr();
1975 tstASMAtomicCmpXchgU8();
1976 tstASMAtomicCmpXchgU32();
1977 tstASMAtomicCmpXchgU64();
1978 tstASMAtomicCmpXchgExU32();
1979 tstASMAtomicCmpXchgExU64();
1980 tstASMAtomicReadU64();
1981 tstASMAtomicUoReadU64();
1982
1983 tstASMAtomicAddS32();
1984 tstASMAtomicAddS64();
1985 tstASMAtomicDecIncS32();
1986 tstASMAtomicDecIncS64();
1987 tstASMAtomicAndOrU32();
1988 tstASMAtomicAndOrU64();
1989
1990 tstASMAtomicUoIncU32();
1991 tstASMAtomicUoDecU32();
1992 tstASMAtomicUoAndOrU32();
1993
1994 tstASMMemZeroPage();
1995 tstASMMemIsZeroPage(g_hTest);
1996 tstASMMemFirstMismatchingU8(g_hTest);
1997 tstASMMemZero32();
1998 tstASMMemFill32();
1999
2000 tstASMMath();
2001
2002 tstASMByteSwap();
2003
2004 tstASMBench();
2005#endif
2006
2007 /*
2008 * Show the result.
2009 */
2010 return RTTestSummaryAndDestroy(g_hTest);
2011}
2012
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette