VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 88726

Last change on this file since 88726 was 87256, checked in by vboxsync, 4 years ago

IPRT,iprt/asm-arm.h: ASMReadTSC using CNTVCT or CNTPCT. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id Revision
File size: 140.1 KB
Line 
1/* $Id: tstRTInlineAsm.cpp 87256 2021-01-15 10:27:15Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include <iprt/asm-math.h>
33
34/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
35 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
36 * fine. */
37#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
38# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
39# define GCC44_32BIT_PIC
40# endif
41#endif
42
43#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
44# include <iprt/asm-amd64-x86.h>
45# include <iprt/x86.h>
46#elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
47# include <iprt/asm-arm.h>
48# include <iprt/time.h>
49#else
50# include <iprt/time.h>
51#endif
52#include <iprt/mem.h>
53#include <iprt/param.h>
54#include <iprt/rand.h>
55#include <iprt/stream.h>
56#include <iprt/string.h>
57#include <iprt/thread.h>
58#include <iprt/test.h>
59#include <iprt/time.h>
60
61
62
63/*********************************************************************************************************************************
64* Defined Constants And Macros *
65*********************************************************************************************************************************/
66#define CHECKVAL(val, expect, fmt) \
67 do \
68 { \
69 if ((val) != (expect)) \
70 { \
71 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
72 } \
73 } while (0)
74
75#define CHECKOP(op, expect, fmt, type) \
76 do \
77 { \
78 type val = op; \
79 if (val != (type)(expect)) \
80 { \
81 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
82 } \
83 } while (0)
84
85#define CHECK_OP_AND_VAL(a_Type, a_Fmt, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
86 do { \
87 CHECKOP(a_Operation, a_ExpectRetVal, a_Fmt, a_Type); \
88 CHECKVAL(*a_pVar, a_ExpectVarVal, a_Fmt); \
89 } while (0)
90
91#define CHECK_OP_AND_VAL_EX(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
92 do { \
93 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
94 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
95 } while (0)
96
97#define CHECK_OP_AND_VAL_EX2(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_uVar2, a_Operation, a_ExpectRetVal, a_ExpectVarVal, a_ExpectVarVal2) \
98 do { \
99 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
100 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
101 CHECKVAL(a_uVar2, a_ExpectVarVal2, a_FmtVar); \
102 } while (0)
103
104/**
105 * Calls a worker function with different worker variable storage types.
106 */
107#define DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type) \
108 do \
109 { \
110 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
111 RTTEST_CHECK_BREAK(g_hTest, pVar); \
112 a_WorkerFunction(pVar); \
113 RTTestGuardedFree(g_hTest, pVar); \
114 \
115 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
116 RTTEST_CHECK_BREAK(g_hTest, pVar); \
117 a_WorkerFunction(pVar); \
118 RTTestGuardedFree(g_hTest, pVar); \
119 } while (0)
120
121
122/**
123 * Calls a worker function with different worker variable storage types.
124 */
125#define DO_SIMPLE_TEST_NO_SUB(a_WorkerFunction, type) \
126 do \
127 { \
128 type StackVar; \
129 a_WorkerFunction(&StackVar); \
130 DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type); \
131 } while (0)
132
133/**
134 * Calls a worker function with different worker variable storage types.
135 */
136#define DO_SIMPLE_TEST(name, type) \
137 do \
138 { \
139 RTTestISub(#name); \
140 DO_SIMPLE_TEST_NO_SUB(tst ## name ## Worker, type); \
141 } while (0)
142
143
144/*********************************************************************************************************************************
145* Global Variables *
146*********************************************************************************************************************************/
147/** The test instance. */
148static RTTEST g_hTest;
149
150
151
152#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
153
154const char *getCacheAss(unsigned u)
155{
156 if (u == 0)
157 return "res0 ";
158 if (u == 1)
159 return "direct";
160 if (u >= 256)
161 return "???";
162
163 char *pszRet = NULL;
164 RTStrAPrintf(&pszRet, "%d way", u);
165 RTMEM_WILL_LEAK(pszRet);
166 return pszRet;
167}
168
169
170const char *getL2CacheAss(unsigned u)
171{
172 switch (u)
173 {
174 case 0: return "off ";
175 case 1: return "direct";
176 case 2: return "2 way ";
177 case 3: return "res3 ";
178 case 4: return "4 way ";
179 case 5: return "res5 ";
180 case 6: return "8 way ";
181 case 7: return "res7 ";
182 case 8: return "16 way";
183 case 9: return "res9 ";
184 case 10: return "res10 ";
185 case 11: return "res11 ";
186 case 12: return "res12 ";
187 case 13: return "res13 ";
188 case 14: return "res14 ";
189 case 15: return "fully ";
190 default:
191 return "????";
192 }
193}
194
195
196/**
197 * Test and dump all possible info from the CPUID instruction.
198 *
199 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
200 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
201 */
202void tstASMCpuId(void)
203{
204 RTTestISub("ASMCpuId");
205
206 unsigned iBit;
207 struct
208 {
209 uint32_t uEBX, uEAX, uEDX, uECX;
210 } s;
211 if (!ASMHasCpuId())
212 {
213 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
214 return;
215 }
216
217 /*
218 * Try the 0 function and use that for checking the ASMCpuId_* variants.
219 */
220 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
221
222 uint32_t u32;
223
224 u32 = ASMCpuId_EAX(0);
225 CHECKVAL(u32, s.uEAX, "%x");
226 u32 = ASMCpuId_EBX(0);
227 CHECKVAL(u32, s.uEBX, "%x");
228 u32 = ASMCpuId_ECX(0);
229 CHECKVAL(u32, s.uECX, "%x");
230 u32 = ASMCpuId_EDX(0);
231 CHECKVAL(u32, s.uEDX, "%x");
232
233 uint32_t uECX2 = s.uECX - 1;
234 uint32_t uEDX2 = s.uEDX - 1;
235 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
236 CHECKVAL(uECX2, s.uECX, "%x");
237 CHECKVAL(uEDX2, s.uEDX, "%x");
238
239 uint32_t uEAX2 = s.uEAX - 1;
240 uint32_t uEBX2 = s.uEBX - 1;
241 uECX2 = s.uECX - 1;
242 uEDX2 = s.uEDX - 1;
243 ASMCpuIdExSlow(0, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
244 CHECKVAL(uEAX2, s.uEAX, "%x");
245 CHECKVAL(uEBX2, s.uEBX, "%x");
246 CHECKVAL(uECX2, s.uECX, "%x");
247 CHECKVAL(uEDX2, s.uEDX, "%x");
248
249 /*
250 * Check the extended APIC stuff.
251 */
252 uint32_t idExtApic;
253 if (ASMCpuId_EAX(0) >= 0xb)
254 {
255 uint8_t idApic = ASMGetApicId();
256 do
257 {
258 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
259 ASMCpuIdExSlow(0xb, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
260 idExtApic = ASMGetApicIdExt0B();
261 } while (ASMGetApicId() != idApic);
262
263 CHECKVAL(uEDX2, idExtApic, "%x");
264 if (idApic != (uint8_t)idExtApic && uECX2 != 0)
265 RTTestIFailed("ASMGetApicIdExt0B() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
266 }
267 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
268 {
269 uint8_t idApic = ASMGetApicId();
270 do
271 {
272 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
273 ASMCpuIdExSlow(0x8000001e, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
274 idExtApic = ASMGetApicIdExt8000001E();
275 } while (ASMGetApicId() != idApic);
276 CHECKVAL(uEAX2, idExtApic, "%x");
277 if (idApic != (uint8_t)idExtApic)
278 RTTestIFailed("ASMGetApicIdExt8000001E() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
279 }
280
281 /*
282 * Done testing, dump the information.
283 */
284 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
285 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
286 const uint32_t cFunctions = s.uEAX;
287
288 /* raw dump */
289 RTTestIPrintf(RTTESTLVL_ALWAYS,
290 "\n"
291 " RAW Standard CPUIDs\n"
292 "Function eax ebx ecx edx\n");
293 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
294 {
295 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
296 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
297 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
298
299 /* Some leafs output depend on the initial value of ECX.
300 * The same seems to apply to invalid standard functions */
301 if (iStd > cFunctions)
302 continue;
303 if (iStd == 0x04) /* Deterministic Cache Parameters Leaf */
304 for (uint32_t uECX = 1; s.uEAX & 0x1f; uECX++)
305 {
306 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
307 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
308 RTTESTI_CHECK_BREAK(uECX < 128);
309 }
310 else if (iStd == 0x07) /* Structured Extended Feature Flags */
311 {
312 uint32_t uMax = s.uEAX;
313 for (uint32_t uECX = 1; uECX < uMax; uECX++)
314 {
315 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
316 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
317 RTTESTI_CHECK_BREAK(uECX < 128);
318 }
319 }
320 else if (iStd == 0x0b) /* Extended Topology Enumeration Leafs */
321 for (uint32_t uECX = 1; (s.uEAX & 0x1f) && (s.uEBX & 0xffff); uECX++)
322 {
323 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
324 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
325 RTTESTI_CHECK_BREAK(uECX < 128);
326 }
327 else if (iStd == 0x0d) /* Extended State Enumeration Leafs */
328 for (uint32_t uECX = 1; s.uEAX != 0 || s.uEBX != 0 || s.uECX != 0 || s.uEDX != 0; uECX++)
329 {
330 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
331 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
332 RTTESTI_CHECK_BREAK(uECX < 128);
333 }
334 else if ( iStd == 0x0f /* Platform quality of service monitoring (PQM) */
335 || iStd == 0x10 /* Platform quality of service enforcement (PQE) */
336 || iStd == 0x12 /* SGX Enumeration */
337 || iStd == 0x14 /* Processor Trace Enumeration */
338 || iStd == 0x17 /* SoC Vendor Attribute Enumeration */
339 || iStd == 0x18 /* Deterministic Address Translation Parameters */)
340 {
341 /** @todo */
342 }
343 else
344 {
345 u32 = ASMCpuId_EAX(iStd);
346 CHECKVAL(u32, s.uEAX, "%x");
347
348 uint32_t u32EbxMask = UINT32_MAX;
349 if (iStd == 1)
350 u32EbxMask = UINT32_C(0x00ffffff); /* Omit the local apic ID in case we're rescheduled. */
351 u32 = ASMCpuId_EBX(iStd);
352 CHECKVAL(u32 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
353
354 u32 = ASMCpuId_ECX(iStd);
355 CHECKVAL(u32, s.uECX, "%x");
356 u32 = ASMCpuId_EDX(iStd);
357 CHECKVAL(u32, s.uEDX, "%x");
358
359 uECX2 = s.uECX - 1;
360 uEDX2 = s.uEDX - 1;
361 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
362 CHECKVAL(uECX2, s.uECX, "%x");
363 CHECKVAL(uEDX2, s.uEDX, "%x");
364
365 uEAX2 = s.uEAX - 1;
366 uEBX2 = s.uEBX - 1;
367 uECX2 = s.uECX - 1;
368 uEDX2 = s.uEDX - 1;
369 ASMCpuId(iStd, &uEAX2, &uEBX2, &uECX2, &uEDX2);
370 CHECKVAL(uEAX2, s.uEAX, "%x");
371 CHECKVAL(uEBX2 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
372 CHECKVAL(uECX2, s.uECX, "%x");
373 CHECKVAL(uEDX2, s.uEDX, "%x");
374 }
375 }
376
377 /*
378 * Understandable output
379 */
380 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
381 RTTestIPrintf(RTTESTLVL_ALWAYS,
382 "Name: %.04s%.04s%.04s\n"
383 "Support: 0-%u\n",
384 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
385 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
386
387 /*
388 * Get Features.
389 */
390 if (cFunctions >= 1)
391 {
392 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
393 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
394 RTTestIPrintf(RTTESTLVL_ALWAYS,
395 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
396 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
397 "Stepping: %d\n"
398 "Type: %d (%s)\n"
399 "APIC ID: %#04x\n"
400 "Logical CPUs: %d\n"
401 "CLFLUSH Size: %d\n"
402 "Brand ID: %#04x\n",
403 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
404 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
405 ASMGetCpuStepping(s.uEAX),
406 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
407 (s.uEBX >> 24) & 0xff,
408 (s.uEBX >> 16) & 0xff,
409 (s.uEBX >> 8) & 0xff,
410 (s.uEBX >> 0) & 0xff);
411
412 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
413 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
414 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
415 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
416 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
417 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
418 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
419 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
420 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
421 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
422 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
423 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
424 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
425 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
426 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
427 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
428 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
429 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
430 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
431 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
432 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
433 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
434 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
435 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
436 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
437 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
438 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
439 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
440 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
441 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
442 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
443 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
444 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
445 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
446
447 /** @todo check intel docs. */
448 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
449 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
450 for (iBit = 1; iBit < 13; iBit++)
451 if (s.uECX & RT_BIT(iBit))
452 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
453 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
454 for (iBit = 14; iBit < 32; iBit++)
455 if (s.uECX & RT_BIT(iBit))
456 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
457 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
458 }
459 if (ASMCpuId_EAX(0) >= 0xb)
460 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 0b): %#010x\n", ASMGetApicIdExt0B());
461
462 /*
463 * Extended.
464 * Implemented after AMD specs.
465 */
466 /** @todo check out the intel specs. */
467 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
468 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
469 {
470 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
471 return;
472 }
473 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
474
475 /* raw dump */
476 RTTestIPrintf(RTTESTLVL_ALWAYS,
477 "\n"
478 " RAW Extended CPUIDs\n"
479 "Function eax ebx ecx edx\n");
480 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
481 {
482 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
483 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
484 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
485
486 if (iExt > cExtFunctions)
487 continue; /* Invalid extended functions seems change the value if ECX changes */
488 if (iExt == 0x8000001d)
489 continue; /* Takes cache level in ecx. */
490
491 u32 = ASMCpuId_EAX(iExt);
492 CHECKVAL(u32, s.uEAX, "%x");
493 u32 = ASMCpuId_EBX(iExt);
494 CHECKVAL(u32, s.uEBX, "%x");
495 u32 = ASMCpuId_ECX(iExt);
496 CHECKVAL(u32, s.uECX, "%x");
497 u32 = ASMCpuId_EDX(iExt);
498 CHECKVAL(u32, s.uEDX, "%x");
499
500 uECX2 = s.uECX - 1;
501 uEDX2 = s.uEDX - 1;
502 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
503 CHECKVAL(uECX2, s.uECX, "%x");
504 CHECKVAL(uEDX2, s.uEDX, "%x");
505
506 uEAX2 = s.uEAX - 1;
507 uEBX2 = s.uEBX - 1;
508 uECX2 = s.uECX - 1;
509 uEDX2 = s.uEDX - 1;
510 ASMCpuId(iExt, &uEAX2, &uEBX2, &uECX2, &uEDX2);
511 CHECKVAL(uEAX2, s.uEAX, "%x");
512 CHECKVAL(uEBX2, s.uEBX, "%x");
513 CHECKVAL(uECX2, s.uECX, "%x");
514 CHECKVAL(uEDX2, s.uEDX, "%x");
515 }
516
517 /*
518 * Understandable output
519 */
520 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
521 RTTestIPrintf(RTTESTLVL_ALWAYS,
522 "Ext Name: %.4s%.4s%.4s\n"
523 "Ext Supports: 0x80000000-%#010x\n",
524 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
525
526 if (cExtFunctions >= 0x80000001)
527 {
528 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
529 RTTestIPrintf(RTTESTLVL_ALWAYS,
530 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
531 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
532 "Stepping: %d\n"
533 "Brand ID: %#05x\n",
534 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
535 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
536 ASMGetCpuStepping(s.uEAX),
537 s.uEBX & 0xfff);
538
539 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
540 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
541 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
542 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
543 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
544 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
545 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
546 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
547 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
548 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
549 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
550 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
551 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
552 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
553 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
554 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
555 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
556 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
557 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
558 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
559 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
560 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
561 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
562 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
563 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
564 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
565 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
566 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
567 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
568 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
569 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
570 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
571 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
572 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
573
574 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
575 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
576 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
577 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
578 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
579 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
580 for (iBit = 5; iBit < 32; iBit++)
581 if (s.uECX & RT_BIT(iBit))
582 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
583 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
584 }
585
586 char szString[4*4*3+1] = {0};
587 if (cExtFunctions >= 0x80000002)
588 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
589 if (cExtFunctions >= 0x80000003)
590 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
591 if (cExtFunctions >= 0x80000004)
592 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
593 if (cExtFunctions >= 0x80000002)
594 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
595
596 if (cExtFunctions >= 0x80000005)
597 {
598 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
599 RTTestIPrintf(RTTESTLVL_ALWAYS,
600 "TLB 2/4M Instr/Uni: %s %3d entries\n"
601 "TLB 2/4M Data: %s %3d entries\n",
602 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
603 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
604 RTTestIPrintf(RTTESTLVL_ALWAYS,
605 "TLB 4K Instr/Uni: %s %3d entries\n"
606 "TLB 4K Data: %s %3d entries\n",
607 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
608 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
609 RTTestIPrintf(RTTESTLVL_ALWAYS,
610 "L1 Instr Cache Line Size: %d bytes\n"
611 "L1 Instr Cache Lines Per Tag: %d\n"
612 "L1 Instr Cache Associativity: %s\n"
613 "L1 Instr Cache Size: %d KB\n",
614 (s.uEDX >> 0) & 0xff,
615 (s.uEDX >> 8) & 0xff,
616 getCacheAss((s.uEDX >> 16) & 0xff),
617 (s.uEDX >> 24) & 0xff);
618 RTTestIPrintf(RTTESTLVL_ALWAYS,
619 "L1 Data Cache Line Size: %d bytes\n"
620 "L1 Data Cache Lines Per Tag: %d\n"
621 "L1 Data Cache Associativity: %s\n"
622 "L1 Data Cache Size: %d KB\n",
623 (s.uECX >> 0) & 0xff,
624 (s.uECX >> 8) & 0xff,
625 getCacheAss((s.uECX >> 16) & 0xff),
626 (s.uECX >> 24) & 0xff);
627 }
628
629 if (cExtFunctions >= 0x80000006)
630 {
631 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
632 RTTestIPrintf(RTTESTLVL_ALWAYS,
633 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
634 "L2 TLB 2/4M Data: %s %4d entries\n",
635 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
636 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
637 RTTestIPrintf(RTTESTLVL_ALWAYS,
638 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
639 "L2 TLB 4K Data: %s %4d entries\n",
640 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
641 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
642 RTTestIPrintf(RTTESTLVL_ALWAYS,
643 "L2 Cache Line Size: %d bytes\n"
644 "L2 Cache Lines Per Tag: %d\n"
645 "L2 Cache Associativity: %s\n"
646 "L2 Cache Size: %d KB\n",
647 (s.uEDX >> 0) & 0xff,
648 (s.uEDX >> 8) & 0xf,
649 getL2CacheAss((s.uEDX >> 12) & 0xf),
650 (s.uEDX >> 16) & 0xffff);
651 }
652
653 if (cExtFunctions >= 0x80000007)
654 {
655 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
656 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
657 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
658 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
659 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
660 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
661 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
662 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
663 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
664 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
665 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
666 for (iBit = 9; iBit < 32; iBit++)
667 if (s.uEDX & RT_BIT(iBit))
668 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
669 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
670 }
671
672 if (cExtFunctions >= 0x80000008)
673 {
674 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
675 RTTestIPrintf(RTTESTLVL_ALWAYS,
676 "Physical Address Width: %d bits\n"
677 "Virtual Address Width: %d bits\n"
678 "Guest Physical Address Width: %d bits\n",
679 (s.uEAX >> 0) & 0xff,
680 (s.uEAX >> 8) & 0xff,
681 (s.uEAX >> 16) & 0xff);
682 RTTestIPrintf(RTTESTLVL_ALWAYS,
683 "Physical Core Count: %d\n",
684 ((s.uECX >> 0) & 0xff) + 1);
685 if ((s.uECX >> 12) & 0xf)
686 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
687 }
688
689 if (cExtFunctions >= 0x8000000a)
690 {
691 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
692 RTTestIPrintf(RTTESTLVL_ALWAYS,
693 "SVM Revision: %d (%#x)\n"
694 "Number of Address Space IDs: %d (%#x)\n",
695 s.uEAX & 0xff, s.uEAX & 0xff,
696 s.uEBX, s.uEBX);
697 }
698 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
699 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 8000001b): %#010x\n", ASMGetApicIdExt8000001E());
700}
701
702# if 0
703static void bruteForceCpuId(void)
704{
705 RTTestISub("brute force CPUID leafs");
706 uint32_t auPrevValues[4] = { 0, 0, 0, 0};
707 uint32_t uLeaf = 0;
708 do
709 {
710 uint32_t auValues[4];
711 ASMCpuIdExSlow(uLeaf, 0, 0, 0, &auValues[0], &auValues[1], &auValues[2], &auValues[3]);
712 if ( (auValues[0] != auPrevValues[0] && auValues[0] != uLeaf)
713 || (auValues[1] != auPrevValues[1] && auValues[1] != 0)
714 || (auValues[2] != auPrevValues[2] && auValues[2] != 0)
715 || (auValues[3] != auPrevValues[3] && auValues[3] != 0)
716 || (uLeaf & (UINT32_C(0x08000000) - UINT32_C(1))) == 0)
717 {
718 RTTestIPrintf(RTTESTLVL_ALWAYS,
719 "%08x: %08x %08x %08x %08x\n", uLeaf,
720 auValues[0], auValues[1], auValues[2], auValues[3]);
721 }
722 auPrevValues[0] = auValues[0];
723 auPrevValues[1] = auValues[1];
724 auPrevValues[2] = auValues[2];
725 auPrevValues[3] = auValues[3];
726
727 //uint32_t uSubLeaf = 0;
728 //do
729 //{
730 //
731 //
732 //} while (false);
733 } while (uLeaf++ < UINT32_MAX);
734}
735# endif
736
737#endif /* AMD64 || X86 */
738
739#define TEST_READ(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
740 do { *a_pVar = a_Val; CHECKOP(a_Function(a_pVar), a_Val, a_Fmt, a_Type); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
741
742DECLINLINE(void) tstASMAtomicReadU8Worker(uint8_t volatile *pu8)
743{
744 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 0);
745 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 1);
746 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 2);
747 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 16);
748 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
749 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
750 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 127);
751 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 128);
752 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 169);
753 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 239);
754 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 254);
755 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 255);
756
757 int8_t volatile *pi8 = (int8_t volatile *)pu8;
758 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MAX);
759 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MIN);
760 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, 42);
761 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, -21);
762
763 bool volatile *pf = (bool volatile *)pu8;
764 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, true);
765 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, false);
766}
767
768
769DECLINLINE(void) tstASMAtomicUoReadU8Worker(uint8_t volatile *pu8)
770{
771 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 0);
772 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 1);
773 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 2);
774 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 16);
775 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
776 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
777 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 127);
778 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 128);
779 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 169);
780 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 239);
781 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 254);
782 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 255);
783
784 int8_t volatile *pi8 = (int8_t volatile *)pu8;
785 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MAX);
786 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MIN);
787 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, 42);
788 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, -21);
789
790 bool volatile *pf = (bool volatile *)pu8;
791 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, true);
792 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, false);
793}
794
795
796DECLINLINE(void) tstASMAtomicReadU16Worker(uint16_t volatile *pu16)
797{
798 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 0);
799 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 19983);
800 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, INT16_MAX);
801 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, UINT16_MAX);
802
803 int16_t volatile *pi16 = (int16_t volatile *)pu16;
804 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MAX);
805 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MIN);
806 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, 42);
807 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, -21);
808}
809
810
811DECLINLINE(void) tstASMAtomicUoReadU16Worker(uint16_t volatile *pu16)
812{
813 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 0);
814 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 19983);
815 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, INT16_MAX);
816 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, UINT16_MAX);
817
818 int16_t volatile *pi16 = (int16_t volatile *)pu16;
819 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MAX);
820 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MIN);
821 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, 42);
822 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, -21);
823}
824
825
826DECLINLINE(void) tstASMAtomicReadU32Worker(uint32_t volatile *pu32)
827{
828 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 0);
829 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 19983);
830 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT16_MAX);
831 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT16_MAX);
832 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M-1);
833 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M+1);
834 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G-1);
835 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G+1);
836 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT32_MAX);
837 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT32_MAX);
838
839 int32_t volatile *pi32 = (int32_t volatile *)pu32;
840 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MAX);
841 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MIN);
842 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, 42);
843 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, -21);
844
845#if ARCH_BITS == 32
846 size_t volatile *pcb = (size_t volatile *)pu32;
847 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
848 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
849 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
850
851 void * volatile *ppv = (void * volatile *)pu32;
852 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
853 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
854
855 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
856 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
857 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
858
859 ASMAtomicReadHandle(phEvt, &hEvt);
860 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
861#endif
862}
863
864
865DECLINLINE(void) tstASMAtomicUoReadU32Worker(uint32_t volatile *pu32)
866{
867 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 0);
868 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 19983);
869 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT16_MAX);
870 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT16_MAX);
871 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M-1);
872 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M+1);
873 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G-1);
874 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G+1);
875 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT32_MAX);
876 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT32_MAX);
877
878 int32_t volatile *pi32 = (int32_t volatile *)pu32;
879 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MAX);
880 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MIN);
881 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, 42);
882 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, -21);
883
884#if ARCH_BITS == 32
885 size_t volatile *pcb = (size_t volatile *)pu32;
886 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
887 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
888 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
889
890 void * volatile *ppv = (void * volatile *)pu32;
891 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
892 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
893
894 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
895 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
896 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
897
898 ASMAtomicUoReadHandle(phEvt, &hEvt);
899 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
900#endif
901}
902
903
904DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
905{
906 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 0);
907 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 19983);
908 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT16_MAX);
909 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT16_MAX);
910 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M-1);
911 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M+1);
912 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G-1);
913 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G+1);
914 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT32_MAX);
915 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT32_MAX);
916 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT64_MAX);
917 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_MAX);
918 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_C(0x450872549687134));
919
920 int64_t volatile *pi64 = (int64_t volatile *)pu64;
921 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MAX);
922 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MIN);
923 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, 42);
924 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, -21);
925
926#if ARCH_BITS == 64
927 size_t volatile *pcb = (size_t volatile *)pu64;
928 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
929 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
930 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
931
932 void * volatile *ppv = (void * volatile *)pu64;
933 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
934 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
935
936 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
937 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
938 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
939
940 ASMAtomicReadHandle(phEvt, &hEvt);
941 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
942#endif
943}
944
945
946DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
947{
948 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 0);
949 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 19983);
950 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT16_MAX);
951 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT16_MAX);
952 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M-1);
953 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M+1);
954 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G-1);
955 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G+1);
956 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT32_MAX);
957 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT32_MAX);
958 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT64_MAX);
959 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_MAX);
960 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_C(0x450872549687134));
961
962 int64_t volatile *pi64 = (int64_t volatile *)pu64;
963 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MAX);
964 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MIN);
965 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, 42);
966 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, -21);
967
968#if ARCH_BITS == 64
969 size_t volatile *pcb = (size_t volatile *)pu64;
970 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
971 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
972 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
973
974 void * volatile *ppv = (void * volatile *)pu64;
975 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
976 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
977
978 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
979 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
980 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
981
982 ASMAtomicUoReadHandle(phEvt, &hEvt);
983 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
984#endif
985}
986
987
988static void tstASMAtomicRead(void)
989{
990 DO_SIMPLE_TEST(ASMAtomicReadU8, uint8_t);
991 DO_SIMPLE_TEST(ASMAtomicUoReadU8, uint8_t);
992
993 DO_SIMPLE_TEST(ASMAtomicReadU16, uint16_t);
994 DO_SIMPLE_TEST(ASMAtomicUoReadU16, uint16_t);
995
996 DO_SIMPLE_TEST(ASMAtomicReadU32, uint32_t);
997 DO_SIMPLE_TEST(ASMAtomicUoReadU32, uint32_t);
998
999 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
1000 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
1001}
1002
1003
1004#define TEST_WRITE(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
1005 do { a_Function(a_pVar, a_Val); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
1006
1007DECLINLINE(void) tstASMAtomicWriteU8Worker(uint8_t volatile *pu8)
1008{
1009 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 0);
1010 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 1);
1011 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 2);
1012 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 16);
1013 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1014 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1015 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 127);
1016 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 128);
1017 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 169);
1018 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 239);
1019 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 254);
1020 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 255);
1021
1022 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1023 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MIN);
1024 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MAX);
1025 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, 42);
1026 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, -41);
1027
1028 volatile bool *pf = (volatile bool *)pu8;
1029 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, true);
1030 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, false);
1031}
1032
1033
1034DECLINLINE(void) tstASMAtomicUoWriteU8Worker(uint8_t volatile *pu8)
1035{
1036 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 0);
1037 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 1);
1038 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 2);
1039 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 16);
1040 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1041 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1042 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 127);
1043 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 128);
1044 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 169);
1045 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 239);
1046 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 254);
1047 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 255);
1048
1049 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1050 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MIN);
1051 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MAX);
1052 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, 42);
1053 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, -41);
1054
1055 volatile bool *pf = (volatile bool *)pu8;
1056 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, true);
1057 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, false);
1058}
1059
1060
1061DECLINLINE(void) tstASMAtomicWriteU16Worker(uint16_t volatile *pu16)
1062{
1063 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 0);
1064 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 19983);
1065 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, INT16_MAX);
1066 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, UINT16_MAX);
1067
1068 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1069 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MIN);
1070 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MAX);
1071 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, 42);
1072 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, -41);
1073}
1074
1075
1076DECLINLINE(void) tstASMAtomicUoWriteU16Worker(uint16_t volatile *pu16)
1077{
1078 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 0);
1079 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 19983);
1080 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, INT16_MAX);
1081 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, UINT16_MAX);
1082
1083 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1084 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MIN);
1085 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MAX);
1086 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, 42);
1087 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, -41);
1088}
1089
1090
1091DECLINLINE(void) tstASMAtomicWriteU32Worker(uint32_t volatile *pu32)
1092{
1093 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 0);
1094 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 19983);
1095 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT16_MAX);
1096 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT16_MAX);
1097 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M-1);
1098 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M+1);
1099 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G-1);
1100 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G+1);
1101 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT32_MAX);
1102 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT32_MAX);
1103
1104 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1105 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MIN);
1106 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MAX);
1107 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, 42);
1108 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, -41);
1109
1110#if ARCH_BITS == 32
1111 size_t volatile *pcb = (size_t volatile *)pu32;
1112 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1113 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1114
1115 void * volatile *ppv = (void * volatile *)pu32;
1116 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1117 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1118
1119 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1120 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1121
1122 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1123 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1124#endif
1125}
1126
1127
1128DECLINLINE(void) tstASMAtomicUoWriteU32Worker(uint32_t volatile *pu32)
1129{
1130 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 0);
1131 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 19983);
1132 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT16_MAX);
1133 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT16_MAX);
1134 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M-1);
1135 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M+1);
1136 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G-1);
1137 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G+1);
1138 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT32_MAX);
1139 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT32_MAX);
1140
1141 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1142 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MIN);
1143 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MAX);
1144 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, 42);
1145 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, -41);
1146
1147#if ARCH_BITS == 32
1148 size_t volatile *pcb = (size_t volatile *)pu32;
1149 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1150 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1151
1152 void * volatile *ppv = (void * volatile *)pu32;
1153 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1154 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1155
1156 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1157 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1158
1159 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1160 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1161#endif
1162}
1163
1164
1165DECLINLINE(void) tstASMAtomicWriteU64Worker(uint64_t volatile *pu64)
1166{
1167 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 0);
1168 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 19983);
1169 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT16_MAX);
1170 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT16_MAX);
1171 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M-1);
1172 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M+1);
1173 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G-1);
1174 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G+1);
1175 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT32_MAX);
1176 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT32_MAX);
1177 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT64_MAX);
1178 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_MAX);
1179 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_C(0x450872549687134));
1180
1181 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1182 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MIN);
1183 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MAX);
1184 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, 42);
1185
1186#if ARCH_BITS == 64
1187 size_t volatile *pcb = (size_t volatile *)pu64;
1188 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1189 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1190
1191 void * volatile *ppv = (void * volatile *)pu64;
1192 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1193 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1194
1195 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1196 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1197
1198 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1199 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1200#endif
1201}
1202
1203
1204DECLINLINE(void) tstASMAtomicUoWriteU64Worker(uint64_t volatile *pu64)
1205{
1206 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 0);
1207 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 19983);
1208 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT16_MAX);
1209 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT16_MAX);
1210 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M-1);
1211 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M+1);
1212 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G-1);
1213 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G+1);
1214 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT32_MAX);
1215 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT32_MAX);
1216 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT64_MAX);
1217 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_MAX);
1218 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_C(0x450872549687134));
1219
1220 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1221 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MIN);
1222 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MAX);
1223 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, 42);
1224
1225#if ARCH_BITS == 64
1226 size_t volatile *pcb = (size_t volatile *)pu64;
1227 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1228 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1229
1230 void * volatile *ppv = (void * volatile *)pu64;
1231 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1232 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1233
1234 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1235 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1236
1237 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1238 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1239#endif
1240}
1241
1242static void tstASMAtomicWrite(void)
1243{
1244 DO_SIMPLE_TEST(ASMAtomicWriteU8, uint8_t);
1245 DO_SIMPLE_TEST(ASMAtomicUoWriteU8, uint8_t);
1246
1247 DO_SIMPLE_TEST(ASMAtomicWriteU16, uint16_t);
1248 DO_SIMPLE_TEST(ASMAtomicUoWriteU16, uint16_t);
1249
1250 DO_SIMPLE_TEST(ASMAtomicWriteU32, uint32_t);
1251 DO_SIMPLE_TEST(ASMAtomicUoWriteU32, uint32_t);
1252
1253 DO_SIMPLE_TEST(ASMAtomicWriteU64, uint64_t);
1254 DO_SIMPLE_TEST(ASMAtomicUoWriteU64, uint64_t);
1255}
1256
1257
1258DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
1259{
1260 *pu8 = 0;
1261 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, 1), 0, 1);
1262 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xff)), 1, UINT8_C(0xff));
1263 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0x87)), UINT8_C(0xff), UINT8_C(0x87));
1264 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xfe)), UINT8_C(0x87), UINT8_C(0xfe));
1265
1266 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1267 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(-4)), INT8_C(-2), INT8_C(-4));
1268 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(4)), INT8_C(-4), INT8_C(4));
1269 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MAX), INT8_C(4), INT8_MAX);
1270 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MIN), INT8_MAX, INT8_MIN);
1271 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, 1), INT8_MIN, 1);
1272
1273 bool volatile *pf = (bool volatile *)pu8;
1274 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), true, false);
1275 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), false, false);
1276 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, true), false, true);
1277}
1278
1279
1280DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
1281{
1282 *pu16 = 0;
1283 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 1), 0, 1);
1284 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 0), 1, 0);
1285 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_MAX), 0, UINT16_MAX);
1286 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x7fff)), UINT16_MAX, UINT16_C(0x7fff));
1287 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x8765)), UINT16_C(0x7fff), UINT16_C(0x8765));
1288 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0xfffe)), UINT16_C(0x8765), UINT16_C(0xfffe));
1289
1290 int16_t volatile *pi16 = (int16_t volatile *)pu16;
1291 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MIN), INT16_C(-2), INT16_MIN);
1292 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MAX), INT16_MIN, INT16_MAX);
1293 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, -8), INT16_MAX, -8);
1294 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, 8), -8, 8);
1295}
1296
1297
1298DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
1299{
1300 *pu32 = 0;
1301 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 1), 0, 1);
1302 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 0), 1, 0);
1303 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_MAX), 0, UINT32_MAX);
1304 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0x87654321)), UINT32_MAX, UINT32_C(0x87654321));
1305 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0xfffffffe)), UINT32_C(0x87654321), UINT32_C(0xfffffffe));
1306
1307 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1308 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MIN), INT32_C(-2), INT32_MIN);
1309 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MAX), INT32_MIN, INT32_MAX);
1310 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, -16), INT32_MAX, -16);
1311 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, 16), -16, 16);
1312
1313#if ARCH_BITS == 32
1314 size_t volatile *pcb = (size_t volatile *)pu32;
1315 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0x9481239b)), 0x10, UINT32_C(0x9481239b));
1316 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0xcdef1234)), UINT32_C(0x9481239b), UINT32_C(0xcdef1234));
1317#endif
1318
1319#if R0_ARCH_BITS == 32
1320 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu32;
1321 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT32_C(0x80341237)), UINT32_C(0xcdef1234), UINT32_C(0x80341237));
1322#endif
1323}
1324
1325
1326DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
1327{
1328 *pu64 = 0;
1329 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 1), 0, 1);
1330 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 0), 1, 0);
1331 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_MAX), 0, UINT64_MAX);
1332 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), UINT64_MAX, UINT64_C(0xfedcba0987654321));
1333 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfffffffffffffffe)), UINT64_C(0xfedcba0987654321), UINT64_C(0xfffffffffffffffe));
1334
1335 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1336 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MAX), -2, INT64_MAX);
1337 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MIN), INT64_MAX, INT64_MIN);
1338 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, -32), INT64_MIN, -32);
1339 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, 32), -32, 32);
1340
1341#if ARCH_BITS == 64
1342 size_t volatile *pcb = (size_t volatile *)pu64;
1343 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0x94812396759)), 0x20, UINT64_C(0x94812396759));
1344 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0xcdef1234abdf7896)), UINT64_C(0x94812396759), UINT64_C(0xcdef1234abdf7896));
1345#endif
1346
1347#if R0_ARCH_BITS == 64
1348 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu64;
1349 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT64_C(0xfedc1234567890ab)), UINT64_C(0xcdef1234abdf7896), UINT64_C(0xfedc1234567890ab));
1350#endif
1351}
1352
1353
1354DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
1355{
1356 *ppv = NULL;
1357 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, (void *)(~(uintptr_t)0));
1358 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0x87654321)), (void *)(~(uintptr_t)0), (void *)(~(uintptr_t)0x87654321));
1359 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, NULL), (void *)(~(uintptr_t)0x87654321), NULL);
1360
1361 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgR3Ptr(ppv, (void *)ppv), NULL, (void *)ppv);
1362
1363 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)ppv;
1364 RTSEMEVENT hRet;
1365 ASMAtomicXchgHandle(phEvt, (RTSEMEVENT)(~(uintptr_t)12345), &hRet);
1366 CHECKVAL(hRet, (RTSEMEVENT)ppv, "%p");
1367 CHECKVAL(*phEvt, (RTSEMEVENT)(~(uintptr_t)12345), "%p");
1368}
1369
1370
1371static void tstASMAtomicXchg(void)
1372{
1373 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
1374 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
1375 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
1376 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
1377 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
1378}
1379
1380
1381DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
1382{
1383 *pu8 = 0xff;
1384 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0), false, 0xff);
1385 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, 0);
1386 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, 0x97);
1387 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), false, 0x97);
1388 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x7f, 0x97), true, 0x7f);
1389
1390 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1391 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, -2, 0x7f), true, -2);
1392 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, -2), true, INT8_MAX);
1393 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, INT8_MIN), false, INT8_MAX);
1394 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MIN, INT8_MAX), true, INT8_MIN);
1395 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, 1, INT8_MIN), true, 1);
1396
1397 bool volatile *pf = (bool volatile *)pu8;
1398 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, true, true), true, true);
1399 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), true, false);
1400 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), false, false);
1401 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, false), true, false);
1402}
1403
1404
1405DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
1406{
1407 *pu32 = UINT32_C(0xffffffff);
1408 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, 0), false, UINT32_C(0xffffffff));
1409 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, 0);
1410 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), UINT32_C(0x12345678)), false, 0);
1411 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), 0), true, UINT32_C(0x80088efd));
1412 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0xfffffffe), UINT32_C(0x80088efd)), true, UINT32_C(0xfffffffe));
1413
1414 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1415 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, 2), false, -2);
1416 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, -2), true, INT32_MIN);
1417 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, -2), false, INT32_MIN);
1418 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), true, -19);
1419 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), false, -19);
1420 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, 19, -19), true, 19);
1421 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, -234), false, 19);
1422 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, 19), true, INT32_MAX);
1423
1424#if ARCH_BITS == 32
1425 void * volatile *ppv = (void * volatile *)pu32;
1426 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1427 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1428 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1429 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1430
1431 bool fRc;
1432 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1433 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1434 CHECKVAL(fRc, false, "%d");
1435 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1436
1437 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1438 CHECKVAL(fRc, true, "%d");
1439 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1440#endif
1441}
1442
1443
1444DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
1445{
1446 *pu64 = UINT64_C(0xffffffffffffff);
1447 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, 0), false, UINT64_C(0xffffffffffffff));
1448 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, 0);
1449 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 1), false, 0);
1450 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, UINT64_C(0x80040008008efd));
1451 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), false, UINT64_C(0x80040008008efd));
1452 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0xfffffffffffffffd), UINT64_C(0x80040008008efd)), true, UINT64_C(0xfffffffffffffffd));
1453
1454 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1455 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, 0), false, -3);
1456 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, -3), true, INT64_MAX);
1457 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MIN), false, INT64_MAX);
1458 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MAX), true, INT64_MIN);
1459 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, -29), false, INT64_MIN);
1460 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), true, -29);
1461 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), false, -29);
1462 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, 29, -29), true, 29);
1463
1464#if ARCH_BITS == 64
1465 void * volatile *ppv = (void * volatile *)pu64;
1466 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1467 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1468 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1469 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1470
1471 bool fRc;
1472 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1473 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1474 CHECKVAL(fRc, false, "%d");
1475 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1476
1477 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1478 CHECKVAL(fRc, true, "%d");
1479 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1480#endif
1481}
1482
1483
1484static void tstASMAtomicCmpXchg(void)
1485{
1486 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
1487 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
1488 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
1489}
1490
1491
1492DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
1493{
1494 *pu32 = UINT32_C(0xffffffff);
1495 uint32_t u32Old = UINT32_C(0x80005111);
1496 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1497 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, 0, UINT32_C(0xffffffff));
1498 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), false, 0, UINT32_C(0x00000000));
1499 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), true, UINT32_C(0x80088efd), 0);
1500 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), false, UINT32_C(0x80088efd), UINT32_C(0x80088efd));
1501 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0xffffffe0), UINT32_C(0x80088efd), &u32Old), true, UINT32_C(0xffffffe0), UINT32_C(0x80088efd));
1502
1503 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1504 int32_t i32Old = 0;
1505 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, 32, &i32Old), false, -32, -32);
1506 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, -32, &i32Old), true, 32, -32);
1507 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), true, INT32_MIN, 32);
1508 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), false, INT32_MIN, INT32_MIN);
1509 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MAX, &i32Old), false, INT32_MIN, INT32_MIN);
1510 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MIN, &i32Old), true, INT32_MAX, INT32_MIN);
1511 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 42, INT32_MAX, &i32Old), true, 42, INT32_MAX);
1512
1513#if ARCH_BITS == 32
1514 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1515 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1516 bool fRc = true;
1517 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1518 CHECKVAL(fRc, false, "%d");
1519 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1520 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1521
1522 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1523 CHECKVAL(fRc, true, "%d");
1524 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1525 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1526#endif
1527}
1528
1529
1530DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
1531{
1532 *pu64 = UINT64_C(0xffffffffffffffff);
1533 uint64_t u64Old = UINT64_C(0x8000000051111111);
1534 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
1535 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, 0, UINT64_C(0xffffffffffffffff));
1536 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0x342, &u64Old), false, 0, 0);
1537 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0, &u64Old), true, UINT64_C(0x0080040008008efd), 0);
1538 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd), &u64Old), true, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd));
1539
1540 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1541 int64_t i64Old = -3;
1542 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, 64, &i64Old), false, -64, -64);
1543 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), true, 64, -64);
1544 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), false, 64, 64);
1545 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, -64, &i64Old), false, 64, 64);
1546 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, 64, &i64Old), true, INT64_MIN, 64);
1547 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MAX, INT64_MIN, &i64Old), true, INT64_MAX, INT64_MIN);
1548 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 42, INT64_MAX, &i64Old), true, 42, INT64_MAX);
1549
1550#if ARCH_BITS == 64
1551 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1552 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1553 bool fRc = true;
1554 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1555 CHECKVAL(fRc, false, "%d");
1556 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1557 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1558
1559 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1560 CHECKVAL(fRc, true, "%d");
1561 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1562 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1563
1564 void * volatile *ppv = (void * volatile *)pu64;
1565 void *pvOld;
1566 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, NULL, &pvOld), false, (void *)~(uintptr_t)0x12380964, (void *)~(uintptr_t)0x12380964);
1567 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964, &pvOld), true, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964);
1568
1569 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)~(uintptr_t)99, &pvOld), false, (void *)(intptr_t)12345678, (void *)(intptr_t)12345678);
1570 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)(intptr_t)12345678, &pvOld), true, (void *)~(intptr_t)99, (void *)(intptr_t)12345678);
1571#endif
1572}
1573
1574
1575static void tstASMAtomicCmpXchgEx(void)
1576{
1577 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
1578 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
1579}
1580
1581
1582#define TEST_RET_OLD(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1583 a_Type const uOldExpect = *(a_pVar); \
1584 a_Type uOldRet = a_Function(a_pVar, a_uVal); \
1585 if (RT_LIKELY( uOldRet == (uOldExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1586 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s," a_Fmt ") -> " a_Fmt ", expected " a_Fmt "; %s=" a_Fmt ", expected " a_Fmt "\n", \
1587 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uOldRet, uOldExpect, #a_pVar, *(a_pVar), (a_VarExpect)); \
1588 } while (0)
1589
1590
1591DECLINLINE(void) tstASMAtomicAddU32Worker(uint32_t *pu32)
1592{
1593 *pu32 = 10;
1594 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, 11);
1595 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffffe), 9);
1596 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffff7), 0);
1597 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffff));
1598 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000000));
1599 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000001));
1600 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), 0);
1601 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 0, 0);
1602
1603 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 0, 0);
1604 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 32, UINT32_C(0xffffffe0));
1605 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffe1));
1606 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffde), UINT32_C(0x00000003));
1607}
1608
1609
1610DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
1611{
1612 *pi32 = 10;
1613 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 1, 11);
1614 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -2, 9);
1615 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -9, 0);
1616 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -0x7fffffff, -0x7fffffff);
1617 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, -0x7fffffff);
1618 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0x7fffffff, 0);
1619 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, 0);
1620
1621 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 0, 0);
1622 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 1, -1);
1623 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, INT32_MIN, INT32_MAX);
1624}
1625
1626
1627DECLINLINE(void) tstASMAtomicAddU64Worker(uint64_t volatile *pu64)
1628{
1629 *pu64 = 10;
1630 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, 1, 11);
1631 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffffe), UINT64_C(0x0000000000000009));
1632 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffff7), UINT64_C(0x0000000000000000));
1633 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0x7ffffffffffffff0));
1634 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0xffffffffffffffe0));
1635 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000000), UINT64_C(0xffffffffffffffe0));
1636 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x000000000000001f), UINT64_C(0xffffffffffffffff));
1637 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000000));
1638
1639 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
1640 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000020), UINT64_C(0xffffffffffffffe0));
1641 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffff), UINT64_C(0x7fffffffffffffe1));
1642 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffdd), UINT64_C(0x0000000000000004));
1643}
1644
1645
1646DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
1647{
1648 *pi64 = 10;
1649 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 11);
1650 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -2, 9);
1651 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -9, 0);
1652 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -INT64_MAX, -INT64_MAX);
1653 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, -INT64_MAX);
1654 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -1, INT64_MIN);
1655 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, INT64_MAX, -1);
1656 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 0);
1657 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, 0);
1658
1659 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 0, 0);
1660 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 1, -1);
1661 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, INT64_MIN, INT64_MAX);
1662}
1663
1664
1665
1666DECLINLINE(void) tstASMAtomicAddZWorker(size_t volatile *pcb)
1667{
1668 *pcb = 10;
1669 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, 1, 11);
1670 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)1, 9);
1671 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)8, 0);
1672
1673 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 0, 0);
1674 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 10, ~(size_t)9);
1675}
1676
1677static void tstASMAtomicAdd(void)
1678{
1679 DO_SIMPLE_TEST(ASMAtomicAddU32, uint32_t);
1680 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
1681 DO_SIMPLE_TEST(ASMAtomicAddU64, uint64_t);
1682 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
1683 DO_SIMPLE_TEST(ASMAtomicAddZ, size_t);
1684}
1685
1686
1687#define TEST_RET_NEW_NV(a_Type, a_Fmt, a_pVar, a_Function, a_VarExpect) do { \
1688 a_Type uNewRet = a_Function(a_pVar); \
1689 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1690 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s) -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1691 __FUNCTION__, __LINE__, #a_Function, #a_pVar, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1692 } while (0)
1693
1694
1695DECLINLINE(void) tstASMAtomicDecIncU32Worker(uint32_t volatile *pu32)
1696{
1697 *pu32 = 3;
1698 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 2);
1699 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1700 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 0);
1701 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX);
1702 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 1);
1703 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 2);
1704 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX - 1);
1705 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX);
1706 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 0);
1707 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 1);
1708 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1709 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1710 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1711 *pu32 = _1M;
1712 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, _1M - 1);
1713 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M);
1714 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M + 1);
1715}
1716
1717DECLINLINE(void) tstASMAtomicUoDecIncU32Worker(uint32_t volatile *pu32)
1718{
1719 *pu32 = 3;
1720 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 2);
1721 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1722 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 0);
1723 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX);
1724 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 1);
1725 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 2);
1726 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX - 1);
1727 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX);
1728 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 0);
1729 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 1);
1730 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1731 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1732 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1733 *pu32 = _1M;
1734 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, _1M - 1);
1735 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M);
1736 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M + 1);
1737}
1738
1739
1740DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
1741{
1742 *pi32 = 10;
1743 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 9);
1744 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 8);
1745 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 7);
1746 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 6);
1747 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 5);
1748 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 4);
1749 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 3);
1750 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1751 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 1);
1752 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 0);
1753 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -1);
1754 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -2);
1755 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, -1);
1756 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 0);
1757 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 1);
1758 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 2);
1759 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1760 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1761 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1762 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1763 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1764 *pi32 = INT32_MAX;
1765 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, INT32_MAX - 1);
1766 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MAX);
1767 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MIN);
1768}
1769
1770
1771#if 0
1772DECLINLINE(void) tstASMAtomicUoDecIncS32Worker(int32_t volatile *pi32)
1773{
1774 *pi32 = 10;
1775 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 9);
1776 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 8);
1777 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 7);
1778 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 6);
1779 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 5);
1780 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 4);
1781 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 3);
1782 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1783 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 1);
1784 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 0);
1785 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -1);
1786 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -2);
1787 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, -1);
1788 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 0);
1789 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 1);
1790 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 2);
1791 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1792 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1793 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1794 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1795 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1796 *pi32 = INT32_MAX;
1797 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, INT32_MAX - 1);
1798 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MAX);
1799 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MIN);
1800}
1801#endif
1802
1803
1804DECLINLINE(void) tstASMAtomicDecIncU64Worker(uint64_t volatile *pu64)
1805{
1806 *pu64 = 3;
1807 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 2);
1808 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 1);
1809 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 0);
1810 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX);
1811 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 1);
1812 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 2);
1813 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX - 1);
1814 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX);
1815 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 0);
1816 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 1);
1817 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 2);
1818 *pu64 = _4G - 1;
1819 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G - 2);
1820 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G - 1);
1821 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G);
1822 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G + 1);
1823 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G);
1824}
1825
1826
1827#if 0
1828DECLINLINE(void) tstASMAtomicUoDecIncU64Worker(uint64_t volatile *pu64)
1829{
1830 *pu64 = 3;
1831 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 2);
1832 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 1);
1833 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 0);
1834 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX);
1835 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 1);
1836 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 2);
1837 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX - 1);
1838 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX);
1839 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 0);
1840 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 1);
1841 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 2);
1842 *pu64 = _4G - 1;
1843 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G - 2);
1844 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G - 1);
1845 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G);
1846 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G + 1);
1847 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G);
1848}
1849#endif
1850
1851
1852DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
1853{
1854 *pi64 = 10;
1855 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 9);
1856 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 8);
1857 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 7);
1858 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 6);
1859 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 5);
1860 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 4);
1861 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 3);
1862 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1863 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 1);
1864 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 0);
1865 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -1);
1866 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -2);
1867 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, -1);
1868 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 0);
1869 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 1);
1870 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 2);
1871 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1872 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1873 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1874 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1875 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1876 *pi64 = INT64_MAX;
1877 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, INT64_MAX - 1);
1878}
1879
1880
1881#if 0
1882DECLINLINE(void) tstASMAtomicUoDecIncS64Worker(int64_t volatile *pi64)
1883{
1884 *pi64 = 10;
1885 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 9);
1886 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 8);
1887 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 7);
1888 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 6);
1889 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 5);
1890 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 4);
1891 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 3);
1892 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1893 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 1);
1894 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 0);
1895 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -1);
1896 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -2);
1897 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, -1);
1898 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 0);
1899 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 1);
1900 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 2);
1901 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1902 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1903 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1904 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1905 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1906 *pi64 = INT64_MAX;
1907 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, INT64_MAX - 1);
1908}
1909#endif
1910
1911
1912DECLINLINE(void) tstASMAtomicDecIncZWorker(size_t volatile *pcb)
1913{
1914 size_t const uBaseVal = ~(size_t)0 >> 7;
1915 *pcb = uBaseVal;
1916 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1917 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 2);
1918 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 3);
1919 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 2);
1920 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 1);
1921 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1922 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal + 1);
1923 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal);
1924 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1925 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1926}
1927
1928
1929static void tstASMAtomicDecInc(void)
1930{
1931 DO_SIMPLE_TEST(ASMAtomicDecIncU32, uint32_t);
1932 DO_SIMPLE_TEST(ASMAtomicUoDecIncU32, uint32_t);
1933 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
1934 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS32, int32_t);
1935 DO_SIMPLE_TEST(ASMAtomicDecIncU64, uint64_t);
1936 //DO_SIMPLE_TEST(ASMAtomicUoDecIncU64, uint64_t);
1937 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
1938 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS64, int64_t);
1939 DO_SIMPLE_TEST(ASMAtomicDecIncZ, size_t);
1940}
1941
1942
1943#define TEST_RET_VOID(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1944 a_Function(a_pVar, a_uVal); \
1945 if (RT_LIKELY( *(a_pVar) == (a_VarExpect) )) { } \
1946 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> %s=" a_Fmt ", expected " a_Fmt "\n", \
1947 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, #a_pVar, *(a_pVar), (a_VarExpect)); \
1948 } while (0)
1949
1950#define TEST_RET_NEW(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1951 a_Type uNewRet = a_Function(a_pVar, a_uVal); \
1952 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1953 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1954 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1955 } while (0)
1956
1957
1958DECLINLINE(void) tstASMAtomicAndOrXorU32Worker(uint32_t volatile *pu32)
1959{
1960 *pu32 = UINT32_C(0xffffffff);
1961 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1962 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1963 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1964 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1965 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(1), UINT32_C(1));
1966 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1967 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1968 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0), UINT32_C(0));
1969 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1970 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1971 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1972 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1973 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1974}
1975
1976
1977DECLINLINE(void) tstASMAtomicUoAndOrXorU32Worker(uint32_t volatile *pu32)
1978{
1979 *pu32 = UINT32_C(0xffffffff);
1980 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1981 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1982 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1983 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1984 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(1), UINT32_C(1));
1985 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1986 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1987 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0), UINT32_C(0));
1988 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1989 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1990 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1991 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1992 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1993}
1994
1995
1996DECLINLINE(void) tstASMAtomicAndOrXorExU32Worker(uint32_t volatile *pu32)
1997{
1998 *pu32 = UINT32_C(0xffffffff);
1999 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2000 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2001 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
2002 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
2003 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(1), UINT32_C(1));
2004 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
2005 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
2006 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0), UINT32_C(0));
2007 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
2008 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
2009 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
2010 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
2011 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
2012}
2013
2014
2015DECLINLINE(void) tstASMAtomicAndOrXorU64Worker(uint64_t volatile *pu64)
2016{
2017 *pu64 = UINT64_C(0xffffffff);
2018 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2019 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2020 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2021 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2022 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(1), UINT64_C(1));
2023 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2024 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2025 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2026 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2027 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2028 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2029 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2030 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2031
2032 /* full 64-bit */
2033 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2034 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2035 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2036 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2037 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2038 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2039 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2040 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2041 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2042 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2043 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2044 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2045 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2046 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2047}
2048
2049
2050DECLINLINE(void) tstASMAtomicUoAndOrXorU64Worker(uint64_t volatile *pu64)
2051{
2052 *pu64 = UINT64_C(0xffffffff);
2053 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2054 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2055 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2056 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2057 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(1), UINT64_C(1));
2058 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2059 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2060 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2061 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2062 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2063 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2064 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2065 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2066
2067 /* full 64-bit */
2068 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2069 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2070 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2071 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2072 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2073 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2074 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2075 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2076 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2077 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2078 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2079 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2080 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2081 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2082}
2083
2084
2085#if 0
2086DECLINLINE(void) tstASMAtomicAndOrXorExU64Worker(uint64_t volatile *pu64)
2087{
2088 *pu64 = UINT64_C(0xffffffff);
2089 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2090 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2091 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2092 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2093 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(1), UINT64_C(1));
2094 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2095 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2096 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2097 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2098 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2099 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2100 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2101 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2102
2103 /* full 64-bit */
2104 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2105 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2106 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2107 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2108 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2109 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2110 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2111 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2112 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2113 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2114 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2115 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2116 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2117 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2118}
2119#endif
2120
2121
2122static void tstASMAtomicAndOrXor(void)
2123{
2124 DO_SIMPLE_TEST(ASMAtomicAndOrXorU32, uint32_t);
2125 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU32, uint32_t);
2126 DO_SIMPLE_TEST(ASMAtomicAndOrXorExU32, uint32_t);
2127 DO_SIMPLE_TEST(ASMAtomicAndOrXorU64, uint64_t);
2128 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU64, uint64_t);
2129 //DO_SIMPLE_TEST(ASMAtomicAndOrXorExU64, uint64_t);
2130}
2131
2132
2133typedef struct
2134{
2135 uint8_t ab[PAGE_SIZE];
2136} TSTPAGE;
2137
2138
2139DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
2140{
2141 for (unsigned j = 0; j < 16; j++)
2142 {
2143 memset(pPage, 0x11 * j, sizeof(*pPage));
2144 ASMMemZeroPage(pPage);
2145 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
2146 if (pPage->ab[i])
2147 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
2148 if (ASMMemIsZeroPage(pPage) != true)
2149 RTTestFailed(g_hTest, "ASMMemIsZeroPage returns false after ASMMemZeroPage!\n");
2150 if (ASMMemFirstMismatchingU32(pPage, sizeof(pPage), 0) != NULL)
2151 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,0) returns non-NULL after ASMMemZeroPage!\n");
2152 }
2153}
2154
2155
2156static void tstASMMemZeroPage(void)
2157{
2158 RTTestISub("ASMMemZeroPage");
2159 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMMemZeroPageWorker, TSTPAGE);
2160}
2161
2162
2163void tstASMMemIsZeroPage(RTTEST hTest)
2164{
2165 RTTestSub(hTest, "ASMMemIsZeroPage");
2166
2167 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2168 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2169 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
2170
2171 memset(pvPage1, 0, PAGE_SIZE);
2172 memset(pvPage2, 0, PAGE_SIZE);
2173 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
2174 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
2175
2176 memset(pvPage1, 0xff, PAGE_SIZE);
2177 memset(pvPage2, 0xff, PAGE_SIZE);
2178 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2179 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2180
2181 memset(pvPage1, 0, PAGE_SIZE);
2182 memset(pvPage2, 0, PAGE_SIZE);
2183 for (unsigned off = 0; off < PAGE_SIZE; off++)
2184 {
2185 ((uint8_t *)pvPage1)[off] = 1;
2186 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2187 ((uint8_t *)pvPage1)[off] = 0;
2188
2189 ((uint8_t *)pvPage2)[off] = 0x80;
2190 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2191 ((uint8_t *)pvPage2)[off] = 0;
2192 }
2193
2194 RTTestSubDone(hTest);
2195}
2196
2197
2198void tstASMMemFirstMismatchingU8(RTTEST hTest)
2199{
2200 RTTestSub(hTest, "ASMMemFirstMismatchingU8");
2201
2202 uint8_t *pbPage1 = (uint8_t *)RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2203 uint8_t *pbPage2 = (uint8_t *)RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2204 RTTESTI_CHECK_RETV(pbPage1 && pbPage2);
2205
2206 memset(pbPage1, 0, PAGE_SIZE);
2207 memset(pbPage2, 0, PAGE_SIZE);
2208 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0) == NULL);
2209 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0) == NULL);
2210 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 1) == pbPage1);
2211 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 1) == pbPage2);
2212 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0x87) == pbPage1);
2213 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0x87) == pbPage2);
2214 RTTESTI_CHECK(ASMMemIsZero(pbPage1, PAGE_SIZE));
2215 RTTESTI_CHECK(ASMMemIsZero(pbPage2, PAGE_SIZE));
2216 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2217 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2218 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0x34));
2219 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0x88));
2220 unsigned cbSub = 32;
2221 while (cbSub-- > 0)
2222 {
2223 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2224 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2225 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0) == NULL);
2226 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0) == NULL);
2227
2228 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0x34) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2229 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0x99) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2230 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0x42) == pbPage1 || !cbSub);
2231 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0x88) == pbPage2 || !cbSub);
2232 }
2233
2234 memset(pbPage1, 0xff, PAGE_SIZE);
2235 memset(pbPage2, 0xff, PAGE_SIZE);
2236 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xff) == NULL);
2237 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xff) == NULL);
2238 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xfe) == pbPage1);
2239 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xfe) == pbPage2);
2240 RTTESTI_CHECK(!ASMMemIsZero(pbPage1, PAGE_SIZE));
2241 RTTESTI_CHECK(!ASMMemIsZero(pbPage2, PAGE_SIZE));
2242 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0xff));
2243 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0xff));
2244 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2245 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2246 cbSub = 32;
2247 while (cbSub-- > 0)
2248 {
2249 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2250 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2251 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xff) == NULL);
2252 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xff) == NULL);
2253
2254 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2255 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2256 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xfe) == pbPage1 || !cbSub);
2257 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xfe) == pbPage2 || !cbSub);
2258 }
2259
2260
2261 /*
2262 * Various alignments and sizes.
2263 */
2264 uint8_t const bFiller1 = 0x00;
2265 uint8_t const bFiller2 = 0xf6;
2266 size_t const cbBuf = 128;
2267 uint8_t *pbBuf1 = pbPage1;
2268 uint8_t *pbBuf2 = &pbPage2[PAGE_SIZE - cbBuf]; /* Put it up against the tail guard */
2269 memset(pbPage1, ~bFiller1, PAGE_SIZE);
2270 memset(pbPage2, ~bFiller2, PAGE_SIZE);
2271 memset(pbBuf1, bFiller1, cbBuf);
2272 memset(pbBuf2, bFiller2, cbBuf);
2273 for (size_t offNonZero = 0; offNonZero < cbBuf; offNonZero++)
2274 {
2275 uint8_t bRand = (uint8_t)RTRandU32();
2276 pbBuf1[offNonZero] = bRand | 1;
2277 pbBuf2[offNonZero] = (0x80 | bRand) ^ 0xf6;
2278
2279 for (size_t offStart = 0; offStart < 32; offStart++)
2280 {
2281 size_t const cbMax = cbBuf - offStart;
2282 for (size_t cb = 0; cb < cbMax; cb++)
2283 {
2284 size_t const offEnd = offStart + cb;
2285 uint8_t bSaved1, bSaved2;
2286 if (offEnd < PAGE_SIZE)
2287 {
2288 bSaved1 = pbBuf1[offEnd];
2289 bSaved2 = pbBuf2[offEnd];
2290 pbBuf1[offEnd] = 0xff;
2291 pbBuf2[offEnd] = 0xff;
2292 }
2293#ifdef _MSC_VER /* simple stupid compiler warnings */
2294 else
2295 bSaved1 = bSaved2 = 0;
2296#endif
2297
2298 uint8_t *pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf1 + offStart, cb, bFiller1);
2299 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf1[offNonZero] : pbRet == NULL);
2300
2301 pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf2 + offStart, cb, bFiller2);
2302 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf2[offNonZero] : pbRet == NULL);
2303
2304 if (offEnd < PAGE_SIZE)
2305 {
2306 pbBuf1[offEnd] = bSaved1;
2307 pbBuf2[offEnd] = bSaved2;
2308 }
2309 }
2310 }
2311
2312 pbBuf1[offNonZero] = 0;
2313 pbBuf2[offNonZero] = 0xf6;
2314 }
2315
2316 RTTestSubDone(hTest);
2317}
2318
2319
2320typedef struct TSTBUF32 { uint32_t au32[384]; } TSTBUF32;
2321
2322DECLINLINE(void) tstASMMemZero32Worker(TSTBUF32 *pBuf)
2323{
2324 ASMMemZero32(pBuf, sizeof(*pBuf));
2325 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2326 if (pBuf->au32[i])
2327 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2328 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2329 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2330 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2331 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2332
2333 memset(pBuf, 0xfe, sizeof(*pBuf));
2334 ASMMemZero32(pBuf, sizeof(*pBuf));
2335 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2336 if (pBuf->au32[i])
2337 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2338 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2339 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2340 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2341 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2342}
2343
2344
2345void tstASMMemZero32(void)
2346{
2347 RTTestSub(g_hTest, "ASMMemZero32");
2348
2349 struct
2350 {
2351 uint64_t u64Magic1;
2352 uint8_t abPage[PAGE_SIZE - 32];
2353 uint64_t u64Magic2;
2354 } Buf1, Buf2, Buf3;
2355
2356 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2357 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
2358 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2359 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2360 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
2361 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2362 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2363 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
2364 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2365 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
2366 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
2367 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
2368 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2369 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2370 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2371 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2372 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2373 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2374 {
2375 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
2376 }
2377 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
2378 if (Buf1.abPage[i])
2379 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2380 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
2381 if (Buf2.abPage[i])
2382 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2383 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
2384 if (Buf3.abPage[i])
2385 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2386
2387 DO_SIMPLE_TEST_NO_SUB(tstASMMemZero32Worker, TSTBUF32);
2388}
2389
2390
2391DECLINLINE(void) tstASMMemFill32Worker(TSTBUF32 *pBuf)
2392{
2393 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1));
2394 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2395 if (pBuf->au32[i] != UINT32_C(0xf629bce1))
2396 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2397 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1)) != NULL)
2398 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0xf629bce1)) returns non-NULL after ASMMemFill32!\n");
2399
2400 memset(pBuf, 0xfe, sizeof(*pBuf));
2401 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678));
2402 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2403 if (pBuf->au32[i] != UINT32_C(0x12345678))
2404 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2405 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678)) != NULL)
2406 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0x12345678)) returns non-NULL after ASMMemFill32!\n");
2407}
2408
2409void tstASMMemFill32(void)
2410{
2411 RTTestSub(g_hTest, "ASMMemFill32");
2412
2413 struct
2414 {
2415 uint64_t u64Magic1;
2416 uint32_t au32Page[PAGE_SIZE / 4];
2417 uint64_t u64Magic2;
2418 } Buf1;
2419 struct
2420 {
2421 uint64_t u64Magic1;
2422 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
2423 uint64_t u64Magic2;
2424 } Buf2;
2425 struct
2426 {
2427 uint64_t u64Magic1;
2428 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
2429 uint64_t u64Magic2;
2430 } Buf3;
2431
2432 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2433 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
2434 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2435 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2436 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
2437 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2438 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2439 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
2440 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2441 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
2442 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
2443 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
2444 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2445 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2446 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2447 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2448 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2449 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2450 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
2451 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
2452 if (Buf1.au32Page[i] != 0xdeadbeef)
2453 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
2454 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
2455 if (Buf2.au32Page[i] != 0xcafeff01)
2456 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
2457 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
2458 if (Buf3.au32Page[i] != 0xf00dd00f)
2459 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
2460
2461 DO_SIMPLE_TEST_NO_SUB(tstASMMemFill32Worker, TSTBUF32);
2462}
2463
2464
2465void tstASMProbe(RTTEST hTest)
2466{
2467 RTTestSub(hTest, "ASMProbeReadByte/Buffer");
2468
2469 uint8_t b = 42;
2470 RTTESTI_CHECK(ASMProbeReadByte(&b) == 42);
2471 ASMProbeReadBuffer(&b, sizeof(b));
2472
2473 for (uint32_t cPages = 1; cPages < 16; cPages++)
2474 {
2475 uint8_t *pbBuf1 = (uint8_t *)RTTestGuardedAllocHead(hTest, cPages * PAGE_SIZE);
2476 uint8_t *pbBuf2 = (uint8_t *)RTTestGuardedAllocTail(hTest, cPages * PAGE_SIZE);
2477 RTTESTI_CHECK_RETV(pbBuf1 && pbBuf2);
2478
2479 memset(pbBuf1, 0xf6, cPages * PAGE_SIZE);
2480 memset(pbBuf2, 0x42, cPages * PAGE_SIZE);
2481
2482 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[cPages * PAGE_SIZE - 1]) == 0xf6);
2483 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[cPages * PAGE_SIZE - 1]) == 0x42);
2484 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[0]) == 0xf6);
2485 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[0]) == 0x42);
2486
2487 ASMProbeReadBuffer(pbBuf1, cPages * PAGE_SIZE);
2488 ASMProbeReadBuffer(pbBuf2, cPages * PAGE_SIZE);
2489 }
2490}
2491
2492
2493void tstASMMisc(void)
2494{
2495 RTTestSub(g_hTest, "Misc");
2496 for (uint32_t i = 0; i < 20; i++)
2497 {
2498 ASMWriteFence();
2499 ASMCompilerBarrier();
2500 ASMReadFence();
2501 ASMNopPause();
2502 ASMSerializeInstruction();
2503 ASMMemoryFence();
2504 }
2505}
2506
2507void tstASMMath(void)
2508{
2509 RTTestSub(g_hTest, "Math");
2510
2511 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
2512 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
2513
2514 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
2515 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
2516
2517 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x00000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2518 CHECKVAL(u32, UINT32_C(0x00000001), "%#018RX32");
2519 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10000000), UINT32_C(0x80000000), UINT32_C(0x20000000));
2520 CHECKVAL(u32, UINT32_C(0x40000000), "%#018RX32");
2521 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x76543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2522 CHECKVAL(u32, UINT32_C(0x76543210), "%#018RX32");
2523 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2524 CHECKVAL(u32, UINT32_C(0xffffffff), "%#018RX32");
2525 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2526 CHECKVAL(u32, UINT32_C(0xfffffff0), "%#018RX32");
2527 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2528 CHECKVAL(u32, UINT32_C(0x05c584ce), "%#018RX32");
2529 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2530 CHECKVAL(u32, UINT32_C(0x2d860795), "%#018RX32");
2531
2532#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2533 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2534 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
2535 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
2536 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
2537 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2538 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
2539 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2540 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
2541 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2542 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
2543 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2544 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
2545 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2546 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
2547
2548# if 0 /* bird: question is whether this should trap or not:
2549 *
2550 * frank: Of course it must trap:
2551 *
2552 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
2553 *
2554 * During the following division, the quotient must fit into a 32-bit register.
2555 * Therefore the smallest valid divisor is
2556 *
2557 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
2558 *
2559 * which is definitely greater than 0x3b9aca00.
2560 *
2561 * bird: No, the C version does *not* crash. So, the question is whether there's any
2562 * code depending on it not crashing.
2563 *
2564 * Of course the assembly versions of the code crash right now for the reasons you've
2565 * given, but the 32-bit MSC version does not crash.
2566 *
2567 * frank: The C version does not crash but delivers incorrect results for this case.
2568 * The reason is
2569 *
2570 * u.s.Hi = (unsigned long)(u64Hi / u32C);
2571 *
2572 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
2573 * to 32 bit. If using this (optimized and fast) function we should just be sure that
2574 * the operands are in a valid range.
2575 */
2576 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
2577 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
2578# endif
2579#endif /* AMD64 || X86 */
2580
2581 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
2582 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
2583
2584 int32_t i32;
2585 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
2586 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2587 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
2588 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2589 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
2590 CHECKVAL(i32, INT32_C(1), "%010RI32");
2591
2592 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
2593 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
2594 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
2595 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
2596}
2597
2598
2599void tstASMByteSwap(void)
2600{
2601 RTTestSub(g_hTest, "ASMByteSwap*");
2602
2603 uint64_t u64In = UINT64_C(0x0011223344556677);
2604 uint64_t u64Out = ASMByteSwapU64(u64In);
2605 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
2606 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
2607 u64Out = ASMByteSwapU64(u64Out);
2608 CHECKVAL(u64Out, u64In, "%#018RX64");
2609 u64In = UINT64_C(0x0123456789abcdef);
2610 u64Out = ASMByteSwapU64(u64In);
2611 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
2612 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
2613 u64Out = ASMByteSwapU64(u64Out);
2614 CHECKVAL(u64Out, u64In, "%#018RX64");
2615 u64In = 0;
2616 u64Out = ASMByteSwapU64(u64In);
2617 CHECKVAL(u64Out, u64In, "%#018RX64");
2618 u64In = UINT64_MAX;
2619 u64Out = ASMByteSwapU64(u64In);
2620 CHECKVAL(u64Out, u64In, "%#018RX64");
2621
2622 uint32_t u32In = UINT32_C(0x00112233);
2623 uint32_t u32Out = ASMByteSwapU32(u32In);
2624 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
2625 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
2626 u32Out = ASMByteSwapU32(u32Out);
2627 CHECKVAL(u32Out, u32In, "%#010RX32");
2628 u32In = UINT32_C(0x12345678);
2629 u32Out = ASMByteSwapU32(u32In);
2630 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
2631 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
2632 u32Out = ASMByteSwapU32(u32Out);
2633 CHECKVAL(u32Out, u32In, "%#010RX32");
2634 u32In = 0;
2635 u32Out = ASMByteSwapU32(u32In);
2636 CHECKVAL(u32Out, u32In, "%#010RX32");
2637 u32In = UINT32_MAX;
2638 u32Out = ASMByteSwapU32(u32In);
2639 CHECKVAL(u32Out, u32In, "%#010RX32");
2640
2641 uint16_t u16In = UINT16_C(0x0011);
2642 uint16_t u16Out = ASMByteSwapU16(u16In);
2643 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
2644 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
2645 u16Out = ASMByteSwapU16(u16Out);
2646 CHECKVAL(u16Out, u16In, "%#06RX16");
2647 u16In = UINT16_C(0x1234);
2648 u16Out = ASMByteSwapU16(u16In);
2649 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
2650 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
2651 u16Out = ASMByteSwapU16(u16Out);
2652 CHECKVAL(u16Out, u16In, "%#06RX16");
2653 u16In = 0;
2654 u16Out = ASMByteSwapU16(u16In);
2655 CHECKVAL(u16Out, u16In, "%#06RX16");
2656 u16In = UINT16_MAX;
2657 u16Out = ASMByteSwapU16(u16In);
2658 CHECKVAL(u16Out, u16In, "%#06RX16");
2659}
2660
2661
2662void tstASMBench(void)
2663{
2664 /*
2665 * Make this static. We don't want to have this located on the stack.
2666 */
2667 static uint8_t volatile s_u8;
2668 static int8_t volatile s_i8;
2669 static uint16_t volatile s_u16;
2670 static int16_t volatile s_i16;
2671 static uint32_t volatile s_u32;
2672 static int32_t volatile s_i32;
2673 static uint64_t volatile s_u64;
2674 static int64_t volatile s_i64;
2675 unsigned i;
2676 const unsigned cRounds = _16M; /* Must be multiple of 8 */
2677 uint64_t u64Elapsed;
2678
2679 RTTestSub(g_hTest, "Benchmarking");
2680
2681#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32))
2682# define BENCH(op, str) \
2683 do { \
2684 RTThreadYield(); \
2685 u64Elapsed = ASMReadTSC(); \
2686 for (i = cRounds; i > 0; i--) \
2687 op; \
2688 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2689 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
2690 } while (0)
2691#else
2692# define BENCH(op, str) \
2693 do { \
2694 RTThreadYield(); \
2695 u64Elapsed = RTTimeNanoTS(); \
2696 for (i = cRounds / 8; i > 0; i--) \
2697 { \
2698 op; \
2699 op; \
2700 op; \
2701 op; \
2702 op; \
2703 op; \
2704 op; \
2705 op; \
2706 } \
2707 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
2708 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \
2709 } while (0)
2710#endif
2711#if (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)) && !defined(GCC44_32BIT_PIC)
2712# define BENCH_TSC(op, str) \
2713 do { \
2714 RTThreadYield(); \
2715 u64Elapsed = ASMReadTSC(); \
2716 for (i = cRounds / 8; i > 0; i--) \
2717 { \
2718 op; \
2719 op; \
2720 op; \
2721 op; \
2722 op; \
2723 op; \
2724 op; \
2725 op; \
2726 } \
2727 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2728 RTTestValue(g_hTest, str, u64Elapsed / cRounds, /*RTTESTUNIT_TICKS_PER_CALL*/ RTTESTUNIT_NONE); \
2729 } while (0)
2730#else
2731# define BENCH_TSC(op, str) BENCH(op, str)
2732#endif
2733
2734 BENCH(s_u32 = 0, "s_u32 = 0");
2735 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
2736 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
2737 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
2738 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
2739 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
2740 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
2741 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
2742 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
2743 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
2744 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
2745 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
2746 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
2747 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
2748 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
2749 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
2750 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
2751 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
2752 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
2753 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
2754 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
2755 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
2756 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
2757 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
2758 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
2759 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
2760 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
2761 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
2762 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
2763 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
2764 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
2765 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
2766 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
2767 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
2768 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
2769 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
2770 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
2771 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
2772 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
2773 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
2774 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
2775 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
2776 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
2777 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
2778 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
2779 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
2780 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
2781 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
2782 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
2783 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
2784 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
2785 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
2786 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
2787 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
2788 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
2789 BENCH(ASMAtomicUoIncU32(&s_u32), "ASMAtomicUoIncU32");
2790 BENCH(ASMAtomicUoDecU32(&s_u32), "ASMAtomicUoDecU32");
2791 BENCH(ASMAtomicUoAndU32(&s_u32, 0xffffffff), "ASMAtomicUoAndU32");
2792 BENCH(ASMAtomicUoOrU32(&s_u32, 0xffffffff), "ASMAtomicUoOrU32");
2793#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2794 BENCH_TSC(ASMSerializeInstructionCpuId(), "ASMSerializeInstructionCpuId");
2795 BENCH_TSC(ASMSerializeInstructionIRet(), "ASMSerializeInstructionIRet");
2796#endif
2797 BENCH(ASMReadFence(), "ASMReadFence");
2798 BENCH(ASMWriteFence(), "ASMWriteFence");
2799 BENCH(ASMMemoryFence(), "ASMMemoryFence");
2800 BENCH(ASMSerializeInstruction(), "ASMSerializeInstruction");
2801 BENCH(ASMNopPause(), "ASMNopPause");
2802
2803 /* The Darwin gcc does not like this ... */
2804#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2805 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
2806 BENCH(s_u32 = ASMGetApicIdExt0B(), "ASMGetApicIdExt0B");
2807 BENCH(s_u32 = ASMGetApicIdExt8000001E(), "ASMGetApicIdExt8000001E");
2808#endif
2809#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32))
2810 BENCH(s_u64 = ASMReadTSC(), "ASMReadTSC");
2811#endif
2812#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2813 uint32_t uAux;
2814 if ( ASMHasCpuId()
2815 && ASMIsValidExtRange(ASMCpuId_EAX(0x80000000))
2816 && (ASMCpuId_EDX(0x80000001) & X86_CPUID_EXT_FEATURE_EDX_RDTSCP) )
2817 {
2818 BENCH_TSC(ASMSerializeInstructionRdTscp(), "ASMSerializeInstructionRdTscp");
2819 BENCH(s_u64 = ASMReadTscWithAux(&uAux), "ASMReadTscWithAux");
2820 }
2821 union
2822 {
2823 uint64_t u64[2];
2824 RTIDTR Unaligned;
2825 struct
2826 {
2827 uint16_t abPadding[3];
2828 RTIDTR Aligned;
2829 } s;
2830 } uBuf;
2831 Assert(((uintptr_t)&uBuf.Unaligned.pIdt & (sizeof(uintptr_t) - 1)) != 0);
2832 BENCH(ASMGetIDTR(&uBuf.Unaligned), "ASMGetIDTR/unaligned");
2833 Assert(((uintptr_t)&uBuf.s.Aligned.pIdt & (sizeof(uintptr_t) - 1)) == 0);
2834 BENCH(ASMGetIDTR(&uBuf.s.Aligned), "ASMGetIDTR/aligned");
2835#endif
2836
2837#undef BENCH
2838}
2839
2840
2841int main(int argc, char **argv)
2842{
2843 RT_NOREF_PV(argc); RT_NOREF_PV(argv);
2844
2845 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
2846 if (rc)
2847 return rc;
2848 RTTestBanner(g_hTest);
2849
2850 /*
2851 * Execute the tests.
2852 */
2853#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2854 tstASMCpuId();
2855 //bruteForceCpuId();
2856#endif
2857#if 1
2858 tstASMAtomicRead();
2859 tstASMAtomicWrite();
2860 tstASMAtomicXchg();
2861 tstASMAtomicCmpXchg();
2862 tstASMAtomicCmpXchgEx();
2863
2864 tstASMAtomicAdd();
2865 tstASMAtomicDecInc();
2866 tstASMAtomicAndOrXor();
2867
2868 tstASMMemZeroPage();
2869 tstASMMemIsZeroPage(g_hTest);
2870 tstASMMemFirstMismatchingU8(g_hTest);
2871 tstASMMemZero32();
2872 tstASMMemFill32();
2873 tstASMProbe(g_hTest);
2874
2875 tstASMMisc();
2876
2877 tstASMMath();
2878
2879 tstASMByteSwap();
2880
2881 tstASMBench();
2882#endif
2883
2884 /*
2885 * Show the result.
2886 */
2887 return RTTestSummaryAndDestroy(g_hTest);
2888}
2889
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette