VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 100762

Last change on this file since 100762 was 99775, checked in by vboxsync, 19 months ago

*: Mark functions as static if not used outside of a given compilation unit. Enables the compiler to optimize inlining, reduces the symbol tables, exposes unused functions and in some rare cases exposes mismtaches between function declarations and definitions, but most importantly reduces the number of parfait reports for the extern-function-no-forward-declaration category. This should not result in any functional changes, bugref:3409

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id Revision
File size: 167.0 KB
Line 
1/* $Id: tstRTInlineAsm.cpp 99775 2023-05-12 12:21:58Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#include <iprt/asm.h>
42#include <iprt/asm-math.h>
43
44/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
45 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
46 * fine. */
47#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
48# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
49# define GCC44_32BIT_PIC
50# endif
51#endif
52
53#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
54# include <iprt/asm-amd64-x86.h>
55# include <iprt/x86.h>
56#elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
57# include <iprt/asm-arm.h>
58# include <iprt/time.h>
59#else
60# include <iprt/time.h>
61#endif
62#include <iprt/mem.h>
63#include <iprt/param.h>
64#include <iprt/rand.h>
65#include <iprt/stream.h>
66#include <iprt/string.h>
67#include <iprt/thread.h>
68#include <iprt/test.h>
69#include <iprt/time.h>
70
71
72
73/*********************************************************************************************************************************
74* Defined Constants And Macros *
75*********************************************************************************************************************************/
76#define CHECKVAL(val, expect, fmt) \
77 do \
78 { \
79 if ((val) != (expect)) \
80 { \
81 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
82 } \
83 } while (0)
84
85#define CHECKOP(op, expect, fmt, type) \
86 do \
87 { \
88 type val = op; \
89 if (val != (type)(expect)) \
90 { \
91 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
92 } \
93 } while (0)
94
95#define CHECK_OP_AND_VAL(a_Type, a_Fmt, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
96 do { \
97 CHECKOP(a_Operation, a_ExpectRetVal, a_Fmt, a_Type); \
98 CHECKVAL(*a_pVar, a_ExpectVarVal, a_Fmt); \
99 } while (0)
100
101#define CHECK_OP_AND_VAL_EX(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
102 do { \
103 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
104 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
105 } while (0)
106
107#define CHECK_OP_AND_VAL_EX2(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_uVar2, a_Operation, a_ExpectRetVal, a_ExpectVarVal, a_ExpectVarVal2) \
108 do { \
109 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
110 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
111 CHECKVAL(a_uVar2, a_ExpectVarVal2, a_FmtVar); \
112 } while (0)
113
114#define CHECKVAL128(a_pu128Val, a_u64HiExpect, a_u64LoExpect) \
115 do \
116 { \
117 if ((a_pu128Val)->s.Hi != (a_u64HiExpect) || (a_pu128Val)->s.Lo != (a_u64LoExpect)) \
118 RTTestFailed(g_hTest, "%s, %d: " #a_pu128Val ": expected %#RX64'%016RX64 got %#RX64'%016RX64\n", \
119 __FUNCTION__, __LINE__, (a_u64HiExpect), (a_u64LoExpect), (a_pu128Val)->s.Hi, (a_pu128Val)->s.Lo); \
120 } while (0)
121#define CHECKVAL128_C(a_pu128Val, a_u64HiExpect, a_u64LoExpect) \
122 do \
123 { \
124 if ((a_pu128Val)->s.Hi != UINT64_C(a_u64HiExpect) || (a_pu128Val)->s.Lo != UINT64_C(a_u64LoExpect)) \
125 RTTestFailed(g_hTest, "%s, %d: " #a_pu128Val ": expected %#RX64'%016RX64 got %#RX64'%016RX64\n", \
126 __FUNCTION__, __LINE__, UINT64_C(a_u64HiExpect), UINT64_C(a_u64LoExpect), \
127 (a_pu128Val)->s.Hi, (a_pu128Val)->s.Lo); \
128 } while (0)
129#define CHECK_OP_AND_VAL_128(a_TypeRet, a_FmtRet, a_pu128Val, a_Operation, a_ExpectRetVal, a_u64HiExpect, a_u64LoExpect) \
130 do { \
131 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
132 CHECKVAL128(a_pu128Val, a_u64HiExpect, a_u64LoExpect); \
133 } while (0)
134#define CHECK_OP_AND_VAL_128_C(a_TypeRet, a_FmtRet, a_pu128Val, a_Operation, a_ExpectRetVal, a_u64HiExpect, a_u64LoExpect) \
135 do { \
136 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
137 CHECKVAL128_C(a_pu128Val, a_u64HiExpect, a_u64LoExpect); \
138 } while (0)
139
140/**
141 * Calls a worker function with different worker variable storage types.
142 */
143#define DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type) \
144 do \
145 { \
146 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
147 RTTEST_CHECK_BREAK(g_hTest, pVar); \
148 a_WorkerFunction(pVar); \
149 RTTestGuardedFree(g_hTest, pVar); \
150 \
151 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
152 RTTEST_CHECK_BREAK(g_hTest, pVar); \
153 a_WorkerFunction(pVar); \
154 RTTestGuardedFree(g_hTest, pVar); \
155 } while (0)
156
157
158/**
159 * Calls a worker function with different worker variable storage types.
160 */
161#define DO_SIMPLE_TEST_NO_SUB(a_WorkerFunction, type) \
162 do \
163 { \
164 type StackVar; \
165 a_WorkerFunction(&StackVar); \
166 DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type); \
167 } while (0)
168
169/**
170 * Calls a worker function with different worker variable storage types.
171 */
172#define DO_SIMPLE_TEST(name, type) \
173 do \
174 { \
175 RTTestISub(#name); \
176 DO_SIMPLE_TEST_NO_SUB(tst ## name ## Worker, type); \
177 } while (0)
178
179
180/*********************************************************************************************************************************
181* Global Variables *
182*********************************************************************************************************************************/
183/** The test instance. */
184static RTTEST g_hTest;
185
186
187
188#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
189
190static const char *getCacheAss(unsigned u)
191{
192 if (u == 0)
193 return "res0 ";
194 if (u == 1)
195 return "direct";
196 if (u >= 256)
197 return "???";
198
199 char *pszRet = NULL;
200 RTStrAPrintf(&pszRet, "%d way", u);
201 RTMEM_WILL_LEAK(pszRet);
202 return pszRet;
203}
204
205
206static const char *getL2CacheAss(unsigned u)
207{
208 switch (u)
209 {
210 case 0: return "off ";
211 case 1: return "direct";
212 case 2: return "2 way ";
213 case 3: return "res3 ";
214 case 4: return "4 way ";
215 case 5: return "res5 ";
216 case 6: return "8 way ";
217 case 7: return "res7 ";
218 case 8: return "16 way";
219 case 9: return "res9 ";
220 case 10: return "res10 ";
221 case 11: return "res11 ";
222 case 12: return "res12 ";
223 case 13: return "res13 ";
224 case 14: return "res14 ";
225 case 15: return "fully ";
226 default:
227 return "????";
228 }
229}
230
231
232/**
233 * Test and dump all possible info from the CPUID instruction.
234 *
235 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
236 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
237 */
238static void tstASMCpuId(void)
239{
240 RTTestISub("ASMCpuId");
241
242 unsigned iBit;
243 struct
244 {
245 uint32_t uEBX, uEAX, uEDX, uECX;
246 } s;
247 if (!ASMHasCpuId())
248 {
249 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
250 return;
251 }
252
253 /*
254 * Try the 0 function and use that for checking the ASMCpuId_* variants.
255 */
256 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
257
258 uint32_t u32;
259
260 u32 = ASMCpuId_EAX(0);
261 CHECKVAL(u32, s.uEAX, "%x");
262 u32 = ASMCpuId_EBX(0);
263 CHECKVAL(u32, s.uEBX, "%x");
264 u32 = ASMCpuId_ECX(0);
265 CHECKVAL(u32, s.uECX, "%x");
266 u32 = ASMCpuId_EDX(0);
267 CHECKVAL(u32, s.uEDX, "%x");
268
269 uint32_t uECX2 = s.uECX - 1;
270 uint32_t uEDX2 = s.uEDX - 1;
271 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
272 CHECKVAL(uECX2, s.uECX, "%x");
273 CHECKVAL(uEDX2, s.uEDX, "%x");
274
275 uint32_t uEAX2 = s.uEAX - 1;
276 uint32_t uEBX2 = s.uEBX - 1;
277 uECX2 = s.uECX - 1;
278 uEDX2 = s.uEDX - 1;
279 ASMCpuIdExSlow(0, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
280 CHECKVAL(uEAX2, s.uEAX, "%x");
281 CHECKVAL(uEBX2, s.uEBX, "%x");
282 CHECKVAL(uECX2, s.uECX, "%x");
283 CHECKVAL(uEDX2, s.uEDX, "%x");
284
285 /*
286 * Check the extended APIC stuff.
287 */
288 uint32_t idExtApic;
289 if (ASMCpuId_EAX(0) >= 0xb)
290 {
291 uint8_t idApic = ASMGetApicId();
292 do
293 {
294 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
295 ASMCpuIdExSlow(0xb, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
296 idExtApic = ASMGetApicIdExt0B();
297 } while (ASMGetApicId() != idApic);
298
299 CHECKVAL(uEDX2, idExtApic, "%x");
300 if (idApic != (uint8_t)idExtApic && uECX2 != 0)
301 RTTestIFailed("ASMGetApicIdExt0B() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
302 }
303 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
304 {
305 uint8_t idApic = ASMGetApicId();
306 do
307 {
308 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
309 ASMCpuIdExSlow(0x8000001e, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
310 idExtApic = ASMGetApicIdExt8000001E();
311 } while (ASMGetApicId() != idApic);
312 CHECKVAL(uEAX2, idExtApic, "%x");
313 if (idApic != (uint8_t)idExtApic)
314 RTTestIFailed("ASMGetApicIdExt8000001E() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
315 }
316
317 /*
318 * Done testing, dump the information.
319 */
320 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
321 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
322 const uint32_t cFunctions = s.uEAX;
323
324 /* raw dump */
325 RTTestIPrintf(RTTESTLVL_ALWAYS,
326 "\n"
327 " RAW Standard CPUIDs\n"
328 "Function eax ebx ecx edx\n");
329 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
330 {
331 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
332 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
333 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
334
335 /* Some leafs output depend on the initial value of ECX.
336 * The same seems to apply to invalid standard functions */
337 if (iStd > cFunctions)
338 continue;
339 if (iStd == 0x04) /* Deterministic Cache Parameters Leaf */
340 for (uint32_t uECX = 1; s.uEAX & 0x1f; uECX++)
341 {
342 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
343 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
344 RTTESTI_CHECK_BREAK(uECX < 128);
345 }
346 else if (iStd == 0x07) /* Structured Extended Feature Flags */
347 {
348 uint32_t uMax = s.uEAX;
349 for (uint32_t uECX = 1; uECX < uMax; uECX++)
350 {
351 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
352 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
353 RTTESTI_CHECK_BREAK(uECX < 128);
354 }
355 }
356 else if (iStd == 0x0b) /* Extended Topology Enumeration Leafs */
357 for (uint32_t uECX = 1; (s.uEAX & 0x1f) && (s.uEBX & 0xffff); uECX++)
358 {
359 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
360 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
361 RTTESTI_CHECK_BREAK(uECX < 128);
362 }
363 else if (iStd == 0x0d) /* Extended State Enumeration Leafs */
364 for (uint32_t uECX = 1; s.uEAX != 0 || s.uEBX != 0 || s.uECX != 0 || s.uEDX != 0; uECX++)
365 {
366 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
367 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
368 RTTESTI_CHECK_BREAK(uECX < 128);
369 }
370 else if ( iStd == 0x0f /* Platform quality of service monitoring (PQM) */
371 || iStd == 0x10 /* Platform quality of service enforcement (PQE) */
372 || iStd == 0x12 /* SGX Enumeration */
373 || iStd == 0x14 /* Processor Trace Enumeration */
374 || iStd == 0x17 /* SoC Vendor Attribute Enumeration */
375 || iStd == 0x18 /* Deterministic Address Translation Parameters */)
376 {
377 /** @todo */
378 }
379 else
380 {
381 u32 = ASMCpuId_EAX(iStd);
382 CHECKVAL(u32, s.uEAX, "%x");
383
384 uint32_t u32EbxMask = UINT32_MAX;
385 if (iStd == 1)
386 u32EbxMask = UINT32_C(0x00ffffff); /* Omit the local apic ID in case we're rescheduled. */
387 u32 = ASMCpuId_EBX(iStd);
388 CHECKVAL(u32 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
389
390 u32 = ASMCpuId_ECX(iStd);
391 CHECKVAL(u32, s.uECX, "%x");
392 u32 = ASMCpuId_EDX(iStd);
393 CHECKVAL(u32, s.uEDX, "%x");
394
395 uECX2 = s.uECX - 1;
396 uEDX2 = s.uEDX - 1;
397 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
398 CHECKVAL(uECX2, s.uECX, "%x");
399 CHECKVAL(uEDX2, s.uEDX, "%x");
400
401 uEAX2 = s.uEAX - 1;
402 uEBX2 = s.uEBX - 1;
403 uECX2 = s.uECX - 1;
404 uEDX2 = s.uEDX - 1;
405 ASMCpuId(iStd, &uEAX2, &uEBX2, &uECX2, &uEDX2);
406 CHECKVAL(uEAX2, s.uEAX, "%x");
407 CHECKVAL(uEBX2 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
408 CHECKVAL(uECX2, s.uECX, "%x");
409 CHECKVAL(uEDX2, s.uEDX, "%x");
410 }
411 }
412
413 /*
414 * Understandable output
415 */
416 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
417 RTTestIPrintf(RTTESTLVL_ALWAYS,
418 "Name: %.04s%.04s%.04s\n"
419 "Support: 0-%u\n",
420 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
421 bool const fIntel = RTX86IsIntelCpu(s.uEBX, s.uECX, s.uEDX);
422
423 /*
424 * Get Features.
425 */
426 if (cFunctions >= 1)
427 {
428 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
429 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
430 RTTestIPrintf(RTTESTLVL_ALWAYS,
431 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
432 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
433 "Stepping: %d\n"
434 "Type: %d (%s)\n"
435 "APIC ID: %#04x\n"
436 "Logical CPUs: %d\n"
437 "CLFLUSH Size: %d\n"
438 "Brand ID: %#04x\n",
439 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, RTX86GetCpuFamily(s.uEAX),
440 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, RTX86GetCpuModel(s.uEAX, fIntel),
441 RTX86GetCpuStepping(s.uEAX),
442 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
443 (s.uEBX >> 24) & 0xff,
444 (s.uEBX >> 16) & 0xff,
445 (s.uEBX >> 8) & 0xff,
446 (s.uEBX >> 0) & 0xff);
447
448 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
449 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
450 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
451 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
452 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
453 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
454 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
455 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
456 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
457 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
458 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
459 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
460 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
461 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
462 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
463 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
464 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
465 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
466 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
467 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
468 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
469 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
470 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
471 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
472 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
473 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
474 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
475 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
476 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
477 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
478 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
479 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
480 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
481 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
482
483 /** @todo check intel docs. */
484 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
485 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
486 for (iBit = 1; iBit < 13; iBit++)
487 if (s.uECX & RT_BIT(iBit))
488 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
489 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
490 for (iBit = 14; iBit < 32; iBit++)
491 if (s.uECX & RT_BIT(iBit))
492 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
493 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
494 }
495 if (ASMCpuId_EAX(0) >= 0xb)
496 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 0b): %#010x\n", ASMGetApicIdExt0B());
497
498 /*
499 * Extended.
500 * Implemented after AMD specs.
501 */
502 /** @todo check out the intel specs. */
503 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
504 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
505 {
506 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
507 return;
508 }
509 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
510
511 /* raw dump */
512 RTTestIPrintf(RTTESTLVL_ALWAYS,
513 "\n"
514 " RAW Extended CPUIDs\n"
515 "Function eax ebx ecx edx\n");
516 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
517 {
518 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
519 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
520 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
521
522 if (iExt > cExtFunctions)
523 continue; /* Invalid extended functions seems change the value if ECX changes */
524 if (iExt == 0x8000001d)
525 continue; /* Takes cache level in ecx. */
526
527 u32 = ASMCpuId_EAX(iExt);
528 CHECKVAL(u32, s.uEAX, "%x");
529 u32 = ASMCpuId_EBX(iExt);
530 CHECKVAL(u32, s.uEBX, "%x");
531 u32 = ASMCpuId_ECX(iExt);
532 CHECKVAL(u32, s.uECX, "%x");
533 u32 = ASMCpuId_EDX(iExt);
534 CHECKVAL(u32, s.uEDX, "%x");
535
536 uECX2 = s.uECX - 1;
537 uEDX2 = s.uEDX - 1;
538 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
539 CHECKVAL(uECX2, s.uECX, "%x");
540 CHECKVAL(uEDX2, s.uEDX, "%x");
541
542 uEAX2 = s.uEAX - 1;
543 uEBX2 = s.uEBX - 1;
544 uECX2 = s.uECX - 1;
545 uEDX2 = s.uEDX - 1;
546 ASMCpuId(iExt, &uEAX2, &uEBX2, &uECX2, &uEDX2);
547 CHECKVAL(uEAX2, s.uEAX, "%x");
548 CHECKVAL(uEBX2, s.uEBX, "%x");
549 CHECKVAL(uECX2, s.uECX, "%x");
550 CHECKVAL(uEDX2, s.uEDX, "%x");
551 }
552
553 /*
554 * Understandable output
555 */
556 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
557 RTTestIPrintf(RTTESTLVL_ALWAYS,
558 "Ext Name: %.4s%.4s%.4s\n"
559 "Ext Supports: 0x80000000-%#010x\n",
560 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
561
562 if (cExtFunctions >= 0x80000001)
563 {
564 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
565 RTTestIPrintf(RTTESTLVL_ALWAYS,
566 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
567 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
568 "Stepping: %d\n"
569 "Brand ID: %#05x\n",
570 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, RTX86GetCpuFamily(s.uEAX),
571 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, RTX86GetCpuModel(s.uEAX, fIntel),
572 RTX86GetCpuStepping(s.uEAX),
573 s.uEBX & 0xfff);
574
575 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
576 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
577 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
578 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
579 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
580 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
581 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
582 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
583 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
584 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
585 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
586 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
587 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
588 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
589 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
590 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
591 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
592 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
593 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
594 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
595 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
596 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
597 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
598 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
599 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
600 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
601 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
602 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
603 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
604 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
605 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
606 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
607 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
608 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
609
610 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
611 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
612 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
613 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
614 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
615 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
616 for (iBit = 5; iBit < 32; iBit++)
617 if (s.uECX & RT_BIT(iBit))
618 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
619 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
620 }
621
622 char szString[4*4*3+1] = {0};
623 if (cExtFunctions >= 0x80000002)
624 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
625 if (cExtFunctions >= 0x80000003)
626 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
627 if (cExtFunctions >= 0x80000004)
628 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
629 if (cExtFunctions >= 0x80000002)
630 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
631
632 if (cExtFunctions >= 0x80000005)
633 {
634 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
635 RTTestIPrintf(RTTESTLVL_ALWAYS,
636 "TLB 2/4M Instr/Uni: %s %3d entries\n"
637 "TLB 2/4M Data: %s %3d entries\n",
638 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
639 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
640 RTTestIPrintf(RTTESTLVL_ALWAYS,
641 "TLB 4K Instr/Uni: %s %3d entries\n"
642 "TLB 4K Data: %s %3d entries\n",
643 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
644 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
645 RTTestIPrintf(RTTESTLVL_ALWAYS,
646 "L1 Instr Cache Line Size: %d bytes\n"
647 "L1 Instr Cache Lines Per Tag: %d\n"
648 "L1 Instr Cache Associativity: %s\n"
649 "L1 Instr Cache Size: %d KB\n",
650 (s.uEDX >> 0) & 0xff,
651 (s.uEDX >> 8) & 0xff,
652 getCacheAss((s.uEDX >> 16) & 0xff),
653 (s.uEDX >> 24) & 0xff);
654 RTTestIPrintf(RTTESTLVL_ALWAYS,
655 "L1 Data Cache Line Size: %d bytes\n"
656 "L1 Data Cache Lines Per Tag: %d\n"
657 "L1 Data Cache Associativity: %s\n"
658 "L1 Data Cache Size: %d KB\n",
659 (s.uECX >> 0) & 0xff,
660 (s.uECX >> 8) & 0xff,
661 getCacheAss((s.uECX >> 16) & 0xff),
662 (s.uECX >> 24) & 0xff);
663 }
664
665 if (cExtFunctions >= 0x80000006)
666 {
667 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
668 RTTestIPrintf(RTTESTLVL_ALWAYS,
669 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
670 "L2 TLB 2/4M Data: %s %4d entries\n",
671 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
672 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
673 RTTestIPrintf(RTTESTLVL_ALWAYS,
674 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
675 "L2 TLB 4K Data: %s %4d entries\n",
676 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
677 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
678 RTTestIPrintf(RTTESTLVL_ALWAYS,
679 "L2 Cache Line Size: %d bytes\n"
680 "L2 Cache Lines Per Tag: %d\n"
681 "L2 Cache Associativity: %s\n"
682 "L2 Cache Size: %d KB\n",
683 (s.uEDX >> 0) & 0xff,
684 (s.uEDX >> 8) & 0xf,
685 getL2CacheAss((s.uEDX >> 12) & 0xf),
686 (s.uEDX >> 16) & 0xffff);
687 }
688
689 if (cExtFunctions >= 0x80000007)
690 {
691 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
692 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
693 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
694 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
695 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
696 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
697 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
698 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
699 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
700 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
701 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
702 for (iBit = 9; iBit < 32; iBit++)
703 if (s.uEDX & RT_BIT(iBit))
704 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
705 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
706 }
707
708 if (cExtFunctions >= 0x80000008)
709 {
710 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
711 RTTestIPrintf(RTTESTLVL_ALWAYS,
712 "Physical Address Width: %d bits\n"
713 "Virtual Address Width: %d bits\n"
714 "Guest Physical Address Width: %d bits\n",
715 (s.uEAX >> 0) & 0xff,
716 (s.uEAX >> 8) & 0xff,
717 (s.uEAX >> 16) & 0xff);
718 RTTestIPrintf(RTTESTLVL_ALWAYS,
719 "Physical Core Count: %d\n",
720 ((s.uECX >> 0) & 0xff) + 1);
721 if ((s.uECX >> 12) & 0xf)
722 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
723 }
724
725 if (cExtFunctions >= 0x8000000a)
726 {
727 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
728 RTTestIPrintf(RTTESTLVL_ALWAYS,
729 "SVM Revision: %d (%#x)\n"
730 "Number of Address Space IDs: %d (%#x)\n",
731 s.uEAX & 0xff, s.uEAX & 0xff,
732 s.uEBX, s.uEBX);
733 }
734 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
735 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 8000001b): %#010x\n", ASMGetApicIdExt8000001E());
736}
737
738# if 0
739static void bruteForceCpuId(void)
740{
741 RTTestISub("brute force CPUID leafs");
742 uint32_t auPrevValues[4] = { 0, 0, 0, 0};
743 uint32_t uLeaf = 0;
744 do
745 {
746 uint32_t auValues[4];
747 ASMCpuIdExSlow(uLeaf, 0, 0, 0, &auValues[0], &auValues[1], &auValues[2], &auValues[3]);
748 if ( (auValues[0] != auPrevValues[0] && auValues[0] != uLeaf)
749 || (auValues[1] != auPrevValues[1] && auValues[1] != 0)
750 || (auValues[2] != auPrevValues[2] && auValues[2] != 0)
751 || (auValues[3] != auPrevValues[3] && auValues[3] != 0)
752 || (uLeaf & (UINT32_C(0x08000000) - UINT32_C(1))) == 0)
753 {
754 RTTestIPrintf(RTTESTLVL_ALWAYS,
755 "%08x: %08x %08x %08x %08x\n", uLeaf,
756 auValues[0], auValues[1], auValues[2], auValues[3]);
757 }
758 auPrevValues[0] = auValues[0];
759 auPrevValues[1] = auValues[1];
760 auPrevValues[2] = auValues[2];
761 auPrevValues[3] = auValues[3];
762
763 //uint32_t uSubLeaf = 0;
764 //do
765 //{
766 //
767 //
768 //} while (false);
769 } while (uLeaf++ < UINT32_MAX);
770}
771# endif
772
773#endif /* AMD64 || X86 */
774
775#define TEST_READ(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
776 do { *a_pVar = a_Val; CHECKOP(a_Function(a_pVar), a_Val, a_Fmt, a_Type); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
777
778DECLINLINE(void) tstASMAtomicReadU8Worker(uint8_t volatile *pu8)
779{
780 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 0);
781 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 1);
782 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 2);
783 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 16);
784 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
785 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
786 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 127);
787 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 128);
788 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 169);
789 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 239);
790 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 254);
791 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 255);
792
793 int8_t volatile *pi8 = (int8_t volatile *)pu8;
794 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MAX);
795 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MIN);
796 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, 42);
797 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, -21);
798
799 bool volatile *pf = (bool volatile *)pu8;
800 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, true);
801 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, false);
802}
803
804
805DECLINLINE(void) tstASMAtomicUoReadU8Worker(uint8_t volatile *pu8)
806{
807 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 0);
808 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 1);
809 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 2);
810 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 16);
811 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
812 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
813 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 127);
814 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 128);
815 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 169);
816 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 239);
817 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 254);
818 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 255);
819
820 int8_t volatile *pi8 = (int8_t volatile *)pu8;
821 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MAX);
822 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MIN);
823 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, 42);
824 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, -21);
825
826 bool volatile *pf = (bool volatile *)pu8;
827 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, true);
828 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, false);
829}
830
831
832DECLINLINE(void) tstASMAtomicReadU16Worker(uint16_t volatile *pu16)
833{
834 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 0);
835 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 19983);
836 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, INT16_MAX);
837 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, UINT16_MAX);
838
839 int16_t volatile *pi16 = (int16_t volatile *)pu16;
840 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MAX);
841 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MIN);
842 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, 42);
843 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, -21);
844}
845
846
847DECLINLINE(void) tstASMAtomicUoReadU16Worker(uint16_t volatile *pu16)
848{
849 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 0);
850 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 19983);
851 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, INT16_MAX);
852 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, UINT16_MAX);
853
854 int16_t volatile *pi16 = (int16_t volatile *)pu16;
855 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MAX);
856 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MIN);
857 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, 42);
858 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, -21);
859}
860
861
862DECLINLINE(void) tstASMAtomicReadU32Worker(uint32_t volatile *pu32)
863{
864 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 0);
865 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 19983);
866 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT16_MAX);
867 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT16_MAX);
868 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M-1);
869 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M+1);
870 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G-1);
871 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G+1);
872 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT32_MAX);
873 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT32_MAX);
874
875 int32_t volatile *pi32 = (int32_t volatile *)pu32;
876 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MAX);
877 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MIN);
878 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, 42);
879 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, -21);
880
881#if ARCH_BITS == 32
882 size_t volatile *pcb = (size_t volatile *)pu32;
883 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
884 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
885 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
886
887 void * volatile *ppv = (void * volatile *)pu32;
888 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
889 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
890
891 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
892 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
893 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
894
895 ASMAtomicReadHandle(phEvt, &hEvt);
896 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
897#endif
898}
899
900
901DECLINLINE(void) tstASMAtomicUoReadU32Worker(uint32_t volatile *pu32)
902{
903 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 0);
904 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 19983);
905 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT16_MAX);
906 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT16_MAX);
907 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M-1);
908 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M+1);
909 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G-1);
910 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G+1);
911 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT32_MAX);
912 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT32_MAX);
913
914 int32_t volatile *pi32 = (int32_t volatile *)pu32;
915 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MAX);
916 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MIN);
917 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, 42);
918 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, -21);
919
920#if ARCH_BITS == 32
921 size_t volatile *pcb = (size_t volatile *)pu32;
922 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
923 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
924 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
925
926 void * volatile *ppv = (void * volatile *)pu32;
927 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
928 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
929
930 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
931 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
932 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
933
934 ASMAtomicUoReadHandle(phEvt, &hEvt);
935 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
936#endif
937}
938
939
940DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
941{
942 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 0);
943 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 19983);
944 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT16_MAX);
945 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT16_MAX);
946 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M-1);
947 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M+1);
948 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G-1);
949 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G+1);
950 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT32_MAX);
951 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT32_MAX);
952 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT64_MAX);
953 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_MAX);
954 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_C(0x450872549687134));
955
956 int64_t volatile *pi64 = (int64_t volatile *)pu64;
957 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MAX);
958 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MIN);
959 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, 42);
960 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, -21);
961
962#if ARCH_BITS == 64
963 size_t volatile *pcb = (size_t volatile *)pu64;
964 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
965 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
966 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
967
968 void * volatile *ppv = (void * volatile *)pu64;
969 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
970 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
971
972 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
973 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
974 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
975
976 ASMAtomicReadHandle(phEvt, &hEvt);
977 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
978#endif
979}
980
981
982DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
983{
984 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 0);
985 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 19983);
986 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT16_MAX);
987 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT16_MAX);
988 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M-1);
989 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M+1);
990 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G-1);
991 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G+1);
992 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT32_MAX);
993 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT32_MAX);
994 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT64_MAX);
995 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_MAX);
996 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_C(0x450872549687134));
997
998 int64_t volatile *pi64 = (int64_t volatile *)pu64;
999 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MAX);
1000 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MIN);
1001 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, 42);
1002 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, -21);
1003
1004#if ARCH_BITS == 64
1005 size_t volatile *pcb = (size_t volatile *)pu64;
1006 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
1007 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
1008 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
1009
1010 void * volatile *ppv = (void * volatile *)pu64;
1011 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
1012 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
1013
1014 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1015 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
1016 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1017
1018 ASMAtomicUoReadHandle(phEvt, &hEvt);
1019 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1020#endif
1021}
1022
1023
1024static void tstASMAtomicRead(void)
1025{
1026 DO_SIMPLE_TEST(ASMAtomicReadU8, uint8_t);
1027 DO_SIMPLE_TEST(ASMAtomicUoReadU8, uint8_t);
1028
1029 DO_SIMPLE_TEST(ASMAtomicReadU16, uint16_t);
1030 DO_SIMPLE_TEST(ASMAtomicUoReadU16, uint16_t);
1031
1032 DO_SIMPLE_TEST(ASMAtomicReadU32, uint32_t);
1033 DO_SIMPLE_TEST(ASMAtomicUoReadU32, uint32_t);
1034
1035 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
1036 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
1037}
1038
1039
1040#define TEST_WRITE(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
1041 do { a_Function(a_pVar, a_Val); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
1042
1043DECLINLINE(void) tstASMAtomicWriteU8Worker(uint8_t volatile *pu8)
1044{
1045 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 0);
1046 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 1);
1047 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 2);
1048 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 16);
1049 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1050 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1051 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 127);
1052 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 128);
1053 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 169);
1054 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 239);
1055 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 254);
1056 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 255);
1057
1058 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1059 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MIN);
1060 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MAX);
1061 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, 42);
1062 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, -41);
1063
1064 volatile bool *pf = (volatile bool *)pu8;
1065 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, true);
1066 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, false);
1067}
1068
1069
1070DECLINLINE(void) tstASMAtomicUoWriteU8Worker(uint8_t volatile *pu8)
1071{
1072 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 0);
1073 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 1);
1074 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 2);
1075 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 16);
1076 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1077 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1078 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 127);
1079 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 128);
1080 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 169);
1081 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 239);
1082 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 254);
1083 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 255);
1084
1085 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1086 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MIN);
1087 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MAX);
1088 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, 42);
1089 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, -41);
1090
1091 volatile bool *pf = (volatile bool *)pu8;
1092 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, true);
1093 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, false);
1094}
1095
1096
1097DECLINLINE(void) tstASMAtomicWriteU16Worker(uint16_t volatile *pu16)
1098{
1099 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 0);
1100 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 19983);
1101 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, INT16_MAX);
1102 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, UINT16_MAX);
1103
1104 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1105 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MIN);
1106 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MAX);
1107 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, 42);
1108 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, -41);
1109}
1110
1111
1112DECLINLINE(void) tstASMAtomicUoWriteU16Worker(uint16_t volatile *pu16)
1113{
1114 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 0);
1115 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 19983);
1116 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, INT16_MAX);
1117 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, UINT16_MAX);
1118
1119 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1120 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MIN);
1121 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MAX);
1122 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, 42);
1123 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, -41);
1124}
1125
1126
1127DECLINLINE(void) tstASMAtomicWriteU32Worker(uint32_t volatile *pu32)
1128{
1129 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 0);
1130 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 19983);
1131 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT16_MAX);
1132 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT16_MAX);
1133 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M-1);
1134 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M+1);
1135 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G-1);
1136 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G+1);
1137 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT32_MAX);
1138 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT32_MAX);
1139
1140 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1141 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MIN);
1142 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MAX);
1143 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, 42);
1144 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, -41);
1145
1146#if ARCH_BITS == 32
1147 size_t volatile *pcb = (size_t volatile *)pu32;
1148 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1149 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1150
1151 void * volatile *ppv = (void * volatile *)pu32;
1152 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1153 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1154
1155 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1156 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1157
1158 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1159 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1160#endif
1161}
1162
1163
1164DECLINLINE(void) tstASMAtomicUoWriteU32Worker(uint32_t volatile *pu32)
1165{
1166 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 0);
1167 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 19983);
1168 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT16_MAX);
1169 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT16_MAX);
1170 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M-1);
1171 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M+1);
1172 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G-1);
1173 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G+1);
1174 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT32_MAX);
1175 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT32_MAX);
1176
1177 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1178 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MIN);
1179 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MAX);
1180 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, 42);
1181 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, -41);
1182
1183#if ARCH_BITS == 32
1184 size_t volatile *pcb = (size_t volatile *)pu32;
1185 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1186 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1187
1188 void * volatile *ppv = (void * volatile *)pu32;
1189 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1190 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1191
1192 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1193 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1194
1195 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1196 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1197#endif
1198}
1199
1200
1201DECLINLINE(void) tstASMAtomicWriteU64Worker(uint64_t volatile *pu64)
1202{
1203 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 0);
1204 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 19983);
1205 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT16_MAX);
1206 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT16_MAX);
1207 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M-1);
1208 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M+1);
1209 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G-1);
1210 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G+1);
1211 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT32_MAX);
1212 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT32_MAX);
1213 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT64_MAX);
1214 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_MAX);
1215 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_C(0x450872549687134));
1216
1217 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1218 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MIN);
1219 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MAX);
1220 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, 42);
1221
1222#if ARCH_BITS == 64
1223 size_t volatile *pcb = (size_t volatile *)pu64;
1224 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1225 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1226
1227 void * volatile *ppv = (void * volatile *)pu64;
1228 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1229 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1230
1231 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1232 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1233
1234 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1235 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1236#endif
1237}
1238
1239
1240DECLINLINE(void) tstASMAtomicUoWriteU64Worker(uint64_t volatile *pu64)
1241{
1242 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 0);
1243 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 19983);
1244 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT16_MAX);
1245 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT16_MAX);
1246 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M-1);
1247 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M+1);
1248 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G-1);
1249 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G+1);
1250 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT32_MAX);
1251 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT32_MAX);
1252 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT64_MAX);
1253 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_MAX);
1254 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_C(0x450872549687134));
1255
1256 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1257 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MIN);
1258 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MAX);
1259 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, 42);
1260
1261#if ARCH_BITS == 64
1262 size_t volatile *pcb = (size_t volatile *)pu64;
1263 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1264 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1265
1266 void * volatile *ppv = (void * volatile *)pu64;
1267 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1268 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1269
1270 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1271 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1272
1273 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1274 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1275#endif
1276}
1277
1278static void tstASMAtomicWrite(void)
1279{
1280 DO_SIMPLE_TEST(ASMAtomicWriteU8, uint8_t);
1281 DO_SIMPLE_TEST(ASMAtomicUoWriteU8, uint8_t);
1282
1283 DO_SIMPLE_TEST(ASMAtomicWriteU16, uint16_t);
1284 DO_SIMPLE_TEST(ASMAtomicUoWriteU16, uint16_t);
1285
1286 DO_SIMPLE_TEST(ASMAtomicWriteU32, uint32_t);
1287 DO_SIMPLE_TEST(ASMAtomicUoWriteU32, uint32_t);
1288
1289 DO_SIMPLE_TEST(ASMAtomicWriteU64, uint64_t);
1290 DO_SIMPLE_TEST(ASMAtomicUoWriteU64, uint64_t);
1291}
1292
1293
1294DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
1295{
1296 *pu8 = 0;
1297 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, 1), 0, 1);
1298 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xff)), 1, UINT8_C(0xff));
1299 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0x87)), UINT8_C(0xff), UINT8_C(0x87));
1300 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xfe)), UINT8_C(0x87), UINT8_C(0xfe));
1301
1302 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1303 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(-4)), INT8_C(-2), INT8_C(-4));
1304 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(4)), INT8_C(-4), INT8_C(4));
1305 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MAX), INT8_C(4), INT8_MAX);
1306 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MIN), INT8_MAX, INT8_MIN);
1307 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, 1), INT8_MIN, 1);
1308
1309 bool volatile *pf = (bool volatile *)pu8;
1310 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), true, false);
1311 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), false, false);
1312 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, true), false, true);
1313}
1314
1315
1316DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
1317{
1318 *pu16 = 0;
1319 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 1), 0, 1);
1320 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 0), 1, 0);
1321 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_MAX), 0, UINT16_MAX);
1322 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x7fff)), UINT16_MAX, UINT16_C(0x7fff));
1323 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x8765)), UINT16_C(0x7fff), UINT16_C(0x8765));
1324 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0xfffe)), UINT16_C(0x8765), UINT16_C(0xfffe));
1325
1326 int16_t volatile *pi16 = (int16_t volatile *)pu16;
1327 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MIN), INT16_C(-2), INT16_MIN);
1328 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MAX), INT16_MIN, INT16_MAX);
1329 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, -8), INT16_MAX, -8);
1330 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, 8), -8, 8);
1331}
1332
1333
1334DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
1335{
1336 *pu32 = 0;
1337 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 1), 0, 1);
1338 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 0), 1, 0);
1339 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_MAX), 0, UINT32_MAX);
1340 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0x87654321)), UINT32_MAX, UINT32_C(0x87654321));
1341 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0xfffffffe)), UINT32_C(0x87654321), UINT32_C(0xfffffffe));
1342
1343 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1344 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MIN), INT32_C(-2), INT32_MIN);
1345 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MAX), INT32_MIN, INT32_MAX);
1346 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, -16), INT32_MAX, -16);
1347 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, 16), -16, 16);
1348
1349#if ARCH_BITS == 32
1350 size_t volatile *pcb = (size_t volatile *)pu32;
1351 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0x9481239b)), 0x10, UINT32_C(0x9481239b));
1352 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0xcdef1234)), UINT32_C(0x9481239b), UINT32_C(0xcdef1234));
1353#endif
1354
1355#if R0_ARCH_BITS == 32
1356 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu32;
1357 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT32_C(0x80341237)), UINT32_C(0xcdef1234), UINT32_C(0x80341237));
1358#endif
1359}
1360
1361
1362DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
1363{
1364 *pu64 = 0;
1365 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 1), 0, 1);
1366 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 0), 1, 0);
1367 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_MAX), 0, UINT64_MAX);
1368 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), UINT64_MAX, UINT64_C(0xfedcba0987654321));
1369 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfffffffffffffffe)), UINT64_C(0xfedcba0987654321), UINT64_C(0xfffffffffffffffe));
1370
1371 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1372 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MAX), -2, INT64_MAX);
1373 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MIN), INT64_MAX, INT64_MIN);
1374 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, -32), INT64_MIN, -32);
1375 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, 32), -32, 32);
1376
1377#if ARCH_BITS == 64
1378 size_t volatile *pcb = (size_t volatile *)pu64;
1379 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0x94812396759)), 0x20, UINT64_C(0x94812396759));
1380 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0xcdef1234abdf7896)), UINT64_C(0x94812396759), UINT64_C(0xcdef1234abdf7896));
1381#endif
1382
1383#if R0_ARCH_BITS == 64
1384 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu64;
1385 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT64_C(0xfedc1234567890ab)), UINT64_C(0xcdef1234abdf7896), UINT64_C(0xfedc1234567890ab));
1386#endif
1387}
1388
1389
1390DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
1391{
1392 *ppv = NULL;
1393 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, (void *)(~(uintptr_t)0));
1394 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0x87654321)), (void *)(~(uintptr_t)0), (void *)(~(uintptr_t)0x87654321));
1395 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, NULL), (void *)(~(uintptr_t)0x87654321), NULL);
1396
1397 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgR3Ptr(ppv, (void *)ppv), NULL, (void *)ppv);
1398
1399 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)ppv;
1400 RTSEMEVENT hRet;
1401 ASMAtomicXchgHandle(phEvt, (RTSEMEVENT)(~(uintptr_t)12345), &hRet);
1402 CHECKVAL(hRet, (RTSEMEVENT)ppv, "%p");
1403 CHECKVAL(*phEvt, (RTSEMEVENT)(~(uintptr_t)12345), "%p");
1404}
1405
1406
1407static void tstASMAtomicXchg(void)
1408{
1409 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
1410 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
1411 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
1412 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
1413 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
1414}
1415
1416
1417DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
1418{
1419 *pu8 = 0xff;
1420 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0), false, 0xff);
1421 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, 0);
1422 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, 0x97);
1423 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), false, 0x97);
1424 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x7f, 0x97), true, 0x7f);
1425
1426 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1427 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, -2, 0x7f), true, -2);
1428 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, -2), true, INT8_MAX);
1429 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, INT8_MIN), false, INT8_MAX);
1430 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MIN, INT8_MAX), true, INT8_MIN);
1431 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, 1, INT8_MIN), true, 1);
1432
1433 bool volatile *pf = (bool volatile *)pu8;
1434 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, true, true), true, true);
1435 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), true, false);
1436 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), false, false);
1437 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, false), true, false);
1438}
1439
1440
1441DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
1442{
1443 *pu32 = UINT32_C(0xffffffff);
1444 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, 0), false, UINT32_C(0xffffffff));
1445 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, 0);
1446 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), UINT32_C(0x12345678)), false, 0);
1447 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), 0), true, UINT32_C(0x80088efd));
1448 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0xfffffffe), UINT32_C(0x80088efd)), true, UINT32_C(0xfffffffe));
1449
1450 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1451 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, 2), false, -2);
1452 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, -2), true, INT32_MIN);
1453 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, -2), false, INT32_MIN);
1454 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), true, -19);
1455 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), false, -19);
1456 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, 19, -19), true, 19);
1457 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, -234), false, 19);
1458 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, 19), true, INT32_MAX);
1459
1460#if ARCH_BITS == 32
1461 *pu32 = 29;
1462 void * volatile *ppv = (void * volatile *)pu32;
1463 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1464 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1465 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1466 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1467
1468 bool fRc;
1469 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1470 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1471 CHECKVAL(fRc, false, "%d");
1472 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1473
1474 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1475 CHECKVAL(fRc, true, "%d");
1476 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1477#endif
1478}
1479
1480
1481DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
1482{
1483 *pu64 = UINT64_C(0xffffffffffffff);
1484 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, 0), false, UINT64_C(0xffffffffffffff));
1485 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, 0);
1486 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 1), false, 0);
1487 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, UINT64_C(0x80040008008efd));
1488 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), false, UINT64_C(0x80040008008efd));
1489 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0xfffffffffffffffd), UINT64_C(0x80040008008efd)), true, UINT64_C(0xfffffffffffffffd));
1490
1491 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1492 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, 0), false, -3);
1493 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, -3), true, INT64_MAX);
1494 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MIN), false, INT64_MAX);
1495 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MAX), true, INT64_MIN);
1496 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, -29), false, INT64_MIN);
1497 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), true, -29);
1498 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), false, -29);
1499 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, 29, -29), true, 29);
1500
1501#if ARCH_BITS == 64
1502 void * volatile *ppv = (void * volatile *)pu64;
1503 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1504 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1505 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1506 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1507
1508 bool fRc;
1509 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1510 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1511 CHECKVAL(fRc, false, "%d");
1512 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1513
1514 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1515 CHECKVAL(fRc, true, "%d");
1516 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1517#endif
1518}
1519
1520
1521#ifdef RTASM_HAVE_CMP_WRITE_U128
1522DECLINLINE(void) tstASMAtomicCmpWriteU128Worker(RTUINT128U volatile *pu128)
1523{
1524 pu128->s.Lo = UINT64_C(0xffffffffffffff);
1525 pu128->s.Hi = UINT64_C(0xffffffffffffff);
1526
1527 RTUINT128U u128A, u128B;
1528 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1529 u128A = RTUINT128_INIT_C(0, 0),
1530 u128B = RTUINT128_INIT_C(0, 0)),
1531 false, 0xffffffffffffff, 0xffffffffffffff);
1532 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1533 u128A = RTUINT128_INIT_C(0, 0),
1534 u128B = RTUINT128_INIT_C(0xffffffffffffff, 0xffffffffffffff)),
1535 true, 0, 0);
1536
1537 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1538 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1539 u128B = RTUINT128_INIT_C(0, 1)),
1540 false, 0, 0);
1541 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1542 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1543 u128B = RTUINT128_INIT_C(1, 0)),
1544 false, 0, 0);
1545 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1546 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1547 u128B = RTUINT128_INIT_C(0, 0)),
1548 true, 0x80040008008efd, 0x40080004004def);
1549
1550 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1551 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1552 u128B = RTUINT128_INIT_C(0x80040008008efd, 0)),
1553 false, 0x80040008008efd, 0x40080004004def);
1554 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1555 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1556 u128B = RTUINT128_INIT_C(0, 0x40080004004def)),
1557 false, 0x80040008008efd, 0x40080004004def);
1558 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128,
1559 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1560 u128B = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def)),
1561 true, 0xfff40ff8f08ef3, 0x4ee8ee04cc4de4);
1562
1563 /* Make sure the v2 version works too (arm) */
1564 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128v2(&pu128->u,
1565 UINT64_C(0x95487930069587), UINT64_C(0x89958490385964),
1566 UINT64_C(0xfff40ff8f08ef3), UINT64_C(0x4ee8ee04cc4de4)),
1567 true, 0x95487930069587, 0x89958490385964);
1568 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128v2(&pu128->u,
1569 UINT64_C(0x99969404869434), UINT64_C(0x11049309994567),
1570 UINT64_C(0x33f40ff8f08eff), UINT64_C(0x99e8ee04cc4dee)),
1571 false, 0x95487930069587, 0x89958490385964);
1572}
1573#endif /* RTASM_HAVE_CMP_WRITE_U128 */
1574
1575
1576static void tstASMAtomicCmpXchg(void)
1577{
1578 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
1579 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
1580 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
1581#ifdef RTASM_HAVE_CMP_WRITE_U128
1582# ifdef RT_ARCH_AMD64
1583 if (ASMCpuId_ECX(1) & X86_CPUID_FEATURE_ECX_CX16)
1584# endif
1585 {
1586 RTTestISub("ASMAtomicCmpWriteU128U");
1587 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMAtomicCmpWriteU128Worker, RTUINT128U);
1588 }
1589#endif
1590}
1591
1592
1593DECLINLINE(void) tstASMAtomicCmpXchgExU8Worker(uint8_t volatile *pu8)
1594{
1595 *pu8 = UINT8_C(0xff);
1596 uint8_t u8Old = UINT8_C(0x11);
1597 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, 0, &u8Old), false, UINT8_C(0xff), UINT8_C(0xff));
1598 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, UINT8_C(0xff), &u8Old), true, 0, UINT8_C(0xff));
1599 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, UINT8_C(0xff), &u8Old), false, 0, UINT8_C(0x00));
1600 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xfd), 0, &u8Old), true, UINT8_C(0xfd), 0);
1601 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xfd), 0, &u8Old), false, UINT8_C(0xfd), UINT8_C(0xfd));
1602 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xe0), UINT8_C(0xfd), &u8Old), true, UINT8_C(0xe0), UINT8_C(0xfd));
1603
1604 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1605 int8_t i8Old = 0;
1606 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 32, 32, &i8Old), false, -32, -32);
1607 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 32, -32, &i8Old), true, 32, -32);
1608 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MIN, 32, &i8Old), true, INT8_MIN, 32);
1609 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MIN, 32, &i8Old), false, INT8_MIN, INT8_MIN);
1610 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MAX, INT8_MAX, &i8Old), false, INT8_MIN, INT8_MIN);
1611 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MAX, INT8_MIN, &i8Old), true, INT8_MAX, INT8_MIN);
1612 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 42, INT8_MAX, &i8Old), true, 42, INT8_MAX);
1613}
1614
1615
1616DECLINLINE(void) tstASMAtomicCmpXchgExU16Worker(uint16_t volatile *pu16)
1617{
1618 *pu16 = UINT16_C(0xffff);
1619 uint16_t u16Old = UINT16_C(0x5111);
1620 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, 0, &u16Old), false, UINT16_C(0xffff), UINT16_C(0xffff));
1621 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, UINT16_C(0xffff), &u16Old), true, 0, UINT16_C(0xffff));
1622 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, UINT16_C(0xffff), &u16Old), false, 0, UINT16_C(0x0000));
1623 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0x8efd), 0, &u16Old), true, UINT16_C(0x8efd), 0);
1624 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0x8efd), 0, &u16Old), false, UINT16_C(0x8efd), UINT16_C(0x8efd));
1625 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0xffe0), UINT16_C(0x8efd), &u16Old), true, UINT16_C(0xffe0), UINT16_C(0x8efd));
1626
1627 int16_t volatile *pi16 = (int16_t volatile *)pu16;
1628 int16_t i16Old = 0;
1629 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 32, 32, &i16Old), false, -32, -32);
1630 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 32, -32, &i16Old), true, 32, -32);
1631 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MIN, 32, &i16Old), true, INT16_MIN, 32);
1632 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MIN, 32, &i16Old), false, INT16_MIN, INT16_MIN);
1633 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MAX, INT16_MAX, &i16Old), false, INT16_MIN, INT16_MIN);
1634 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MAX, INT16_MIN, &i16Old), true, INT16_MAX, INT16_MIN);
1635 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 42, INT16_MAX, &i16Old), true, 42, INT16_MAX);
1636}
1637
1638
1639DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
1640{
1641 *pu32 = UINT32_C(0xffffffff);
1642 uint32_t u32Old = UINT32_C(0x80005111);
1643 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1644 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, 0, UINT32_C(0xffffffff));
1645 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), false, 0, UINT32_C(0x00000000));
1646 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), true, UINT32_C(0x80088efd), 0);
1647 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), false, UINT32_C(0x80088efd), UINT32_C(0x80088efd));
1648 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0xffffffe0), UINT32_C(0x80088efd), &u32Old), true, UINT32_C(0xffffffe0), UINT32_C(0x80088efd));
1649
1650 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1651 int32_t i32Old = 0;
1652 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, 32, &i32Old), false, -32, -32);
1653 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, -32, &i32Old), true, 32, -32);
1654 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), true, INT32_MIN, 32);
1655 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), false, INT32_MIN, INT32_MIN);
1656 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MAX, &i32Old), false, INT32_MIN, INT32_MIN);
1657 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MIN, &i32Old), true, INT32_MAX, INT32_MIN);
1658 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 42, INT32_MAX, &i32Old), true, 42, INT32_MAX);
1659
1660#if ARCH_BITS == 32
1661 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1662 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1663 bool fRc = true;
1664 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1665 CHECKVAL(fRc, false, "%d");
1666 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1667 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1668
1669 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1670 CHECKVAL(fRc, true, "%d");
1671 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1672 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1673#endif
1674}
1675
1676
1677DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
1678{
1679 *pu64 = UINT64_C(0xffffffffffffffff);
1680 uint64_t u64Old = UINT64_C(0x8000000051111111);
1681 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
1682 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, 0, UINT64_C(0xffffffffffffffff));
1683 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0x342, &u64Old), false, 0, 0);
1684 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0, &u64Old), true, UINT64_C(0x0080040008008efd), 0);
1685 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd), &u64Old), true, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd));
1686
1687 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1688 int64_t i64Old = -3;
1689 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, 64, &i64Old), false, -64, -64);
1690 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), true, 64, -64);
1691 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), false, 64, 64);
1692 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, -64, &i64Old), false, 64, 64);
1693 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, 64, &i64Old), true, INT64_MIN, 64);
1694 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MAX, INT64_MIN, &i64Old), true, INT64_MAX, INT64_MIN);
1695 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 42, INT64_MAX, &i64Old), true, 42, INT64_MAX);
1696
1697#if ARCH_BITS == 64
1698 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1699 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1700 bool fRc = true;
1701 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1702 CHECKVAL(fRc, false, "%d");
1703 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1704 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1705
1706 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1707 CHECKVAL(fRc, true, "%d");
1708 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1709 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1710
1711 void * volatile *ppv = (void * volatile *)pu64;
1712 void *pvOld;
1713 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, NULL, &pvOld), false, (void *)~(uintptr_t)0x12380964, (void *)~(uintptr_t)0x12380964);
1714 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964, &pvOld), true, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964);
1715
1716 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)~(uintptr_t)99, &pvOld), false, (void *)(intptr_t)12345678, (void *)(intptr_t)12345678);
1717 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)(intptr_t)12345678, &pvOld), true, (void *)~(intptr_t)99, (void *)(intptr_t)12345678);
1718#endif
1719}
1720
1721
1722#ifdef RTASM_HAVE_CMP_XCHG_U128
1723DECLINLINE(void) tstASMAtomicCmpXchgU128Worker(RTUINT128U volatile *pu128)
1724{
1725 pu128->s.Lo = UINT64_C(0xffffffffffffff);
1726 pu128->s.Hi = UINT64_C(0xffffffffffffff);
1727
1728 RTUINT128U u128A, u128B;
1729 RTUINT128U const u128OldInit = RTUINT128_INIT_C(0x4242424242424242, 0x2222222222222222);
1730 RTUINT128U u128Old = u128OldInit;
1731 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1732 u128A = RTUINT128_INIT_C(0, 0),
1733 u128B = RTUINT128_INIT_C(0, 0),
1734 &u128Old),
1735 false, 0xffffffffffffff, 0xffffffffffffff);
1736 CHECKVAL128_C(&u128Old, 0xffffffffffffff, 0xffffffffffffff);
1737
1738 u128Old = u128OldInit;
1739 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1740 u128A = RTUINT128_INIT_C(0, 0),
1741 u128B = RTUINT128_INIT_C(0xffffffffffffff, 0xffffffffffffff),
1742 &u128Old),
1743 true, 0, 0);
1744 CHECKVAL128_C(&u128Old, 0xffffffffffffff, 0xffffffffffffff);
1745
1746 u128Old = u128OldInit;
1747 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1748 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1749 u128B = RTUINT128_INIT_C(0, 1),
1750 &u128Old),
1751 false, 0, 0);
1752 CHECKVAL128_C(&u128Old, 0, 0);
1753
1754 u128Old = u128OldInit;
1755 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1756 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1757 u128B = RTUINT128_INIT_C(1, 0),
1758 &u128Old),
1759 false, 0, 0);
1760 CHECKVAL128_C(&u128Old, 0, 0);
1761
1762 u128Old = u128OldInit;
1763 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1764 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1765 u128B = RTUINT128_INIT_C(0, 0),
1766 &u128Old),
1767 true, 0x80040008008efd, 0x40080004004def);
1768 CHECKVAL128_C(&u128Old, 0, 0);
1769
1770 u128Old = u128OldInit;
1771 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1772 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1773 u128B = RTUINT128_INIT_C(0x80040008008efd, 0),
1774 &u128Old),
1775 false, 0x80040008008efd, 0x40080004004def);
1776 CHECKVAL128_C(&u128Old, 0x80040008008efd, 0x40080004004def);
1777
1778 u128Old = u128OldInit;
1779 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1780 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1781 u128B = RTUINT128_INIT_C(0, 0x40080004004def),
1782 &u128Old),
1783 false, 0x80040008008efd, 0x40080004004def);
1784 CHECKVAL128_C(&u128Old, 0x80040008008efd, 0x40080004004def);
1785
1786 u128Old = u128OldInit;
1787 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128U(pu128,
1788 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4),
1789 u128B = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def),
1790 &u128Old),
1791 true, 0xfff40ff8f08ef3, 0x4ee8ee04cc4de4);
1792 CHECKVAL128_C(&u128Old, 0x80040008008efd, 0x40080004004def);
1793
1794 /* Make sure the v2 version works too (arm) */
1795 u128Old = u128OldInit;
1796 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128v2(&pu128->u,
1797 UINT64_C(0x78039485960543), UINT64_C(0x97058437294586),
1798 UINT64_C(0xfff40ff8f08ef3), UINT64_C(0x4ee8ee04cc4de4),
1799 &u128Old.u),
1800 true, 0x78039485960543, 0x97058437294586);
1801 CHECKVAL128_C(&u128Old, 0xfff40ff8f08ef3, 0x4ee8ee04cc4de4);
1802
1803 u128Old = u128OldInit;
1804 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpXchgU128v2(&pu128->u,
1805 UINT64_C(0x13495874560495), UINT64_C(0x12304896098597),
1806 UINT64_C(0xfff40ff8f08ef3), UINT64_C(0x4ee8ee04cc4de4),
1807 &u128Old.u),
1808 false, 0x78039485960543, 0x97058437294586);
1809 CHECKVAL128_C(&u128Old, 0x78039485960543, 0x97058437294586);
1810}
1811#endif /* RTASM_HAVE_CMP_XCHG_U128 */
1812
1813
1814static void tstASMAtomicCmpXchgEx(void)
1815{
1816 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU8, uint8_t);
1817 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU16, uint16_t);
1818 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
1819 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
1820#ifdef RTASM_HAVE_CMP_XCHG_U128
1821# ifdef RT_ARCH_AMD64
1822 if (ASMCpuId_ECX(1) & X86_CPUID_FEATURE_ECX_CX16)
1823# endif
1824 {
1825 RTTestISub("ASMAtomicCmpXchgU128");
1826 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMAtomicCmpXchgU128Worker, RTUINT128U);
1827 }
1828#endif
1829}
1830
1831
1832#define TEST_RET_OLD(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1833 a_Type const uOldExpect = *(a_pVar); \
1834 a_Type uOldRet = a_Function(a_pVar, a_uVal); \
1835 if (RT_LIKELY( uOldRet == (uOldExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1836 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s," a_Fmt ") -> " a_Fmt ", expected " a_Fmt "; %s=" a_Fmt ", expected " a_Fmt "\n", \
1837 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uOldRet, uOldExpect, #a_pVar, *(a_pVar), (a_VarExpect)); \
1838 } while (0)
1839
1840
1841DECLINLINE(void) tstASMAtomicAddU32Worker(uint32_t *pu32)
1842{
1843 *pu32 = 10;
1844 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, 11);
1845 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffffe), 9);
1846 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffff7), 0);
1847 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffff));
1848 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000000));
1849 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000001));
1850 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), 0);
1851 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 0, 0);
1852
1853 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 0, 0);
1854 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 32, UINT32_C(0xffffffe0));
1855 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffe1));
1856 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffde), UINT32_C(0x00000003));
1857}
1858
1859
1860DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
1861{
1862 *pi32 = 10;
1863 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 1, 11);
1864 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -2, 9);
1865 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -9, 0);
1866 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -0x7fffffff, -0x7fffffff);
1867 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, -0x7fffffff);
1868 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0x7fffffff, 0);
1869 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, 0);
1870
1871 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 0, 0);
1872 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 1, -1);
1873 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, INT32_MIN, INT32_MAX);
1874}
1875
1876
1877DECLINLINE(void) tstASMAtomicAddU64Worker(uint64_t volatile *pu64)
1878{
1879 *pu64 = 10;
1880 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, 1, 11);
1881 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffffe), UINT64_C(0x0000000000000009));
1882 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffff7), UINT64_C(0x0000000000000000));
1883 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0x7ffffffffffffff0));
1884 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0xffffffffffffffe0));
1885 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000000), UINT64_C(0xffffffffffffffe0));
1886 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x000000000000001f), UINT64_C(0xffffffffffffffff));
1887 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000000));
1888
1889 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
1890 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000020), UINT64_C(0xffffffffffffffe0));
1891 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffff), UINT64_C(0x7fffffffffffffe1));
1892 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffdd), UINT64_C(0x0000000000000004));
1893}
1894
1895
1896DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
1897{
1898 *pi64 = 10;
1899 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 11);
1900 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -2, 9);
1901 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -9, 0);
1902 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -INT64_MAX, -INT64_MAX);
1903 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, -INT64_MAX);
1904 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -1, INT64_MIN);
1905 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, INT64_MAX, -1);
1906 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 0);
1907 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, 0);
1908
1909 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 0, 0);
1910 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 1, -1);
1911 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, INT64_MIN, INT64_MAX);
1912}
1913
1914
1915
1916DECLINLINE(void) tstASMAtomicAddZWorker(size_t volatile *pcb)
1917{
1918 *pcb = 10;
1919 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, 1, 11);
1920 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)1, 9);
1921 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)8, 0);
1922
1923 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 0, 0);
1924 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 10, ~(size_t)9);
1925}
1926
1927static void tstASMAtomicAdd(void)
1928{
1929 DO_SIMPLE_TEST(ASMAtomicAddU32, uint32_t);
1930 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
1931 DO_SIMPLE_TEST(ASMAtomicAddU64, uint64_t);
1932 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
1933 DO_SIMPLE_TEST(ASMAtomicAddZ, size_t);
1934}
1935
1936
1937#define TEST_RET_NEW_NV(a_Type, a_Fmt, a_pVar, a_Function, a_VarExpect) do { \
1938 a_Type uNewRet = a_Function(a_pVar); \
1939 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1940 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s) -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1941 __FUNCTION__, __LINE__, #a_Function, #a_pVar, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1942 } while (0)
1943
1944
1945DECLINLINE(void) tstASMAtomicDecIncU32Worker(uint32_t volatile *pu32)
1946{
1947 *pu32 = 3;
1948 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 2);
1949 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1950 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 0);
1951 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX);
1952 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 1);
1953 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 2);
1954 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX - 1);
1955 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX);
1956 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 0);
1957 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 1);
1958 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1959 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1960 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1961 *pu32 = _1M;
1962 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, _1M - 1);
1963 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M);
1964 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M + 1);
1965}
1966
1967DECLINLINE(void) tstASMAtomicUoDecIncU32Worker(uint32_t volatile *pu32)
1968{
1969 *pu32 = 3;
1970 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 2);
1971 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1972 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 0);
1973 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX);
1974 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 1);
1975 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 2);
1976 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX - 1);
1977 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX);
1978 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 0);
1979 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 1);
1980 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1981 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1982 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1983 *pu32 = _1M;
1984 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, _1M - 1);
1985 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M);
1986 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M + 1);
1987}
1988
1989
1990DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
1991{
1992 *pi32 = 10;
1993 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 9);
1994 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 8);
1995 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 7);
1996 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 6);
1997 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 5);
1998 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 4);
1999 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 3);
2000 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
2001 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 1);
2002 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 0);
2003 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -1);
2004 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -2);
2005 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, -1);
2006 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 0);
2007 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 1);
2008 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 2);
2009 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
2010 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
2011 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
2012 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
2013 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
2014 *pi32 = INT32_MAX;
2015 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, INT32_MAX - 1);
2016 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MAX);
2017 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MIN);
2018}
2019
2020
2021#if 0
2022DECLINLINE(void) tstASMAtomicUoDecIncS32Worker(int32_t volatile *pi32)
2023{
2024 *pi32 = 10;
2025 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 9);
2026 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 8);
2027 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 7);
2028 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 6);
2029 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 5);
2030 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 4);
2031 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 3);
2032 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
2033 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 1);
2034 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 0);
2035 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -1);
2036 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -2);
2037 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, -1);
2038 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 0);
2039 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 1);
2040 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 2);
2041 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
2042 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
2043 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
2044 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
2045 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
2046 *pi32 = INT32_MAX;
2047 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, INT32_MAX - 1);
2048 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MAX);
2049 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MIN);
2050}
2051#endif
2052
2053
2054DECLINLINE(void) tstASMAtomicDecIncU64Worker(uint64_t volatile *pu64)
2055{
2056 *pu64 = 3;
2057 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 2);
2058 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 1);
2059 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 0);
2060 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX);
2061 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 1);
2062 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 2);
2063 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX - 1);
2064 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX);
2065 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 0);
2066 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 1);
2067 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 2);
2068 *pu64 = _4G - 1;
2069 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G - 2);
2070 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G - 1);
2071 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G);
2072 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G + 1);
2073 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G);
2074}
2075
2076
2077#if 0
2078DECLINLINE(void) tstASMAtomicUoDecIncU64Worker(uint64_t volatile *pu64)
2079{
2080 *pu64 = 3;
2081 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 2);
2082 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 1);
2083 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 0);
2084 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX);
2085 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 1);
2086 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 2);
2087 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX - 1);
2088 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX);
2089 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 0);
2090 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 1);
2091 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 2);
2092 *pu64 = _4G - 1;
2093 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G - 2);
2094 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G - 1);
2095 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G);
2096 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G + 1);
2097 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G);
2098}
2099#endif
2100
2101
2102DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
2103{
2104 *pi64 = 10;
2105 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 9);
2106 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 8);
2107 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 7);
2108 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 6);
2109 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 5);
2110 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 4);
2111 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 3);
2112 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
2113 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 1);
2114 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 0);
2115 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -1);
2116 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -2);
2117 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, -1);
2118 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 0);
2119 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 1);
2120 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 2);
2121 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
2122 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
2123 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
2124 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
2125 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
2126 *pi64 = INT64_MAX;
2127 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, INT64_MAX - 1);
2128}
2129
2130
2131#if 0
2132DECLINLINE(void) tstASMAtomicUoDecIncS64Worker(int64_t volatile *pi64)
2133{
2134 *pi64 = 10;
2135 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 9);
2136 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 8);
2137 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 7);
2138 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 6);
2139 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 5);
2140 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 4);
2141 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 3);
2142 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
2143 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 1);
2144 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 0);
2145 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -1);
2146 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -2);
2147 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, -1);
2148 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 0);
2149 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 1);
2150 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 2);
2151 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
2152 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
2153 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
2154 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
2155 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
2156 *pi64 = INT64_MAX;
2157 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, INT64_MAX - 1);
2158}
2159#endif
2160
2161
2162DECLINLINE(void) tstASMAtomicDecIncZWorker(size_t volatile *pcb)
2163{
2164 size_t const uBaseVal = ~(size_t)0 >> 7;
2165 *pcb = uBaseVal;
2166 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
2167 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 2);
2168 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 3);
2169 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 2);
2170 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 1);
2171 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
2172 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal + 1);
2173 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal);
2174 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
2175 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
2176}
2177
2178
2179static void tstASMAtomicDecInc(void)
2180{
2181 DO_SIMPLE_TEST(ASMAtomicDecIncU32, uint32_t);
2182 DO_SIMPLE_TEST(ASMAtomicUoDecIncU32, uint32_t);
2183 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
2184 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS32, int32_t);
2185 DO_SIMPLE_TEST(ASMAtomicDecIncU64, uint64_t);
2186 //DO_SIMPLE_TEST(ASMAtomicUoDecIncU64, uint64_t);
2187 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
2188 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS64, int64_t);
2189 DO_SIMPLE_TEST(ASMAtomicDecIncZ, size_t);
2190}
2191
2192
2193#define TEST_RET_VOID(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
2194 a_Function(a_pVar, a_uVal); \
2195 if (RT_LIKELY( *(a_pVar) == (a_VarExpect) )) { } \
2196 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> %s=" a_Fmt ", expected " a_Fmt "\n", \
2197 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, #a_pVar, *(a_pVar), (a_VarExpect)); \
2198 } while (0)
2199
2200#define TEST_RET_NEW(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
2201 a_Type uNewRet = a_Function(a_pVar, a_uVal); \
2202 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
2203 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
2204 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
2205 } while (0)
2206
2207
2208DECLINLINE(void) tstASMAtomicAndOrXorU32Worker(uint32_t volatile *pu32)
2209{
2210 *pu32 = UINT32_C(0xffffffff);
2211 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2212 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2213 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
2214 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
2215 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(1), UINT32_C(1));
2216 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
2217 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
2218 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0), UINT32_C(0));
2219 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
2220 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
2221 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
2222 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
2223 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
2224}
2225
2226
2227DECLINLINE(void) tstASMAtomicUoAndOrXorU32Worker(uint32_t volatile *pu32)
2228{
2229 *pu32 = UINT32_C(0xffffffff);
2230 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2231 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2232 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
2233 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
2234 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(1), UINT32_C(1));
2235 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
2236 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
2237 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0), UINT32_C(0));
2238 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
2239 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
2240 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
2241 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
2242 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
2243}
2244
2245
2246DECLINLINE(void) tstASMAtomicAndOrXorExU32Worker(uint32_t volatile *pu32)
2247{
2248 *pu32 = UINT32_C(0xffffffff);
2249 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2250 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2251 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
2252 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
2253 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(1), UINT32_C(1));
2254 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
2255 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
2256 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0), UINT32_C(0));
2257 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
2258 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
2259 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
2260 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
2261 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
2262}
2263
2264
2265DECLINLINE(void) tstASMAtomicAndOrXorU64Worker(uint64_t volatile *pu64)
2266{
2267 *pu64 = UINT64_C(0xffffffff);
2268 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2269 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2270 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2271 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2272 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(1), UINT64_C(1));
2273 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2274 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2275 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2276 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2277 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2278 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2279 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2280 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2281
2282 /* full 64-bit */
2283 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2284 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2285 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2286 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2287 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2288 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2289 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2290 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2291 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2292 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2293 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2294 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2295 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2296 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2297}
2298
2299
2300DECLINLINE(void) tstASMAtomicUoAndOrXorU64Worker(uint64_t volatile *pu64)
2301{
2302 *pu64 = UINT64_C(0xffffffff);
2303 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2304 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2305 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2306 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2307 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(1), UINT64_C(1));
2308 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2309 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2310 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2311 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2312 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2313 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2314 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2315 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2316
2317 /* full 64-bit */
2318 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2319 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2320 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2321 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2322 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2323 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2324 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2325 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2326 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2327 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2328 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2329 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2330 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2331 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2332}
2333
2334
2335#if 0
2336DECLINLINE(void) tstASMAtomicAndOrXorExU64Worker(uint64_t volatile *pu64)
2337{
2338 *pu64 = UINT64_C(0xffffffff);
2339 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2340 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2341 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2342 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2343 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(1), UINT64_C(1));
2344 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2345 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2346 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2347 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2348 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2349 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2350 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2351 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2352
2353 /* full 64-bit */
2354 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2355 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2356 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2357 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2358 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2359 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2360 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2361 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2362 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2363 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2364 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2365 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2366 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2367 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2368}
2369#endif
2370
2371
2372static void tstASMAtomicAndOrXor(void)
2373{
2374 DO_SIMPLE_TEST(ASMAtomicAndOrXorU32, uint32_t);
2375 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU32, uint32_t);
2376 DO_SIMPLE_TEST(ASMAtomicAndOrXorExU32, uint32_t);
2377 DO_SIMPLE_TEST(ASMAtomicAndOrXorU64, uint64_t);
2378 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU64, uint64_t);
2379 //DO_SIMPLE_TEST(ASMAtomicAndOrXorExU64, uint64_t);
2380}
2381
2382
2383typedef struct
2384{
2385 uint8_t ab[PAGE_SIZE];
2386} TSTPAGE;
2387
2388
2389DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
2390{
2391 for (unsigned j = 0; j < 16; j++)
2392 {
2393 memset(pPage, 0x11 * j, sizeof(*pPage));
2394 ASMMemZeroPage(pPage);
2395 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
2396 if (pPage->ab[i])
2397 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
2398 if (ASMMemIsZeroPage(pPage) != true)
2399 RTTestFailed(g_hTest, "ASMMemIsZeroPage returns false after ASMMemZeroPage!\n");
2400 if (ASMMemFirstMismatchingU32(pPage, sizeof(pPage), 0) != NULL)
2401 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,0) returns non-NULL after ASMMemZeroPage!\n");
2402 }
2403}
2404
2405
2406static void tstASMMemZeroPage(void)
2407{
2408 RTTestISub("ASMMemZeroPage");
2409 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMMemZeroPageWorker, TSTPAGE);
2410}
2411
2412
2413static void tstASMMemIsZeroPage(RTTEST hTest)
2414{
2415 RTTestSub(hTest, "ASMMemIsZeroPage");
2416
2417 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2418 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2419 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
2420
2421 memset(pvPage1, 0, PAGE_SIZE);
2422 memset(pvPage2, 0, PAGE_SIZE);
2423 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
2424 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
2425
2426 memset(pvPage1, 0xff, PAGE_SIZE);
2427 memset(pvPage2, 0xff, PAGE_SIZE);
2428 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2429 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2430
2431 memset(pvPage1, 0, PAGE_SIZE);
2432 memset(pvPage2, 0, PAGE_SIZE);
2433 for (unsigned off = 0; off < PAGE_SIZE; off++)
2434 {
2435 ((uint8_t *)pvPage1)[off] = 1;
2436 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2437 ((uint8_t *)pvPage1)[off] = 0;
2438
2439 ((uint8_t *)pvPage2)[off] = 0x80;
2440 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2441 ((uint8_t *)pvPage2)[off] = 0;
2442 }
2443
2444 RTTestSubDone(hTest);
2445}
2446
2447
2448static void tstASMMemFirstMismatchingU8(RTTEST hTest)
2449{
2450 RTTestSub(hTest, "ASMMemFirstMismatchingU8");
2451
2452 uint8_t *pbPage1 = (uint8_t *)RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2453 uint8_t *pbPage2 = (uint8_t *)RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2454 RTTESTI_CHECK_RETV(pbPage1 && pbPage2);
2455
2456 memset(pbPage1, 0, PAGE_SIZE);
2457 memset(pbPage2, 0, PAGE_SIZE);
2458 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0) == NULL);
2459 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0) == NULL);
2460 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 1) == pbPage1);
2461 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 1) == pbPage2);
2462 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0x87) == pbPage1);
2463 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0x87) == pbPage2);
2464 RTTESTI_CHECK(ASMMemIsZero(pbPage1, PAGE_SIZE));
2465 RTTESTI_CHECK(ASMMemIsZero(pbPage2, PAGE_SIZE));
2466 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2467 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2468 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0x34));
2469 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0x88));
2470 unsigned cbSub = 32;
2471 while (cbSub-- > 0)
2472 {
2473 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2474 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2475 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0) == NULL);
2476 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0) == NULL);
2477
2478 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0x34) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2479 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0x99) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2480 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0x42) == pbPage1 || !cbSub);
2481 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0x88) == pbPage2 || !cbSub);
2482 }
2483
2484 memset(pbPage1, 0xff, PAGE_SIZE);
2485 memset(pbPage2, 0xff, PAGE_SIZE);
2486 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xff) == NULL);
2487 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xff) == NULL);
2488 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xfe) == pbPage1);
2489 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xfe) == pbPage2);
2490 RTTESTI_CHECK(!ASMMemIsZero(pbPage1, PAGE_SIZE));
2491 RTTESTI_CHECK(!ASMMemIsZero(pbPage2, PAGE_SIZE));
2492 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0xff));
2493 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0xff));
2494 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2495 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2496 cbSub = 32;
2497 while (cbSub-- > 0)
2498 {
2499 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2500 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2501 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xff) == NULL);
2502 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xff) == NULL);
2503
2504 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2505 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2506 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xfe) == pbPage1 || !cbSub);
2507 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xfe) == pbPage2 || !cbSub);
2508 }
2509
2510
2511 /*
2512 * Various alignments and sizes.
2513 */
2514 uint8_t const bFiller1 = 0x00;
2515 uint8_t const bFiller2 = 0xf6;
2516 size_t const cbBuf = 128;
2517 uint8_t *pbBuf1 = pbPage1;
2518 uint8_t *pbBuf2 = &pbPage2[PAGE_SIZE - cbBuf]; /* Put it up against the tail guard */
2519 memset(pbPage1, ~bFiller1, PAGE_SIZE);
2520 memset(pbPage2, ~bFiller2, PAGE_SIZE);
2521 memset(pbBuf1, bFiller1, cbBuf);
2522 memset(pbBuf2, bFiller2, cbBuf);
2523 for (size_t offNonZero = 0; offNonZero < cbBuf; offNonZero++)
2524 {
2525 uint8_t bRand = (uint8_t)RTRandU32();
2526 pbBuf1[offNonZero] = bRand | 1;
2527 pbBuf2[offNonZero] = (0x80 | bRand) ^ 0xf6;
2528
2529 for (size_t offStart = 0; offStart < 32; offStart++)
2530 {
2531 size_t const cbMax = cbBuf - offStart;
2532 for (size_t cb = 0; cb < cbMax; cb++)
2533 {
2534 size_t const offEnd = offStart + cb;
2535 uint8_t bSaved1, bSaved2;
2536 if (offEnd < PAGE_SIZE)
2537 {
2538 bSaved1 = pbBuf1[offEnd];
2539 bSaved2 = pbBuf2[offEnd];
2540 pbBuf1[offEnd] = 0xff;
2541 pbBuf2[offEnd] = 0xff;
2542 }
2543#ifdef _MSC_VER /* simple stupid compiler warnings */
2544 else
2545 bSaved1 = bSaved2 = 0;
2546#endif
2547
2548 uint8_t *pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf1 + offStart, cb, bFiller1);
2549 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf1[offNonZero] : pbRet == NULL);
2550
2551 pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf2 + offStart, cb, bFiller2);
2552 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf2[offNonZero] : pbRet == NULL);
2553
2554 if (offEnd < PAGE_SIZE)
2555 {
2556 pbBuf1[offEnd] = bSaved1;
2557 pbBuf2[offEnd] = bSaved2;
2558 }
2559 }
2560 }
2561
2562 pbBuf1[offNonZero] = 0;
2563 pbBuf2[offNonZero] = 0xf6;
2564 }
2565
2566 RTTestSubDone(hTest);
2567}
2568
2569
2570typedef struct TSTBUF32 { uint32_t au32[384]; } TSTBUF32;
2571
2572DECLINLINE(void) tstASMMemZero32Worker(TSTBUF32 *pBuf)
2573{
2574 ASMMemZero32(pBuf, sizeof(*pBuf));
2575 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2576 if (pBuf->au32[i])
2577 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2578 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2579 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2580 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2581 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2582
2583 memset(pBuf, 0xfe, sizeof(*pBuf));
2584 ASMMemZero32(pBuf, sizeof(*pBuf));
2585 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2586 if (pBuf->au32[i])
2587 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2588 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2589 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2590 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2591 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2592}
2593
2594
2595static void tstASMMemZero32(void)
2596{
2597 RTTestSub(g_hTest, "ASMMemZero32");
2598
2599 struct
2600 {
2601 uint64_t u64Magic1;
2602 uint8_t abPage[PAGE_SIZE - 32];
2603 uint64_t u64Magic2;
2604 } Buf1, Buf2, Buf3;
2605
2606 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2607 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
2608 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2609 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2610 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
2611 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2612 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2613 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
2614 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2615 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
2616 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
2617 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
2618 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2619 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2620 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2621 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2622 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2623 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2624 {
2625 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
2626 }
2627 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
2628 if (Buf1.abPage[i])
2629 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2630 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
2631 if (Buf2.abPage[i])
2632 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2633 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
2634 if (Buf3.abPage[i])
2635 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2636
2637 DO_SIMPLE_TEST_NO_SUB(tstASMMemZero32Worker, TSTBUF32);
2638}
2639
2640
2641DECLINLINE(void) tstASMMemFill32Worker(TSTBUF32 *pBuf)
2642{
2643 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1));
2644 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2645 if (pBuf->au32[i] != UINT32_C(0xf629bce1))
2646 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2647 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1)) != NULL)
2648 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0xf629bce1)) returns non-NULL after ASMMemFill32!\n");
2649
2650 memset(pBuf, 0xfe, sizeof(*pBuf));
2651 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678));
2652 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2653 if (pBuf->au32[i] != UINT32_C(0x12345678))
2654 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2655 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678)) != NULL)
2656 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0x12345678)) returns non-NULL after ASMMemFill32!\n");
2657}
2658
2659static void tstASMMemFill32(void)
2660{
2661 RTTestSub(g_hTest, "ASMMemFill32");
2662
2663 struct
2664 {
2665 uint64_t u64Magic1;
2666 uint32_t au32Page[PAGE_SIZE / 4];
2667 uint64_t u64Magic2;
2668 } Buf1;
2669 struct
2670 {
2671 uint64_t u64Magic1;
2672 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
2673 uint64_t u64Magic2;
2674 } Buf2;
2675 struct
2676 {
2677 uint64_t u64Magic1;
2678 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
2679 uint64_t u64Magic2;
2680 } Buf3;
2681
2682 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2683 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
2684 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2685 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2686 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
2687 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2688 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2689 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
2690 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2691 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
2692 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
2693 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
2694 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2695 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2696 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2697 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2698 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2699 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2700 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
2701 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
2702 if (Buf1.au32Page[i] != 0xdeadbeef)
2703 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
2704 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
2705 if (Buf2.au32Page[i] != 0xcafeff01)
2706 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
2707 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
2708 if (Buf3.au32Page[i] != 0xf00dd00f)
2709 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
2710
2711 DO_SIMPLE_TEST_NO_SUB(tstASMMemFill32Worker, TSTBUF32);
2712}
2713
2714
2715static void tstASMProbe(RTTEST hTest)
2716{
2717 RTTestSub(hTest, "ASMProbeReadByte/Buffer");
2718
2719 uint8_t b = 42;
2720 RTTESTI_CHECK(ASMProbeReadByte(&b) == 42);
2721 ASMProbeReadBuffer(&b, sizeof(b));
2722
2723 for (uint32_t cPages = 1; cPages < 16; cPages++)
2724 {
2725 uint8_t *pbBuf1 = (uint8_t *)RTTestGuardedAllocHead(hTest, cPages * PAGE_SIZE);
2726 uint8_t *pbBuf2 = (uint8_t *)RTTestGuardedAllocTail(hTest, cPages * PAGE_SIZE);
2727 RTTESTI_CHECK_RETV(pbBuf1 && pbBuf2);
2728
2729 memset(pbBuf1, 0xf6, cPages * PAGE_SIZE);
2730 memset(pbBuf2, 0x42, cPages * PAGE_SIZE);
2731
2732 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[cPages * PAGE_SIZE - 1]) == 0xf6);
2733 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[cPages * PAGE_SIZE - 1]) == 0x42);
2734 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[0]) == 0xf6);
2735 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[0]) == 0x42);
2736
2737 ASMProbeReadBuffer(pbBuf1, cPages * PAGE_SIZE);
2738 ASMProbeReadBuffer(pbBuf2, cPages * PAGE_SIZE);
2739 }
2740}
2741
2742
2743static void tstASMMisc(void)
2744{
2745 RTTestSub(g_hTest, "Misc");
2746 for (uint32_t i = 0; i < 20; i++)
2747 {
2748 ASMWriteFence();
2749 ASMCompilerBarrier();
2750 ASMReadFence();
2751 ASMNopPause();
2752 ASMSerializeInstruction();
2753 ASMMemoryFence();
2754 }
2755}
2756
2757
2758static void tstASMBit(void)
2759{
2760 RTTestSub(g_hTest, "ASMBitFirstSetU16");
2761 RTTESTI_CHECK(ASMBitFirstSetU16(0x0000) == 0);
2762 RTTESTI_CHECK(ASMBitFirstSetU16(0x0001) == 1);
2763 RTTESTI_CHECK(ASMBitFirstSetU16(0x8000) == 16);
2764 RTTESTI_CHECK(ASMBitFirstSetU16(0x0ef0) == 5);
2765 for (unsigned iBit = 0; iBit < 16; iBit++)
2766 {
2767 RTTESTI_CHECK(ASMBitFirstSetU16((uint16_t)1 << iBit) == iBit + 1);
2768 RTTESTI_CHECK(ASMBitFirstSetU16(UINT16_MAX << iBit) == iBit + 1);
2769 }
2770
2771 RTTestSub(g_hTest, "ASMBitFirstSetU32");
2772 RTTESTI_CHECK(ASMBitFirstSetU32(UINT32_C(0x00000000)) == 0);
2773 RTTESTI_CHECK(ASMBitFirstSetU32(UINT32_C(0x00000001)) == 1);
2774 RTTESTI_CHECK(ASMBitFirstSetU32(UINT32_C(0x80000000)) == 32);
2775 RTTESTI_CHECK(ASMBitFirstSetU32(UINT32_C(0x0efff0f0)) == 5);
2776 for (unsigned iBit = 0; iBit < 32; iBit++)
2777 {
2778 RTTESTI_CHECK(ASMBitFirstSetU32((uint32_t)1 << iBit) == iBit + 1);
2779 RTTESTI_CHECK(ASMBitFirstSetU32(UINT32_MAX << iBit) == iBit + 1);
2780 }
2781
2782 RTTestSub(g_hTest, "ASMBitFirstSetU64");
2783 RTTESTI_CHECK(ASMBitFirstSetU64(UINT64_C(0x0000000000000000)) == 0);
2784 RTTESTI_CHECK(ASMBitFirstSetU64(UINT64_C(0x0000000000000001)) == 1);
2785 RTTESTI_CHECK(ASMBitFirstSetU64(UINT64_C(0x8000000000000000)) == 64);
2786 RTTESTI_CHECK(ASMBitFirstSetU64(UINT64_C(0x0effffff0ffff0f0)) == 5);
2787 for (unsigned iBit = 0; iBit < 64; iBit++)
2788 {
2789 RTTESTI_CHECK(ASMBitFirstSetU64((uint64_t)1 << iBit) == iBit + 1);
2790 RTTESTI_CHECK(ASMBitFirstSetU64(UINT64_MAX << iBit) == iBit + 1);
2791 }
2792
2793 RTTestSub(g_hTest, "ASMBitLastSetU16");
2794 RTTESTI_CHECK(ASMBitLastSetU16(0x0000) == 0);
2795 RTTESTI_CHECK(ASMBitLastSetU16(0x0001) == 1);
2796 RTTESTI_CHECK(ASMBitLastSetU16(0x8000) == 16);
2797 RTTESTI_CHECK(ASMBitLastSetU16(0x0fe0) == 12);
2798 for (unsigned iBit = 0; iBit < 16; iBit++)
2799 {
2800 RTTESTI_CHECK(ASMBitLastSetU16(UINT16_C(0x8000) >> (15 - iBit)) == iBit + 1);
2801 RTTESTI_CHECK(ASMBitLastSetU16(UINT16_MAX >> (15 - iBit)) == iBit + 1);
2802 }
2803
2804 RTTestSub(g_hTest, "ASMBitLastSetU32");
2805 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_C(0x00000000)) == 0);
2806 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_C(0x00000001)) == 1);
2807 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_C(0x80000000)) == 32);
2808 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_C(0x0fffffe0)) == 28);
2809 for (unsigned iBit = 0; iBit < 32; iBit++)
2810 {
2811 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_C(0x80000000) >> (31 - iBit)) == iBit + 1);
2812 RTTESTI_CHECK(ASMBitLastSetU32(UINT32_MAX >> (31 - iBit)) == iBit + 1);
2813 }
2814
2815 RTTestSub(g_hTest, "ASMBitLastSetU64");
2816 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_C(0x0000000000000000)) == 0);
2817 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_C(0x0000000000000001)) == 1);
2818 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_C(0x8000000000000000)) == 64);
2819 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_C(0x0ffffefff0ffffe0)) == 60);
2820 for (unsigned iBit = 0; iBit < 64; iBit++)
2821 {
2822 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_C(0x8000000000000000) >> (63 - iBit)) == iBit + 1);
2823 RTTESTI_CHECK(ASMBitLastSetU64(UINT64_MAX >> (63 - iBit)) == iBit + 1);
2824 }
2825
2826 RTTestSub(g_hTest, "ASMCountLeadingZerosU16");
2827 RTTESTI_CHECK(ASMCountLeadingZerosU16(0x0000) == 16);
2828 RTTESTI_CHECK(ASMCountLeadingZerosU16(0x0001) == 15);
2829 RTTESTI_CHECK(ASMCountLeadingZerosU16(0x8000) == 0);
2830 RTTESTI_CHECK(ASMCountLeadingZerosU16(0x0fe0) == 4);
2831 for (unsigned iBit = 0; iBit < 16; iBit++)
2832 {
2833 RTTESTI_CHECK(ASMCountLeadingZerosU16(UINT16_C(0x8000) >> iBit) == iBit);
2834 RTTESTI_CHECK(ASMCountLeadingZerosU16(UINT16_MAX >> iBit) == iBit);
2835 }
2836
2837 RTTestSub(g_hTest, "ASMCountLeadingZerosU32");
2838 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_C(0x00000000)) == 32);
2839 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_C(0x00000001)) == 31);
2840 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_C(0x80000000)) == 0);
2841 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_C(0x0fffffe0)) == 4);
2842 for (unsigned iBit = 0; iBit < 32; iBit++)
2843 {
2844 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_C(0x80000000) >> iBit) == iBit);
2845 RTTESTI_CHECK(ASMCountLeadingZerosU32(UINT32_MAX >> iBit) == iBit);
2846 }
2847
2848 RTTestSub(g_hTest, "ASMCountLeadingZerosU64");
2849 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_C(0x0000000000000000)) == 64);
2850 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_C(0x0000000000000001)) == 63);
2851 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_C(0x8000000000000000)) == 0);
2852 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_C(0x0fffffff0f0fffe0)) == 4);
2853 for (unsigned iBit = 0; iBit < 64; iBit++)
2854 {
2855 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_C(0x8000000000000000) >> iBit) == iBit);
2856 RTTESTI_CHECK(ASMCountLeadingZerosU64(UINT64_MAX >> iBit) == iBit);
2857 }
2858
2859 RTTestSub(g_hTest, "ASMCountTrailingZerosU16");
2860 RTTESTI_CHECK(ASMCountTrailingZerosU16(0x0000) == 16);
2861 RTTESTI_CHECK(ASMCountTrailingZerosU16(0x0001) == 0);
2862 RTTESTI_CHECK(ASMCountTrailingZerosU16(0x8000) == 15);
2863 RTTESTI_CHECK(ASMCountTrailingZerosU16(0x0ef0) == 4);
2864 for (unsigned iBit = 0; iBit < 16; iBit++)
2865 {
2866 RTTESTI_CHECK(ASMCountTrailingZerosU16((uint16_t)1 << iBit) == iBit);
2867 RTTESTI_CHECK(ASMCountTrailingZerosU16(UINT16_MAX << iBit) == iBit);
2868 }
2869
2870 RTTestSub(g_hTest, "ASMCountTrailingZerosU32");
2871 RTTESTI_CHECK(ASMCountTrailingZerosU32(UINT32_C(0x00000000)) == 32);
2872 RTTESTI_CHECK(ASMCountTrailingZerosU32(UINT32_C(0x00000001)) == 0);
2873 RTTESTI_CHECK(ASMCountTrailingZerosU32(UINT32_C(0x80000000)) == 31);
2874 RTTESTI_CHECK(ASMCountTrailingZerosU32(UINT32_C(0x0efffff0)) == 4);
2875 for (unsigned iBit = 0; iBit < 32; iBit++)
2876 {
2877 RTTESTI_CHECK(ASMCountTrailingZerosU32((uint32_t)1 << iBit) == iBit);
2878 RTTESTI_CHECK(ASMCountTrailingZerosU32(UINT32_MAX << iBit) == iBit);
2879 }
2880
2881 RTTestSub(g_hTest, "ASMCountTrailingZerosU64");
2882 RTTESTI_CHECK(ASMCountTrailingZerosU64(UINT64_C(0x0000000000000000)) == 64);
2883 RTTESTI_CHECK(ASMCountTrailingZerosU64(UINT64_C(0x0000000000000001)) == 0);
2884 RTTESTI_CHECK(ASMCountTrailingZerosU64(UINT64_C(0x8000000000000000)) == 63);
2885 RTTESTI_CHECK(ASMCountTrailingZerosU64(UINT64_C(0x0effff0fefef0ff0)) == 4);
2886 for (unsigned iBit = 0; iBit < 64; iBit++)
2887 {
2888 RTTESTI_CHECK(ASMCountTrailingZerosU64((uint64_t)1 << iBit) == iBit);
2889 RTTESTI_CHECK(ASMCountTrailingZerosU64(UINT64_MAX << iBit) == iBit);
2890 }
2891}
2892
2893
2894static void tstASMMath(void)
2895{
2896 RTTestSub(g_hTest, "Math");
2897
2898 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
2899 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
2900
2901 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
2902 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
2903
2904 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x00000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2905 CHECKVAL(u32, UINT32_C(0x00000001), "%#018RX32");
2906 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10000000), UINT32_C(0x80000000), UINT32_C(0x20000000));
2907 CHECKVAL(u32, UINT32_C(0x40000000), "%#018RX32");
2908 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x76543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2909 CHECKVAL(u32, UINT32_C(0x76543210), "%#018RX32");
2910 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2911 CHECKVAL(u32, UINT32_C(0xffffffff), "%#018RX32");
2912 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2913 CHECKVAL(u32, UINT32_C(0xfffffff0), "%#018RX32");
2914 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2915 CHECKVAL(u32, UINT32_C(0x05c584ce), "%#018RX32");
2916 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2917 CHECKVAL(u32, UINT32_C(0x2d860795), "%#018RX32");
2918
2919#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2920 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2921 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
2922 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
2923 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
2924 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2925 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
2926 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2927 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
2928 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2929 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
2930 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2931 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
2932 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2933 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
2934
2935# if 0 /* bird: question is whether this should trap or not:
2936 *
2937 * frank: Of course it must trap:
2938 *
2939 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
2940 *
2941 * During the following division, the quotient must fit into a 32-bit register.
2942 * Therefore the smallest valid divisor is
2943 *
2944 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
2945 *
2946 * which is definitely greater than 0x3b9aca00.
2947 *
2948 * bird: No, the C version does *not* crash. So, the question is whether there's any
2949 * code depending on it not crashing.
2950 *
2951 * Of course the assembly versions of the code crash right now for the reasons you've
2952 * given, but the 32-bit MSC version does not crash.
2953 *
2954 * frank: The C version does not crash but delivers incorrect results for this case.
2955 * The reason is
2956 *
2957 * u.s.Hi = (unsigned long)(u64Hi / u32C);
2958 *
2959 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
2960 * to 32 bit. If using this (optimized and fast) function we should just be sure that
2961 * the operands are in a valid range.
2962 */
2963 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
2964 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
2965# endif
2966#endif /* AMD64 || X86 */
2967
2968 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
2969 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
2970
2971 int32_t i32;
2972 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
2973 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2974 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
2975 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2976 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
2977 CHECKVAL(i32, INT32_C(1), "%010RI32");
2978
2979 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
2980 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
2981 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
2982 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
2983}
2984
2985
2986static void tstASMByteSwap(void)
2987{
2988 RTTestSub(g_hTest, "ASMByteSwap*");
2989
2990 uint64_t u64In = UINT64_C(0x0011223344556677);
2991 uint64_t u64Out = ASMByteSwapU64(u64In);
2992 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
2993 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
2994 u64Out = ASMByteSwapU64(u64Out);
2995 CHECKVAL(u64Out, u64In, "%#018RX64");
2996 u64In = UINT64_C(0x0123456789abcdef);
2997 u64Out = ASMByteSwapU64(u64In);
2998 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
2999 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
3000 u64Out = ASMByteSwapU64(u64Out);
3001 CHECKVAL(u64Out, u64In, "%#018RX64");
3002 u64In = 0;
3003 u64Out = ASMByteSwapU64(u64In);
3004 CHECKVAL(u64Out, u64In, "%#018RX64");
3005 u64In = UINT64_MAX;
3006 u64Out = ASMByteSwapU64(u64In);
3007 CHECKVAL(u64Out, u64In, "%#018RX64");
3008
3009 uint32_t u32In = UINT32_C(0x00112233);
3010 uint32_t u32Out = ASMByteSwapU32(u32In);
3011 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
3012 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
3013 u32Out = ASMByteSwapU32(u32Out);
3014 CHECKVAL(u32Out, u32In, "%#010RX32");
3015 u32In = UINT32_C(0x12345678);
3016 u32Out = ASMByteSwapU32(u32In);
3017 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
3018 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
3019 u32Out = ASMByteSwapU32(u32Out);
3020 CHECKVAL(u32Out, u32In, "%#010RX32");
3021 u32In = 0;
3022 u32Out = ASMByteSwapU32(u32In);
3023 CHECKVAL(u32Out, u32In, "%#010RX32");
3024 u32In = UINT32_MAX;
3025 u32Out = ASMByteSwapU32(u32In);
3026 CHECKVAL(u32Out, u32In, "%#010RX32");
3027
3028 uint16_t u16In = UINT16_C(0x0011);
3029 uint16_t u16Out = ASMByteSwapU16(u16In);
3030 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
3031 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
3032 u16Out = ASMByteSwapU16(u16Out);
3033 CHECKVAL(u16Out, u16In, "%#06RX16");
3034 u16In = UINT16_C(0x1234);
3035 u16Out = ASMByteSwapU16(u16In);
3036 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
3037 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
3038 u16Out = ASMByteSwapU16(u16Out);
3039 CHECKVAL(u16Out, u16In, "%#06RX16");
3040 u16In = 0;
3041 u16Out = ASMByteSwapU16(u16In);
3042 CHECKVAL(u16Out, u16In, "%#06RX16");
3043 u16In = UINT16_MAX;
3044 u16Out = ASMByteSwapU16(u16In);
3045 CHECKVAL(u16Out, u16In, "%#06RX16");
3046}
3047
3048
3049static void tstASMBench(void)
3050{
3051 /*
3052 * Make this static. We don't want to have this located on the stack.
3053 */
3054 static uint8_t volatile s_u8;
3055 static int8_t volatile s_i8;
3056 static uint16_t volatile s_u16;
3057 static int16_t volatile s_i16;
3058 static uint32_t volatile s_u32;
3059 static int32_t volatile s_i32;
3060 static uint64_t volatile s_u64;
3061 static int64_t volatile s_i64;
3062#if defined(RTASM_HAVE_CMP_WRITE_U128) || defined(RTASM_HAVE_CMP_XCHG_U128)
3063 static RTUINT128U volatile s_u128;
3064#endif
3065 static uint8_t s_u8Old;
3066 static int8_t s_i8Old;
3067 static uint16_t s_u16Old;
3068 static int16_t s_i16Old;
3069 static uint32_t s_u32Old;
3070 static int32_t s_i32Old;
3071 static uint64_t s_u64Old;
3072 static int64_t s_i64Old;
3073#if defined(RTASM_HAVE_CMP_WRITE_U128) || defined(RTASM_HAVE_CMP_XCHG_U128)
3074 static RTUINT128U s_u128Old;
3075 RTUINT128U u128Tmp1, u128Tmp2;
3076# ifdef RT_ARCH_AMD64
3077 bool const fHaveCmpXchg128 = RT_BOOL(ASMCpuId_ECX(1) & X86_CPUID_FEATURE_ECX_CX16);
3078# else
3079 bool const fHaveCmpXchg128 = true;
3080# endif
3081#endif
3082 unsigned i;
3083 const unsigned cRounds = _16M; /* Must be multiple of 8 */
3084 uint64_t u64Elapsed;
3085
3086 RTTestSub(g_hTest, "Benchmarking");
3087
3088#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32))
3089# define BENCH(op, str) \
3090 do { \
3091 RTThreadYield(); \
3092 u64Elapsed = ASMReadTSC(); \
3093 for (i = cRounds; i > 0; i--) \
3094 op; \
3095 u64Elapsed = ASMReadTSC() - u64Elapsed; \
3096 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
3097 } while (0)
3098#else
3099# define BENCH(op, str) \
3100 do { \
3101 RTThreadYield(); \
3102 u64Elapsed = RTTimeNanoTS(); \
3103 for (i = cRounds / 8; i > 0; i--) \
3104 { \
3105 op; \
3106 op; \
3107 op; \
3108 op; \
3109 op; \
3110 op; \
3111 op; \
3112 op; \
3113 } \
3114 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
3115 RTTestValue(g_hTest, str, u64Elapsed * 1000 / cRounds, RTTESTUNIT_PS_PER_CALL); \
3116 } while (0)
3117#endif
3118#if (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)) && !defined(GCC44_32BIT_PIC)
3119# define BENCH_TSC(op, str) \
3120 do { \
3121 RTThreadYield(); \
3122 u64Elapsed = ASMReadTSC(); \
3123 for (i = cRounds / 8; i > 0; i--) \
3124 { \
3125 op; \
3126 op; \
3127 op; \
3128 op; \
3129 op; \
3130 op; \
3131 op; \
3132 op; \
3133 } \
3134 u64Elapsed = ASMReadTSC() - u64Elapsed; \
3135 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
3136 } while (0)
3137#else
3138# define BENCH_TSC(op, str) BENCH(op, str)
3139#endif
3140
3141 BENCH(s_u32 = 0, "s_u32 = 0");
3142 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
3143 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
3144 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
3145 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
3146 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
3147 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
3148 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
3149 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
3150 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
3151 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
3152 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
3153 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
3154 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
3155 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
3156 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
3157 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
3158 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
3159 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
3160 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
3161 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
3162 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
3163 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
3164 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
3165 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
3166 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
3167 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
3168 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
3169 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
3170 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
3171 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
3172 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
3173 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
3174 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
3175 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
3176 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
3177 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
3178 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
3179 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
3180 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
3181 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
3182 BENCH(ASMAtomicCmpXchgU8(&s_u8, 0, 0), "ASMAtomicCmpXchgU8");
3183 BENCH(ASMAtomicCmpXchgS8(&s_i8, 0, 0), "ASMAtomicCmpXchgS8");
3184 //BENCH(ASMAtomicCmpXchgU16(&s_u16, 0, 0), "ASMAtomicCmpXchgU16");
3185 //BENCH(ASMAtomicCmpXchgS16(&s_i16, 0, 0), "ASMAtomicCmpXchgS16");
3186 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
3187 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
3188 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
3189 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
3190#ifdef RTASM_HAVE_CMP_WRITE_U128
3191 if (fHaveCmpXchg128)
3192 BENCH(ASMAtomicCmpWriteU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0), u128Tmp2 = RTUINT128_INIT_C(0, 0)),
3193 "ASMAtomicCmpWriteU128U");
3194#endif
3195 BENCH(ASMAtomicCmpXchgU8(&s_u8, 0, 1), "ASMAtomicCmpXchgU8/neg");
3196 BENCH(ASMAtomicCmpXchgS8(&s_i8, 0, 1), "ASMAtomicCmpXchgS8/neg");
3197 //BENCH(ASMAtomicCmpXchgU16(&s_u16, 0, 1), "ASMAtomicCmpXchgU16/neg");
3198 //BENCH(ASMAtomicCmpXchgS16(&s_s16, 0, 1), "ASMAtomicCmpXchgS16/neg");
3199 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
3200 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
3201 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
3202 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
3203#ifdef RTASM_HAVE_CMP_WRITE_U128
3204 if (fHaveCmpXchg128)
3205 BENCH(ASMAtomicCmpWriteU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0), u128Tmp2 = RTUINT128_INIT_C(0, 1)),
3206 "ASMAtomicCmpWriteU128U/neg");
3207#endif
3208 BENCH(ASMAtomicCmpXchgExU8(&s_u8, 0, 0, &s_u8Old), "ASMAtomicCmpXchgExU8");
3209 BENCH(ASMAtomicCmpXchgExS8(&s_i8, 0, 0, &s_i8Old), "ASMAtomicCmpXchgExS8");
3210 BENCH(ASMAtomicCmpXchgExU16(&s_u16, 0, 0, &s_u16Old), "ASMAtomicCmpXchgExU16");
3211 BENCH(ASMAtomicCmpXchgExS16(&s_i16, 0, 0, &s_i16Old), "ASMAtomicCmpXchgExS16");
3212 BENCH(ASMAtomicCmpXchgExU32(&s_u32, 0, 0, &s_u32Old), "ASMAtomicCmpXchgExU32");
3213 BENCH(ASMAtomicCmpXchgExS32(&s_i32, 0, 0, &s_i32Old), "ASMAtomicCmpXchgExS32");
3214 BENCH(ASMAtomicCmpXchgExU64(&s_u64, 0, 0, &s_u64Old), "ASMAtomicCmpXchgExU64");
3215 BENCH(ASMAtomicCmpXchgExS64(&s_i64, 0, 0, &s_i64Old), "ASMAtomicCmpXchgExS64");
3216#ifdef RTASM_HAVE_CMP_XCHG_U128
3217 if (fHaveCmpXchg128)
3218 BENCH(ASMAtomicCmpXchgU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0), u128Tmp2 = RTUINT128_INIT_C(0, 0), &s_u128Old),
3219 "ASMAtomicCmpXchgU128U");
3220#endif
3221 BENCH(ASMAtomicCmpXchgExU8(&s_u8, 0, 1, &s_u8Old), "ASMAtomicCmpXchgExU8/neg");
3222 BENCH(ASMAtomicCmpXchgExS8(&s_i8, 0, 1, &s_i8Old), "ASMAtomicCmpXchgExS8/neg");
3223 BENCH(ASMAtomicCmpXchgExU16(&s_u16, 0, 1, &s_u16Old), "ASMAtomicCmpXchgExU16/neg");
3224 BENCH(ASMAtomicCmpXchgExS16(&s_i16, 0, 1, &s_i16Old), "ASMAtomicCmpXchgExS16/neg");
3225 BENCH(ASMAtomicCmpXchgExU32(&s_u32, 0, 1, &s_u32Old), "ASMAtomicCmpXchgExU32/neg");
3226 BENCH(ASMAtomicCmpXchgExS32(&s_i32, 0, 1, &s_i32Old), "ASMAtomicCmpXchgExS32/neg");
3227 BENCH(ASMAtomicCmpXchgExU64(&s_u64, 0, 1, &s_u64Old), "ASMAtomicCmpXchgExU64/neg");
3228 BENCH(ASMAtomicCmpXchgExS64(&s_i64, 0, 1, &s_i64Old), "ASMAtomicCmpXchgExS64/neg");
3229#ifdef RTASM_HAVE_CMP_XCHG_U128
3230 if (fHaveCmpXchg128)
3231 BENCH(ASMAtomicCmpXchgU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0), u128Tmp2 = RTUINT128_INIT_C(0, 1), &s_u128Old),
3232 "ASMAtomicCmpXchgU128U/neg");
3233#endif
3234 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
3235 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
3236 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
3237 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
3238 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
3239 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
3240 BENCH(ASMAtomicUoIncU32(&s_u32), "ASMAtomicUoIncU32");
3241 BENCH(ASMAtomicUoDecU32(&s_u32), "ASMAtomicUoDecU32");
3242 BENCH(ASMAtomicUoAndU32(&s_u32, 0xffffffff), "ASMAtomicUoAndU32");
3243 BENCH(ASMAtomicUoOrU32(&s_u32, 0xffffffff), "ASMAtomicUoOrU32");
3244#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
3245 BENCH_TSC(ASMSerializeInstructionCpuId(), "ASMSerializeInstructionCpuId");
3246 BENCH_TSC(ASMSerializeInstructionIRet(), "ASMSerializeInstructionIRet");
3247#endif
3248 BENCH(ASMReadFence(), "ASMReadFence");
3249 BENCH(ASMWriteFence(), "ASMWriteFence");
3250 BENCH(ASMMemoryFence(), "ASMMemoryFence");
3251 BENCH(ASMSerializeInstruction(), "ASMSerializeInstruction");
3252 BENCH(ASMNopPause(), "ASMNopPause");
3253
3254 BENCH(ASMBitFirstSetU16(s_u16), "ASMBitFirstSetU16");
3255 BENCH(ASMBitFirstSetU32(s_u32), "ASMBitFirstSetU32");
3256 BENCH(ASMBitFirstSetU64(s_u32), "ASMBitFirstSetU64");
3257 BENCH(ASMBitLastSetU16(s_u16), "ASMBitLastSetU16");
3258 BENCH(ASMBitLastSetU32(s_u32), "ASMBitLastSetU32");
3259 BENCH(ASMBitLastSetU64(s_u32), "ASMBitLastSetU64");
3260 BENCH(ASMCountLeadingZerosU16(s_u16), "ASMCountLeadingZerosU16");
3261 BENCH(ASMCountLeadingZerosU32(s_u32), "ASMCountLeadingZerosU32");
3262 BENCH(ASMCountLeadingZerosU64(s_u64), "ASMCountLeadingZerosU64");
3263 BENCH(ASMCountTrailingZerosU16(s_u16), "ASMCountTrailingZerosU16");
3264 BENCH(ASMCountTrailingZerosU32(s_u32), "ASMCountTrailingZerosU32");
3265 BENCH(ASMCountTrailingZerosU64(s_u64), "ASMCountTrailingZerosU64");
3266
3267 /* The Darwin gcc does not like this ... */
3268#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
3269 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
3270 BENCH(s_u32 = ASMGetApicIdExt0B(), "ASMGetApicIdExt0B");
3271 BENCH(s_u32 = ASMGetApicIdExt8000001E(), "ASMGetApicIdExt8000001E");
3272#endif
3273#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32))
3274 BENCH(s_u64 = ASMReadTSC(), "ASMReadTSC");
3275#endif
3276#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
3277 uint32_t uAux;
3278 if ( ASMHasCpuId()
3279 && RTX86IsValidExtRange(ASMCpuId_EAX(0x80000000))
3280 && (ASMCpuId_EDX(0x80000001) & X86_CPUID_EXT_FEATURE_EDX_RDTSCP) )
3281 {
3282 BENCH_TSC(ASMSerializeInstructionRdTscp(), "ASMSerializeInstructionRdTscp");
3283 BENCH(s_u64 = ASMReadTscWithAux(&uAux), "ASMReadTscWithAux");
3284 }
3285 union
3286 {
3287 uint64_t u64[2];
3288 RTIDTR Unaligned;
3289 struct
3290 {
3291 uint16_t abPadding[3];
3292 RTIDTR Aligned;
3293 } s;
3294 } uBuf;
3295 Assert(((uintptr_t)&uBuf.Unaligned.pIdt & (sizeof(uintptr_t) - 1)) != 0);
3296 BENCH(ASMGetIDTR(&uBuf.Unaligned), "ASMGetIDTR/unaligned");
3297 Assert(((uintptr_t)&uBuf.s.Aligned.pIdt & (sizeof(uintptr_t) - 1)) == 0);
3298 BENCH(ASMGetIDTR(&uBuf.s.Aligned), "ASMGetIDTR/aligned");
3299#endif
3300
3301#undef BENCH
3302}
3303
3304
3305int main(int argc, char **argv)
3306{
3307 RT_NOREF_PV(argc); RT_NOREF_PV(argv);
3308
3309 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
3310 if (rc)
3311 return rc;
3312 RTTestBanner(g_hTest);
3313
3314 /*
3315 * Execute the tests.
3316 */
3317#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
3318 tstASMCpuId();
3319 //bruteForceCpuId();
3320#endif
3321#if 1
3322 tstASMAtomicRead();
3323 tstASMAtomicWrite();
3324 tstASMAtomicXchg();
3325 tstASMAtomicCmpXchg();
3326 tstASMAtomicCmpXchgEx();
3327
3328 tstASMAtomicAdd();
3329 tstASMAtomicDecInc();
3330 tstASMAtomicAndOrXor();
3331
3332 tstASMMemZeroPage();
3333 tstASMMemIsZeroPage(g_hTest);
3334 tstASMMemFirstMismatchingU8(g_hTest);
3335 tstASMMemZero32();
3336 tstASMMemFill32();
3337 tstASMProbe(g_hTest);
3338
3339 tstASMMisc();
3340
3341 tstASMBit();
3342
3343 tstASMMath();
3344
3345 tstASMByteSwap();
3346
3347 tstASMBench();
3348#endif
3349
3350 /*
3351 * Show the result.
3352 */
3353 return RTTestSummaryAndDestroy(g_hTest);
3354}
3355
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette