VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstInlineAsm.cpp@ 22509

Last change on this file since 22509 was 22004, checked in by vboxsync, 15 years ago

iprt/asm.h: Added ASMMemIsZeroPage.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 44.5 KB
Line 
1/* $Id: tstInlineAsm.cpp 22004 2009-08-05 18:26:53Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2007 Sun Microsystems, Inc.
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 *
26 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
27 * Clara, CA 95054 USA or visit http://www.sun.com if you need
28 * additional information or have any questions.
29 */
30
31/*******************************************************************************
32* Header Files *
33*******************************************************************************/
34#include <iprt/asm.h>
35#include <iprt/stream.h>
36#include <iprt/string.h>
37#include <iprt/initterm.h>
38#include <iprt/param.h>
39#include <iprt/thread.h>
40#include <iprt/test.h>
41
42
43
44/*******************************************************************************
45* Defined Constants And Macros *
46*******************************************************************************/
47#define CHECKVAL(val, expect, fmt) \
48 do \
49 { \
50 if ((val) != (expect)) \
51 { \
52 RTTestIErrorInc(); \
53 RTPrintf("%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
54 } \
55 } while (0)
56
57#define CHECKOP(op, expect, fmt, type) \
58 do \
59 { \
60 type val = op; \
61 if (val != (type)(expect)) \
62 { \
63 RTTestIErrorInc(); \
64 RTPrintf("%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
65 } \
66 } while (0)
67
68
69#if !defined(PIC) || !defined(RT_ARCH_X86)
70const char *getCacheAss(unsigned u)
71{
72 if (u == 0)
73 return "res0 ";
74 if (u == 1)
75 return "direct";
76 if (u >= 256)
77 return "???";
78
79 char *pszRet;
80 RTStrAPrintf(&pszRet, "%d way", u); /* intentional leak! */
81 return pszRet;
82}
83
84
85const char *getL2CacheAss(unsigned u)
86{
87 switch (u)
88 {
89 case 0: return "off ";
90 case 1: return "direct";
91 case 2: return "2 way ";
92 case 3: return "res3 ";
93 case 4: return "4 way ";
94 case 5: return "res5 ";
95 case 6: return "8 way ";
96 case 7: return "res7 ";
97 case 8: return "16 way";
98 case 9: return "res9 ";
99 case 10: return "res10 ";
100 case 11: return "res11 ";
101 case 12: return "res12 ";
102 case 13: return "res13 ";
103 case 14: return "res14 ";
104 case 15: return "fully ";
105 default:
106 return "????";
107 }
108}
109
110
111/**
112 * Test and dump all possible info from the CPUID instruction.
113 *
114 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
115 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
116 */
117void tstASMCpuId(void)
118{
119 unsigned iBit;
120 struct
121 {
122 uint32_t uEBX, uEAX, uEDX, uECX;
123 } s;
124 if (!ASMHasCpuId())
125 {
126 RTPrintf("tstInlineAsm: warning! CPU doesn't support CPUID\n");
127 return;
128 }
129
130 /*
131 * Try the 0 function and use that for checking the ASMCpuId_* variants.
132 */
133 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
134
135 uint32_t u32 = ASMCpuId_ECX(0);
136 CHECKVAL(u32, s.uECX, "%x");
137
138 u32 = ASMCpuId_EDX(0);
139 CHECKVAL(u32, s.uEDX, "%x");
140
141 uint32_t uECX2 = s.uECX - 1;
142 uint32_t uEDX2 = s.uEDX - 1;
143 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
144
145 CHECKVAL(uECX2, s.uECX, "%x");
146 CHECKVAL(uEDX2, s.uEDX, "%x");
147
148 /*
149 * Done testing, dump the information.
150 */
151 RTPrintf("tstInlineAsm: CPUID Dump\n");
152 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
153 const uint32_t cFunctions = s.uEAX;
154
155 /* raw dump */
156 RTPrintf("\n"
157 " RAW Standard CPUIDs\n"
158 "Function eax ebx ecx edx\n");
159 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
160 {
161 ASMCpuId(iStd, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
162 RTPrintf("%08x %08x %08x %08x %08x%s\n",
163 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
164 }
165
166 /*
167 * Understandable output
168 */
169 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
170 RTPrintf("Name: %.04s%.04s%.04s\n"
171 "Support: 0-%u\n",
172 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
173 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
174
175 /*
176 * Get Features.
177 */
178 if (cFunctions >= 1)
179 {
180 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
181 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n"
182 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
183 "Stepping: %d\n"
184 "APIC ID: %#04x\n"
185 "Logical CPUs: %d\n"
186 "CLFLUSH Size: %d\n"
187 "Brand ID: %#04x\n",
188 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
189 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
190 ASMGetCpuStepping(s.uEAX),
191 (s.uEBX >> 24) & 0xff,
192 (s.uEBX >> 16) & 0xff,
193 (s.uEBX >> 8) & 0xff,
194 (s.uEBX >> 0) & 0xff);
195
196 RTPrintf("Features EDX: ");
197 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU");
198 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME");
199 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE");
200 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE");
201 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC");
202 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR");
203 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE");
204 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE");
205 if (s.uEDX & RT_BIT(8)) RTPrintf(" CX8");
206 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC");
207 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10");
208 if (s.uEDX & RT_BIT(11)) RTPrintf(" SEP");
209 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR");
210 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE");
211 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA");
212 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV");
213 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT");
214 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36");
215 if (s.uEDX & RT_BIT(18)) RTPrintf(" PSN");
216 if (s.uEDX & RT_BIT(19)) RTPrintf(" CLFSH");
217 if (s.uEDX & RT_BIT(20)) RTPrintf(" 20");
218 if (s.uEDX & RT_BIT(21)) RTPrintf(" DS");
219 if (s.uEDX & RT_BIT(22)) RTPrintf(" ACPI");
220 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX");
221 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR");
222 if (s.uEDX & RT_BIT(25)) RTPrintf(" SSE");
223 if (s.uEDX & RT_BIT(26)) RTPrintf(" SSE2");
224 if (s.uEDX & RT_BIT(27)) RTPrintf(" SS");
225 if (s.uEDX & RT_BIT(28)) RTPrintf(" HTT");
226 if (s.uEDX & RT_BIT(29)) RTPrintf(" 29");
227 if (s.uEDX & RT_BIT(30)) RTPrintf(" 30");
228 if (s.uEDX & RT_BIT(31)) RTPrintf(" 31");
229 RTPrintf("\n");
230
231 /** @todo check intel docs. */
232 RTPrintf("Features ECX: ");
233 if (s.uECX & RT_BIT(0)) RTPrintf(" SSE3");
234 for (iBit = 1; iBit < 13; iBit++)
235 if (s.uECX & RT_BIT(iBit))
236 RTPrintf(" %d", iBit);
237 if (s.uECX & RT_BIT(13)) RTPrintf(" CX16");
238 for (iBit = 14; iBit < 32; iBit++)
239 if (s.uECX & RT_BIT(iBit))
240 RTPrintf(" %d", iBit);
241 RTPrintf("\n");
242 }
243
244 /*
245 * Extended.
246 * Implemented after AMD specs.
247 */
248 /** @todo check out the intel specs. */
249 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
250 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
251 {
252 RTPrintf("No extended CPUID info? Check the manual on how to detect this...\n");
253 return;
254 }
255 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
256
257 /* raw dump */
258 RTPrintf("\n"
259 " RAW Extended CPUIDs\n"
260 "Function eax ebx ecx edx\n");
261 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
262 {
263 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
264 RTPrintf("%08x %08x %08x %08x %08x%s\n",
265 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
266 }
267
268 /*
269 * Understandable output
270 */
271 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
272 RTPrintf("Ext Name: %.4s%.4s%.4s\n"
273 "Ext Supports: 0x80000000-%#010x\n",
274 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
275
276 if (cExtFunctions >= 0x80000001)
277 {
278 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
279 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n"
280 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
281 "Stepping: %d\n"
282 "Brand ID: %#05x\n",
283 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
284 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
285 ASMGetCpuStepping(s.uEAX),
286 s.uEBX & 0xfff);
287
288 RTPrintf("Features EDX: ");
289 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU");
290 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME");
291 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE");
292 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE");
293 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC");
294 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR");
295 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE");
296 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE");
297 if (s.uEDX & RT_BIT(8)) RTPrintf(" CMPXCHG8B");
298 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC");
299 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10");
300 if (s.uEDX & RT_BIT(11)) RTPrintf(" SysCallSysRet");
301 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR");
302 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE");
303 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA");
304 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV");
305 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT");
306 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36");
307 if (s.uEDX & RT_BIT(18)) RTPrintf(" 18");
308 if (s.uEDX & RT_BIT(19)) RTPrintf(" 19");
309 if (s.uEDX & RT_BIT(20)) RTPrintf(" NX");
310 if (s.uEDX & RT_BIT(21)) RTPrintf(" 21");
311 if (s.uEDX & RT_BIT(22)) RTPrintf(" MmxExt");
312 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX");
313 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR");
314 if (s.uEDX & RT_BIT(25)) RTPrintf(" FastFXSR");
315 if (s.uEDX & RT_BIT(26)) RTPrintf(" 26");
316 if (s.uEDX & RT_BIT(27)) RTPrintf(" RDTSCP");
317 if (s.uEDX & RT_BIT(28)) RTPrintf(" 28");
318 if (s.uEDX & RT_BIT(29)) RTPrintf(" LongMode");
319 if (s.uEDX & RT_BIT(30)) RTPrintf(" 3DNowExt");
320 if (s.uEDX & RT_BIT(31)) RTPrintf(" 3DNow");
321 RTPrintf("\n");
322
323 RTPrintf("Features ECX: ");
324 if (s.uECX & RT_BIT(0)) RTPrintf(" LahfSahf");
325 if (s.uECX & RT_BIT(1)) RTPrintf(" CmpLegacy");
326 if (s.uECX & RT_BIT(2)) RTPrintf(" SVM");
327 if (s.uECX & RT_BIT(3)) RTPrintf(" 3");
328 if (s.uECX & RT_BIT(4)) RTPrintf(" AltMovCr8");
329 for (iBit = 5; iBit < 32; iBit++)
330 if (s.uECX & RT_BIT(iBit))
331 RTPrintf(" %d", iBit);
332 RTPrintf("\n");
333 }
334
335 char szString[4*4*3+1] = {0};
336 if (cExtFunctions >= 0x80000002)
337 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
338 if (cExtFunctions >= 0x80000003)
339 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
340 if (cExtFunctions >= 0x80000004)
341 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
342 if (cExtFunctions >= 0x80000002)
343 RTPrintf("Full Name: %s\n", szString);
344
345 if (cExtFunctions >= 0x80000005)
346 {
347 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
348 RTPrintf("TLB 2/4M Instr/Uni: %s %3d entries\n"
349 "TLB 2/4M Data: %s %3d entries\n",
350 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
351 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
352 RTPrintf("TLB 4K Instr/Uni: %s %3d entries\n"
353 "TLB 4K Data: %s %3d entries\n",
354 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
355 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
356 RTPrintf("L1 Instr Cache Line Size: %d bytes\n"
357 "L1 Instr Cache Lines Per Tag: %d\n"
358 "L1 Instr Cache Associativity: %s\n"
359 "L1 Instr Cache Size: %d KB\n",
360 (s.uEDX >> 0) & 0xff,
361 (s.uEDX >> 8) & 0xff,
362 getCacheAss((s.uEDX >> 16) & 0xff),
363 (s.uEDX >> 24) & 0xff);
364 RTPrintf("L1 Data Cache Line Size: %d bytes\n"
365 "L1 Data Cache Lines Per Tag: %d\n"
366 "L1 Data Cache Associativity: %s\n"
367 "L1 Data Cache Size: %d KB\n",
368 (s.uECX >> 0) & 0xff,
369 (s.uECX >> 8) & 0xff,
370 getCacheAss((s.uECX >> 16) & 0xff),
371 (s.uECX >> 24) & 0xff);
372 }
373
374 if (cExtFunctions >= 0x80000006)
375 {
376 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
377 RTPrintf("L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
378 "L2 TLB 2/4M Data: %s %4d entries\n",
379 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
380 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
381 RTPrintf("L2 TLB 4K Instr/Uni: %s %4d entries\n"
382 "L2 TLB 4K Data: %s %4d entries\n",
383 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
384 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
385 RTPrintf("L2 Cache Line Size: %d bytes\n"
386 "L2 Cache Lines Per Tag: %d\n"
387 "L2 Cache Associativity: %s\n"
388 "L2 Cache Size: %d KB\n",
389 (s.uEDX >> 0) & 0xff,
390 (s.uEDX >> 8) & 0xf,
391 getL2CacheAss((s.uEDX >> 12) & 0xf),
392 (s.uEDX >> 16) & 0xffff);
393 }
394
395 if (cExtFunctions >= 0x80000007)
396 {
397 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
398 RTPrintf("APM Features: ");
399 if (s.uEDX & RT_BIT(0)) RTPrintf(" TS");
400 if (s.uEDX & RT_BIT(1)) RTPrintf(" FID");
401 if (s.uEDX & RT_BIT(2)) RTPrintf(" VID");
402 if (s.uEDX & RT_BIT(3)) RTPrintf(" TTP");
403 if (s.uEDX & RT_BIT(4)) RTPrintf(" TM");
404 if (s.uEDX & RT_BIT(5)) RTPrintf(" STC");
405 if (s.uEDX & RT_BIT(6)) RTPrintf(" 6");
406 if (s.uEDX & RT_BIT(7)) RTPrintf(" 7");
407 if (s.uEDX & RT_BIT(8)) RTPrintf(" TscInvariant");
408 for (iBit = 9; iBit < 32; iBit++)
409 if (s.uEDX & RT_BIT(iBit))
410 RTPrintf(" %d", iBit);
411 RTPrintf("\n");
412 }
413
414 if (cExtFunctions >= 0x80000008)
415 {
416 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
417 RTPrintf("Physical Address Width: %d bits\n"
418 "Virtual Address Width: %d bits\n",
419 (s.uEAX >> 0) & 0xff,
420 (s.uEAX >> 8) & 0xff);
421 RTPrintf("Physical Core Count: %d\n",
422 ((s.uECX >> 0) & 0xff) + 1);
423 if ((s.uECX >> 12) & 0xf)
424 RTPrintf("ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
425 }
426
427 if (cExtFunctions >= 0x8000000a)
428 {
429 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
430 RTPrintf("SVM Revision: %d (%#x)\n"
431 "Number of Address Space IDs: %d (%#x)\n",
432 s.uEAX & 0xff, s.uEAX & 0xff,
433 s.uEBX, s.uEBX);
434 }
435}
436#endif /* !PIC || !X86 */
437
438
439static void tstASMAtomicXchgU8(void)
440{
441 struct
442 {
443 uint8_t u8Dummy0;
444 uint8_t u8;
445 uint8_t u8Dummy1;
446 } s;
447
448 s.u8 = 0;
449 s.u8Dummy0 = s.u8Dummy1 = 0x42;
450 CHECKOP(ASMAtomicXchgU8(&s.u8, 1), 0, "%#x", uint8_t);
451 CHECKVAL(s.u8, 1, "%#x");
452
453 CHECKOP(ASMAtomicXchgU8(&s.u8, 0), 1, "%#x", uint8_t);
454 CHECKVAL(s.u8, 0, "%#x");
455
456 CHECKOP(ASMAtomicXchgU8(&s.u8, 0xff), 0, "%#x", uint8_t);
457 CHECKVAL(s.u8, 0xff, "%#x");
458
459 CHECKOP(ASMAtomicXchgU8(&s.u8, 0x87), 0xffff, "%#x", uint8_t);
460 CHECKVAL(s.u8, 0x87, "%#x");
461 CHECKVAL(s.u8Dummy0, 0x42, "%#x");
462 CHECKVAL(s.u8Dummy1, 0x42, "%#x");
463}
464
465
466static void tstASMAtomicXchgU16(void)
467{
468 struct
469 {
470 uint16_t u16Dummy0;
471 uint16_t u16;
472 uint16_t u16Dummy1;
473 } s;
474
475 s.u16 = 0;
476 s.u16Dummy0 = s.u16Dummy1 = 0x1234;
477 CHECKOP(ASMAtomicXchgU16(&s.u16, 1), 0, "%#x", uint16_t);
478 CHECKVAL(s.u16, 1, "%#x");
479
480 CHECKOP(ASMAtomicXchgU16(&s.u16, 0), 1, "%#x", uint16_t);
481 CHECKVAL(s.u16, 0, "%#x");
482
483 CHECKOP(ASMAtomicXchgU16(&s.u16, 0xffff), 0, "%#x", uint16_t);
484 CHECKVAL(s.u16, 0xffff, "%#x");
485
486 CHECKOP(ASMAtomicXchgU16(&s.u16, 0x8765), 0xffff, "%#x", uint16_t);
487 CHECKVAL(s.u16, 0x8765, "%#x");
488 CHECKVAL(s.u16Dummy0, 0x1234, "%#x");
489 CHECKVAL(s.u16Dummy1, 0x1234, "%#x");
490}
491
492
493static void tstASMAtomicXchgU32(void)
494{
495 struct
496 {
497 uint32_t u32Dummy0;
498 uint32_t u32;
499 uint32_t u32Dummy1;
500 } s;
501
502 s.u32 = 0;
503 s.u32Dummy0 = s.u32Dummy1 = 0x11223344;
504
505 CHECKOP(ASMAtomicXchgU32(&s.u32, 1), 0, "%#x", uint32_t);
506 CHECKVAL(s.u32, 1, "%#x");
507
508 CHECKOP(ASMAtomicXchgU32(&s.u32, 0), 1, "%#x", uint32_t);
509 CHECKVAL(s.u32, 0, "%#x");
510
511 CHECKOP(ASMAtomicXchgU32(&s.u32, ~0U), 0, "%#x", uint32_t);
512 CHECKVAL(s.u32, ~0U, "%#x");
513
514 CHECKOP(ASMAtomicXchgU32(&s.u32, 0x87654321), ~0U, "%#x", uint32_t);
515 CHECKVAL(s.u32, 0x87654321, "%#x");
516
517 CHECKVAL(s.u32Dummy0, 0x11223344, "%#x");
518 CHECKVAL(s.u32Dummy1, 0x11223344, "%#x");
519}
520
521
522static void tstASMAtomicXchgU64(void)
523{
524 struct
525 {
526 uint64_t u64Dummy0;
527 uint64_t u64;
528 uint64_t u64Dummy1;
529 } s;
530
531 s.u64 = 0;
532 s.u64Dummy0 = s.u64Dummy1 = 0x1122334455667788ULL;
533
534 CHECKOP(ASMAtomicXchgU64(&s.u64, 1), 0ULL, "%#llx", uint64_t);
535 CHECKVAL(s.u64, 1ULL, "%#llx");
536
537 CHECKOP(ASMAtomicXchgU64(&s.u64, 0), 1ULL, "%#llx", uint64_t);
538 CHECKVAL(s.u64, 0ULL, "%#llx");
539
540 CHECKOP(ASMAtomicXchgU64(&s.u64, ~0ULL), 0ULL, "%#llx", uint64_t);
541 CHECKVAL(s.u64, ~0ULL, "%#llx");
542
543 CHECKOP(ASMAtomicXchgU64(&s.u64, 0xfedcba0987654321ULL), ~0ULL, "%#llx", uint64_t);
544 CHECKVAL(s.u64, 0xfedcba0987654321ULL, "%#llx");
545
546 CHECKVAL(s.u64Dummy0, 0x1122334455667788ULL, "%#llx");
547 CHECKVAL(s.u64Dummy1, 0x1122334455667788ULL, "%#llx");
548}
549
550
551static void tstASMAtomicXchgPtr(void)
552{
553 void *pv = NULL;
554
555 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)(~(uintptr_t)0)), NULL, "%p", void *);
556 CHECKVAL(pv, (void *)(~(uintptr_t)0), "%p");
557
558 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *);
559 CHECKVAL(pv, (void *)0x87654321, "%p");
560
561 CHECKOP(ASMAtomicXchgPtr(&pv, NULL), (void *)0x87654321, "%p", void *);
562 CHECKVAL(pv, NULL, "%p");
563}
564
565
566static void tstASMAtomicCmpXchgU32(void)
567{
568 uint32_t u32 = 0xffffffff;
569
570 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0), false, "%d", bool);
571 CHECKVAL(u32, 0xffffffff, "%x");
572
573 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0xffffffff), true, "%d", bool);
574 CHECKVAL(u32, 0, "%x");
575
576 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0xffffffff), false, "%d", bool);
577 CHECKVAL(u32, 0, "%x");
578
579 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0), true, "%d", bool);
580 CHECKVAL(u32, 0x8008efd, "%x");
581}
582
583
584static void tstASMAtomicCmpXchgU64(void)
585{
586 uint64_t u64 = 0xffffffffffffffULL;
587
588 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0), false, "%d", bool);
589 CHECKVAL(u64, 0xffffffffffffffULL, "%#llx");
590
591 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0xffffffffffffffULL), true, "%d", bool);
592 CHECKVAL(u64, 0, "%x");
593
594 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff), false, "%d", bool);
595 CHECKVAL(u64, 0, "%x");
596
597 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL), false, "%d", bool);
598 CHECKVAL(u64, 0, "%x");
599
600 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool);
601 CHECKVAL(u64, 0x80040008008efdULL, "%#llx");
602}
603
604
605static void tstASMAtomicCmpXchgExU32(void)
606{
607 uint32_t u32 = 0xffffffff;
608 uint32_t u32Old = 0x80005111;
609
610 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0, &u32Old), false, "%d", bool);
611 CHECKVAL(u32, 0xffffffff, "%x");
612 CHECKVAL(u32Old, 0xffffffff, "%x");
613
614 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0xffffffff, &u32Old), true, "%d", bool);
615 CHECKVAL(u32, 0, "%x");
616 CHECKVAL(u32Old, 0xffffffff, "%x");
617
618 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0xffffffff, &u32Old), false, "%d", bool);
619 CHECKVAL(u32, 0, "%x");
620 CHECKVAL(u32Old, 0, "%x");
621
622 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0, &u32Old), true, "%d", bool);
623 CHECKVAL(u32, 0x8008efd, "%x");
624 CHECKVAL(u32Old, 0, "%x");
625
626 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0x8008efd, &u32Old), true, "%d", bool);
627 CHECKVAL(u32, 0, "%x");
628 CHECKVAL(u32Old, 0x8008efd, "%x");
629}
630
631
632static void tstASMAtomicCmpXchgExU64(void)
633{
634 uint64_t u64 = 0xffffffffffffffffULL;
635 uint64_t u64Old = 0x8000000051111111ULL;
636
637 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0, &u64Old), false, "%d", bool);
638 CHECKVAL(u64, 0xffffffffffffffffULL, "%llx");
639 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx");
640
641 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0xffffffffffffffffULL, &u64Old), true, "%d", bool);
642 CHECKVAL(u64, 0ULL, "%llx");
643 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx");
644
645 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff, &u64Old), false, "%d", bool);
646 CHECKVAL(u64, 0ULL, "%llx");
647 CHECKVAL(u64Old, 0ULL, "%llx");
648
649 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL, &u64Old), false, "%d", bool);
650 CHECKVAL(u64, 0ULL, "%llx");
651 CHECKVAL(u64Old, 0ULL, "%llx");
652
653 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0, &u64Old), true, "%d", bool);
654 CHECKVAL(u64, 0x80040008008efdULL, "%llx");
655 CHECKVAL(u64Old, 0ULL, "%llx");
656
657 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0x80040008008efdULL, &u64Old), true, "%d", bool);
658 CHECKVAL(u64, 0ULL, "%llx");
659 CHECKVAL(u64Old, 0x80040008008efdULL, "%llx");
660}
661
662
663static void tstASMAtomicReadU64(void)
664{
665 uint64_t u64 = 0;
666
667 CHECKOP(ASMAtomicReadU64(&u64), 0ULL, "%#llx", uint64_t);
668 CHECKVAL(u64, 0ULL, "%#llx");
669
670 u64 = ~0ULL;
671 CHECKOP(ASMAtomicReadU64(&u64), ~0ULL, "%#llx", uint64_t);
672 CHECKVAL(u64, ~0ULL, "%#llx");
673
674 u64 = 0xfedcba0987654321ULL;
675 CHECKOP(ASMAtomicReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t);
676 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx");
677}
678
679
680static void tstASMAtomicAddS32(void)
681{
682 int32_t i32Rc;
683 int32_t i32 = 10;
684#define MYCHECK(op, rc, val) \
685 do { \
686 i32Rc = op; \
687 if (i32Rc != (rc)) \
688 { \
689 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
690 RTTestIErrorInc(); \
691 } \
692 if (i32 != (val)) \
693 { \
694 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, val); \
695 RTTestIErrorInc(); \
696 } \
697 } while (0)
698 MYCHECK(ASMAtomicAddS32(&i32, 1), 10, 11);
699 MYCHECK(ASMAtomicAddS32(&i32, -2), 11, 9);
700 MYCHECK(ASMAtomicAddS32(&i32, -9), 9, 0);
701 MYCHECK(ASMAtomicAddS32(&i32, -0x7fffffff), 0, -0x7fffffff);
702 MYCHECK(ASMAtomicAddS32(&i32, 0), -0x7fffffff, -0x7fffffff);
703 MYCHECK(ASMAtomicAddS32(&i32, 0x7fffffff), -0x7fffffff, 0);
704 MYCHECK(ASMAtomicAddS32(&i32, 0), 0, 0);
705#undef MYCHECK
706}
707
708
709static void tstASMAtomicDecIncS32(void)
710{
711 int32_t i32Rc;
712 int32_t i32 = 10;
713#define MYCHECK(op, rc) \
714 do { \
715 i32Rc = op; \
716 if (i32Rc != (rc)) \
717 { \
718 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
719 RTTestIErrorInc(); \
720 } \
721 if (i32 != (rc)) \
722 { \
723 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, rc); \
724 RTTestIErrorInc(); \
725 } \
726 } while (0)
727 MYCHECK(ASMAtomicDecS32(&i32), 9);
728 MYCHECK(ASMAtomicDecS32(&i32), 8);
729 MYCHECK(ASMAtomicDecS32(&i32), 7);
730 MYCHECK(ASMAtomicDecS32(&i32), 6);
731 MYCHECK(ASMAtomicDecS32(&i32), 5);
732 MYCHECK(ASMAtomicDecS32(&i32), 4);
733 MYCHECK(ASMAtomicDecS32(&i32), 3);
734 MYCHECK(ASMAtomicDecS32(&i32), 2);
735 MYCHECK(ASMAtomicDecS32(&i32), 1);
736 MYCHECK(ASMAtomicDecS32(&i32), 0);
737 MYCHECK(ASMAtomicDecS32(&i32), -1);
738 MYCHECK(ASMAtomicDecS32(&i32), -2);
739 MYCHECK(ASMAtomicIncS32(&i32), -1);
740 MYCHECK(ASMAtomicIncS32(&i32), 0);
741 MYCHECK(ASMAtomicIncS32(&i32), 1);
742 MYCHECK(ASMAtomicIncS32(&i32), 2);
743 MYCHECK(ASMAtomicIncS32(&i32), 3);
744 MYCHECK(ASMAtomicDecS32(&i32), 2);
745 MYCHECK(ASMAtomicIncS32(&i32), 3);
746 MYCHECK(ASMAtomicDecS32(&i32), 2);
747 MYCHECK(ASMAtomicIncS32(&i32), 3);
748#undef MYCHECK
749}
750
751
752static void tstASMAtomicAndOrU32(void)
753{
754 uint32_t u32 = 0xffffffff;
755
756 ASMAtomicOrU32(&u32, 0xffffffff);
757 CHECKVAL(u32, 0xffffffff, "%x");
758
759 ASMAtomicAndU32(&u32, 0xffffffff);
760 CHECKVAL(u32, 0xffffffff, "%x");
761
762 ASMAtomicAndU32(&u32, 0x8f8f8f8f);
763 CHECKVAL(u32, 0x8f8f8f8f, "%x");
764
765 ASMAtomicOrU32(&u32, 0x70707070);
766 CHECKVAL(u32, 0xffffffff, "%x");
767
768 ASMAtomicAndU32(&u32, 1);
769 CHECKVAL(u32, 1, "%x");
770
771 ASMAtomicOrU32(&u32, 0x80000000);
772 CHECKVAL(u32, 0x80000001, "%x");
773
774 ASMAtomicAndU32(&u32, 0x80000000);
775 CHECKVAL(u32, 0x80000000, "%x");
776
777 ASMAtomicAndU32(&u32, 0);
778 CHECKVAL(u32, 0, "%x");
779
780 ASMAtomicOrU32(&u32, 0x42424242);
781 CHECKVAL(u32, 0x42424242, "%x");
782}
783
784
785void tstASMMemZeroPage(void)
786{
787 struct
788 {
789 uint64_t u64Magic1;
790 uint8_t abPage[PAGE_SIZE];
791 uint64_t u64Magic2;
792 } Buf1, Buf2, Buf3;
793
794 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
795 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
796 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
797 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
798 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
799 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
800 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
801 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
802 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
803 ASMMemZeroPage(Buf1.abPage);
804 ASMMemZeroPage(Buf2.abPage);
805 ASMMemZeroPage(Buf3.abPage);
806 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
807 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
808 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
809 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
810 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
811 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
812 {
813 RTPrintf("tstInlineAsm: ASMMemZeroPage violated one/both magic(s)!\n");
814 RTTestIErrorInc();
815 }
816 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++)
817 if (Buf1.abPage[i])
818 {
819 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
820 RTTestIErrorInc();
821 }
822 for (unsigned i = 0; i < sizeof(Buf2.abPage); i++)
823 if (Buf2.abPage[i])
824 {
825 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
826 RTTestIErrorInc();
827 }
828 for (unsigned i = 0; i < sizeof(Buf3.abPage); i++)
829 if (Buf3.abPage[i])
830 {
831 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
832 RTTestIErrorInc();
833 }
834}
835
836
837void tstASMMemIsZeroPage(RTTEST hTest)
838{
839 RTTestSub(hTest, "ASMMemIsZeroPage");
840
841 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
842 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
843 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
844
845 memset(pvPage1, 0, PAGE_SIZE);
846 memset(pvPage2, 0, PAGE_SIZE);
847 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
848 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
849
850 memset(pvPage1, 0xff, PAGE_SIZE);
851 memset(pvPage2, 0xff, PAGE_SIZE);
852 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
853 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
854
855 memset(pvPage1, 0, PAGE_SIZE);
856 memset(pvPage2, 0, PAGE_SIZE);
857 for (unsigned off = 0; off < PAGE_SIZE; off++)
858 {
859 ((uint8_t *)pvPage1)[off] = 1;
860 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
861 ((uint8_t *)pvPage1)[off] = 0;
862
863 ((uint8_t *)pvPage2)[off] = 0x80;
864 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
865 ((uint8_t *)pvPage2)[off] = 0;
866 }
867
868 RTTestSubDone(hTest);
869}
870
871
872void tstASMMemZero32(void)
873{
874 struct
875 {
876 uint64_t u64Magic1;
877 uint8_t abPage[PAGE_SIZE - 32];
878 uint64_t u64Magic2;
879 } Buf1, Buf2, Buf3;
880
881 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
882 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
883 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
884 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
885 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
886 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
887 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
888 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
889 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
890 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
891 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
892 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
893 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
894 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
895 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
896 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
897 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
898 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
899 {
900 RTPrintf("tstInlineAsm: ASMMemZero32 violated one/both magic(s)!\n");
901 RTTestIErrorInc();
902 }
903 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
904 if (Buf1.abPage[i])
905 {
906 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i);
907 RTTestIErrorInc();
908 }
909 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
910 if (Buf2.abPage[i])
911 {
912 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i);
913 RTTestIErrorInc();
914 }
915 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
916 if (Buf3.abPage[i])
917 {
918 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i);
919 RTTestIErrorInc();
920 }
921}
922
923
924void tstASMMemFill32(void)
925{
926 struct
927 {
928 uint64_t u64Magic1;
929 uint32_t au32Page[PAGE_SIZE / 4];
930 uint64_t u64Magic2;
931 } Buf1;
932 struct
933 {
934 uint64_t u64Magic1;
935 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
936 uint64_t u64Magic2;
937 } Buf2;
938 struct
939 {
940 uint64_t u64Magic1;
941 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
942 uint64_t u64Magic2;
943 } Buf3;
944
945 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
946 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
947 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
948 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
949 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
950 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
951 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
952 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
953 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
954 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
955 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
956 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
957 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
958 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
959 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
960 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
961 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
962 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
963 {
964 RTPrintf("tstInlineAsm: ASMMemFill32 violated one/both magic(s)!\n");
965 RTTestIErrorInc();
966 }
967 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
968 if (Buf1.au32Page[i] != 0xdeadbeef)
969 {
970 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
971 RTTestIErrorInc();
972 }
973 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
974 if (Buf2.au32Page[i] != 0xcafeff01)
975 {
976 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
977 RTTestIErrorInc();
978 }
979 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
980 if (Buf3.au32Page[i] != 0xf00dd00f)
981 {
982 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
983 RTTestIErrorInc();
984 }
985}
986
987
988
989void tstASMMath(void)
990{
991 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
992 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
993
994 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
995 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
996
997 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
998 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
999 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
1000 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
1001 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1002 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
1003 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1004 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
1005 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
1006 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
1007 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
1008 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
1009 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
1010 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
1011
1012#if 0 /* bird: question is whether this should trap or not:
1013 *
1014 * frank: Of course it must trap:
1015 *
1016 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
1017 *
1018 * During the following division, the quotient must fit into a 32-bit register.
1019 * Therefore the smallest valid divisor is
1020 *
1021 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
1022 *
1023 * which is definitely greater than 0x3b9aca00.
1024 *
1025 * bird: No, the C version does *not* crash. So, the question is whether there's any
1026 * code depending on it not crashing.
1027 *
1028 * Of course the assembly versions of the code crash right now for the reasons you've
1029 * given, but the 32-bit MSC version does not crash.
1030 *
1031 * frank: The C version does not crash but delivers incorrect results for this case.
1032 * The reason is
1033 *
1034 * u.s.Hi = (unsigned long)(u64Hi / u32C);
1035 *
1036 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
1037 * to 32 bit. If using this (optimized and fast) function we should just be sure that
1038 * the operands are in a valid range.
1039 */
1040 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
1041 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
1042#endif
1043
1044 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
1045 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
1046
1047 int32_t i32;
1048 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
1049 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1050 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
1051 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1052 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
1053 CHECKVAL(i32, INT32_C(1), "%010RI32");
1054
1055 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
1056 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
1057 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
1058 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
1059}
1060
1061
1062void tstASMByteSwap(void)
1063{
1064 RTPrintf("tstInlineASM: TESTING - ASMByteSwap*\n");
1065
1066 uint64_t u64In = UINT64_C(0x0011223344556677);
1067 uint64_t u64Out = ASMByteSwapU64(u64In);
1068 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
1069 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
1070 u64Out = ASMByteSwapU64(u64Out);
1071 CHECKVAL(u64Out, u64In, "%#018RX64");
1072 u64In = UINT64_C(0x0123456789abcdef);
1073 u64Out = ASMByteSwapU64(u64In);
1074 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
1075 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
1076 u64Out = ASMByteSwapU64(u64Out);
1077 CHECKVAL(u64Out, u64In, "%#018RX64");
1078 u64In = 0;
1079 u64Out = ASMByteSwapU64(u64In);
1080 CHECKVAL(u64Out, u64In, "%#018RX64");
1081 u64In = ~(uint64_t)0;
1082 u64Out = ASMByteSwapU64(u64In);
1083 CHECKVAL(u64Out, u64In, "%#018RX64");
1084
1085 uint32_t u32In = UINT32_C(0x00112233);
1086 uint32_t u32Out = ASMByteSwapU32(u32In);
1087 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
1088 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
1089 u32Out = ASMByteSwapU32(u32Out);
1090 CHECKVAL(u32Out, u32In, "%#010RX32");
1091 u32In = UINT32_C(0x12345678);
1092 u32Out = ASMByteSwapU32(u32In);
1093 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
1094 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
1095 u32Out = ASMByteSwapU32(u32Out);
1096 CHECKVAL(u32Out, u32In, "%#010RX32");
1097 u32In = 0;
1098 u32Out = ASMByteSwapU32(u32In);
1099 CHECKVAL(u32Out, u32In, "%#010RX32");
1100 u32In = ~(uint32_t)0;
1101 u32Out = ASMByteSwapU32(u32In);
1102 CHECKVAL(u32Out, u32In, "%#010RX32");
1103
1104 uint16_t u16In = UINT16_C(0x0011);
1105 uint16_t u16Out = ASMByteSwapU16(u16In);
1106 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
1107 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
1108 u16Out = ASMByteSwapU16(u16Out);
1109 CHECKVAL(u16Out, u16In, "%#06RX16");
1110 u16In = UINT16_C(0x1234);
1111 u16Out = ASMByteSwapU16(u16In);
1112 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
1113 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
1114 u16Out = ASMByteSwapU16(u16Out);
1115 CHECKVAL(u16Out, u16In, "%#06RX16");
1116 u16In = 0;
1117 u16Out = ASMByteSwapU16(u16In);
1118 CHECKVAL(u16Out, u16In, "%#06RX16");
1119 u16In = ~(uint16_t)0;
1120 u16Out = ASMByteSwapU16(u16In);
1121 CHECKVAL(u16Out, u16In, "%#06RX16");
1122}
1123
1124
1125void tstASMBench(void)
1126{
1127 /*
1128 * Make this static. We don't want to have this located on the stack.
1129 */
1130 static uint8_t volatile s_u8;
1131 static int8_t volatile s_i8;
1132 static uint16_t volatile s_u16;
1133 static int16_t volatile s_i16;
1134 static uint32_t volatile s_u32;
1135 static int32_t volatile s_i32;
1136 static uint64_t volatile s_u64;
1137 static int64_t volatile s_i64;
1138 register unsigned i;
1139 const unsigned cRounds = 1000000;
1140 register uint64_t u64Elapsed;
1141
1142 RTPrintf("tstInlineASM: Benchmarking:\n");
1143
1144#define BENCH(op, str) \
1145 RTThreadYield(); \
1146 u64Elapsed = ASMReadTSC(); \
1147 for (i = cRounds; i > 0; i--) \
1148 op; \
1149 u64Elapsed = ASMReadTSC() - u64Elapsed; \
1150 RTPrintf(" %-30s %3llu cycles\n", str, u64Elapsed / cRounds);
1151
1152 BENCH(s_u32 = 0, "s_u32 = 0:");
1153 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8:");
1154 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8:");
1155 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16:");
1156 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16:");
1157 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32:");
1158 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32:");
1159 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64:");
1160 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64:");
1161 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8:");
1162 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8:");
1163 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16:");
1164 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16:");
1165 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32:");
1166 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32:");
1167 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64:");
1168 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64:");
1169 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8:");
1170 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8:");
1171 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16:");
1172 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16:");
1173 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32:");
1174 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32:");
1175 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64:");
1176 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64:");
1177 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32:");
1178 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32:");
1179 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64:");
1180 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64:");
1181 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg:");
1182 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg:");
1183 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg:");
1184 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg:");
1185 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32:");
1186 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32:");
1187 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32:");
1188 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32:");
1189 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32:");
1190 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32:");
1191
1192 RTPrintf("Done.\n");
1193
1194#undef BENCH
1195}
1196
1197
1198int main(int argc, char *argv[])
1199{
1200 RTTEST hTest;
1201 int rc = RTTestInitAndCreate("tstInlineAsm", &hTest);
1202 if (rc)
1203 return rc;
1204 RTTestBanner(hTest);
1205
1206 /*
1207 * Execute the tests.
1208 */
1209#if !defined(PIC) || !defined(RT_ARCH_X86)
1210 tstASMCpuId();
1211#endif
1212 tstASMAtomicXchgU8();
1213 tstASMAtomicXchgU16();
1214 tstASMAtomicXchgU32();
1215 tstASMAtomicXchgU64();
1216 tstASMAtomicXchgPtr();
1217 tstASMAtomicCmpXchgU32();
1218 tstASMAtomicCmpXchgU64();
1219 tstASMAtomicCmpXchgExU32();
1220 tstASMAtomicCmpXchgExU64();
1221 tstASMAtomicReadU64();
1222 tstASMAtomicAddS32();
1223 tstASMAtomicDecIncS32();
1224 tstASMAtomicAndOrU32();
1225 tstASMMemZeroPage();
1226 tstASMMemIsZeroPage(hTest);
1227 tstASMMemZero32();
1228 tstASMMemFill32();
1229 tstASMMath();
1230 tstASMByteSwap();
1231 tstASMBench();
1232
1233 /*
1234 * Show the result.
1235 */
1236 return RTTestSummaryAndDestroy(hTest);
1237}
1238
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette