VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 36549

Last change on this file since 36549 was 33207, checked in by vboxsync, 14 years ago

tstRTInlineAsm: another fix

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 58.3 KB
Line 
1/* $Id: tstRTInlineAsm.cpp 33207 2010-10-18 15:02:47Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2010 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27/*******************************************************************************
28* Header Files *
29*******************************************************************************/
30#include <iprt/asm.h>
31#include <iprt/asm-math.h>
32
33/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
34 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
35 * fine. */
36#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
37# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
38# define GCC44_32BIT_PIC
39# endif
40#endif
41
42#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
43# include <iprt/asm-amd64-x86.h>
44#else
45# include <iprt/time.h>
46#endif
47#include <iprt/stream.h>
48#include <iprt/string.h>
49#include <iprt/param.h>
50#include <iprt/thread.h>
51#include <iprt/test.h>
52#include <iprt/time.h>
53
54
55
56/*******************************************************************************
57* Defined Constants And Macros *
58*******************************************************************************/
59#define CHECKVAL(val, expect, fmt) \
60 do \
61 { \
62 if ((val) != (expect)) \
63 { \
64 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
65 } \
66 } while (0)
67
68#define CHECKOP(op, expect, fmt, type) \
69 do \
70 { \
71 type val = op; \
72 if (val != (type)(expect)) \
73 { \
74 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
75 } \
76 } while (0)
77
78/**
79 * Calls a worker function with different worker variable storage types.
80 */
81#define DO_SIMPLE_TEST(name, type) \
82 do \
83 { \
84 RTTestISub(#name); \
85 type StackVar; \
86 tst ## name ## Worker(&StackVar); \
87 \
88 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
89 RTTEST_CHECK_BREAK(g_hTest, pVar); \
90 tst ## name ## Worker(pVar); \
91 RTTestGuardedFree(g_hTest, pVar); \
92 \
93 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
94 RTTEST_CHECK_BREAK(g_hTest, pVar); \
95 tst ## name ## Worker(pVar); \
96 RTTestGuardedFree(g_hTest, pVar); \
97 } while (0)
98
99
100/*******************************************************************************
101* Global Variables *
102*******************************************************************************/
103/** The test instance. */
104static RTTEST g_hTest;
105
106
107
108#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
109
110const char *getCacheAss(unsigned u)
111{
112 if (u == 0)
113 return "res0 ";
114 if (u == 1)
115 return "direct";
116 if (u >= 256)
117 return "???";
118
119 char *pszRet;
120 RTStrAPrintf(&pszRet, "%d way", u); /* intentional leak! */
121 return pszRet;
122}
123
124
125const char *getL2CacheAss(unsigned u)
126{
127 switch (u)
128 {
129 case 0: return "off ";
130 case 1: return "direct";
131 case 2: return "2 way ";
132 case 3: return "res3 ";
133 case 4: return "4 way ";
134 case 5: return "res5 ";
135 case 6: return "8 way ";
136 case 7: return "res7 ";
137 case 8: return "16 way";
138 case 9: return "res9 ";
139 case 10: return "res10 ";
140 case 11: return "res11 ";
141 case 12: return "res12 ";
142 case 13: return "res13 ";
143 case 14: return "res14 ";
144 case 15: return "fully ";
145 default:
146 return "????";
147 }
148}
149
150
151/**
152 * Test and dump all possible info from the CPUID instruction.
153 *
154 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
155 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
156 */
157void tstASMCpuId(void)
158{
159 RTTestISub("ASMCpuId");
160
161 unsigned iBit;
162 struct
163 {
164 uint32_t uEBX, uEAX, uEDX, uECX;
165 } s;
166 if (!ASMHasCpuId())
167 {
168 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
169 return;
170 }
171
172 /*
173 * Try the 0 function and use that for checking the ASMCpuId_* variants.
174 */
175 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
176
177 uint32_t u32;
178
179 u32 = ASMCpuId_EAX(0);
180 CHECKVAL(u32, s.uEAX, "%x");
181 u32 = ASMCpuId_EBX(0);
182 CHECKVAL(u32, s.uEBX, "%x");
183 u32 = ASMCpuId_ECX(0);
184 CHECKVAL(u32, s.uECX, "%x");
185 u32 = ASMCpuId_EDX(0);
186 CHECKVAL(u32, s.uEDX, "%x");
187
188 uint32_t uECX2 = s.uECX - 1;
189 uint32_t uEDX2 = s.uEDX - 1;
190 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
191 CHECKVAL(uECX2, s.uECX, "%x");
192 CHECKVAL(uEDX2, s.uEDX, "%x");
193
194 /*
195 * Done testing, dump the information.
196 */
197 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
198 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
199 const uint32_t cFunctions = s.uEAX;
200
201 /* raw dump */
202 RTTestIPrintf(RTTESTLVL_ALWAYS,
203 "\n"
204 " RAW Standard CPUIDs\n"
205 "Function eax ebx ecx edx\n");
206 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
207 {
208 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
209 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
210 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
211
212 if (iStd == 0x04 || iStd == 0x0b || iStd == 0x0d || iStd > cFunctions)
213 continue; /* Leaf 04 and leaf 0d output depend on the initial value of ECX
214 * The same seems to apply to invalid standard functions */
215
216 u32 = ASMCpuId_EAX(iStd);
217 CHECKVAL(u32, s.uEAX, "%x");
218 u32 = ASMCpuId_EBX(iStd);
219 CHECKVAL(u32, s.uEBX, "%x");
220 u32 = ASMCpuId_ECX(iStd);
221 CHECKVAL(u32, s.uECX, "%x");
222 u32 = ASMCpuId_EDX(iStd);
223 CHECKVAL(u32, s.uEDX, "%x");
224
225 uECX2 = s.uECX - 1;
226 uEDX2 = s.uEDX - 1;
227 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
228 CHECKVAL(uECX2, s.uECX, "%x");
229 CHECKVAL(uEDX2, s.uEDX, "%x");
230 }
231
232 /*
233 * Understandable output
234 */
235 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
236 RTTestIPrintf(RTTESTLVL_ALWAYS,
237 "Name: %.04s%.04s%.04s\n"
238 "Support: 0-%u\n",
239 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
240 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
241
242 /*
243 * Get Features.
244 */
245 if (cFunctions >= 1)
246 {
247 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
248 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
249 RTTestIPrintf(RTTESTLVL_ALWAYS,
250 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
251 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
252 "Stepping: %d\n"
253 "Type: %d (%s)\n"
254 "APIC ID: %#04x\n"
255 "Logical CPUs: %d\n"
256 "CLFLUSH Size: %d\n"
257 "Brand ID: %#04x\n",
258 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
259 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
260 ASMGetCpuStepping(s.uEAX),
261 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
262 (s.uEBX >> 24) & 0xff,
263 (s.uEBX >> 16) & 0xff,
264 (s.uEBX >> 8) & 0xff,
265 (s.uEBX >> 0) & 0xff);
266
267 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
268 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
269 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
270 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
271 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
272 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
273 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
274 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
275 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
276 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
277 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
278 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
279 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
280 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
281 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
282 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
283 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
284 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
285 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
286 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
287 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
288 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
289 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
290 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
291 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
292 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
293 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
294 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
295 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
296 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
297 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
298 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
299 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
300 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
301
302 /** @todo check intel docs. */
303 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
304 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
305 for (iBit = 1; iBit < 13; iBit++)
306 if (s.uECX & RT_BIT(iBit))
307 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
308 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
309 for (iBit = 14; iBit < 32; iBit++)
310 if (s.uECX & RT_BIT(iBit))
311 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
312 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
313 }
314
315 /*
316 * Extended.
317 * Implemented after AMD specs.
318 */
319 /** @todo check out the intel specs. */
320 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
321 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
322 {
323 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
324 return;
325 }
326 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
327
328 /* raw dump */
329 RTTestIPrintf(RTTESTLVL_ALWAYS,
330 "\n"
331 " RAW Extended CPUIDs\n"
332 "Function eax ebx ecx edx\n");
333 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
334 {
335 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
336 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
337 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
338
339 if (iExt > cExtFunctions)
340 continue; /* Invalid extended functions seems change the value if ECX changes */
341
342 u32 = ASMCpuId_EAX(iExt);
343 CHECKVAL(u32, s.uEAX, "%x");
344 u32 = ASMCpuId_EBX(iExt);
345 CHECKVAL(u32, s.uEBX, "%x");
346 u32 = ASMCpuId_ECX(iExt);
347 CHECKVAL(u32, s.uECX, "%x");
348 u32 = ASMCpuId_EDX(iExt);
349 CHECKVAL(u32, s.uEDX, "%x");
350
351 uECX2 = s.uECX - 1;
352 uEDX2 = s.uEDX - 1;
353 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
354 CHECKVAL(uECX2, s.uECX, "%x");
355 CHECKVAL(uEDX2, s.uEDX, "%x");
356 }
357
358 /*
359 * Understandable output
360 */
361 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
362 RTTestIPrintf(RTTESTLVL_ALWAYS,
363 "Ext Name: %.4s%.4s%.4s\n"
364 "Ext Supports: 0x80000000-%#010x\n",
365 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
366
367 if (cExtFunctions >= 0x80000001)
368 {
369 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
370 RTTestIPrintf(RTTESTLVL_ALWAYS,
371 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
372 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
373 "Stepping: %d\n"
374 "Brand ID: %#05x\n",
375 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
376 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
377 ASMGetCpuStepping(s.uEAX),
378 s.uEBX & 0xfff);
379
380 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
381 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
382 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
383 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
384 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
385 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
386 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
387 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
388 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
389 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
390 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
391 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
392 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
393 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
394 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
395 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
396 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
397 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
398 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
399 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
400 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
401 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
402 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
403 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
404 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
405 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
406 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
407 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
408 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
409 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
410 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
411 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
412 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
413 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
414
415 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
416 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
417 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
418 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
419 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
420 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
421 for (iBit = 5; iBit < 32; iBit++)
422 if (s.uECX & RT_BIT(iBit))
423 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
424 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
425 }
426
427 char szString[4*4*3+1] = {0};
428 if (cExtFunctions >= 0x80000002)
429 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
430 if (cExtFunctions >= 0x80000003)
431 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
432 if (cExtFunctions >= 0x80000004)
433 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
434 if (cExtFunctions >= 0x80000002)
435 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
436
437 if (cExtFunctions >= 0x80000005)
438 {
439 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
440 RTTestIPrintf(RTTESTLVL_ALWAYS,
441 "TLB 2/4M Instr/Uni: %s %3d entries\n"
442 "TLB 2/4M Data: %s %3d entries\n",
443 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
444 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
445 RTTestIPrintf(RTTESTLVL_ALWAYS,
446 "TLB 4K Instr/Uni: %s %3d entries\n"
447 "TLB 4K Data: %s %3d entries\n",
448 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
449 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
450 RTTestIPrintf(RTTESTLVL_ALWAYS,
451 "L1 Instr Cache Line Size: %d bytes\n"
452 "L1 Instr Cache Lines Per Tag: %d\n"
453 "L1 Instr Cache Associativity: %s\n"
454 "L1 Instr Cache Size: %d KB\n",
455 (s.uEDX >> 0) & 0xff,
456 (s.uEDX >> 8) & 0xff,
457 getCacheAss((s.uEDX >> 16) & 0xff),
458 (s.uEDX >> 24) & 0xff);
459 RTTestIPrintf(RTTESTLVL_ALWAYS,
460 "L1 Data Cache Line Size: %d bytes\n"
461 "L1 Data Cache Lines Per Tag: %d\n"
462 "L1 Data Cache Associativity: %s\n"
463 "L1 Data Cache Size: %d KB\n",
464 (s.uECX >> 0) & 0xff,
465 (s.uECX >> 8) & 0xff,
466 getCacheAss((s.uECX >> 16) & 0xff),
467 (s.uECX >> 24) & 0xff);
468 }
469
470 if (cExtFunctions >= 0x80000006)
471 {
472 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
473 RTTestIPrintf(RTTESTLVL_ALWAYS,
474 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
475 "L2 TLB 2/4M Data: %s %4d entries\n",
476 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
477 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
478 RTTestIPrintf(RTTESTLVL_ALWAYS,
479 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
480 "L2 TLB 4K Data: %s %4d entries\n",
481 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
482 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
483 RTTestIPrintf(RTTESTLVL_ALWAYS,
484 "L2 Cache Line Size: %d bytes\n"
485 "L2 Cache Lines Per Tag: %d\n"
486 "L2 Cache Associativity: %s\n"
487 "L2 Cache Size: %d KB\n",
488 (s.uEDX >> 0) & 0xff,
489 (s.uEDX >> 8) & 0xf,
490 getL2CacheAss((s.uEDX >> 12) & 0xf),
491 (s.uEDX >> 16) & 0xffff);
492 }
493
494 if (cExtFunctions >= 0x80000007)
495 {
496 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
497 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
498 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
499 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
500 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
501 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
502 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
503 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
504 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
505 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
506 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
507 for (iBit = 9; iBit < 32; iBit++)
508 if (s.uEDX & RT_BIT(iBit))
509 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
510 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
511 }
512
513 if (cExtFunctions >= 0x80000008)
514 {
515 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
516 RTTestIPrintf(RTTESTLVL_ALWAYS,
517 "Physical Address Width: %d bits\n"
518 "Virtual Address Width: %d bits\n"
519 "Guest Physical Address Width: %d bits\n",
520 (s.uEAX >> 0) & 0xff,
521 (s.uEAX >> 8) & 0xff,
522 (s.uEAX >> 16) & 0xff);
523 RTTestIPrintf(RTTESTLVL_ALWAYS,
524 "Physical Core Count: %d\n",
525 ((s.uECX >> 0) & 0xff) + 1);
526 if ((s.uECX >> 12) & 0xf)
527 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
528 }
529
530 if (cExtFunctions >= 0x8000000a)
531 {
532 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
533 RTTestIPrintf(RTTESTLVL_ALWAYS,
534 "SVM Revision: %d (%#x)\n"
535 "Number of Address Space IDs: %d (%#x)\n",
536 s.uEAX & 0xff, s.uEAX & 0xff,
537 s.uEBX, s.uEBX);
538 }
539}
540
541#endif /* AMD64 || X86 */
542
543DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
544{
545 *pu8 = 0;
546 CHECKOP(ASMAtomicXchgU8(pu8, 1), 0, "%#x", uint8_t);
547 CHECKVAL(*pu8, 1, "%#x");
548
549 CHECKOP(ASMAtomicXchgU8(pu8, 0), 1, "%#x", uint8_t);
550 CHECKVAL(*pu8, 0, "%#x");
551
552 CHECKOP(ASMAtomicXchgU8(pu8, 0xff), 0, "%#x", uint8_t);
553 CHECKVAL(*pu8, 0xff, "%#x");
554
555 CHECKOP(ASMAtomicXchgU8(pu8, 0x87), 0xffff, "%#x", uint8_t);
556 CHECKVAL(*pu8, 0x87, "%#x");
557}
558
559
560static void tstASMAtomicXchgU8(void)
561{
562 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
563}
564
565
566DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
567{
568 *pu16 = 0;
569
570 CHECKOP(ASMAtomicXchgU16(pu16, 1), 0, "%#x", uint16_t);
571 CHECKVAL(*pu16, 1, "%#x");
572
573 CHECKOP(ASMAtomicXchgU16(pu16, 0), 1, "%#x", uint16_t);
574 CHECKVAL(*pu16, 0, "%#x");
575
576 CHECKOP(ASMAtomicXchgU16(pu16, 0xffff), 0, "%#x", uint16_t);
577 CHECKVAL(*pu16, 0xffff, "%#x");
578
579 CHECKOP(ASMAtomicXchgU16(pu16, 0x8765), 0xffff, "%#x", uint16_t);
580 CHECKVAL(*pu16, 0x8765, "%#x");
581}
582
583
584static void tstASMAtomicXchgU16(void)
585{
586 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
587}
588
589
590DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
591{
592 *pu32 = 0;
593
594 CHECKOP(ASMAtomicXchgU32(pu32, 1), 0, "%#x", uint32_t);
595 CHECKVAL(*pu32, 1, "%#x");
596
597 CHECKOP(ASMAtomicXchgU32(pu32, 0), 1, "%#x", uint32_t);
598 CHECKVAL(*pu32, 0, "%#x");
599
600 CHECKOP(ASMAtomicXchgU32(pu32, ~UINT32_C(0)), 0, "%#x", uint32_t);
601 CHECKVAL(*pu32, ~UINT32_C(0), "%#x");
602
603 CHECKOP(ASMAtomicXchgU32(pu32, 0x87654321), ~UINT32_C(0), "%#x", uint32_t);
604 CHECKVAL(*pu32, 0x87654321, "%#x");
605}
606
607
608static void tstASMAtomicXchgU32(void)
609{
610 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
611}
612
613
614DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
615{
616 *pu64 = 0;
617
618 CHECKOP(ASMAtomicXchgU64(pu64, 1), UINT64_C(0), "%#llx", uint64_t);
619 CHECKVAL(*pu64, UINT64_C(1), "%#llx");
620
621 CHECKOP(ASMAtomicXchgU64(pu64, 0), UINT64_C(1), "%#llx", uint64_t);
622 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
623
624 CHECKOP(ASMAtomicXchgU64(pu64, ~UINT64_C(0)), UINT64_C(0), "%#llx", uint64_t);
625 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
626
627 CHECKOP(ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), ~UINT64_C(0), "%#llx", uint64_t);
628 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
629}
630
631
632static void tstASMAtomicXchgU64(void)
633{
634 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
635}
636
637
638DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
639{
640 *ppv = NULL;
641
642 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, "%p", void *);
643 CHECKVAL(*ppv, (void *)(~(uintptr_t)0), "%p");
644
645 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *);
646 CHECKVAL(*ppv, (void *)0x87654321, "%p");
647
648 CHECKOP(ASMAtomicXchgPtr(ppv, NULL), (void *)0x87654321, "%p", void *);
649 CHECKVAL(*ppv, NULL, "%p");
650}
651
652
653static void tstASMAtomicXchgPtr(void)
654{
655 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
656}
657
658
659DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
660{
661 *pu8 = 0xff;
662
663 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0), false, "%d", bool);
664 CHECKVAL(*pu8, 0xff, "%x");
665
666 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, "%d", bool);
667 CHECKVAL(*pu8, 0, "%x");
668
669 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x79, 0xff), false, "%d", bool);
670 CHECKVAL(*pu8, 0, "%x");
671
672 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, "%d", bool);
673 CHECKVAL(*pu8, 0x97, "%x");
674}
675
676
677static void tstASMAtomicCmpXchgU8(void)
678{
679 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
680}
681
682
683DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
684{
685 *pu32 = UINT32_C(0xffffffff);
686
687 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, 0), false, "%d", bool);
688 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
689
690 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, "%d", bool);
691 CHECKVAL(*pu32, 0, "%x");
692
693 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff)), false, "%d", bool);
694 CHECKVAL(*pu32, 0, "%x");
695
696 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), 0), true, "%d", bool);
697 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x");
698}
699
700
701static void tstASMAtomicCmpXchgU32(void)
702{
703 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
704}
705
706
707
708DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
709{
710 *pu64 = UINT64_C(0xffffffffffffff);
711
712 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, 0), false, "%d", bool);
713 CHECKVAL(*pu64, UINT64_C(0xffffffffffffff), "%#llx");
714
715 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, "%d", bool);
716 CHECKVAL(*pu64, 0, "%x");
717
718 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff)), false, "%d", bool);
719 CHECKVAL(*pu64, 0, "%x");
720
721 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000)), false, "%d", bool);
722 CHECKVAL(*pu64, 0, "%x");
723
724 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, "%d", bool);
725 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%#llx");
726}
727
728
729static void tstASMAtomicCmpXchgU64(void)
730{
731 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
732}
733
734
735DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
736{
737 *pu32 = UINT32_C(0xffffffff);
738 uint32_t u32Old = UINT32_C(0x80005111);
739
740 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, "%d", bool);
741 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
742 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x");
743
744 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, "%d", bool);
745 CHECKVAL(*pu32, 0, "%x");
746 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x");
747
748 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff), &u32Old), false, "%d", bool);
749 CHECKVAL(*pu32, 0, "%x");
750 CHECKVAL(u32Old, 0, "%x");
751
752 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), 0, &u32Old), true, "%d", bool);
753 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x");
754 CHECKVAL(u32Old, 0, "%x");
755
756 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0x8008efd), &u32Old), true, "%d", bool);
757 CHECKVAL(*pu32, 0, "%x");
758 CHECKVAL(u32Old, UINT32_C(0x8008efd), "%x");
759}
760
761
762static void tstASMAtomicCmpXchgExU32(void)
763{
764 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
765}
766
767
768DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
769{
770 *pu64 = UINT64_C(0xffffffffffffffff);
771 uint64_t u64Old = UINT64_C(0x8000000051111111);
772
773 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, "%d", bool);
774 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%llx");
775 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx");
776
777 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, "%d", bool);
778 CHECKVAL(*pu64, UINT64_C(0), "%llx");
779 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx");
780
781 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0xffffffff, &u64Old), false, "%d", bool);
782 CHECKVAL(*pu64, UINT64_C(0), "%llx");
783 CHECKVAL(u64Old, UINT64_C(0), "%llx");
784
785 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000), &u64Old), false, "%d", bool);
786 CHECKVAL(*pu64, UINT64_C(0), "%llx");
787 CHECKVAL(u64Old, UINT64_C(0), "%llx");
788
789 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0, &u64Old), true, "%d", bool);
790 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%llx");
791 CHECKVAL(u64Old, UINT64_C(0), "%llx");
792
793 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0x80040008008efd), &u64Old), true, "%d", bool);
794 CHECKVAL(*pu64, UINT64_C(0), "%llx");
795 CHECKVAL(u64Old, UINT64_C(0x80040008008efd), "%llx");
796}
797
798
799static void tstASMAtomicCmpXchgExU64(void)
800{
801 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
802}
803
804
805DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
806{
807 *pu64 = 0;
808
809 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0), "%#llx", uint64_t);
810 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
811
812 *pu64 = ~UINT64_C(0);
813 CHECKOP(ASMAtomicReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t);
814 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
815
816 *pu64 = UINT64_C(0xfedcba0987654321);
817 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t);
818 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
819}
820
821
822static void tstASMAtomicReadU64(void)
823{
824 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
825}
826
827
828DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
829{
830 *pu64 = 0;
831
832 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0), "%#llx", uint64_t);
833 CHECKVAL(*pu64, UINT64_C(0), "%#llx");
834
835 *pu64 = ~UINT64_C(0);
836 CHECKOP(ASMAtomicUoReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t);
837 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx");
838
839 *pu64 = UINT64_C(0xfedcba0987654321);
840 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t);
841 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx");
842}
843
844
845static void tstASMAtomicUoReadU64(void)
846{
847 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
848}
849
850
851DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
852{
853 int32_t i32Rc;
854 *pi32 = 10;
855#define MYCHECK(op, rc, val) \
856 do { \
857 i32Rc = op; \
858 if (i32Rc != (rc)) \
859 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
860 if (*pi32 != (val)) \
861 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, val); \
862 } while (0)
863 MYCHECK(ASMAtomicAddS32(pi32, 1), 10, 11);
864 MYCHECK(ASMAtomicAddS32(pi32, -2), 11, 9);
865 MYCHECK(ASMAtomicAddS32(pi32, -9), 9, 0);
866 MYCHECK(ASMAtomicAddS32(pi32, -0x7fffffff), 0, -0x7fffffff);
867 MYCHECK(ASMAtomicAddS32(pi32, 0), -0x7fffffff, -0x7fffffff);
868 MYCHECK(ASMAtomicAddS32(pi32, 0x7fffffff), -0x7fffffff, 0);
869 MYCHECK(ASMAtomicAddS32(pi32, 0), 0, 0);
870#undef MYCHECK
871}
872
873static void tstASMAtomicAddS32(void)
874{
875 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
876}
877
878
879DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
880{
881 int64_t i64Rc;
882 *pi64 = 10;
883#define MYCHECK(op, rc, val) \
884 do { \
885 i64Rc = op; \
886 if (i64Rc != (rc)) \
887 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %llx expected %llx\n", __FUNCTION__, __LINE__, #op, i64Rc, (int64_t)rc); \
888 if (*pi64 != (val)) \
889 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%llx expected %llx\n", __FUNCTION__, __LINE__, #op, *pi64, (int64_t)(val)); \
890 } while (0)
891 MYCHECK(ASMAtomicAddS64(pi64, 1), 10, 11);
892 MYCHECK(ASMAtomicAddS64(pi64, -2), 11, 9);
893 MYCHECK(ASMAtomicAddS64(pi64, -9), 9, 0);
894 MYCHECK(ASMAtomicAddS64(pi64, -INT64_MAX), 0, -INT64_MAX);
895 MYCHECK(ASMAtomicAddS64(pi64, 0), -INT64_MAX, -INT64_MAX);
896 MYCHECK(ASMAtomicAddS64(pi64, -1), -INT64_MAX, INT64_MIN);
897 MYCHECK(ASMAtomicAddS64(pi64, INT64_MAX), INT64_MIN, -1);
898 MYCHECK(ASMAtomicAddS64(pi64, 1), -1, 0);
899 MYCHECK(ASMAtomicAddS64(pi64, 0), 0, 0);
900#undef MYCHECK
901}
902
903
904static void tstASMAtomicAddS64(void)
905{
906 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
907}
908
909
910DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
911{
912 int32_t i32Rc;
913 *pi32 = 10;
914#define MYCHECK(op, rc) \
915 do { \
916 i32Rc = op; \
917 if (i32Rc != (rc)) \
918 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \
919 if (*pi32 != (rc)) \
920 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, rc); \
921 } while (0)
922 MYCHECK(ASMAtomicDecS32(pi32), 9);
923 MYCHECK(ASMAtomicDecS32(pi32), 8);
924 MYCHECK(ASMAtomicDecS32(pi32), 7);
925 MYCHECK(ASMAtomicDecS32(pi32), 6);
926 MYCHECK(ASMAtomicDecS32(pi32), 5);
927 MYCHECK(ASMAtomicDecS32(pi32), 4);
928 MYCHECK(ASMAtomicDecS32(pi32), 3);
929 MYCHECK(ASMAtomicDecS32(pi32), 2);
930 MYCHECK(ASMAtomicDecS32(pi32), 1);
931 MYCHECK(ASMAtomicDecS32(pi32), 0);
932 MYCHECK(ASMAtomicDecS32(pi32), -1);
933 MYCHECK(ASMAtomicDecS32(pi32), -2);
934 MYCHECK(ASMAtomicIncS32(pi32), -1);
935 MYCHECK(ASMAtomicIncS32(pi32), 0);
936 MYCHECK(ASMAtomicIncS32(pi32), 1);
937 MYCHECK(ASMAtomicIncS32(pi32), 2);
938 MYCHECK(ASMAtomicIncS32(pi32), 3);
939 MYCHECK(ASMAtomicDecS32(pi32), 2);
940 MYCHECK(ASMAtomicIncS32(pi32), 3);
941 MYCHECK(ASMAtomicDecS32(pi32), 2);
942 MYCHECK(ASMAtomicIncS32(pi32), 3);
943#undef MYCHECK
944}
945
946
947static void tstASMAtomicDecIncS32(void)
948{
949 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
950}
951
952
953DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
954{
955 int64_t i64Rc;
956 *pi64 = 10;
957#define MYCHECK(op, rc) \
958 do { \
959 i64Rc = op; \
960 if (i64Rc != (rc)) \
961 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, rc); \
962 if (*pi64 != (rc)) \
963 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, rc); \
964 } while (0)
965 MYCHECK(ASMAtomicDecS64(pi64), 9);
966 MYCHECK(ASMAtomicDecS64(pi64), 8);
967 MYCHECK(ASMAtomicDecS64(pi64), 7);
968 MYCHECK(ASMAtomicDecS64(pi64), 6);
969 MYCHECK(ASMAtomicDecS64(pi64), 5);
970 MYCHECK(ASMAtomicDecS64(pi64), 4);
971 MYCHECK(ASMAtomicDecS64(pi64), 3);
972 MYCHECK(ASMAtomicDecS64(pi64), 2);
973 MYCHECK(ASMAtomicDecS64(pi64), 1);
974 MYCHECK(ASMAtomicDecS64(pi64), 0);
975 MYCHECK(ASMAtomicDecS64(pi64), -1);
976 MYCHECK(ASMAtomicDecS64(pi64), -2);
977 MYCHECK(ASMAtomicIncS64(pi64), -1);
978 MYCHECK(ASMAtomicIncS64(pi64), 0);
979 MYCHECK(ASMAtomicIncS64(pi64), 1);
980 MYCHECK(ASMAtomicIncS64(pi64), 2);
981 MYCHECK(ASMAtomicIncS64(pi64), 3);
982 MYCHECK(ASMAtomicDecS64(pi64), 2);
983 MYCHECK(ASMAtomicIncS64(pi64), 3);
984 MYCHECK(ASMAtomicDecS64(pi64), 2);
985 MYCHECK(ASMAtomicIncS64(pi64), 3);
986#undef MYCHECK
987}
988
989
990static void tstASMAtomicDecIncS64(void)
991{
992 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
993}
994
995
996DECLINLINE(void) tstASMAtomicAndOrU32Worker(uint32_t volatile *pu32)
997{
998 *pu32 = UINT32_C(0xffffffff);
999
1000 ASMAtomicOrU32(pu32, UINT32_C(0xffffffff));
1001 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1002
1003 ASMAtomicAndU32(pu32, UINT32_C(0xffffffff));
1004 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1005
1006 ASMAtomicAndU32(pu32, UINT32_C(0x8f8f8f8f));
1007 CHECKVAL(*pu32, UINT32_C(0x8f8f8f8f), "%x");
1008
1009 ASMAtomicOrU32(pu32, UINT32_C(0x70707070));
1010 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x");
1011
1012 ASMAtomicAndU32(pu32, UINT32_C(1));
1013 CHECKVAL(*pu32, UINT32_C(1), "%x");
1014
1015 ASMAtomicOrU32(pu32, UINT32_C(0x80000000));
1016 CHECKVAL(*pu32, UINT32_C(0x80000001), "%x");
1017
1018 ASMAtomicAndU32(pu32, UINT32_C(0x80000000));
1019 CHECKVAL(*pu32, UINT32_C(0x80000000), "%x");
1020
1021 ASMAtomicAndU32(pu32, UINT32_C(0));
1022 CHECKVAL(*pu32, UINT32_C(0), "%x");
1023
1024 ASMAtomicOrU32(pu32, UINT32_C(0x42424242));
1025 CHECKVAL(*pu32, UINT32_C(0x42424242), "%x");
1026}
1027
1028
1029static void tstASMAtomicAndOrU32(void)
1030{
1031 DO_SIMPLE_TEST(ASMAtomicAndOrU32, uint32_t);
1032}
1033
1034
1035DECLINLINE(void) tstASMAtomicAndOrU64Worker(uint64_t volatile *pu64)
1036{
1037 *pu64 = UINT64_C(0xffffffff);
1038
1039 ASMAtomicOrU64(pu64, UINT64_C(0xffffffff));
1040 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1041
1042 ASMAtomicAndU64(pu64, UINT64_C(0xffffffff));
1043 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1044
1045 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f));
1046 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f), "%x");
1047
1048 ASMAtomicOrU64(pu64, UINT64_C(0x70707070));
1049 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x");
1050
1051 ASMAtomicAndU64(pu64, UINT64_C(1));
1052 CHECKVAL(*pu64, UINT64_C(1), "%x");
1053
1054 ASMAtomicOrU64(pu64, UINT64_C(0x80000000));
1055 CHECKVAL(*pu64, UINT64_C(0x80000001), "%x");
1056
1057 ASMAtomicAndU64(pu64, UINT64_C(0x80000000));
1058 CHECKVAL(*pu64, UINT64_C(0x80000000), "%x");
1059
1060 ASMAtomicAndU64(pu64, UINT64_C(0));
1061 CHECKVAL(*pu64, UINT64_C(0), "%x");
1062
1063 ASMAtomicOrU64(pu64, UINT64_C(0x42424242));
1064 CHECKVAL(*pu64, UINT64_C(0x42424242), "%x");
1065
1066 // Same as above, but now 64-bit wide.
1067 ASMAtomicAndU64(pu64, UINT64_C(0));
1068 CHECKVAL(*pu64, UINT64_C(0), "%x");
1069
1070 ASMAtomicOrU64(pu64, UINT64_C(0xffffffffffffffff));
1071 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1072
1073 ASMAtomicAndU64(pu64, UINT64_C(0xffffffffffffffff));
1074 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1075
1076 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f8f8f8f8f));
1077 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f8f8f8f8f), "%x");
1078
1079 ASMAtomicOrU64(pu64, UINT64_C(0x7070707070707070));
1080 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x");
1081
1082 ASMAtomicAndU64(pu64, UINT64_C(1));
1083 CHECKVAL(*pu64, UINT64_C(1), "%x");
1084
1085 ASMAtomicOrU64(pu64, UINT64_C(0x8000000000000000));
1086 CHECKVAL(*pu64, UINT64_C(0x8000000000000001), "%x");
1087
1088 ASMAtomicAndU64(pu64, UINT64_C(0x8000000000000000));
1089 CHECKVAL(*pu64, UINT64_C(0x8000000000000000), "%x");
1090
1091 ASMAtomicAndU64(pu64, UINT64_C(0));
1092 CHECKVAL(*pu64, UINT64_C(0), "%x");
1093
1094 ASMAtomicOrU64(pu64, UINT64_C(0x4242424242424242));
1095 CHECKVAL(*pu64, UINT64_C(0x4242424242424242), "%x");
1096}
1097
1098
1099static void tstASMAtomicAndOrU64(void)
1100{
1101 DO_SIMPLE_TEST(ASMAtomicAndOrU64, uint64_t);
1102}
1103
1104
1105typedef struct
1106{
1107 uint8_t ab[PAGE_SIZE];
1108} TSTPAGE;
1109
1110
1111DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
1112{
1113 for (unsigned j = 0; j < 16; j++)
1114 {
1115 memset(pPage, 0x11 * j, sizeof(*pPage));
1116 ASMMemZeroPage(pPage);
1117 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
1118 if (pPage->ab[i])
1119 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
1120 }
1121}
1122
1123
1124static void tstASMMemZeroPage(void)
1125{
1126 DO_SIMPLE_TEST(ASMMemZeroPage, TSTPAGE);
1127}
1128
1129
1130void tstASMMemIsZeroPage(RTTEST hTest)
1131{
1132 RTTestSub(hTest, "ASMMemIsZeroPage");
1133
1134 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
1135 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
1136 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
1137
1138 memset(pvPage1, 0, PAGE_SIZE);
1139 memset(pvPage2, 0, PAGE_SIZE);
1140 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
1141 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
1142
1143 memset(pvPage1, 0xff, PAGE_SIZE);
1144 memset(pvPage2, 0xff, PAGE_SIZE);
1145 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
1146 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
1147
1148 memset(pvPage1, 0, PAGE_SIZE);
1149 memset(pvPage2, 0, PAGE_SIZE);
1150 for (unsigned off = 0; off < PAGE_SIZE; off++)
1151 {
1152 ((uint8_t *)pvPage1)[off] = 1;
1153 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
1154 ((uint8_t *)pvPage1)[off] = 0;
1155
1156 ((uint8_t *)pvPage2)[off] = 0x80;
1157 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
1158 ((uint8_t *)pvPage2)[off] = 0;
1159 }
1160
1161 RTTestSubDone(hTest);
1162}
1163
1164
1165void tstASMMemZero32(void)
1166{
1167 RTTestSub(g_hTest, "ASMMemFill32");
1168
1169 struct
1170 {
1171 uint64_t u64Magic1;
1172 uint8_t abPage[PAGE_SIZE - 32];
1173 uint64_t u64Magic2;
1174 } Buf1, Buf2, Buf3;
1175
1176 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
1177 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
1178 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
1179 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
1180 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
1181 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
1182 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
1183 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
1184 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
1185 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
1186 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
1187 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
1188 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
1189 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
1190 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
1191 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
1192 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
1193 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
1194 {
1195 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
1196 }
1197 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
1198 if (Buf1.abPage[i])
1199 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1200 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
1201 if (Buf2.abPage[i])
1202 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1203 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
1204 if (Buf3.abPage[i])
1205 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
1206}
1207
1208
1209void tstASMMemFill32(void)
1210{
1211 RTTestSub(g_hTest, "ASMMemFill32");
1212
1213 struct
1214 {
1215 uint64_t u64Magic1;
1216 uint32_t au32Page[PAGE_SIZE / 4];
1217 uint64_t u64Magic2;
1218 } Buf1;
1219 struct
1220 {
1221 uint64_t u64Magic1;
1222 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
1223 uint64_t u64Magic2;
1224 } Buf2;
1225 struct
1226 {
1227 uint64_t u64Magic1;
1228 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
1229 uint64_t u64Magic2;
1230 } Buf3;
1231
1232 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
1233 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
1234 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
1235 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
1236 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
1237 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
1238 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
1239 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
1240 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
1241 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
1242 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
1243 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
1244 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
1245 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
1246 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
1247 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
1248 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
1249 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
1250 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
1251 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
1252 if (Buf1.au32Page[i] != 0xdeadbeef)
1253 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
1254 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
1255 if (Buf2.au32Page[i] != 0xcafeff01)
1256 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
1257 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
1258 if (Buf3.au32Page[i] != 0xf00dd00f)
1259 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
1260}
1261
1262
1263
1264void tstASMMath(void)
1265{
1266 RTTestSub(g_hTest, "Math");
1267
1268 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
1269 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
1270
1271 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
1272 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
1273
1274#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
1275 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
1276 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
1277 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
1278 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
1279 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1280 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
1281 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1282 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
1283 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
1284 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
1285 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
1286 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
1287 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
1288 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
1289
1290# if 0 /* bird: question is whether this should trap or not:
1291 *
1292 * frank: Of course it must trap:
1293 *
1294 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
1295 *
1296 * During the following division, the quotient must fit into a 32-bit register.
1297 * Therefore the smallest valid divisor is
1298 *
1299 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
1300 *
1301 * which is definitely greater than 0x3b9aca00.
1302 *
1303 * bird: No, the C version does *not* crash. So, the question is whether there's any
1304 * code depending on it not crashing.
1305 *
1306 * Of course the assembly versions of the code crash right now for the reasons you've
1307 * given, but the 32-bit MSC version does not crash.
1308 *
1309 * frank: The C version does not crash but delivers incorrect results for this case.
1310 * The reason is
1311 *
1312 * u.s.Hi = (unsigned long)(u64Hi / u32C);
1313 *
1314 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
1315 * to 32 bit. If using this (optimized and fast) function we should just be sure that
1316 * the operands are in a valid range.
1317 */
1318 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
1319 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
1320# endif
1321#endif /* AMD64 || X86 */
1322
1323 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
1324 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
1325
1326 int32_t i32;
1327 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
1328 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1329 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
1330 CHECKVAL(i32, INT32_C(-1), "%010RI32");
1331 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
1332 CHECKVAL(i32, INT32_C(1), "%010RI32");
1333
1334 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
1335 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
1336 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
1337 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
1338}
1339
1340
1341void tstASMByteSwap(void)
1342{
1343 RTTestSub(g_hTest, "ASMByteSwap*");
1344
1345 uint64_t u64In = UINT64_C(0x0011223344556677);
1346 uint64_t u64Out = ASMByteSwapU64(u64In);
1347 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
1348 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
1349 u64Out = ASMByteSwapU64(u64Out);
1350 CHECKVAL(u64Out, u64In, "%#018RX64");
1351 u64In = UINT64_C(0x0123456789abcdef);
1352 u64Out = ASMByteSwapU64(u64In);
1353 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
1354 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
1355 u64Out = ASMByteSwapU64(u64Out);
1356 CHECKVAL(u64Out, u64In, "%#018RX64");
1357 u64In = 0;
1358 u64Out = ASMByteSwapU64(u64In);
1359 CHECKVAL(u64Out, u64In, "%#018RX64");
1360 u64In = ~(uint64_t)0;
1361 u64Out = ASMByteSwapU64(u64In);
1362 CHECKVAL(u64Out, u64In, "%#018RX64");
1363
1364 uint32_t u32In = UINT32_C(0x00112233);
1365 uint32_t u32Out = ASMByteSwapU32(u32In);
1366 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
1367 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
1368 u32Out = ASMByteSwapU32(u32Out);
1369 CHECKVAL(u32Out, u32In, "%#010RX32");
1370 u32In = UINT32_C(0x12345678);
1371 u32Out = ASMByteSwapU32(u32In);
1372 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
1373 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
1374 u32Out = ASMByteSwapU32(u32Out);
1375 CHECKVAL(u32Out, u32In, "%#010RX32");
1376 u32In = 0;
1377 u32Out = ASMByteSwapU32(u32In);
1378 CHECKVAL(u32Out, u32In, "%#010RX32");
1379 u32In = ~(uint32_t)0;
1380 u32Out = ASMByteSwapU32(u32In);
1381 CHECKVAL(u32Out, u32In, "%#010RX32");
1382
1383 uint16_t u16In = UINT16_C(0x0011);
1384 uint16_t u16Out = ASMByteSwapU16(u16In);
1385 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
1386 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
1387 u16Out = ASMByteSwapU16(u16Out);
1388 CHECKVAL(u16Out, u16In, "%#06RX16");
1389 u16In = UINT16_C(0x1234);
1390 u16Out = ASMByteSwapU16(u16In);
1391 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
1392 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
1393 u16Out = ASMByteSwapU16(u16Out);
1394 CHECKVAL(u16Out, u16In, "%#06RX16");
1395 u16In = 0;
1396 u16Out = ASMByteSwapU16(u16In);
1397 CHECKVAL(u16Out, u16In, "%#06RX16");
1398 u16In = ~(uint16_t)0;
1399 u16Out = ASMByteSwapU16(u16In);
1400 CHECKVAL(u16Out, u16In, "%#06RX16");
1401}
1402
1403
1404void tstASMBench(void)
1405{
1406 /*
1407 * Make this static. We don't want to have this located on the stack.
1408 */
1409 static uint8_t volatile s_u8;
1410 static int8_t volatile s_i8;
1411 static uint16_t volatile s_u16;
1412 static int16_t volatile s_i16;
1413 static uint32_t volatile s_u32;
1414 static int32_t volatile s_i32;
1415 static uint64_t volatile s_u64;
1416 static int64_t volatile s_i64;
1417 register unsigned i;
1418 const unsigned cRounds = _2M;
1419 register uint64_t u64Elapsed;
1420
1421 RTTestSub(g_hTest, "Benchmarking");
1422
1423#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1424# define BENCH(op, str) \
1425 do { \
1426 RTThreadYield(); \
1427 u64Elapsed = ASMReadTSC(); \
1428 for (i = cRounds; i > 0; i--) \
1429 op; \
1430 u64Elapsed = ASMReadTSC() - u64Elapsed; \
1431 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
1432 } while (0)
1433#else
1434# define BENCH(op, str) \
1435 do { \
1436 RTThreadYield(); \
1437 u64Elapsed = RTTimeNanoTS(); \
1438 for (i = cRounds; i > 0; i--) \
1439 op; \
1440 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
1441 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \
1442 } while (0)
1443#endif
1444
1445 BENCH(s_u32 = 0, "s_u32 = 0");
1446 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
1447 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
1448 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
1449 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
1450 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
1451 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
1452 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
1453 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
1454 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
1455 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
1456 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
1457 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
1458 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
1459 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
1460 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
1461 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
1462 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
1463 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
1464 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
1465 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
1466 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
1467 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
1468 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
1469 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
1470 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
1471 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
1472 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
1473 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
1474 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
1475 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
1476 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
1477 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
1478 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
1479 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
1480 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
1481 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
1482 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
1483 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
1484 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
1485 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
1486 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
1487 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
1488 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
1489 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
1490 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
1491 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
1492 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
1493 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
1494 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
1495 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
1496 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
1497 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
1498 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
1499 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
1500 /* The Darwin gcc does not like this ... */
1501#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1502 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
1503#endif
1504
1505#undef BENCH
1506}
1507
1508
1509int main(int argc, char *argv[])
1510{
1511 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
1512 if (rc)
1513 return rc;
1514 RTTestBanner(g_hTest);
1515
1516 /*
1517 * Execute the tests.
1518 */
1519#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
1520 tstASMCpuId();
1521#endif
1522 tstASMAtomicXchgU8();
1523 tstASMAtomicXchgU16();
1524 tstASMAtomicXchgU32();
1525 tstASMAtomicXchgU64();
1526 tstASMAtomicXchgPtr();
1527 tstASMAtomicCmpXchgU8();
1528 tstASMAtomicCmpXchgU32();
1529 tstASMAtomicCmpXchgU64();
1530 tstASMAtomicCmpXchgExU32();
1531 tstASMAtomicCmpXchgExU64();
1532 tstASMAtomicReadU64();
1533 tstASMAtomicUoReadU64();
1534
1535 tstASMAtomicAddS32();
1536 tstASMAtomicAddS64();
1537 tstASMAtomicDecIncS32();
1538 tstASMAtomicDecIncS64();
1539 tstASMAtomicAndOrU32();
1540 tstASMAtomicAndOrU64();
1541
1542 tstASMMemZeroPage();
1543 tstASMMemIsZeroPage(g_hTest);
1544 tstASMMemZero32();
1545 tstASMMemFill32();
1546
1547 tstASMMath();
1548
1549 tstASMByteSwap();
1550
1551 tstASMBench();
1552
1553 /*
1554 * Show the result.
1555 */
1556 return RTTestSummaryAndDestroy(g_hTest);
1557}
1558
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette