- Timestamp:
- Oct 14, 2010 2:47:59 PM (14 years ago)
- svn:sync-xref-src-repo-rev:
- 66668
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp
r33130 r33136 47 47 #include <iprt/stream.h> 48 48 #include <iprt/string.h> 49 #include <iprt/initterm.h>50 49 #include <iprt/param.h> 51 50 #include <iprt/thread.h> 52 51 #include <iprt/test.h> 52 #include <iprt/time.h> 53 53 54 54 … … 62 62 if ((val) != (expect)) \ 63 63 { \ 64 RTTestIErrorInc(); \ 65 RTPrintf("%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \ 64 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \ 66 65 } \ 67 66 } while (0) … … 73 72 if (val != (type)(expect)) \ 74 73 { \ 75 RTTestIErrorInc(); \ 76 RTPrintf("%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \ 74 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \ 77 75 } \ 78 76 } while (0) 77 78 /** 79 * Calls a worker function with different worker variable storage types. 80 */ 81 #define DO_SIMPLE_TEST(name, type) \ 82 do \ 83 { \ 84 RTTestISub(#name); \ 85 type StackVar; \ 86 tst ## name ## Worker(&StackVar); \ 87 \ 88 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \ 89 RTTEST_CHECK_BREAK(g_hTest, pVar); \ 90 tst ## name ## Worker(pVar); \ 91 RTTestGuardedFree(g_hTest, pVar); \ 92 \ 93 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \ 94 RTTEST_CHECK_BREAK(g_hTest, pVar); \ 95 tst ## name ## Worker(pVar); \ 96 RTTestGuardedFree(g_hTest, pVar); \ 97 } while (0) 98 99 100 /******************************************************************************* 101 * Global Variables * 102 *******************************************************************************/ 103 /** The test instance. */ 104 static RTTEST g_hTest; 105 79 106 80 107 … … 130 157 void tstASMCpuId(void) 131 158 { 159 RTTestISub("ASMCpuId"); 160 132 161 unsigned iBit; 133 162 struct … … 137 166 if (!ASMHasCpuId()) 138 167 { 139 RT Printf("tstInlineAsm:warning! CPU doesn't support CPUID\n");168 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n"); 140 169 return; 141 170 } … … 166 195 * Done testing, dump the information. 167 196 */ 168 RT Printf("tstInlineAsm:CPUID Dump\n");197 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n"); 169 198 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 170 199 const uint32_t cFunctions = s.uEAX; 171 200 172 201 /* raw dump */ 173 RTPrintf("\n" 174 " RAW Standard CPUIDs\n" 175 "Function eax ebx ecx edx\n"); 202 RTTestIPrintf(RTTESTLVL_ALWAYS, 203 "\n" 204 " RAW Standard CPUIDs\n" 205 "Function eax ebx ecx edx\n"); 176 206 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++) 177 207 { … … 179 209 continue; /* Leaf 04 output depends on the initial value of ECX */ 180 210 ASMCpuId(iStd, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 181 RT Printf("%08x %08x %08x %08x %08x%s\n",182 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");211 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n", 212 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*"); 183 213 184 214 u32 = ASMCpuId_EAX(iStd); … … 202 232 */ 203 233 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 204 RTPrintf("Name: %.04s%.04s%.04s\n" 205 "Support: 0-%u\n", 206 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 234 RTTestIPrintf(RTTESTLVL_ALWAYS, 235 "Name: %.04s%.04s%.04s\n" 236 "Support: 0-%u\n", 237 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 207 238 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX); 208 239 … … 214 245 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" }; 215 246 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 216 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n" 217 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 218 "Stepping: %d\n" 219 "Type: %d (%s)\n" 220 "APIC ID: %#04x\n" 221 "Logical CPUs: %d\n" 222 "CLFLUSH Size: %d\n" 223 "Brand ID: %#04x\n", 224 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 225 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 226 ASMGetCpuStepping(s.uEAX), 227 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3], 228 (s.uEBX >> 24) & 0xff, 229 (s.uEBX >> 16) & 0xff, 230 (s.uEBX >> 8) & 0xff, 231 (s.uEBX >> 0) & 0xff); 232 233 RTPrintf("Features EDX: "); 234 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU"); 235 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME"); 236 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE"); 237 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE"); 238 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC"); 239 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR"); 240 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE"); 241 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE"); 242 if (s.uEDX & RT_BIT(8)) RTPrintf(" CX8"); 243 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC"); 244 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10"); 245 if (s.uEDX & RT_BIT(11)) RTPrintf(" SEP"); 246 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR"); 247 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE"); 248 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA"); 249 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV"); 250 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT"); 251 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36"); 252 if (s.uEDX & RT_BIT(18)) RTPrintf(" PSN"); 253 if (s.uEDX & RT_BIT(19)) RTPrintf(" CLFSH"); 254 if (s.uEDX & RT_BIT(20)) RTPrintf(" 20"); 255 if (s.uEDX & RT_BIT(21)) RTPrintf(" DS"); 256 if (s.uEDX & RT_BIT(22)) RTPrintf(" ACPI"); 257 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX"); 258 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR"); 259 if (s.uEDX & RT_BIT(25)) RTPrintf(" SSE"); 260 if (s.uEDX & RT_BIT(26)) RTPrintf(" SSE2"); 261 if (s.uEDX & RT_BIT(27)) RTPrintf(" SS"); 262 if (s.uEDX & RT_BIT(28)) RTPrintf(" HTT"); 263 if (s.uEDX & RT_BIT(29)) RTPrintf(" 29"); 264 if (s.uEDX & RT_BIT(30)) RTPrintf(" 30"); 265 if (s.uEDX & RT_BIT(31)) RTPrintf(" 31"); 266 RTPrintf("\n"); 247 RTTestIPrintf(RTTESTLVL_ALWAYS, 248 "Family: %#x \tExtended: %#x \tEffective: %#x\n" 249 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 250 "Stepping: %d\n" 251 "Type: %d (%s)\n" 252 "APIC ID: %#04x\n" 253 "Logical CPUs: %d\n" 254 "CLFLUSH Size: %d\n" 255 "Brand ID: %#04x\n", 256 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 257 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 258 ASMGetCpuStepping(s.uEAX), 259 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3], 260 (s.uEBX >> 24) & 0xff, 261 (s.uEBX >> 16) & 0xff, 262 (s.uEBX >> 8) & 0xff, 263 (s.uEBX >> 0) & 0xff); 264 265 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: "); 266 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU"); 267 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME"); 268 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE"); 269 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE"); 270 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC"); 271 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR"); 272 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE"); 273 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE"); 274 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8"); 275 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC"); 276 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10"); 277 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP"); 278 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR"); 279 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE"); 280 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA"); 281 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV"); 282 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT"); 283 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36"); 284 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN"); 285 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH"); 286 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20"); 287 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS"); 288 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI"); 289 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX"); 290 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR"); 291 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE"); 292 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2"); 293 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS"); 294 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT"); 295 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29"); 296 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30"); 297 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31"); 298 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 267 299 268 300 /** @todo check intel docs. */ 269 RT Printf("Features ECX: ");270 if (s.uECX & RT_BIT(0)) RT Printf(" SSE3");301 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: "); 302 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3"); 271 303 for (iBit = 1; iBit < 13; iBit++) 272 304 if (s.uECX & RT_BIT(iBit)) 273 RT Printf(" %d", iBit);274 if (s.uECX & RT_BIT(13)) RT Printf(" CX16");305 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 306 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16"); 275 307 for (iBit = 14; iBit < 32; iBit++) 276 308 if (s.uECX & RT_BIT(iBit)) 277 RT Printf(" %d", iBit);278 RT Printf("\n");309 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 310 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 279 311 } 280 312 … … 287 319 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX) 288 320 { 289 RT Printf("No extended CPUID info? Check the manual on how to detect this...\n");321 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n"); 290 322 return; 291 323 } … … 293 325 294 326 /* raw dump */ 295 RTPrintf("\n" 296 " RAW Extended CPUIDs\n" 297 "Function eax ebx ecx edx\n"); 327 RTTestIPrintf(RTTESTLVL_ALWAYS, 328 "\n" 329 " RAW Extended CPUIDs\n" 330 "Function eax ebx ecx edx\n"); 298 331 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++) 299 332 { 300 333 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 301 RT Printf("%08x %08x %08x %08x %08x%s\n",302 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");334 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n", 335 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*"); 303 336 304 337 u32 = ASMCpuId_EAX(iExt); … … 322 355 */ 323 356 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 324 RTPrintf("Ext Name: %.4s%.4s%.4s\n" 325 "Ext Supports: 0x80000000-%#010x\n", 326 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 357 RTTestIPrintf(RTTESTLVL_ALWAYS, 358 "Ext Name: %.4s%.4s%.4s\n" 359 "Ext Supports: 0x80000000-%#010x\n", 360 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 327 361 328 362 if (cExtFunctions >= 0x80000001) 329 363 { 330 364 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 331 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n" 332 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 333 "Stepping: %d\n" 334 "Brand ID: %#05x\n", 335 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 336 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 337 ASMGetCpuStepping(s.uEAX), 338 s.uEBX & 0xfff); 339 340 RTPrintf("Features EDX: "); 341 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU"); 342 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME"); 343 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE"); 344 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE"); 345 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC"); 346 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR"); 347 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE"); 348 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE"); 349 if (s.uEDX & RT_BIT(8)) RTPrintf(" CMPXCHG8B"); 350 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC"); 351 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10"); 352 if (s.uEDX & RT_BIT(11)) RTPrintf(" SysCallSysRet"); 353 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR"); 354 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE"); 355 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA"); 356 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV"); 357 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT"); 358 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36"); 359 if (s.uEDX & RT_BIT(18)) RTPrintf(" 18"); 360 if (s.uEDX & RT_BIT(19)) RTPrintf(" 19"); 361 if (s.uEDX & RT_BIT(20)) RTPrintf(" NX"); 362 if (s.uEDX & RT_BIT(21)) RTPrintf(" 21"); 363 if (s.uEDX & RT_BIT(22)) RTPrintf(" MmxExt"); 364 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX"); 365 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR"); 366 if (s.uEDX & RT_BIT(25)) RTPrintf(" FastFXSR"); 367 if (s.uEDX & RT_BIT(26)) RTPrintf(" 26"); 368 if (s.uEDX & RT_BIT(27)) RTPrintf(" RDTSCP"); 369 if (s.uEDX & RT_BIT(28)) RTPrintf(" 28"); 370 if (s.uEDX & RT_BIT(29)) RTPrintf(" LongMode"); 371 if (s.uEDX & RT_BIT(30)) RTPrintf(" 3DNowExt"); 372 if (s.uEDX & RT_BIT(31)) RTPrintf(" 3DNow"); 373 RTPrintf("\n"); 374 375 RTPrintf("Features ECX: "); 376 if (s.uECX & RT_BIT(0)) RTPrintf(" LahfSahf"); 377 if (s.uECX & RT_BIT(1)) RTPrintf(" CmpLegacy"); 378 if (s.uECX & RT_BIT(2)) RTPrintf(" SVM"); 379 if (s.uECX & RT_BIT(3)) RTPrintf(" 3"); 380 if (s.uECX & RT_BIT(4)) RTPrintf(" AltMovCr8"); 365 RTTestIPrintf(RTTESTLVL_ALWAYS, 366 "Family: %#x \tExtended: %#x \tEffective: %#x\n" 367 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 368 "Stepping: %d\n" 369 "Brand ID: %#05x\n", 370 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 371 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 372 ASMGetCpuStepping(s.uEAX), 373 s.uEBX & 0xfff); 374 375 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: "); 376 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU"); 377 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME"); 378 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE"); 379 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE"); 380 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC"); 381 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR"); 382 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE"); 383 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE"); 384 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B"); 385 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC"); 386 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10"); 387 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet"); 388 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR"); 389 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE"); 390 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA"); 391 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV"); 392 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT"); 393 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36"); 394 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18"); 395 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19"); 396 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX"); 397 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21"); 398 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt"); 399 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX"); 400 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR"); 401 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR"); 402 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26"); 403 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP"); 404 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28"); 405 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode"); 406 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt"); 407 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow"); 408 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 409 410 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: "); 411 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf"); 412 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy"); 413 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM"); 414 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3"); 415 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8"); 381 416 for (iBit = 5; iBit < 32; iBit++) 382 417 if (s.uECX & RT_BIT(iBit)) 383 RT Printf(" %d", iBit);384 RT Printf("\n");418 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 419 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 385 420 } 386 421 … … 393 428 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]); 394 429 if (cExtFunctions >= 0x80000002) 395 RT Printf("Full Name: %s\n", szString);430 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString); 396 431 397 432 if (cExtFunctions >= 0x80000005) 398 433 { 399 434 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 400 RTPrintf("TLB 2/4M Instr/Uni: %s %3d entries\n" 401 "TLB 2/4M Data: %s %3d entries\n", 402 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff, 403 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff); 404 RTPrintf("TLB 4K Instr/Uni: %s %3d entries\n" 405 "TLB 4K Data: %s %3d entries\n", 406 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff, 407 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff); 408 RTPrintf("L1 Instr Cache Line Size: %d bytes\n" 409 "L1 Instr Cache Lines Per Tag: %d\n" 410 "L1 Instr Cache Associativity: %s\n" 411 "L1 Instr Cache Size: %d KB\n", 412 (s.uEDX >> 0) & 0xff, 413 (s.uEDX >> 8) & 0xff, 414 getCacheAss((s.uEDX >> 16) & 0xff), 415 (s.uEDX >> 24) & 0xff); 416 RTPrintf("L1 Data Cache Line Size: %d bytes\n" 417 "L1 Data Cache Lines Per Tag: %d\n" 418 "L1 Data Cache Associativity: %s\n" 419 "L1 Data Cache Size: %d KB\n", 420 (s.uECX >> 0) & 0xff, 421 (s.uECX >> 8) & 0xff, 422 getCacheAss((s.uECX >> 16) & 0xff), 423 (s.uECX >> 24) & 0xff); 435 RTTestIPrintf(RTTESTLVL_ALWAYS, 436 "TLB 2/4M Instr/Uni: %s %3d entries\n" 437 "TLB 2/4M Data: %s %3d entries\n", 438 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff, 439 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff); 440 RTTestIPrintf(RTTESTLVL_ALWAYS, 441 "TLB 4K Instr/Uni: %s %3d entries\n" 442 "TLB 4K Data: %s %3d entries\n", 443 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff, 444 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff); 445 RTTestIPrintf(RTTESTLVL_ALWAYS, 446 "L1 Instr Cache Line Size: %d bytes\n" 447 "L1 Instr Cache Lines Per Tag: %d\n" 448 "L1 Instr Cache Associativity: %s\n" 449 "L1 Instr Cache Size: %d KB\n", 450 (s.uEDX >> 0) & 0xff, 451 (s.uEDX >> 8) & 0xff, 452 getCacheAss((s.uEDX >> 16) & 0xff), 453 (s.uEDX >> 24) & 0xff); 454 RTTestIPrintf(RTTESTLVL_ALWAYS, 455 "L1 Data Cache Line Size: %d bytes\n" 456 "L1 Data Cache Lines Per Tag: %d\n" 457 "L1 Data Cache Associativity: %s\n" 458 "L1 Data Cache Size: %d KB\n", 459 (s.uECX >> 0) & 0xff, 460 (s.uECX >> 8) & 0xff, 461 getCacheAss((s.uECX >> 16) & 0xff), 462 (s.uECX >> 24) & 0xff); 424 463 } 425 464 … … 427 466 { 428 467 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 429 RTPrintf("L2 TLB 2/4M Instr/Uni: %s %4d entries\n" 430 "L2 TLB 2/4M Data: %s %4d entries\n", 431 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff, 432 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff); 433 RTPrintf("L2 TLB 4K Instr/Uni: %s %4d entries\n" 434 "L2 TLB 4K Data: %s %4d entries\n", 435 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff, 436 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff); 437 RTPrintf("L2 Cache Line Size: %d bytes\n" 438 "L2 Cache Lines Per Tag: %d\n" 439 "L2 Cache Associativity: %s\n" 440 "L2 Cache Size: %d KB\n", 441 (s.uEDX >> 0) & 0xff, 442 (s.uEDX >> 8) & 0xf, 443 getL2CacheAss((s.uEDX >> 12) & 0xf), 444 (s.uEDX >> 16) & 0xffff); 468 RTTestIPrintf(RTTESTLVL_ALWAYS, 469 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n" 470 "L2 TLB 2/4M Data: %s %4d entries\n", 471 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff, 472 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff); 473 RTTestIPrintf(RTTESTLVL_ALWAYS, 474 "L2 TLB 4K Instr/Uni: %s %4d entries\n" 475 "L2 TLB 4K Data: %s %4d entries\n", 476 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff, 477 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff); 478 RTTestIPrintf(RTTESTLVL_ALWAYS, 479 "L2 Cache Line Size: %d bytes\n" 480 "L2 Cache Lines Per Tag: %d\n" 481 "L2 Cache Associativity: %s\n" 482 "L2 Cache Size: %d KB\n", 483 (s.uEDX >> 0) & 0xff, 484 (s.uEDX >> 8) & 0xf, 485 getL2CacheAss((s.uEDX >> 12) & 0xf), 486 (s.uEDX >> 16) & 0xffff); 445 487 } 446 488 … … 448 490 { 449 491 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 450 RT Printf("APM Features: ");451 if (s.uEDX & RT_BIT(0)) RT Printf(" TS");452 if (s.uEDX & RT_BIT(1)) RT Printf(" FID");453 if (s.uEDX & RT_BIT(2)) RT Printf(" VID");454 if (s.uEDX & RT_BIT(3)) RT Printf(" TTP");455 if (s.uEDX & RT_BIT(4)) RT Printf(" TM");456 if (s.uEDX & RT_BIT(5)) RT Printf(" STC");457 if (s.uEDX & RT_BIT(6)) RT Printf(" 6");458 if (s.uEDX & RT_BIT(7)) RT Printf(" 7");459 if (s.uEDX & RT_BIT(8)) RT Printf(" TscInvariant");492 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: "); 493 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS"); 494 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID"); 495 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID"); 496 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP"); 497 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM"); 498 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC"); 499 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6"); 500 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7"); 501 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant"); 460 502 for (iBit = 9; iBit < 32; iBit++) 461 503 if (s.uEDX & RT_BIT(iBit)) 462 RT Printf(" %d", iBit);463 RT Printf("\n");504 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 505 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 464 506 } 465 507 … … 467 509 { 468 510 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 469 RTPrintf("Physical Address Width: %d bits\n" 470 "Virtual Address Width: %d bits\n" 471 "Guest Physical Address Width: %d bits\n", 472 (s.uEAX >> 0) & 0xff, 473 (s.uEAX >> 8) & 0xff, 474 (s.uEAX >> 16) & 0xff); 475 RTPrintf("Physical Core Count: %d\n", 476 ((s.uECX >> 0) & 0xff) + 1); 511 RTTestIPrintf(RTTESTLVL_ALWAYS, 512 "Physical Address Width: %d bits\n" 513 "Virtual Address Width: %d bits\n" 514 "Guest Physical Address Width: %d bits\n", 515 (s.uEAX >> 0) & 0xff, 516 (s.uEAX >> 8) & 0xff, 517 (s.uEAX >> 16) & 0xff); 518 RTTestIPrintf(RTTESTLVL_ALWAYS, 519 "Physical Core Count: %d\n", 520 ((s.uECX >> 0) & 0xff) + 1); 477 521 if ((s.uECX >> 12) & 0xf) 478 RT Printf("ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);522 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf); 479 523 } 480 524 … … 482 526 { 483 527 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 484 RTPrintf("SVM Revision: %d (%#x)\n" 485 "Number of Address Space IDs: %d (%#x)\n", 486 s.uEAX & 0xff, s.uEAX & 0xff, 487 s.uEBX, s.uEBX); 528 RTTestIPrintf(RTTESTLVL_ALWAYS, 529 "SVM Revision: %d (%#x)\n" 530 "Number of Address Space IDs: %d (%#x)\n", 531 s.uEAX & 0xff, s.uEAX & 0xff, 532 s.uEBX, s.uEBX); 488 533 } 489 534 } … … 491 536 #endif /* AMD64 || X86 */ 492 537 538 DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8) 539 { 540 *pu8 = 0; 541 CHECKOP(ASMAtomicXchgU8(pu8, 1), 0, "%#x", uint8_t); 542 CHECKVAL(*pu8, 1, "%#x"); 543 544 CHECKOP(ASMAtomicXchgU8(pu8, 0), 1, "%#x", uint8_t); 545 CHECKVAL(*pu8, 0, "%#x"); 546 547 CHECKOP(ASMAtomicXchgU8(pu8, 0xff), 0, "%#x", uint8_t); 548 CHECKVAL(*pu8, 0xff, "%#x"); 549 550 CHECKOP(ASMAtomicXchgU8(pu8, 0x87), 0xffff, "%#x", uint8_t); 551 CHECKVAL(*pu8, 0x87, "%#x"); 552 } 553 554 493 555 static void tstASMAtomicXchgU8(void) 494 556 { 495 struct 496 { 497 uint8_t u8Dummy0; 498 uint8_t u8; 499 uint8_t u8Dummy1; 500 } s; 501 502 s.u8 = 0; 503 s.u8Dummy0 = s.u8Dummy1 = 0x42; 504 CHECKOP(ASMAtomicXchgU8(&s.u8, 1), 0, "%#x", uint8_t); 505 CHECKVAL(s.u8, 1, "%#x"); 506 507 CHECKOP(ASMAtomicXchgU8(&s.u8, 0), 1, "%#x", uint8_t); 508 CHECKVAL(s.u8, 0, "%#x"); 509 510 CHECKOP(ASMAtomicXchgU8(&s.u8, 0xff), 0, "%#x", uint8_t); 511 CHECKVAL(s.u8, 0xff, "%#x"); 512 513 CHECKOP(ASMAtomicXchgU8(&s.u8, 0x87), 0xffff, "%#x", uint8_t); 514 CHECKVAL(s.u8, 0x87, "%#x"); 515 CHECKVAL(s.u8Dummy0, 0x42, "%#x"); 516 CHECKVAL(s.u8Dummy1, 0x42, "%#x"); 557 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t); 558 } 559 560 561 DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16) 562 { 563 *pu16 = 0; 564 565 CHECKOP(ASMAtomicXchgU16(pu16, 1), 0, "%#x", uint16_t); 566 CHECKVAL(*pu16, 1, "%#x"); 567 568 CHECKOP(ASMAtomicXchgU16(pu16, 0), 1, "%#x", uint16_t); 569 CHECKVAL(*pu16, 0, "%#x"); 570 571 CHECKOP(ASMAtomicXchgU16(pu16, 0xffff), 0, "%#x", uint16_t); 572 CHECKVAL(*pu16, 0xffff, "%#x"); 573 574 CHECKOP(ASMAtomicXchgU16(pu16, 0x8765), 0xffff, "%#x", uint16_t); 575 CHECKVAL(*pu16, 0x8765, "%#x"); 517 576 } 518 577 … … 520 579 static void tstASMAtomicXchgU16(void) 521 580 { 522 struct 523 { 524 uint16_t u16Dummy0; 525 uint16_t u16; 526 uint16_t u16Dummy1; 527 } s; 528 529 s.u16 = 0; 530 s.u16Dummy0 = s.u16Dummy1 = 0x1234; 531 CHECKOP(ASMAtomicXchgU16(&s.u16, 1), 0, "%#x", uint16_t); 532 CHECKVAL(s.u16, 1, "%#x"); 533 534 CHECKOP(ASMAtomicXchgU16(&s.u16, 0), 1, "%#x", uint16_t); 535 CHECKVAL(s.u16, 0, "%#x"); 536 537 CHECKOP(ASMAtomicXchgU16(&s.u16, 0xffff), 0, "%#x", uint16_t); 538 CHECKVAL(s.u16, 0xffff, "%#x"); 539 540 CHECKOP(ASMAtomicXchgU16(&s.u16, 0x8765), 0xffff, "%#x", uint16_t); 541 CHECKVAL(s.u16, 0x8765, "%#x"); 542 CHECKVAL(s.u16Dummy0, 0x1234, "%#x"); 543 CHECKVAL(s.u16Dummy1, 0x1234, "%#x"); 581 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t); 582 } 583 584 585 DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32) 586 { 587 *pu32 = 0; 588 589 CHECKOP(ASMAtomicXchgU32(pu32, 1), 0, "%#x", uint32_t); 590 CHECKVAL(*pu32, 1, "%#x"); 591 592 CHECKOP(ASMAtomicXchgU32(pu32, 0), 1, "%#x", uint32_t); 593 CHECKVAL(*pu32, 0, "%#x"); 594 595 CHECKOP(ASMAtomicXchgU32(pu32, ~UINT32_C(0)), 0, "%#x", uint32_t); 596 CHECKVAL(*pu32, ~UINT32_C(0), "%#x"); 597 598 CHECKOP(ASMAtomicXchgU32(pu32, 0x87654321), ~UINT32_C(0), "%#x", uint32_t); 599 CHECKVAL(*pu32, 0x87654321, "%#x"); 544 600 } 545 601 … … 547 603 static void tstASMAtomicXchgU32(void) 548 604 { 549 struct 550 { 551 uint32_t u32Dummy0; 552 uint32_t u32; 553 uint32_t u32Dummy1; 554 } s; 555 556 s.u32 = 0; 557 s.u32Dummy0 = s.u32Dummy1 = 0x11223344; 558 559 CHECKOP(ASMAtomicXchgU32(&s.u32, 1), 0, "%#x", uint32_t); 560 CHECKVAL(s.u32, 1, "%#x"); 561 562 CHECKOP(ASMAtomicXchgU32(&s.u32, 0), 1, "%#x", uint32_t); 563 CHECKVAL(s.u32, 0, "%#x"); 564 565 CHECKOP(ASMAtomicXchgU32(&s.u32, ~0U), 0, "%#x", uint32_t); 566 CHECKVAL(s.u32, ~0U, "%#x"); 567 568 CHECKOP(ASMAtomicXchgU32(&s.u32, 0x87654321), ~0U, "%#x", uint32_t); 569 CHECKVAL(s.u32, 0x87654321, "%#x"); 570 571 CHECKVAL(s.u32Dummy0, 0x11223344, "%#x"); 572 CHECKVAL(s.u32Dummy1, 0x11223344, "%#x"); 605 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t); 606 } 607 608 609 DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64) 610 { 611 *pu64 = 0; 612 613 CHECKOP(ASMAtomicXchgU64(pu64, 1), UINT64_C(0), "%#llx", uint64_t); 614 CHECKVAL(*pu64, UINT64_C(1), "%#llx"); 615 616 CHECKOP(ASMAtomicXchgU64(pu64, 0), UINT64_C(1), "%#llx", uint64_t); 617 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 618 619 CHECKOP(ASMAtomicXchgU64(pu64, ~UINT64_C(0)), UINT64_C(0), "%#llx", uint64_t); 620 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 621 622 CHECKOP(ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), ~UINT64_C(0), "%#llx", uint64_t); 623 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 573 624 } 574 625 … … 576 627 static void tstASMAtomicXchgU64(void) 577 628 { 578 struct 579 { 580 uint64_t u64Dummy0; 581 uint64_t u64; 582 uint64_t u64Dummy1; 583 } s; 584 585 s.u64 = 0; 586 s.u64Dummy0 = s.u64Dummy1 = 0x1122334455667788ULL; 587 588 CHECKOP(ASMAtomicXchgU64(&s.u64, 1), 0ULL, "%#llx", uint64_t); 589 CHECKVAL(s.u64, 1ULL, "%#llx"); 590 591 CHECKOP(ASMAtomicXchgU64(&s.u64, 0), 1ULL, "%#llx", uint64_t); 592 CHECKVAL(s.u64, 0ULL, "%#llx"); 593 594 CHECKOP(ASMAtomicXchgU64(&s.u64, ~0ULL), 0ULL, "%#llx", uint64_t); 595 CHECKVAL(s.u64, ~0ULL, "%#llx"); 596 597 CHECKOP(ASMAtomicXchgU64(&s.u64, 0xfedcba0987654321ULL), ~0ULL, "%#llx", uint64_t); 598 CHECKVAL(s.u64, 0xfedcba0987654321ULL, "%#llx"); 599 600 CHECKVAL(s.u64Dummy0, 0x1122334455667788ULL, "%#llx"); 601 CHECKVAL(s.u64Dummy1, 0x1122334455667788ULL, "%#llx"); 629 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t); 630 } 631 632 633 DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv) 634 { 635 *ppv = NULL; 636 637 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, "%p", void *); 638 CHECKVAL(*ppv, (void *)(~(uintptr_t)0), "%p"); 639 640 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *); 641 CHECKVAL(*ppv, (void *)0x87654321, "%p"); 642 643 CHECKOP(ASMAtomicXchgPtr(ppv, NULL), (void *)0x87654321, "%p", void *); 644 CHECKVAL(*ppv, NULL, "%p"); 602 645 } 603 646 … … 605 648 static void tstASMAtomicXchgPtr(void) 606 649 { 607 void *pv = NULL; 608 609 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)(~(uintptr_t)0)), NULL, "%p", void *); 610 CHECKVAL(pv, (void *)(~(uintptr_t)0), "%p"); 611 612 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *); 613 CHECKVAL(pv, (void *)0x87654321, "%p"); 614 615 CHECKOP(ASMAtomicXchgPtr(&pv, NULL), (void *)0x87654321, "%p", void *); 616 CHECKVAL(pv, NULL, "%p"); 650 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *); 651 } 652 653 654 DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8) 655 { 656 *pu8 = 0xff; 657 658 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0), false, "%d", bool); 659 CHECKVAL(*pu8, 0xff, "%x"); 660 661 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, "%d", bool); 662 CHECKVAL(*pu8, 0, "%x"); 663 664 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x79, 0xff), false, "%d", bool); 665 CHECKVAL(*pu8, 0, "%x"); 666 667 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, "%d", bool); 668 CHECKVAL(*pu8, 0x97, "%x"); 617 669 } 618 670 … … 620 672 static void tstASMAtomicCmpXchgU8(void) 621 673 { 622 struct 623 { 624 uint8_t u8Before; 625 uint8_t u8; 626 uint8_t u8After; 627 } u = { 0xcc, 0xff, 0xaa }; 628 629 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0, 0), false, "%d", bool); 630 CHECKVAL(u.u8, 0xff, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 631 632 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0, 0xff), true, "%d", bool); 633 CHECKVAL(u.u8, 0, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 634 635 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0x79, 0xff), false, "%d", bool); 636 CHECKVAL(u.u8, 0, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 637 638 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0x97, 0), true, "%d", bool); 639 CHECKVAL(u.u8, 0x97, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 674 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t); 675 } 676 677 678 DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32) 679 { 680 *pu32 = UINT32_C(0xffffffff); 681 682 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, 0), false, "%d", bool); 683 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 684 685 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, "%d", bool); 686 CHECKVAL(*pu32, 0, "%x"); 687 688 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff)), false, "%d", bool); 689 CHECKVAL(*pu32, 0, "%x"); 690 691 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), 0), true, "%d", bool); 692 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x"); 640 693 } 641 694 … … 643 696 static void tstASMAtomicCmpXchgU32(void) 644 697 { 645 uint32_t u32 = 0xffffffff; 646 647 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0), false, "%d", bool); 648 CHECKVAL(u32, 0xffffffff, "%x"); 649 650 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0xffffffff), true, "%d", bool); 651 CHECKVAL(u32, 0, "%x"); 652 653 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0xffffffff), false, "%d", bool); 654 CHECKVAL(u32, 0, "%x"); 655 656 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0), true, "%d", bool); 657 CHECKVAL(u32, 0x8008efd, "%x"); 698 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t); 699 } 700 701 702 703 DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64) 704 { 705 *pu64 = UINT64_C(0xffffffffffffff); 706 707 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, 0), false, "%d", bool); 708 CHECKVAL(*pu64, UINT64_C(0xffffffffffffff), "%#llx"); 709 710 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, "%d", bool); 711 CHECKVAL(*pu64, 0, "%x"); 712 713 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff)), false, "%d", bool); 714 CHECKVAL(*pu64, 0, "%x"); 715 716 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000)), false, "%d", bool); 717 CHECKVAL(*pu64, 0, "%x"); 718 719 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, "%d", bool); 720 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%#llx"); 658 721 } 659 722 … … 661 724 static void tstASMAtomicCmpXchgU64(void) 662 725 { 663 uint64_t u64 = 0xffffffffffffffULL; 664 665 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0), false, "%d", bool); 666 CHECKVAL(u64, 0xffffffffffffffULL, "%#llx"); 667 668 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0xffffffffffffffULL), true, "%d", bool); 669 CHECKVAL(u64, 0, "%x"); 670 671 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff), false, "%d", bool); 672 CHECKVAL(u64, 0, "%x"); 673 674 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL), false, "%d", bool); 675 CHECKVAL(u64, 0, "%x"); 676 677 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool); 678 CHECKVAL(u64, 0x80040008008efdULL, "%#llx"); 726 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t); 727 } 728 729 730 DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32) 731 { 732 *pu32 = UINT32_C(0xffffffff); 733 uint32_t u32Old = UINT32_C(0x80005111); 734 735 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, "%d", bool); 736 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 737 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x"); 738 739 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, "%d", bool); 740 CHECKVAL(*pu32, 0, "%x"); 741 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x"); 742 743 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff), &u32Old), false, "%d", bool); 744 CHECKVAL(*pu32, 0, "%x"); 745 CHECKVAL(u32Old, 0, "%x"); 746 747 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), 0, &u32Old), true, "%d", bool); 748 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x"); 749 CHECKVAL(u32Old, 0, "%x"); 750 751 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0x8008efd), &u32Old), true, "%d", bool); 752 CHECKVAL(*pu32, 0, "%x"); 753 CHECKVAL(u32Old, UINT32_C(0x8008efd), "%x"); 679 754 } 680 755 … … 682 757 static void tstASMAtomicCmpXchgExU32(void) 683 758 { 684 uint32_t u32 = 0xffffffff; 685 uint32_t u32Old = 0x80005111; 686 687 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0, &u32Old), false, "%d", bool); 688 CHECKVAL(u32, 0xffffffff, "%x"); 689 CHECKVAL(u32Old, 0xffffffff, "%x"); 690 691 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0xffffffff, &u32Old), true, "%d", bool); 692 CHECKVAL(u32, 0, "%x"); 693 CHECKVAL(u32Old, 0xffffffff, "%x"); 694 695 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0xffffffff, &u32Old), false, "%d", bool); 696 CHECKVAL(u32, 0, "%x"); 697 CHECKVAL(u32Old, 0, "%x"); 698 699 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0, &u32Old), true, "%d", bool); 700 CHECKVAL(u32, 0x8008efd, "%x"); 701 CHECKVAL(u32Old, 0, "%x"); 702 703 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0x8008efd, &u32Old), true, "%d", bool); 704 CHECKVAL(u32, 0, "%x"); 705 CHECKVAL(u32Old, 0x8008efd, "%x"); 759 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t); 760 } 761 762 763 DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64) 764 { 765 *pu64 = UINT64_C(0xffffffffffffffff); 766 uint64_t u64Old = UINT64_C(0x8000000051111111); 767 768 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, "%d", bool); 769 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%llx"); 770 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx"); 771 772 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, "%d", bool); 773 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 774 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx"); 775 776 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0xffffffff, &u64Old), false, "%d", bool); 777 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 778 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 779 780 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000), &u64Old), false, "%d", bool); 781 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 782 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 783 784 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0, &u64Old), true, "%d", bool); 785 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%llx"); 786 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 787 788 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0x80040008008efd), &u64Old), true, "%d", bool); 789 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 790 CHECKVAL(u64Old, UINT64_C(0x80040008008efd), "%llx"); 706 791 } 707 792 … … 709 794 static void tstASMAtomicCmpXchgExU64(void) 710 795 { 711 uint64_t u64 = 0xffffffffffffffffULL; 712 uint64_t u64Old = 0x8000000051111111ULL; 713 714 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0, &u64Old), false, "%d", bool); 715 CHECKVAL(u64, 0xffffffffffffffffULL, "%llx"); 716 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx"); 717 718 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0xffffffffffffffffULL, &u64Old), true, "%d", bool); 719 CHECKVAL(u64, 0ULL, "%llx"); 720 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx"); 721 722 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff, &u64Old), false, "%d", bool); 723 CHECKVAL(u64, 0ULL, "%llx"); 724 CHECKVAL(u64Old, 0ULL, "%llx"); 725 726 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL, &u64Old), false, "%d", bool); 727 CHECKVAL(u64, 0ULL, "%llx"); 728 CHECKVAL(u64Old, 0ULL, "%llx"); 729 730 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0, &u64Old), true, "%d", bool); 731 CHECKVAL(u64, 0x80040008008efdULL, "%llx"); 732 CHECKVAL(u64Old, 0ULL, "%llx"); 733 734 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0x80040008008efdULL, &u64Old), true, "%d", bool); 735 CHECKVAL(u64, 0ULL, "%llx"); 736 CHECKVAL(u64Old, 0x80040008008efdULL, "%llx"); 796 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t); 797 } 798 799 800 DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64) 801 { 802 *pu64 = 0; 803 804 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0), "%#llx", uint64_t); 805 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 806 807 *pu64 = ~UINT64_C(0); 808 CHECKOP(ASMAtomicReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t); 809 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 810 811 *pu64 = UINT64_C(0xfedcba0987654321); 812 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t); 813 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 737 814 } 738 815 … … 740 817 static void tstASMAtomicReadU64(void) 741 818 { 742 uint64_t u64 = 0; 743 744 CHECKOP(ASMAtomicReadU64(&u64), 0ULL, "%#llx", uint64_t); 745 CHECKVAL(u64, 0ULL, "%#llx"); 746 747 u64 = ~0ULL; 748 CHECKOP(ASMAtomicReadU64(&u64), ~0ULL, "%#llx", uint64_t); 749 CHECKVAL(u64, ~0ULL, "%#llx"); 750 751 u64 = 0xfedcba0987654321ULL; 752 CHECKOP(ASMAtomicReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t); 753 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx"); 819 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t); 820 } 821 822 823 DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64) 824 { 825 *pu64 = 0; 826 827 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0), "%#llx", uint64_t); 828 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 829 830 *pu64 = ~UINT64_C(0); 831 CHECKOP(ASMAtomicUoReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t); 832 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 833 834 *pu64 = UINT64_C(0xfedcba0987654321); 835 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t); 836 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 754 837 } 755 838 … … 757 840 static void tstASMAtomicUoReadU64(void) 758 841 { 759 uint64_t u64 = 0; 760 761 CHECKOP(ASMAtomicUoReadU64(&u64), 0ULL, "%#llx", uint64_t); 762 CHECKVAL(u64, 0ULL, "%#llx"); 763 764 u64 = ~0ULL; 765 CHECKOP(ASMAtomicUoReadU64(&u64), ~0ULL, "%#llx", uint64_t); 766 CHECKVAL(u64, ~0ULL, "%#llx"); 767 768 u64 = 0xfedcba0987654321ULL; 769 CHECKOP(ASMAtomicUoReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t); 770 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx"); 771 } 772 773 774 static void tstASMAtomicAddS32(void) 842 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t); 843 } 844 845 846 DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32) 775 847 { 776 848 int32_t i32Rc; 777 int32_ti32 = 10;849 *pi32 = 10; 778 850 #define MYCHECK(op, rc, val) \ 779 851 do { \ 780 852 i32Rc = op; \ 781 853 if (i32Rc != (rc)) \ 782 { \ 783 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 784 RTTestIErrorInc(); \ 785 } \ 786 if (i32 != (val)) \ 787 { \ 788 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, val); \ 789 RTTestIErrorInc(); \ 790 } \ 854 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 855 if (*pi32 != (val)) \ 856 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, val); \ 791 857 } while (0) 792 MYCHECK(ASMAtomicAddS32( &i32, 1), 10, 11);793 MYCHECK(ASMAtomicAddS32( &i32, -2), 11, 9);794 MYCHECK(ASMAtomicAddS32( &i32, -9), 9, 0);795 MYCHECK(ASMAtomicAddS32( &i32, -0x7fffffff), 0, -0x7fffffff);796 MYCHECK(ASMAtomicAddS32( &i32, 0), -0x7fffffff, -0x7fffffff);797 MYCHECK(ASMAtomicAddS32( &i32, 0x7fffffff), -0x7fffffff, 0);798 MYCHECK(ASMAtomicAddS32( &i32, 0), 0, 0);858 MYCHECK(ASMAtomicAddS32(pi32, 1), 10, 11); 859 MYCHECK(ASMAtomicAddS32(pi32, -2), 11, 9); 860 MYCHECK(ASMAtomicAddS32(pi32, -9), 9, 0); 861 MYCHECK(ASMAtomicAddS32(pi32, -0x7fffffff), 0, -0x7fffffff); 862 MYCHECK(ASMAtomicAddS32(pi32, 0), -0x7fffffff, -0x7fffffff); 863 MYCHECK(ASMAtomicAddS32(pi32, 0x7fffffff), -0x7fffffff, 0); 864 MYCHECK(ASMAtomicAddS32(pi32, 0), 0, 0); 799 865 #undef MYCHECK 800 866 } 801 867 802 803 static void tstASMAtomicDecIncS32(void) 868 static void tstASMAtomicAddS32(void) 869 { 870 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t); 871 } 872 873 874 DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64) 875 { 876 int64_t i64Rc; 877 *pi64 = 10; 878 #define MYCHECK(op, rc, val) \ 879 do { \ 880 i64Rc = op; \ 881 if (i64Rc != (rc)) \ 882 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, (int64_t)rc); \ 883 if (*pi64 != (val)) \ 884 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, (int64_t)(val)); \ 885 } while (0) 886 MYCHECK(ASMAtomicAddS64(pi64, 1), 10, 11); 887 MYCHECK(ASMAtomicAddS64(pi64, -2), 11, 9); 888 MYCHECK(ASMAtomicAddS64(pi64, -9), 9, 0); 889 MYCHECK(ASMAtomicAddS64(pi64, -INT64_MAX), 0, -INT64_MAX); 890 MYCHECK(ASMAtomicAddS64(pi64, 0), -INT64_MAX, -INT64_MAX); 891 MYCHECK(ASMAtomicAddS64(pi64, -1), -INT64_MAX, INT64_MIN); 892 MYCHECK(ASMAtomicAddS64(pi64, INT64_MAX), INT64_MIN, -1); 893 MYCHECK(ASMAtomicAddS64(pi64, 1), -1, 0); 894 MYCHECK(ASMAtomicAddS64(pi64, 0), 0, 0); 895 #undef MYCHECK 896 } 897 898 899 static void tstASMAtomicAddS64(void) 900 { 901 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t); 902 } 903 904 905 DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32) 804 906 { 805 907 int32_t i32Rc; 806 int32_ti32 = 10;908 *pi32 = 10; 807 909 #define MYCHECK(op, rc) \ 808 910 do { \ 809 911 i32Rc = op; \ 810 912 if (i32Rc != (rc)) \ 811 { \ 812 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 813 RTTestIErrorInc(); \ 814 } \ 815 if (i32 != (rc)) \ 816 { \ 817 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, rc); \ 818 RTTestIErrorInc(); \ 819 } \ 913 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 914 if (*pi32 != (rc)) \ 915 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, rc); \ 820 916 } while (0) 821 MYCHECK(ASMAtomicDecS32( &i32), 9);822 MYCHECK(ASMAtomicDecS32( &i32), 8);823 MYCHECK(ASMAtomicDecS32( &i32), 7);824 MYCHECK(ASMAtomicDecS32( &i32), 6);825 MYCHECK(ASMAtomicDecS32( &i32), 5);826 MYCHECK(ASMAtomicDecS32( &i32), 4);827 MYCHECK(ASMAtomicDecS32( &i32), 3);828 MYCHECK(ASMAtomicDecS32( &i32), 2);829 MYCHECK(ASMAtomicDecS32( &i32), 1);830 MYCHECK(ASMAtomicDecS32( &i32), 0);831 MYCHECK(ASMAtomicDecS32( &i32), -1);832 MYCHECK(ASMAtomicDecS32( &i32), -2);833 MYCHECK(ASMAtomicIncS32( &i32), -1);834 MYCHECK(ASMAtomicIncS32( &i32), 0);835 MYCHECK(ASMAtomicIncS32( &i32), 1);836 MYCHECK(ASMAtomicIncS32( &i32), 2);837 MYCHECK(ASMAtomicIncS32( &i32), 3);838 MYCHECK(ASMAtomicDecS32( &i32), 2);839 MYCHECK(ASMAtomicIncS32( &i32), 3);840 MYCHECK(ASMAtomicDecS32( &i32), 2);841 MYCHECK(ASMAtomicIncS32( &i32), 3);917 MYCHECK(ASMAtomicDecS32(pi32), 9); 918 MYCHECK(ASMAtomicDecS32(pi32), 8); 919 MYCHECK(ASMAtomicDecS32(pi32), 7); 920 MYCHECK(ASMAtomicDecS32(pi32), 6); 921 MYCHECK(ASMAtomicDecS32(pi32), 5); 922 MYCHECK(ASMAtomicDecS32(pi32), 4); 923 MYCHECK(ASMAtomicDecS32(pi32), 3); 924 MYCHECK(ASMAtomicDecS32(pi32), 2); 925 MYCHECK(ASMAtomicDecS32(pi32), 1); 926 MYCHECK(ASMAtomicDecS32(pi32), 0); 927 MYCHECK(ASMAtomicDecS32(pi32), -1); 928 MYCHECK(ASMAtomicDecS32(pi32), -2); 929 MYCHECK(ASMAtomicIncS32(pi32), -1); 930 MYCHECK(ASMAtomicIncS32(pi32), 0); 931 MYCHECK(ASMAtomicIncS32(pi32), 1); 932 MYCHECK(ASMAtomicIncS32(pi32), 2); 933 MYCHECK(ASMAtomicIncS32(pi32), 3); 934 MYCHECK(ASMAtomicDecS32(pi32), 2); 935 MYCHECK(ASMAtomicIncS32(pi32), 3); 936 MYCHECK(ASMAtomicDecS32(pi32), 2); 937 MYCHECK(ASMAtomicIncS32(pi32), 3); 842 938 #undef MYCHECK 843 939 } 844 940 845 941 942 static void tstASMAtomicDecIncS32(void) 943 { 944 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t); 945 } 946 947 948 DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64) 949 { 950 int64_t i64Rc; 951 *pi64 = 10; 952 #define MYCHECK(op, rc) \ 953 do { \ 954 i64Rc = op; \ 955 if (i64Rc != (rc)) \ 956 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, rc); \ 957 if (*pi64 != (rc)) \ 958 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, rc); \ 959 } while (0) 960 MYCHECK(ASMAtomicDecS64(pi64), 9); 961 MYCHECK(ASMAtomicDecS64(pi64), 8); 962 MYCHECK(ASMAtomicDecS64(pi64), 7); 963 MYCHECK(ASMAtomicDecS64(pi64), 6); 964 MYCHECK(ASMAtomicDecS64(pi64), 5); 965 MYCHECK(ASMAtomicDecS64(pi64), 4); 966 MYCHECK(ASMAtomicDecS64(pi64), 3); 967 MYCHECK(ASMAtomicDecS64(pi64), 2); 968 MYCHECK(ASMAtomicDecS64(pi64), 1); 969 MYCHECK(ASMAtomicDecS64(pi64), 0); 970 MYCHECK(ASMAtomicDecS64(pi64), -1); 971 MYCHECK(ASMAtomicDecS64(pi64), -2); 972 MYCHECK(ASMAtomicIncS64(pi64), -1); 973 MYCHECK(ASMAtomicIncS64(pi64), 0); 974 MYCHECK(ASMAtomicIncS64(pi64), 1); 975 MYCHECK(ASMAtomicIncS64(pi64), 2); 976 MYCHECK(ASMAtomicIncS64(pi64), 3); 977 MYCHECK(ASMAtomicDecS64(pi64), 2); 978 MYCHECK(ASMAtomicIncS64(pi64), 3); 979 MYCHECK(ASMAtomicDecS64(pi64), 2); 980 MYCHECK(ASMAtomicIncS64(pi64), 3); 981 #undef MYCHECK 982 } 983 984 985 static void tstASMAtomicDecIncS64(void) 986 { 987 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t); 988 } 989 990 991 DECLINLINE(void) tstASMAtomicAndOrU32Worker(uint32_t volatile *pu32) 992 { 993 *pu32 = UINT32_C(0xffffffff); 994 995 ASMAtomicOrU32(pu32, UINT32_C(0xffffffff)); 996 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 997 998 ASMAtomicAndU32(pu32, UINT32_C(0xffffffff)); 999 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 1000 1001 ASMAtomicAndU32(pu32, UINT32_C(0x8f8f8f8f)); 1002 CHECKVAL(*pu32, UINT32_C(0x8f8f8f8f), "%x"); 1003 1004 ASMAtomicOrU32(pu32, UINT32_C(0x70707070)); 1005 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 1006 1007 ASMAtomicAndU32(pu32, UINT32_C(1)); 1008 CHECKVAL(*pu32, UINT32_C(1), "%x"); 1009 1010 ASMAtomicOrU32(pu32, UINT32_C(0x80000000)); 1011 CHECKVAL(*pu32, UINT32_C(0x80000001), "%x"); 1012 1013 ASMAtomicAndU32(pu32, UINT32_C(0x80000000)); 1014 CHECKVAL(*pu32, UINT32_C(0x80000000), "%x"); 1015 1016 ASMAtomicAndU32(pu32, UINT32_C(0)); 1017 CHECKVAL(*pu32, UINT32_C(0), "%x"); 1018 1019 ASMAtomicOrU32(pu32, UINT32_C(0x42424242)); 1020 CHECKVAL(*pu32, UINT32_C(0x42424242), "%x"); 1021 } 1022 1023 846 1024 static void tstASMAtomicAndOrU32(void) 847 1025 { 848 uint32_t u32 = 0xffffffff; 849 850 ASMAtomicOrU32(&u32, 0xffffffff); 851 CHECKVAL(u32, 0xffffffff, "%x"); 852 853 ASMAtomicAndU32(&u32, 0xffffffff); 854 CHECKVAL(u32, 0xffffffff, "%x"); 855 856 ASMAtomicAndU32(&u32, 0x8f8f8f8f); 857 CHECKVAL(u32, 0x8f8f8f8f, "%x"); 858 859 ASMAtomicOrU32(&u32, 0x70707070); 860 CHECKVAL(u32, 0xffffffff, "%x"); 861 862 ASMAtomicAndU32(&u32, 1); 863 CHECKVAL(u32, 1, "%x"); 864 865 ASMAtomicOrU32(&u32, 0x80000000); 866 CHECKVAL(u32, 0x80000001, "%x"); 867 868 ASMAtomicAndU32(&u32, 0x80000000); 869 CHECKVAL(u32, 0x80000000, "%x"); 870 871 ASMAtomicAndU32(&u32, 0); 872 CHECKVAL(u32, 0, "%x"); 873 874 ASMAtomicOrU32(&u32, 0x42424242); 875 CHECKVAL(u32, 0x42424242, "%x"); 876 } 877 878 879 void tstASMMemZeroPage(void) 880 { 881 struct 1026 DO_SIMPLE_TEST(ASMAtomicAndOrU32, uint32_t); 1027 } 1028 1029 1030 DECLINLINE(void) tstASMAtomicAndOrU64Worker(uint64_t volatile *pu64) 1031 { 1032 *pu64 = UINT64_C(0xffffffff); 1033 1034 ASMAtomicOrU64(pu64, UINT64_C(0xffffffff)); 1035 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1036 1037 ASMAtomicAndU64(pu64, UINT64_C(0xffffffff)); 1038 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1039 1040 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f)); 1041 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f), "%x"); 1042 1043 ASMAtomicOrU64(pu64, UINT64_C(0x70707070)); 1044 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1045 1046 ASMAtomicAndU64(pu64, UINT64_C(1)); 1047 CHECKVAL(*pu64, UINT64_C(1), "%x"); 1048 1049 ASMAtomicOrU64(pu64, UINT64_C(0x80000000)); 1050 CHECKVAL(*pu64, UINT64_C(0x80000001), "%x"); 1051 1052 ASMAtomicAndU64(pu64, UINT64_C(0x80000000)); 1053 CHECKVAL(*pu64, UINT64_C(0x80000000), "%x"); 1054 1055 ASMAtomicAndU64(pu64, UINT64_C(0)); 1056 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1057 1058 ASMAtomicOrU64(pu64, UINT64_C(0x42424242)); 1059 CHECKVAL(*pu64, UINT64_C(0x42424242), "%x"); 1060 1061 // Same as above, but now 64-bit wide. 1062 ASMAtomicAndU64(pu64, UINT64_C(0)); 1063 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1064 1065 ASMAtomicOrU64(pu64, UINT64_C(0xffffffffffffffff)); 1066 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1067 1068 ASMAtomicAndU64(pu64, UINT64_C(0xffffffffffffffff)); 1069 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1070 1071 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f8f8f8f8f)); 1072 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f8f8f8f8f), "%x"); 1073 1074 ASMAtomicOrU64(pu64, UINT64_C(0x7070707070707070)); 1075 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1076 1077 ASMAtomicAndU64(pu64, UINT64_C(1)); 1078 CHECKVAL(*pu64, UINT64_C(1), "%x"); 1079 1080 ASMAtomicOrU64(pu64, UINT64_C(0x8000000000000000)); 1081 CHECKVAL(*pu64, UINT64_C(0x8000000000000001), "%x"); 1082 1083 ASMAtomicAndU64(pu64, UINT64_C(0x8000000000000000)); 1084 CHECKVAL(*pu64, UINT64_C(0x8000000000000000), "%x"); 1085 1086 ASMAtomicAndU64(pu64, UINT64_C(0)); 1087 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1088 1089 ASMAtomicOrU64(pu64, UINT64_C(0x4242424242424242)); 1090 CHECKVAL(*pu64, UINT64_C(0x4242424242424242), "%x"); 1091 } 1092 1093 1094 static void tstASMAtomicAndOrU64(void) 1095 { 1096 DO_SIMPLE_TEST(ASMAtomicAndOrU64, uint64_t); 1097 } 1098 1099 1100 typedef struct 1101 { 1102 uint8_t ab[PAGE_SIZE]; 1103 } TSTPAGE; 1104 1105 1106 DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage) 1107 { 1108 for (unsigned j = 0; j < 16; j++) 882 1109 { 883 uint64_t u64Magic1; 884 uint8_t abPage[PAGE_SIZE]; 885 uint64_t u64Magic2; 886 } Buf1, Buf2, Buf3; 887 888 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff); 889 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage)); 890 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff); 891 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff); 892 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage)); 893 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff); 894 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff); 895 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage)); 896 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff); 897 ASMMemZeroPage(Buf1.abPage); 898 ASMMemZeroPage(Buf2.abPage); 899 ASMMemZeroPage(Buf3.abPage); 900 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff) 901 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff) 902 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff) 903 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff) 904 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff) 905 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 906 { 907 RTPrintf("tstInlineAsm: ASMMemZeroPage violated one/both magic(s)!\n"); 908 RTTestIErrorInc(); 1110 memset(pPage, 0x11 * j, sizeof(*pPage)); 1111 ASMMemZeroPage(pPage); 1112 for (unsigned i = 0; i < sizeof(pPage->ab); i++) 1113 if (pPage->ab[i]) 1114 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 909 1115 } 910 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++) 911 if (Buf1.abPage[i]) 912 { 913 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 914 RTTestIErrorInc(); 915 } 916 for (unsigned i = 0; i < sizeof(Buf2.abPage); i++) 917 if (Buf2.abPage[i]) 918 { 919 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 920 RTTestIErrorInc(); 921 } 922 for (unsigned i = 0; i < sizeof(Buf3.abPage); i++) 923 if (Buf3.abPage[i]) 924 { 925 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 926 RTTestIErrorInc(); 927 } 1116 } 1117 1118 1119 static void tstASMMemZeroPage(void) 1120 { 1121 DO_SIMPLE_TEST(ASMMemZeroPage, TSTPAGE); 928 1122 } 929 1123 … … 966 1160 void tstASMMemZero32(void) 967 1161 { 1162 RTTestSub(g_hTest, "ASMMemFill32"); 1163 968 1164 struct 969 1165 { … … 992 1188 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 993 1189 { 994 RTPrintf("tstInlineAsm: ASMMemZero32 violated one/both magic(s)!\n"); 995 RTTestIErrorInc(); 1190 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n"); 996 1191 } 997 1192 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++) 998 1193 if (Buf1.abPage[i]) 999 { 1000 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1001 RTTestIErrorInc(); 1002 } 1194 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1003 1195 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++) 1004 1196 if (Buf2.abPage[i]) 1005 { 1006 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1007 RTTestIErrorInc(); 1008 } 1197 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1009 1198 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++) 1010 1199 if (Buf3.abPage[i]) 1011 { 1012 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1013 RTTestIErrorInc(); 1014 } 1200 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1015 1201 } 1016 1202 … … 1018 1204 void tstASMMemFill32(void) 1019 1205 { 1206 RTTestSub(g_hTest, "ASMMemFill32"); 1207 1020 1208 struct 1021 1209 { … … 1055 1243 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff) 1056 1244 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 1057 { 1058 RTPrintf("tstInlineAsm: ASMMemFill32 violated one/both magic(s)!\n"); 1059 RTTestIErrorInc(); 1060 } 1245 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n"); 1061 1246 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++) 1062 1247 if (Buf1.au32Page[i] != 0xdeadbeef) 1063 { 1064 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef); 1065 RTTestIErrorInc(); 1066 } 1248 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef); 1067 1249 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++) 1068 1250 if (Buf2.au32Page[i] != 0xcafeff01) 1069 { 1070 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01); 1071 RTTestIErrorInc(); 1072 } 1251 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01); 1073 1252 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++) 1074 1253 if (Buf3.au32Page[i] != 0xf00dd00f) 1075 { 1076 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f); 1077 RTTestIErrorInc(); 1078 } 1254 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f); 1079 1255 } 1080 1256 … … 1083 1259 void tstASMMath(void) 1084 1260 { 1261 RTTestSub(g_hTest, "Math"); 1262 1085 1263 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000)); 1086 1264 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64"); … … 1158 1336 void tstASMByteSwap(void) 1159 1337 { 1160 RT Printf("tstInlineASM: TESTING - ASMByteSwap*\n");1338 RTTestSub(g_hTest, "ASMByteSwap*"); 1161 1339 1162 1340 uint64_t u64In = UINT64_C(0x0011223344556677); … … 1233 1411 static int64_t volatile s_i64; 1234 1412 register unsigned i; 1235 const unsigned cRounds = 2000000;1413 const unsigned cRounds = _2M; 1236 1414 register uint64_t u64Elapsed; 1237 1415 1238 RT Printf("tstInlineASM: Benchmarking:\n");1239 1240 #if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))1416 RTTestSub(g_hTest, "Benchmarking"); 1417 1418 #if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) 1241 1419 # define BENCH(op, str) \ 1242 1420 do { \ … … 1246 1424 op; \ 1247 1425 u64Elapsed = ASMReadTSC() - u64Elapsed; \ 1248 RT Printf(" %-30s %3llu cycles\n", str, u64Elapsed / cRounds); \1426 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \ 1249 1427 } while (0) 1250 1428 #else … … 1256 1434 op; \ 1257 1435 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \ 1258 RT Printf(" %-30s %3llu ns\n", str, u64Elapsed / cRounds); \1436 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \ 1259 1437 } while (0) 1260 1438 #endif 1261 1439 1262 BENCH(s_u32 = 0, "s_u32 = 0 :");1263 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8 :");1264 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8 :");1265 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16 :");1266 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16 :");1267 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32 :");1268 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32 :");1269 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64 :");1270 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64 :");1271 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8 :");1272 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8 :");1273 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16 :");1274 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16 :");1275 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32 :");1276 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32 :");1277 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64 :");1278 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64 :");1279 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8 :");1280 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8 :");1281 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16 :");1282 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16 :");1283 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32 :");1284 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32 :");1285 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64 :");1286 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64 :");1287 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8 :");1288 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8 :");1289 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16 :");1290 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16 :");1291 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32 :");1292 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32 :");1293 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64 :");1294 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64 :");1295 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8 :");1296 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8 :");1297 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16 :");1298 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16 :");1299 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32 :");1300 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32 :");1301 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64 :");1302 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64 :");1303 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32 :");1304 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32 :");1305 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64 :");1306 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64 :");1307 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg :");1308 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg :");1309 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg :");1310 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg :");1311 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32 :");1312 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32 :");1313 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32 :");1314 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32 :");1315 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32 :");1316 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32 :");1440 BENCH(s_u32 = 0, "s_u32 = 0"); 1441 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8"); 1442 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8"); 1443 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16"); 1444 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16"); 1445 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32"); 1446 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32"); 1447 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64"); 1448 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64"); 1449 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8"); 1450 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8"); 1451 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16"); 1452 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16"); 1453 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32"); 1454 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32"); 1455 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64"); 1456 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64"); 1457 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8"); 1458 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8"); 1459 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16"); 1460 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16"); 1461 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32"); 1462 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32"); 1463 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64"); 1464 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64"); 1465 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8"); 1466 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8"); 1467 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16"); 1468 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16"); 1469 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32"); 1470 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32"); 1471 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64"); 1472 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64"); 1473 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8"); 1474 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8"); 1475 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16"); 1476 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16"); 1477 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32"); 1478 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32"); 1479 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64"); 1480 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64"); 1481 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32"); 1482 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32"); 1483 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64"); 1484 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64"); 1485 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg"); 1486 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg"); 1487 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg"); 1488 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg"); 1489 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32"); 1490 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32"); 1491 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32"); 1492 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32"); 1493 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32"); 1494 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32"); 1317 1495 /* The Darwin gcc does not like this ... */ 1318 1496 #if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) 1319 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId :");1497 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId"); 1320 1498 #endif 1321 1499 1322 RTPrintf("Done.\n");1323 1324 1500 #undef BENCH 1325 1501 } … … 1328 1504 int main(int argc, char *argv[]) 1329 1505 { 1330 RTTEST hTest; 1331 int rc = RTTestInitAndCreate("tstRTInlineAsm", &hTest); 1506 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest); 1332 1507 if (rc) 1333 1508 return rc; 1334 RTTestBanner( hTest);1509 RTTestBanner(g_hTest); 1335 1510 1336 1511 /* … … 1352 1527 tstASMAtomicReadU64(); 1353 1528 tstASMAtomicUoReadU64(); 1529 1354 1530 tstASMAtomicAddS32(); 1531 tstASMAtomicAddS64(); 1355 1532 tstASMAtomicDecIncS32(); 1533 tstASMAtomicDecIncS64(); 1356 1534 tstASMAtomicAndOrU32(); 1535 tstASMAtomicAndOrU64(); 1536 1357 1537 tstASMMemZeroPage(); 1358 tstASMMemIsZeroPage( hTest);1538 tstASMMemIsZeroPage(g_hTest); 1359 1539 tstASMMemZero32(); 1360 1540 tstASMMemFill32(); 1541 1361 1542 tstASMMath(); 1543 1362 1544 tstASMByteSwap(); 1545 1363 1546 tstASMBench(); 1364 1547 … … 1366 1549 * Show the result. 1367 1550 */ 1368 return RTTestSummaryAndDestroy( hTest);1369 } 1370 1551 return RTTestSummaryAndDestroy(g_hTest); 1552 } 1553
Note:
See TracChangeset
for help on using the changeset viewer.