Changeset 33136 in vbox
- Timestamp:
- Oct 14, 2010 2:47:59 PM (14 years ago)
- Location:
- trunk
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r30112 r33136 70 70 # pragma intrinsic(_byteswap_uint64) 71 71 # pragma intrinsic(_InterlockedExchange64) 72 # pragma intrinsic(_InterlockedExchangeAdd64) 73 # pragma intrinsic(_InterlockedAnd64) 74 # pragma intrinsic(_InterlockedOr64) 75 # pragma intrinsic(_InterlockedIncrement64) 76 # pragma intrinsic(_InterlockedDecrement64) 72 77 # endif 73 78 #endif … … 181 186 #endif 182 187 188 189 /** @def ASMBreakpoint 190 * Debugger Breakpoint. 191 * @remark In the gnu world we add a nop instruction after the int3 to 192 * force gdb to remain at the int3 source line. 193 * @remark The L4 kernel will try make sense of the breakpoint, thus the jmp. 194 * @internal 195 */ 196 #if RT_INLINE_ASM_GNU_STYLE 197 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 198 # ifndef __L4ENV__ 199 # define ASMBreakpoint() do { __asm__ __volatile__("int3\n\tnop"); } while (0) 200 # else 201 # define ASMBreakpoint() do { __asm__ __volatile__("int3; jmp 1f; 1:"); } while (0) 202 # endif 203 # elif defined(RT_ARCH_SPARC64) 204 # define ASMBreakpoint() do { __asm__ __volatile__("illtrap 0\n\t") } while (0) /** @todo Sparc64: this is just a wild guess. */ 205 # elif defined(RT_ARCH_SPARC) 206 # define ASMBreakpoint() do { __asm__ __volatile__("unimp 0\n\t"); } while (0) /** @todo Sparc: this is just a wild guess (same as Sparc64, just different name). */ 207 # else 208 # error "PORTME" 209 # endif 210 #else 211 # define ASMBreakpoint() __debugbreak() 212 #endif 213 214 215 /** 216 * Spinloop hint for platforms that have these, empty function on the other 217 * platforms. 218 * 219 * x86 & AMD64: The PAUSE variant of NOP for helping hyperthreaded CPUs detecing 220 * spin locks. 221 */ 222 #if RT_INLINE_ASM_EXTERNAL && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) 223 DECLASM(void) ASMNopPause(void); 224 #else 225 DECLINLINE(void) ASMNopPause(void) 226 { 227 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 228 # if RT_INLINE_ASM_GNU_STYLE 229 __asm__ __volatile__(".byte 0xf3,0x90\n\t"); 230 # else 231 __asm { 232 _emit 0f3h 233 _emit 090h 234 } 235 # endif 236 # else 237 /* dummy */ 238 # endif 239 } 240 #endif 183 241 184 242 … … 1274 1332 1275 1333 /** 1276 * Atomically exchanges and adds to a 32-bit value, ordered.1277 *1278 * @returns The old value.1279 * @param pu32 Pointer to the value.1280 * @param u32 Number to add.1281 */1282 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1283 DECLASM(uint32_t) ASMAtomicAddU32(uint32_t volatile *pu32, uint32_t u32);1284 #else1285 DECLINLINE(uint32_t) ASMAtomicAddU32(uint32_t volatile *pu32, uint32_t u32)1286 {1287 # if RT_INLINE_ASM_USES_INTRIN1288 u32 = _InterlockedExchangeAdd((long *)pu32, u32);1289 return u32;1290 1291 # elif RT_INLINE_ASM_GNU_STYLE1292 __asm__ __volatile__("lock; xaddl %0, %1\n\t"1293 : "=r" (u32),1294 "=m" (*pu32)1295 : "0" (u32),1296 "m" (*pu32)1297 : "memory");1298 return u32;1299 # else1300 __asm1301 {1302 mov eax, [u32]1303 # ifdef RT_ARCH_AMD641304 mov rdx, [pu32]1305 lock xadd [rdx], eax1306 # else1307 mov edx, [pu32]1308 lock xadd [edx], eax1309 # endif1310 mov [u32], eax1311 }1312 return u32;1313 # endif1314 }1315 #endif1316 1317 1318 /**1319 * Atomically exchanges and adds to a signed 32-bit value, ordered.1320 *1321 * @returns The old value.1322 * @param pi32 Pointer to the value.1323 * @param i32 Number to add.1324 */1325 DECLINLINE(int32_t) ASMAtomicAddS32(int32_t volatile *pi32, int32_t i32)1326 {1327 return (int32_t)ASMAtomicAddU32((uint32_t volatile *)pi32, (uint32_t)i32);1328 }1329 1330 1331 /**1332 * Atomically exchanges and subtracts to an unsigned 32-bit value, ordered.1333 *1334 * @returns The old value.1335 * @param pu32 Pointer to the value.1336 * @param u32 Number to subtract.1337 */1338 DECLINLINE(uint32_t) ASMAtomicSubU32(uint32_t volatile *pu32, uint32_t u32)1339 {1340 return ASMAtomicAddU32(pu32, (uint32_t)-(int32_t)u32);1341 }1342 1343 1344 /**1345 * Atomically exchanges and subtracts to a signed 32-bit value, ordered.1346 *1347 * @returns The old value.1348 * @param pi32 Pointer to the value.1349 * @param i32 Number to subtract.1350 */1351 DECLINLINE(int32_t) ASMAtomicSubS32(int32_t volatile *pi32, int32_t i32)1352 {1353 return (int32_t)ASMAtomicAddU32((uint32_t volatile *)pi32, (uint32_t)-i32);1354 }1355 1356 1357 /**1358 * Atomically increment a 32-bit value, ordered.1359 *1360 * @returns The new value.1361 * @param pu32 Pointer to the value to increment.1362 */1363 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1364 DECLASM(uint32_t) ASMAtomicIncU32(uint32_t volatile *pu32);1365 #else1366 DECLINLINE(uint32_t) ASMAtomicIncU32(uint32_t volatile *pu32)1367 {1368 uint32_t u32;1369 # if RT_INLINE_ASM_USES_INTRIN1370 u32 = _InterlockedIncrement((long *)pu32);1371 return u32;1372 1373 # elif RT_INLINE_ASM_GNU_STYLE1374 __asm__ __volatile__("lock; xaddl %0, %1\n\t"1375 : "=r" (u32),1376 "=m" (*pu32)1377 : "0" (1),1378 "m" (*pu32)1379 : "memory");1380 return u32+1;1381 # else1382 __asm1383 {1384 mov eax, 11385 # ifdef RT_ARCH_AMD641386 mov rdx, [pu32]1387 lock xadd [rdx], eax1388 # else1389 mov edx, [pu32]1390 lock xadd [edx], eax1391 # endif1392 mov u32, eax1393 }1394 return u32+1;1395 # endif1396 }1397 #endif1398 1399 1400 /**1401 * Atomically increment a signed 32-bit value, ordered.1402 *1403 * @returns The new value.1404 * @param pi32 Pointer to the value to increment.1405 */1406 DECLINLINE(int32_t) ASMAtomicIncS32(int32_t volatile *pi32)1407 {1408 return (int32_t)ASMAtomicIncU32((uint32_t volatile *)pi32);1409 }1410 1411 1412 /**1413 * Atomically decrement an unsigned 32-bit value, ordered.1414 *1415 * @returns The new value.1416 * @param pu32 Pointer to the value to decrement.1417 */1418 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1419 DECLASM(uint32_t) ASMAtomicDecU32(uint32_t volatile *pu32);1420 #else1421 DECLINLINE(uint32_t) ASMAtomicDecU32(uint32_t volatile *pu32)1422 {1423 uint32_t u32;1424 # if RT_INLINE_ASM_USES_INTRIN1425 u32 = _InterlockedDecrement((long *)pu32);1426 return u32;1427 1428 # elif RT_INLINE_ASM_GNU_STYLE1429 __asm__ __volatile__("lock; xaddl %0, %1\n\t"1430 : "=r" (u32),1431 "=m" (*pu32)1432 : "0" (-1),1433 "m" (*pu32)1434 : "memory");1435 return u32-1;1436 # else1437 __asm1438 {1439 mov eax, -11440 # ifdef RT_ARCH_AMD641441 mov rdx, [pu32]1442 lock xadd [rdx], eax1443 # else1444 mov edx, [pu32]1445 lock xadd [edx], eax1446 # endif1447 mov u32, eax1448 }1449 return u32-1;1450 # endif1451 }1452 #endif1453 1454 1455 /**1456 * Atomically decrement a signed 32-bit value, ordered.1457 *1458 * @returns The new value.1459 * @param pi32 Pointer to the value to decrement.1460 */1461 DECLINLINE(int32_t) ASMAtomicDecS32(int32_t volatile *pi32)1462 {1463 return (int32_t)ASMAtomicDecU32((uint32_t volatile *)pi32);1464 }1465 1466 1467 /**1468 * Atomically Or an unsigned 32-bit value, ordered.1469 *1470 * @param pu32 Pointer to the pointer variable to OR u32 with.1471 * @param u32 The value to OR *pu32 with.1472 */1473 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1474 DECLASM(void) ASMAtomicOrU32(uint32_t volatile *pu32, uint32_t u32);1475 #else1476 DECLINLINE(void) ASMAtomicOrU32(uint32_t volatile *pu32, uint32_t u32)1477 {1478 # if RT_INLINE_ASM_USES_INTRIN1479 _InterlockedOr((long volatile *)pu32, (long)u32);1480 1481 # elif RT_INLINE_ASM_GNU_STYLE1482 __asm__ __volatile__("lock; orl %1, %0\n\t"1483 : "=m" (*pu32)1484 : "ir" (u32),1485 "m" (*pu32));1486 # else1487 __asm1488 {1489 mov eax, [u32]1490 # ifdef RT_ARCH_AMD641491 mov rdx, [pu32]1492 lock or [rdx], eax1493 # else1494 mov edx, [pu32]1495 lock or [edx], eax1496 # endif1497 }1498 # endif1499 }1500 #endif1501 1502 1503 /**1504 * Atomically Or a signed 32-bit value, ordered.1505 *1506 * @param pi32 Pointer to the pointer variable to OR u32 with.1507 * @param i32 The value to OR *pu32 with.1508 */1509 DECLINLINE(void) ASMAtomicOrS32(int32_t volatile *pi32, int32_t i32)1510 {1511 ASMAtomicOrU32((uint32_t volatile *)pi32, i32);1512 }1513 1514 1515 /**1516 * Atomically And an unsigned 32-bit value, ordered.1517 *1518 * @param pu32 Pointer to the pointer variable to AND u32 with.1519 * @param u32 The value to AND *pu32 with.1520 */1521 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN1522 DECLASM(void) ASMAtomicAndU32(uint32_t volatile *pu32, uint32_t u32);1523 #else1524 DECLINLINE(void) ASMAtomicAndU32(uint32_t volatile *pu32, uint32_t u32)1525 {1526 # if RT_INLINE_ASM_USES_INTRIN1527 _InterlockedAnd((long volatile *)pu32, u32);1528 1529 # elif RT_INLINE_ASM_GNU_STYLE1530 __asm__ __volatile__("lock; andl %1, %0\n\t"1531 : "=m" (*pu32)1532 : "ir" (u32),1533 "m" (*pu32));1534 # else1535 __asm1536 {1537 mov eax, [u32]1538 # ifdef RT_ARCH_AMD641539 mov rdx, [pu32]1540 lock and [rdx], eax1541 # else1542 mov edx, [pu32]1543 lock and [edx], eax1544 # endif1545 }1546 # endif1547 }1548 #endif1549 1550 1551 /**1552 * Atomically And a signed 32-bit value, ordered.1553 *1554 * @param pi32 Pointer to the pointer variable to AND i32 with.1555 * @param i32 The value to AND *pi32 with.1556 */1557 DECLINLINE(void) ASMAtomicAndS32(int32_t volatile *pi32, int32_t i32)1558 {1559 ASMAtomicAndU32((uint32_t volatile *)pi32, (uint32_t)i32);1560 }1561 1562 1563 /**1564 1334 * Serialize Instruction. 1565 1335 */ … … 2645 2415 2646 2416 2417 /** 2418 * Atomically exchanges and adds to a 32-bit value, ordered. 2419 * 2420 * @returns The old value. 2421 * @param pu32 Pointer to the value. 2422 * @param u32 Number to add. 2423 */ 2424 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2425 DECLASM(uint32_t) ASMAtomicAddU32(uint32_t volatile *pu32, uint32_t u32); 2426 #else 2427 DECLINLINE(uint32_t) ASMAtomicAddU32(uint32_t volatile *pu32, uint32_t u32) 2428 { 2429 # if RT_INLINE_ASM_USES_INTRIN 2430 u32 = _InterlockedExchangeAdd((long *)pu32, u32); 2431 return u32; 2432 2433 # elif RT_INLINE_ASM_GNU_STYLE 2434 __asm__ __volatile__("lock; xaddl %0, %1\n\t" 2435 : "=r" (u32), 2436 "=m" (*pu32) 2437 : "0" (u32), 2438 "m" (*pu32) 2439 : "memory"); 2440 return u32; 2441 # else 2442 __asm 2443 { 2444 mov eax, [u32] 2445 # ifdef RT_ARCH_AMD64 2446 mov rdx, [pu32] 2447 lock xadd [rdx], eax 2448 # else 2449 mov edx, [pu32] 2450 lock xadd [edx], eax 2451 # endif 2452 mov [u32], eax 2453 } 2454 return u32; 2455 # endif 2456 } 2457 #endif 2458 2459 2460 /** 2461 * Atomically exchanges and adds to a signed 32-bit value, ordered. 2462 * 2463 * @returns The old value. 2464 * @param pi32 Pointer to the value. 2465 * @param i32 Number to add. 2466 */ 2467 DECLINLINE(int32_t) ASMAtomicAddS32(int32_t volatile *pi32, int32_t i32) 2468 { 2469 return (int32_t)ASMAtomicAddU32((uint32_t volatile *)pi32, (uint32_t)i32); 2470 } 2471 2472 2473 /** 2474 * Atomically exchanges and adds to a 64-bit value, ordered. 2475 * 2476 * @returns The old value. 2477 * @param pu64 Pointer to the value. 2478 * @param u64 Number to add. 2479 */ 2480 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2481 DECLASM(uint64_t) ASMAtomicAddU64(uint64_t volatile *pu64, uint64_t u64); 2482 #else 2483 DECLINLINE(uint64_t) ASMAtomicAddU64(uint64_t volatile *pu64, uint64_t u64) 2484 { 2485 # if RT_INLINE_ASM_USES_INTRIN && defined(RT_ARCH_AMD64) 2486 u64 = _InterlockedExchangeAdd64((__int64 *)pu64, u64); 2487 return u64; 2488 2489 # elif RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 2490 __asm__ __volatile__("lock; xaddq %0, %1\n\t" 2491 : "=r" (u64), 2492 "=m" (*pu64) 2493 : "0" (u64), 2494 "m" (*pu64) 2495 : "memory"); 2496 return u64; 2497 # else 2498 uint64_t u64New; 2499 for (;;) 2500 { 2501 uint64_t u64Old = ASMAtomicUoReadU64(pu64); 2502 u64New = u64Old + u64; 2503 if (ASMAtomicCmpXchgU64(pu64, u64New, u64Old)) 2504 break; 2505 ASMNopPause(); 2506 } 2507 return u64New; 2508 # endif 2509 } 2510 #endif 2511 2512 2513 /** 2514 * Atomically exchanges and adds to a signed 64-bit value, ordered. 2515 * 2516 * @returns The old value. 2517 * @param pi64 Pointer to the value. 2518 * @param i64 Number to add. 2519 */ 2520 DECLINLINE(int64_t) ASMAtomicAddS64(int64_t volatile *pi64, int64_t i64) 2521 { 2522 return (int64_t)ASMAtomicAddU64((uint64_t volatile *)pi64, (uint64_t)i64); 2523 } 2524 2525 2526 /** 2527 * Atomically exchanges and subtracts to an unsigned 32-bit value, ordered. 2528 * 2529 * @returns The old value. 2530 * @param pu32 Pointer to the value. 2531 * @param u32 Number to subtract. 2532 */ 2533 DECLINLINE(uint32_t) ASMAtomicSubU32(uint32_t volatile *pu32, uint32_t u32) 2534 { 2535 return ASMAtomicAddU32(pu32, (uint32_t)-(int32_t)u32); 2536 } 2537 2538 2539 /** 2540 * Atomically exchanges and subtracts to a signed 32-bit value, ordered. 2541 * 2542 * @returns The old value. 2543 * @param pi32 Pointer to the value. 2544 * @param i32 Number to subtract. 2545 */ 2546 DECLINLINE(int32_t) ASMAtomicSubS32(int32_t volatile *pi32, int32_t i32) 2547 { 2548 return (int32_t)ASMAtomicAddU32((uint32_t volatile *)pi32, (uint32_t)-i32); 2549 } 2550 2551 2552 /** 2553 * Atomically exchanges and subtracts to an unsigned 64-bit value, ordered. 2554 * 2555 * @returns The old value. 2556 * @param pu64 Pointer to the value. 2557 * @param u64 Number to subtract. 2558 */ 2559 DECLINLINE(uint64_t) ASMAtomicSubU64(uint64_t volatile *pu64, uint64_t u64) 2560 { 2561 return ASMAtomicAddU64(pu64, (uint64_t)-(int64_t)u64); 2562 } 2563 2564 2565 /** 2566 * Atomically exchanges and subtracts to a signed 64-bit value, ordered. 2567 * 2568 * @returns The old value. 2569 * @param pi64 Pointer to the value. 2570 * @param i64 Number to subtract. 2571 */ 2572 DECLINLINE(int64_t) ASMAtomicSubS64(int64_t volatile *pi64, int64_t i64) 2573 { 2574 return (int64_t)ASMAtomicAddU64((uint64_t volatile *)pi64, (uint64_t)-i64); 2575 } 2576 2577 2578 /** 2579 * Atomically increment a 32-bit value, ordered. 2580 * 2581 * @returns The new value. 2582 * @param pu32 Pointer to the value to increment. 2583 */ 2584 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2585 DECLASM(uint32_t) ASMAtomicIncU32(uint32_t volatile *pu32); 2586 #else 2587 DECLINLINE(uint32_t) ASMAtomicIncU32(uint32_t volatile *pu32) 2588 { 2589 uint32_t u32; 2590 # if RT_INLINE_ASM_USES_INTRIN 2591 u32 = _InterlockedIncrement((long *)pu32); 2592 return u32; 2593 2594 # elif RT_INLINE_ASM_GNU_STYLE 2595 __asm__ __volatile__("lock; xaddl %0, %1\n\t" 2596 : "=r" (u32), 2597 "=m" (*pu32) 2598 : "0" (1), 2599 "m" (*pu32) 2600 : "memory"); 2601 return u32+1; 2602 # else 2603 __asm 2604 { 2605 mov eax, 1 2606 # ifdef RT_ARCH_AMD64 2607 mov rdx, [pu32] 2608 lock xadd [rdx], eax 2609 # else 2610 mov edx, [pu32] 2611 lock xadd [edx], eax 2612 # endif 2613 mov u32, eax 2614 } 2615 return u32+1; 2616 # endif 2617 } 2618 #endif 2619 2620 2621 /** 2622 * Atomically increment a signed 32-bit value, ordered. 2623 * 2624 * @returns The new value. 2625 * @param pi32 Pointer to the value to increment. 2626 */ 2627 DECLINLINE(int32_t) ASMAtomicIncS32(int32_t volatile *pi32) 2628 { 2629 return (int32_t)ASMAtomicIncU32((uint32_t volatile *)pi32); 2630 } 2631 2632 2633 /** 2634 * Atomically increment a 64-bit value, ordered. 2635 * 2636 * @returns The new value. 2637 * @param pu64 Pointer to the value to increment. 2638 */ 2639 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2640 DECLASM(uint64_t) ASMAtomicIncU64(uint64_t volatile *pu64); 2641 #else 2642 DECLINLINE(uint64_t) ASMAtomicIncU64(uint64_t volatile *pu64) 2643 { 2644 uint64_t u64; 2645 # if RT_INLINE_ASM_USES_INTRIN && defined(RT_ARCH_AMD64) 2646 u64 = _InterlockedIncrement64((__int64 *)pu64); 2647 return u64; 2648 2649 # elif RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 2650 __asm__ __volatile__("lock; xaddq %0, %1\n\t" 2651 : "=r" (u64), 2652 "=m" (*pu64) 2653 : "0" (1), 2654 "m" (*pu64) 2655 : "memory"); 2656 return u64 + 1; 2657 # else 2658 return ASMAtomicAddU64(pu64, 1) + 1; 2659 # endif 2660 } 2661 #endif 2662 2663 2664 /** 2665 * Atomically increment a signed 64-bit value, ordered. 2666 * 2667 * @returns The new value. 2668 * @param pi64 Pointer to the value to increment. 2669 */ 2670 DECLINLINE(int64_t) ASMAtomicIncS64(int64_t volatile *pi64) 2671 { 2672 return (int64_t)ASMAtomicIncU64((uint64_t volatile *)pi64); 2673 } 2674 2675 2676 /** 2677 * Atomically decrement an unsigned 32-bit value, ordered. 2678 * 2679 * @returns The new value. 2680 * @param pu32 Pointer to the value to decrement. 2681 */ 2682 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2683 DECLASM(uint32_t) ASMAtomicDecU32(uint32_t volatile *pu32); 2684 #else 2685 DECLINLINE(uint32_t) ASMAtomicDecU32(uint32_t volatile *pu32) 2686 { 2687 uint32_t u32; 2688 # if RT_INLINE_ASM_USES_INTRIN 2689 u32 = _InterlockedDecrement((long *)pu32); 2690 return u32; 2691 2692 # elif RT_INLINE_ASM_GNU_STYLE 2693 __asm__ __volatile__("lock; xaddl %0, %1\n\t" 2694 : "=r" (u32), 2695 "=m" (*pu32) 2696 : "0" (-1), 2697 "m" (*pu32) 2698 : "memory"); 2699 return u32-1; 2700 # else 2701 __asm 2702 { 2703 mov eax, -1 2704 # ifdef RT_ARCH_AMD64 2705 mov rdx, [pu32] 2706 lock xadd [rdx], eax 2707 # else 2708 mov edx, [pu32] 2709 lock xadd [edx], eax 2710 # endif 2711 mov u32, eax 2712 } 2713 return u32-1; 2714 # endif 2715 } 2716 #endif 2717 2718 2719 /** 2720 * Atomically decrement a signed 32-bit value, ordered. 2721 * 2722 * @returns The new value. 2723 * @param pi32 Pointer to the value to decrement. 2724 */ 2725 DECLINLINE(int32_t) ASMAtomicDecS32(int32_t volatile *pi32) 2726 { 2727 return (int32_t)ASMAtomicDecU32((uint32_t volatile *)pi32); 2728 } 2729 2730 2731 /** 2732 * Atomically decrement an unsigned 64-bit value, ordered. 2733 * 2734 * @returns The new value. 2735 * @param pu64 Pointer to the value to decrement. 2736 */ 2737 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2738 DECLASM(uint64_t) ASMAtomicDecU64(uint64_t volatile *pu64); 2739 #else 2740 DECLINLINE(uint64_t) ASMAtomicDecU64(uint64_t volatile *pu64) 2741 { 2742 # if RT_INLINE_ASM_USES_INTRIN && defined(RT_ARCH_AMD64) 2743 uint64_t u64 = _InterlockedDecrement64((__int64 volatile *)pu64); 2744 return u64; 2745 2746 # elif RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 2747 uint64_t u64; 2748 __asm__ __volatile__("lock; xaddq %q0, %1\n\t" 2749 : "=r" (u64), 2750 "=m" (*pu64) 2751 : "0" (~(uint64_t)0), 2752 "m" (*pu64) 2753 : "memory"); 2754 return u64-1; 2755 # else 2756 return ASMAtomicAddU64(pu64, UINT64_MAX) - 1; 2757 # endif 2758 } 2759 #endif 2760 2761 2762 /** 2763 * Atomically decrement a signed 64-bit value, ordered. 2764 * 2765 * @returns The new value. 2766 * @param pi64 Pointer to the value to decrement. 2767 */ 2768 DECLINLINE(int64_t) ASMAtomicDecS64(int64_t volatile *pi64) 2769 { 2770 return (int64_t)ASMAtomicDecU64((uint64_t volatile *)pi64); 2771 } 2772 2773 2774 /** 2775 * Atomically Or an unsigned 32-bit value, ordered. 2776 * 2777 * @param pu32 Pointer to the pointer variable to OR u32 with. 2778 * @param u32 The value to OR *pu32 with. 2779 */ 2780 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2781 DECLASM(void) ASMAtomicOrU32(uint32_t volatile *pu32, uint32_t u32); 2782 #else 2783 DECLINLINE(void) ASMAtomicOrU32(uint32_t volatile *pu32, uint32_t u32) 2784 { 2785 # if RT_INLINE_ASM_USES_INTRIN 2786 _InterlockedOr((long volatile *)pu32, (long)u32); 2787 2788 # elif RT_INLINE_ASM_GNU_STYLE 2789 __asm__ __volatile__("lock; orl %1, %0\n\t" 2790 : "=m" (*pu32) 2791 : "ir" (u32), 2792 "m" (*pu32)); 2793 # else 2794 __asm 2795 { 2796 mov eax, [u32] 2797 # ifdef RT_ARCH_AMD64 2798 mov rdx, [pu32] 2799 lock or [rdx], eax 2800 # else 2801 mov edx, [pu32] 2802 lock or [edx], eax 2803 # endif 2804 } 2805 # endif 2806 } 2807 #endif 2808 2809 2810 /** 2811 * Atomically Or a signed 32-bit value, ordered. 2812 * 2813 * @param pi32 Pointer to the pointer variable to OR u32 with. 2814 * @param i32 The value to OR *pu32 with. 2815 */ 2816 DECLINLINE(void) ASMAtomicOrS32(int32_t volatile *pi32, int32_t i32) 2817 { 2818 ASMAtomicOrU32((uint32_t volatile *)pi32, i32); 2819 } 2820 2821 2822 /** 2823 * Atomically Or an unsigned 64-bit value, ordered. 2824 * 2825 * @param pu64 Pointer to the pointer variable to OR u64 with. 2826 * @param u64 The value to OR *pu64 with. 2827 */ 2828 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2829 DECLASM(void) ASMAtomicOrU64(uint64_t volatile *pu64, uint64_t u64); 2830 #else 2831 DECLINLINE(void) ASMAtomicOrU64(uint64_t volatile *pu64, uint64_t u64) 2832 { 2833 # if RT_INLINE_ASM_USES_INTRIN && defined(RT_ARCH_AMD64) 2834 _InterlockedOr64((__int64 volatile *)pu64, (__int64)u64); 2835 2836 # elif RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 2837 __asm__ __volatile__("lock; orq %1, %q0\n\t" 2838 : "=m" (*pu64) 2839 : "r" (u64), 2840 "m" (*pu64)); 2841 # else 2842 for (;;) 2843 { 2844 uint64_t u64Old = ASMAtomicUoReadU64(pu64); 2845 uint64_t u64New = u64Old | u64; 2846 if (ASMAtomicCmpXchgU64(pu64, u64New, u64Old)) 2847 break; 2848 ASMNopPause(); 2849 } 2850 # endif 2851 } 2852 #endif 2853 2854 2855 /** 2856 * Atomically Or a signed 64-bit value, ordered. 2857 * 2858 * @param pi64 Pointer to the pointer variable to OR u64 with. 2859 * @param i64 The value to OR *pu64 with. 2860 */ 2861 DECLINLINE(void) ASMAtomicOrS64(int64_t volatile *pi64, int64_t i64) 2862 { 2863 ASMAtomicOrU64((uint64_t volatile *)pi64, i64); 2864 } 2865 /** 2866 * Atomically And an unsigned 32-bit value, ordered. 2867 * 2868 * @param pu32 Pointer to the pointer variable to AND u32 with. 2869 * @param u32 The value to AND *pu32 with. 2870 */ 2871 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2872 DECLASM(void) ASMAtomicAndU32(uint32_t volatile *pu32, uint32_t u32); 2873 #else 2874 DECLINLINE(void) ASMAtomicAndU32(uint32_t volatile *pu32, uint32_t u32) 2875 { 2876 # if RT_INLINE_ASM_USES_INTRIN 2877 _InterlockedAnd((long volatile *)pu32, u32); 2878 2879 # elif RT_INLINE_ASM_GNU_STYLE 2880 __asm__ __volatile__("lock; andl %1, %0\n\t" 2881 : "=m" (*pu32) 2882 : "ir" (u32), 2883 "m" (*pu32)); 2884 # else 2885 __asm 2886 { 2887 mov eax, [u32] 2888 # ifdef RT_ARCH_AMD64 2889 mov rdx, [pu32] 2890 lock and [rdx], eax 2891 # else 2892 mov edx, [pu32] 2893 lock and [edx], eax 2894 # endif 2895 } 2896 # endif 2897 } 2898 #endif 2899 2900 2901 /** 2902 * Atomically And a signed 32-bit value, ordered. 2903 * 2904 * @param pi32 Pointer to the pointer variable to AND i32 with. 2905 * @param i32 The value to AND *pi32 with. 2906 */ 2907 DECLINLINE(void) ASMAtomicAndS32(int32_t volatile *pi32, int32_t i32) 2908 { 2909 ASMAtomicAndU32((uint32_t volatile *)pi32, (uint32_t)i32); 2910 } 2911 2912 2913 /** 2914 * Atomically And an unsigned 64-bit value, ordered. 2915 * 2916 * @param pu64 Pointer to the pointer variable to AND u64 with. 2917 * @param u64 The value to AND *pu64 with. 2918 */ 2919 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 2920 DECLASM(void) ASMAtomicAndU64(uint64_t volatile *pu64, uint64_t u64); 2921 #else 2922 DECLINLINE(void) ASMAtomicAndU64(uint64_t volatile *pu64, uint64_t u64) 2923 { 2924 # if RT_INLINE_ASM_USES_INTRIN && defined(RT_ARCH_AMD64) 2925 _InterlockedAnd64((__int64 volatile *)pu64, u64); 2926 2927 # elif RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 2928 __asm__ __volatile__("lock; andq %1, %0\n\t" 2929 : "=m" (*pu64) 2930 : "r" (u64), 2931 "m" (*pu64)); 2932 # else 2933 for (;;) 2934 { 2935 uint64_t u64Old = ASMAtomicUoReadU64(pu64); 2936 uint64_t u64New = u64Old & u64; 2937 if (ASMAtomicCmpXchgU64(pu64, u64New, u64Old)) 2938 break; 2939 ASMNopPause(); 2940 } 2941 # endif 2942 } 2943 #endif 2944 2945 2946 /** 2947 * Atomically And a signed 64-bit value, ordered. 2948 * 2949 * @param pi64 Pointer to the pointer variable to AND i64 with. 2950 * @param i64 The value to AND *pi64 with. 2951 */ 2952 DECLINLINE(void) ASMAtomicAndS64(int64_t volatile *pi64, int64_t i64) 2953 { 2954 ASMAtomicAndU64((uint64_t volatile *)pi64, (uint64_t)i64); 2955 } 2956 2957 2647 2958 2648 2959 /** @def RT_ASM_PAGE_SIZE … … 2998 3309 ASMProbeReadByte(pu8 + cbBuf - 1); 2999 3310 } 3000 3001 3002 /** @def ASMBreakpoint3003 * Debugger Breakpoint.3004 * @remark In the gnu world we add a nop instruction after the int3 to3005 * force gdb to remain at the int3 source line.3006 * @remark The L4 kernel will try make sense of the breakpoint, thus the jmp.3007 * @internal3008 */3009 #if RT_INLINE_ASM_GNU_STYLE3010 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)3011 # ifndef __L4ENV__3012 # define ASMBreakpoint() do { __asm__ __volatile__("int3\n\tnop"); } while (0)3013 # else3014 # define ASMBreakpoint() do { __asm__ __volatile__("int3; jmp 1f; 1:"); } while (0)3015 # endif3016 # elif defined(RT_ARCH_SPARC64)3017 # define ASMBreakpoint() do { __asm__ __volatile__("illtrap 0\n\t") } while (0) /** @todo Sparc64: this is just a wild guess. */3018 # elif defined(RT_ARCH_SPARC)3019 # define ASMBreakpoint() do { __asm__ __volatile__("unimp 0\n\t"); } while (0) /** @todo Sparc: this is just a wild guess (same as Sparc64, just different name). */3020 # else3021 # error "PORTME"3022 # endif3023 #else3024 # define ASMBreakpoint() __debugbreak()3025 #endif3026 3027 3028 /**3029 * Spinloop hint for platforms that have these, empty function on the other3030 * platforms.3031 *3032 * x86 & AMD64: The PAUSE variant of NOP for helping hyperthreaded CPUs detecing3033 * spin locks.3034 */3035 #if RT_INLINE_ASM_EXTERNAL && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))3036 DECLASM(void) ASMNopPause(void);3037 #else3038 DECLINLINE(void) ASMNopPause(void)3039 {3040 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)3041 # if RT_INLINE_ASM_GNU_STYLE3042 __asm__ __volatile__(".byte 0xf3,0x90\n\t");3043 # else3044 __asm {3045 _emit 0f3h3046 _emit 090h3047 }3048 # endif3049 # else3050 /* dummy */3051 # endif3052 }3053 #endif3054 3311 3055 3312 -
trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp
r33130 r33136 47 47 #include <iprt/stream.h> 48 48 #include <iprt/string.h> 49 #include <iprt/initterm.h>50 49 #include <iprt/param.h> 51 50 #include <iprt/thread.h> 52 51 #include <iprt/test.h> 52 #include <iprt/time.h> 53 53 54 54 … … 62 62 if ((val) != (expect)) \ 63 63 { \ 64 RTTestIErrorInc(); \ 65 RTPrintf("%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \ 64 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \ 66 65 } \ 67 66 } while (0) … … 73 72 if (val != (type)(expect)) \ 74 73 { \ 75 RTTestIErrorInc(); \ 76 RTPrintf("%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \ 74 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \ 77 75 } \ 78 76 } while (0) 77 78 /** 79 * Calls a worker function with different worker variable storage types. 80 */ 81 #define DO_SIMPLE_TEST(name, type) \ 82 do \ 83 { \ 84 RTTestISub(#name); \ 85 type StackVar; \ 86 tst ## name ## Worker(&StackVar); \ 87 \ 88 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \ 89 RTTEST_CHECK_BREAK(g_hTest, pVar); \ 90 tst ## name ## Worker(pVar); \ 91 RTTestGuardedFree(g_hTest, pVar); \ 92 \ 93 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \ 94 RTTEST_CHECK_BREAK(g_hTest, pVar); \ 95 tst ## name ## Worker(pVar); \ 96 RTTestGuardedFree(g_hTest, pVar); \ 97 } while (0) 98 99 100 /******************************************************************************* 101 * Global Variables * 102 *******************************************************************************/ 103 /** The test instance. */ 104 static RTTEST g_hTest; 105 79 106 80 107 … … 130 157 void tstASMCpuId(void) 131 158 { 159 RTTestISub("ASMCpuId"); 160 132 161 unsigned iBit; 133 162 struct … … 137 166 if (!ASMHasCpuId()) 138 167 { 139 RT Printf("tstInlineAsm:warning! CPU doesn't support CPUID\n");168 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n"); 140 169 return; 141 170 } … … 166 195 * Done testing, dump the information. 167 196 */ 168 RT Printf("tstInlineAsm:CPUID Dump\n");197 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n"); 169 198 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 170 199 const uint32_t cFunctions = s.uEAX; 171 200 172 201 /* raw dump */ 173 RTPrintf("\n" 174 " RAW Standard CPUIDs\n" 175 "Function eax ebx ecx edx\n"); 202 RTTestIPrintf(RTTESTLVL_ALWAYS, 203 "\n" 204 " RAW Standard CPUIDs\n" 205 "Function eax ebx ecx edx\n"); 176 206 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++) 177 207 { … … 179 209 continue; /* Leaf 04 output depends on the initial value of ECX */ 180 210 ASMCpuId(iStd, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 181 RT Printf("%08x %08x %08x %08x %08x%s\n",182 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");211 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n", 212 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*"); 183 213 184 214 u32 = ASMCpuId_EAX(iStd); … … 202 232 */ 203 233 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 204 RTPrintf("Name: %.04s%.04s%.04s\n" 205 "Support: 0-%u\n", 206 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 234 RTTestIPrintf(RTTESTLVL_ALWAYS, 235 "Name: %.04s%.04s%.04s\n" 236 "Support: 0-%u\n", 237 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 207 238 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX); 208 239 … … 214 245 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" }; 215 246 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 216 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n" 217 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 218 "Stepping: %d\n" 219 "Type: %d (%s)\n" 220 "APIC ID: %#04x\n" 221 "Logical CPUs: %d\n" 222 "CLFLUSH Size: %d\n" 223 "Brand ID: %#04x\n", 224 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 225 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 226 ASMGetCpuStepping(s.uEAX), 227 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3], 228 (s.uEBX >> 24) & 0xff, 229 (s.uEBX >> 16) & 0xff, 230 (s.uEBX >> 8) & 0xff, 231 (s.uEBX >> 0) & 0xff); 232 233 RTPrintf("Features EDX: "); 234 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU"); 235 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME"); 236 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE"); 237 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE"); 238 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC"); 239 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR"); 240 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE"); 241 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE"); 242 if (s.uEDX & RT_BIT(8)) RTPrintf(" CX8"); 243 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC"); 244 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10"); 245 if (s.uEDX & RT_BIT(11)) RTPrintf(" SEP"); 246 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR"); 247 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE"); 248 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA"); 249 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV"); 250 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT"); 251 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36"); 252 if (s.uEDX & RT_BIT(18)) RTPrintf(" PSN"); 253 if (s.uEDX & RT_BIT(19)) RTPrintf(" CLFSH"); 254 if (s.uEDX & RT_BIT(20)) RTPrintf(" 20"); 255 if (s.uEDX & RT_BIT(21)) RTPrintf(" DS"); 256 if (s.uEDX & RT_BIT(22)) RTPrintf(" ACPI"); 257 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX"); 258 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR"); 259 if (s.uEDX & RT_BIT(25)) RTPrintf(" SSE"); 260 if (s.uEDX & RT_BIT(26)) RTPrintf(" SSE2"); 261 if (s.uEDX & RT_BIT(27)) RTPrintf(" SS"); 262 if (s.uEDX & RT_BIT(28)) RTPrintf(" HTT"); 263 if (s.uEDX & RT_BIT(29)) RTPrintf(" 29"); 264 if (s.uEDX & RT_BIT(30)) RTPrintf(" 30"); 265 if (s.uEDX & RT_BIT(31)) RTPrintf(" 31"); 266 RTPrintf("\n"); 247 RTTestIPrintf(RTTESTLVL_ALWAYS, 248 "Family: %#x \tExtended: %#x \tEffective: %#x\n" 249 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 250 "Stepping: %d\n" 251 "Type: %d (%s)\n" 252 "APIC ID: %#04x\n" 253 "Logical CPUs: %d\n" 254 "CLFLUSH Size: %d\n" 255 "Brand ID: %#04x\n", 256 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 257 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 258 ASMGetCpuStepping(s.uEAX), 259 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3], 260 (s.uEBX >> 24) & 0xff, 261 (s.uEBX >> 16) & 0xff, 262 (s.uEBX >> 8) & 0xff, 263 (s.uEBX >> 0) & 0xff); 264 265 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: "); 266 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU"); 267 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME"); 268 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE"); 269 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE"); 270 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC"); 271 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR"); 272 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE"); 273 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE"); 274 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8"); 275 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC"); 276 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10"); 277 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP"); 278 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR"); 279 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE"); 280 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA"); 281 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV"); 282 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT"); 283 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36"); 284 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN"); 285 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH"); 286 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20"); 287 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS"); 288 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI"); 289 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX"); 290 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR"); 291 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE"); 292 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2"); 293 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS"); 294 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT"); 295 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29"); 296 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30"); 297 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31"); 298 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 267 299 268 300 /** @todo check intel docs. */ 269 RT Printf("Features ECX: ");270 if (s.uECX & RT_BIT(0)) RT Printf(" SSE3");301 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: "); 302 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3"); 271 303 for (iBit = 1; iBit < 13; iBit++) 272 304 if (s.uECX & RT_BIT(iBit)) 273 RT Printf(" %d", iBit);274 if (s.uECX & RT_BIT(13)) RT Printf(" CX16");305 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 306 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16"); 275 307 for (iBit = 14; iBit < 32; iBit++) 276 308 if (s.uECX & RT_BIT(iBit)) 277 RT Printf(" %d", iBit);278 RT Printf("\n");309 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 310 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 279 311 } 280 312 … … 287 319 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX) 288 320 { 289 RT Printf("No extended CPUID info? Check the manual on how to detect this...\n");321 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n"); 290 322 return; 291 323 } … … 293 325 294 326 /* raw dump */ 295 RTPrintf("\n" 296 " RAW Extended CPUIDs\n" 297 "Function eax ebx ecx edx\n"); 327 RTTestIPrintf(RTTESTLVL_ALWAYS, 328 "\n" 329 " RAW Extended CPUIDs\n" 330 "Function eax ebx ecx edx\n"); 298 331 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++) 299 332 { 300 333 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 301 RT Printf("%08x %08x %08x %08x %08x%s\n",302 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");334 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n", 335 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*"); 303 336 304 337 u32 = ASMCpuId_EAX(iExt); … … 322 355 */ 323 356 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 324 RTPrintf("Ext Name: %.4s%.4s%.4s\n" 325 "Ext Supports: 0x80000000-%#010x\n", 326 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 357 RTTestIPrintf(RTTESTLVL_ALWAYS, 358 "Ext Name: %.4s%.4s%.4s\n" 359 "Ext Supports: 0x80000000-%#010x\n", 360 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX); 327 361 328 362 if (cExtFunctions >= 0x80000001) 329 363 { 330 364 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 331 RTPrintf("Family: %#x \tExtended: %#x \tEffective: %#x\n" 332 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 333 "Stepping: %d\n" 334 "Brand ID: %#05x\n", 335 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 336 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 337 ASMGetCpuStepping(s.uEAX), 338 s.uEBX & 0xfff); 339 340 RTPrintf("Features EDX: "); 341 if (s.uEDX & RT_BIT(0)) RTPrintf(" FPU"); 342 if (s.uEDX & RT_BIT(1)) RTPrintf(" VME"); 343 if (s.uEDX & RT_BIT(2)) RTPrintf(" DE"); 344 if (s.uEDX & RT_BIT(3)) RTPrintf(" PSE"); 345 if (s.uEDX & RT_BIT(4)) RTPrintf(" TSC"); 346 if (s.uEDX & RT_BIT(5)) RTPrintf(" MSR"); 347 if (s.uEDX & RT_BIT(6)) RTPrintf(" PAE"); 348 if (s.uEDX & RT_BIT(7)) RTPrintf(" MCE"); 349 if (s.uEDX & RT_BIT(8)) RTPrintf(" CMPXCHG8B"); 350 if (s.uEDX & RT_BIT(9)) RTPrintf(" APIC"); 351 if (s.uEDX & RT_BIT(10)) RTPrintf(" 10"); 352 if (s.uEDX & RT_BIT(11)) RTPrintf(" SysCallSysRet"); 353 if (s.uEDX & RT_BIT(12)) RTPrintf(" MTRR"); 354 if (s.uEDX & RT_BIT(13)) RTPrintf(" PGE"); 355 if (s.uEDX & RT_BIT(14)) RTPrintf(" MCA"); 356 if (s.uEDX & RT_BIT(15)) RTPrintf(" CMOV"); 357 if (s.uEDX & RT_BIT(16)) RTPrintf(" PAT"); 358 if (s.uEDX & RT_BIT(17)) RTPrintf(" PSE36"); 359 if (s.uEDX & RT_BIT(18)) RTPrintf(" 18"); 360 if (s.uEDX & RT_BIT(19)) RTPrintf(" 19"); 361 if (s.uEDX & RT_BIT(20)) RTPrintf(" NX"); 362 if (s.uEDX & RT_BIT(21)) RTPrintf(" 21"); 363 if (s.uEDX & RT_BIT(22)) RTPrintf(" MmxExt"); 364 if (s.uEDX & RT_BIT(23)) RTPrintf(" MMX"); 365 if (s.uEDX & RT_BIT(24)) RTPrintf(" FXSR"); 366 if (s.uEDX & RT_BIT(25)) RTPrintf(" FastFXSR"); 367 if (s.uEDX & RT_BIT(26)) RTPrintf(" 26"); 368 if (s.uEDX & RT_BIT(27)) RTPrintf(" RDTSCP"); 369 if (s.uEDX & RT_BIT(28)) RTPrintf(" 28"); 370 if (s.uEDX & RT_BIT(29)) RTPrintf(" LongMode"); 371 if (s.uEDX & RT_BIT(30)) RTPrintf(" 3DNowExt"); 372 if (s.uEDX & RT_BIT(31)) RTPrintf(" 3DNow"); 373 RTPrintf("\n"); 374 375 RTPrintf("Features ECX: "); 376 if (s.uECX & RT_BIT(0)) RTPrintf(" LahfSahf"); 377 if (s.uECX & RT_BIT(1)) RTPrintf(" CmpLegacy"); 378 if (s.uECX & RT_BIT(2)) RTPrintf(" SVM"); 379 if (s.uECX & RT_BIT(3)) RTPrintf(" 3"); 380 if (s.uECX & RT_BIT(4)) RTPrintf(" AltMovCr8"); 365 RTTestIPrintf(RTTESTLVL_ALWAYS, 366 "Family: %#x \tExtended: %#x \tEffective: %#x\n" 367 "Model: %#x \tExtended: %#x \tEffective: %#x\n" 368 "Stepping: %d\n" 369 "Brand ID: %#05x\n", 370 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX), 371 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel), 372 ASMGetCpuStepping(s.uEAX), 373 s.uEBX & 0xfff); 374 375 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: "); 376 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU"); 377 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME"); 378 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE"); 379 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE"); 380 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC"); 381 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR"); 382 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE"); 383 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE"); 384 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B"); 385 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC"); 386 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10"); 387 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet"); 388 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR"); 389 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE"); 390 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA"); 391 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV"); 392 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT"); 393 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36"); 394 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18"); 395 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19"); 396 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX"); 397 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21"); 398 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt"); 399 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX"); 400 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR"); 401 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR"); 402 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26"); 403 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP"); 404 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28"); 405 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode"); 406 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt"); 407 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow"); 408 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 409 410 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: "); 411 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf"); 412 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy"); 413 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM"); 414 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3"); 415 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8"); 381 416 for (iBit = 5; iBit < 32; iBit++) 382 417 if (s.uECX & RT_BIT(iBit)) 383 RT Printf(" %d", iBit);384 RT Printf("\n");418 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 419 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 385 420 } 386 421 … … 393 428 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]); 394 429 if (cExtFunctions >= 0x80000002) 395 RT Printf("Full Name: %s\n", szString);430 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString); 396 431 397 432 if (cExtFunctions >= 0x80000005) 398 433 { 399 434 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 400 RTPrintf("TLB 2/4M Instr/Uni: %s %3d entries\n" 401 "TLB 2/4M Data: %s %3d entries\n", 402 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff, 403 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff); 404 RTPrintf("TLB 4K Instr/Uni: %s %3d entries\n" 405 "TLB 4K Data: %s %3d entries\n", 406 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff, 407 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff); 408 RTPrintf("L1 Instr Cache Line Size: %d bytes\n" 409 "L1 Instr Cache Lines Per Tag: %d\n" 410 "L1 Instr Cache Associativity: %s\n" 411 "L1 Instr Cache Size: %d KB\n", 412 (s.uEDX >> 0) & 0xff, 413 (s.uEDX >> 8) & 0xff, 414 getCacheAss((s.uEDX >> 16) & 0xff), 415 (s.uEDX >> 24) & 0xff); 416 RTPrintf("L1 Data Cache Line Size: %d bytes\n" 417 "L1 Data Cache Lines Per Tag: %d\n" 418 "L1 Data Cache Associativity: %s\n" 419 "L1 Data Cache Size: %d KB\n", 420 (s.uECX >> 0) & 0xff, 421 (s.uECX >> 8) & 0xff, 422 getCacheAss((s.uECX >> 16) & 0xff), 423 (s.uECX >> 24) & 0xff); 435 RTTestIPrintf(RTTESTLVL_ALWAYS, 436 "TLB 2/4M Instr/Uni: %s %3d entries\n" 437 "TLB 2/4M Data: %s %3d entries\n", 438 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff, 439 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff); 440 RTTestIPrintf(RTTESTLVL_ALWAYS, 441 "TLB 4K Instr/Uni: %s %3d entries\n" 442 "TLB 4K Data: %s %3d entries\n", 443 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff, 444 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff); 445 RTTestIPrintf(RTTESTLVL_ALWAYS, 446 "L1 Instr Cache Line Size: %d bytes\n" 447 "L1 Instr Cache Lines Per Tag: %d\n" 448 "L1 Instr Cache Associativity: %s\n" 449 "L1 Instr Cache Size: %d KB\n", 450 (s.uEDX >> 0) & 0xff, 451 (s.uEDX >> 8) & 0xff, 452 getCacheAss((s.uEDX >> 16) & 0xff), 453 (s.uEDX >> 24) & 0xff); 454 RTTestIPrintf(RTTESTLVL_ALWAYS, 455 "L1 Data Cache Line Size: %d bytes\n" 456 "L1 Data Cache Lines Per Tag: %d\n" 457 "L1 Data Cache Associativity: %s\n" 458 "L1 Data Cache Size: %d KB\n", 459 (s.uECX >> 0) & 0xff, 460 (s.uECX >> 8) & 0xff, 461 getCacheAss((s.uECX >> 16) & 0xff), 462 (s.uECX >> 24) & 0xff); 424 463 } 425 464 … … 427 466 { 428 467 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 429 RTPrintf("L2 TLB 2/4M Instr/Uni: %s %4d entries\n" 430 "L2 TLB 2/4M Data: %s %4d entries\n", 431 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff, 432 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff); 433 RTPrintf("L2 TLB 4K Instr/Uni: %s %4d entries\n" 434 "L2 TLB 4K Data: %s %4d entries\n", 435 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff, 436 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff); 437 RTPrintf("L2 Cache Line Size: %d bytes\n" 438 "L2 Cache Lines Per Tag: %d\n" 439 "L2 Cache Associativity: %s\n" 440 "L2 Cache Size: %d KB\n", 441 (s.uEDX >> 0) & 0xff, 442 (s.uEDX >> 8) & 0xf, 443 getL2CacheAss((s.uEDX >> 12) & 0xf), 444 (s.uEDX >> 16) & 0xffff); 468 RTTestIPrintf(RTTESTLVL_ALWAYS, 469 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n" 470 "L2 TLB 2/4M Data: %s %4d entries\n", 471 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff, 472 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff); 473 RTTestIPrintf(RTTESTLVL_ALWAYS, 474 "L2 TLB 4K Instr/Uni: %s %4d entries\n" 475 "L2 TLB 4K Data: %s %4d entries\n", 476 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff, 477 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff); 478 RTTestIPrintf(RTTESTLVL_ALWAYS, 479 "L2 Cache Line Size: %d bytes\n" 480 "L2 Cache Lines Per Tag: %d\n" 481 "L2 Cache Associativity: %s\n" 482 "L2 Cache Size: %d KB\n", 483 (s.uEDX >> 0) & 0xff, 484 (s.uEDX >> 8) & 0xf, 485 getL2CacheAss((s.uEDX >> 12) & 0xf), 486 (s.uEDX >> 16) & 0xffff); 445 487 } 446 488 … … 448 490 { 449 491 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 450 RT Printf("APM Features: ");451 if (s.uEDX & RT_BIT(0)) RT Printf(" TS");452 if (s.uEDX & RT_BIT(1)) RT Printf(" FID");453 if (s.uEDX & RT_BIT(2)) RT Printf(" VID");454 if (s.uEDX & RT_BIT(3)) RT Printf(" TTP");455 if (s.uEDX & RT_BIT(4)) RT Printf(" TM");456 if (s.uEDX & RT_BIT(5)) RT Printf(" STC");457 if (s.uEDX & RT_BIT(6)) RT Printf(" 6");458 if (s.uEDX & RT_BIT(7)) RT Printf(" 7");459 if (s.uEDX & RT_BIT(8)) RT Printf(" TscInvariant");492 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: "); 493 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS"); 494 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID"); 495 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID"); 496 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP"); 497 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM"); 498 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC"); 499 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6"); 500 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7"); 501 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant"); 460 502 for (iBit = 9; iBit < 32; iBit++) 461 503 if (s.uEDX & RT_BIT(iBit)) 462 RT Printf(" %d", iBit);463 RT Printf("\n");504 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit); 505 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n"); 464 506 } 465 507 … … 467 509 { 468 510 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 469 RTPrintf("Physical Address Width: %d bits\n" 470 "Virtual Address Width: %d bits\n" 471 "Guest Physical Address Width: %d bits\n", 472 (s.uEAX >> 0) & 0xff, 473 (s.uEAX >> 8) & 0xff, 474 (s.uEAX >> 16) & 0xff); 475 RTPrintf("Physical Core Count: %d\n", 476 ((s.uECX >> 0) & 0xff) + 1); 511 RTTestIPrintf(RTTESTLVL_ALWAYS, 512 "Physical Address Width: %d bits\n" 513 "Virtual Address Width: %d bits\n" 514 "Guest Physical Address Width: %d bits\n", 515 (s.uEAX >> 0) & 0xff, 516 (s.uEAX >> 8) & 0xff, 517 (s.uEAX >> 16) & 0xff); 518 RTTestIPrintf(RTTESTLVL_ALWAYS, 519 "Physical Core Count: %d\n", 520 ((s.uECX >> 0) & 0xff) + 1); 477 521 if ((s.uECX >> 12) & 0xf) 478 RT Printf("ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);522 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf); 479 523 } 480 524 … … 482 526 { 483 527 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX); 484 RTPrintf("SVM Revision: %d (%#x)\n" 485 "Number of Address Space IDs: %d (%#x)\n", 486 s.uEAX & 0xff, s.uEAX & 0xff, 487 s.uEBX, s.uEBX); 528 RTTestIPrintf(RTTESTLVL_ALWAYS, 529 "SVM Revision: %d (%#x)\n" 530 "Number of Address Space IDs: %d (%#x)\n", 531 s.uEAX & 0xff, s.uEAX & 0xff, 532 s.uEBX, s.uEBX); 488 533 } 489 534 } … … 491 536 #endif /* AMD64 || X86 */ 492 537 538 DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8) 539 { 540 *pu8 = 0; 541 CHECKOP(ASMAtomicXchgU8(pu8, 1), 0, "%#x", uint8_t); 542 CHECKVAL(*pu8, 1, "%#x"); 543 544 CHECKOP(ASMAtomicXchgU8(pu8, 0), 1, "%#x", uint8_t); 545 CHECKVAL(*pu8, 0, "%#x"); 546 547 CHECKOP(ASMAtomicXchgU8(pu8, 0xff), 0, "%#x", uint8_t); 548 CHECKVAL(*pu8, 0xff, "%#x"); 549 550 CHECKOP(ASMAtomicXchgU8(pu8, 0x87), 0xffff, "%#x", uint8_t); 551 CHECKVAL(*pu8, 0x87, "%#x"); 552 } 553 554 493 555 static void tstASMAtomicXchgU8(void) 494 556 { 495 struct 496 { 497 uint8_t u8Dummy0; 498 uint8_t u8; 499 uint8_t u8Dummy1; 500 } s; 501 502 s.u8 = 0; 503 s.u8Dummy0 = s.u8Dummy1 = 0x42; 504 CHECKOP(ASMAtomicXchgU8(&s.u8, 1), 0, "%#x", uint8_t); 505 CHECKVAL(s.u8, 1, "%#x"); 506 507 CHECKOP(ASMAtomicXchgU8(&s.u8, 0), 1, "%#x", uint8_t); 508 CHECKVAL(s.u8, 0, "%#x"); 509 510 CHECKOP(ASMAtomicXchgU8(&s.u8, 0xff), 0, "%#x", uint8_t); 511 CHECKVAL(s.u8, 0xff, "%#x"); 512 513 CHECKOP(ASMAtomicXchgU8(&s.u8, 0x87), 0xffff, "%#x", uint8_t); 514 CHECKVAL(s.u8, 0x87, "%#x"); 515 CHECKVAL(s.u8Dummy0, 0x42, "%#x"); 516 CHECKVAL(s.u8Dummy1, 0x42, "%#x"); 557 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t); 558 } 559 560 561 DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16) 562 { 563 *pu16 = 0; 564 565 CHECKOP(ASMAtomicXchgU16(pu16, 1), 0, "%#x", uint16_t); 566 CHECKVAL(*pu16, 1, "%#x"); 567 568 CHECKOP(ASMAtomicXchgU16(pu16, 0), 1, "%#x", uint16_t); 569 CHECKVAL(*pu16, 0, "%#x"); 570 571 CHECKOP(ASMAtomicXchgU16(pu16, 0xffff), 0, "%#x", uint16_t); 572 CHECKVAL(*pu16, 0xffff, "%#x"); 573 574 CHECKOP(ASMAtomicXchgU16(pu16, 0x8765), 0xffff, "%#x", uint16_t); 575 CHECKVAL(*pu16, 0x8765, "%#x"); 517 576 } 518 577 … … 520 579 static void tstASMAtomicXchgU16(void) 521 580 { 522 struct 523 { 524 uint16_t u16Dummy0; 525 uint16_t u16; 526 uint16_t u16Dummy1; 527 } s; 528 529 s.u16 = 0; 530 s.u16Dummy0 = s.u16Dummy1 = 0x1234; 531 CHECKOP(ASMAtomicXchgU16(&s.u16, 1), 0, "%#x", uint16_t); 532 CHECKVAL(s.u16, 1, "%#x"); 533 534 CHECKOP(ASMAtomicXchgU16(&s.u16, 0), 1, "%#x", uint16_t); 535 CHECKVAL(s.u16, 0, "%#x"); 536 537 CHECKOP(ASMAtomicXchgU16(&s.u16, 0xffff), 0, "%#x", uint16_t); 538 CHECKVAL(s.u16, 0xffff, "%#x"); 539 540 CHECKOP(ASMAtomicXchgU16(&s.u16, 0x8765), 0xffff, "%#x", uint16_t); 541 CHECKVAL(s.u16, 0x8765, "%#x"); 542 CHECKVAL(s.u16Dummy0, 0x1234, "%#x"); 543 CHECKVAL(s.u16Dummy1, 0x1234, "%#x"); 581 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t); 582 } 583 584 585 DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32) 586 { 587 *pu32 = 0; 588 589 CHECKOP(ASMAtomicXchgU32(pu32, 1), 0, "%#x", uint32_t); 590 CHECKVAL(*pu32, 1, "%#x"); 591 592 CHECKOP(ASMAtomicXchgU32(pu32, 0), 1, "%#x", uint32_t); 593 CHECKVAL(*pu32, 0, "%#x"); 594 595 CHECKOP(ASMAtomicXchgU32(pu32, ~UINT32_C(0)), 0, "%#x", uint32_t); 596 CHECKVAL(*pu32, ~UINT32_C(0), "%#x"); 597 598 CHECKOP(ASMAtomicXchgU32(pu32, 0x87654321), ~UINT32_C(0), "%#x", uint32_t); 599 CHECKVAL(*pu32, 0x87654321, "%#x"); 544 600 } 545 601 … … 547 603 static void tstASMAtomicXchgU32(void) 548 604 { 549 struct 550 { 551 uint32_t u32Dummy0; 552 uint32_t u32; 553 uint32_t u32Dummy1; 554 } s; 555 556 s.u32 = 0; 557 s.u32Dummy0 = s.u32Dummy1 = 0x11223344; 558 559 CHECKOP(ASMAtomicXchgU32(&s.u32, 1), 0, "%#x", uint32_t); 560 CHECKVAL(s.u32, 1, "%#x"); 561 562 CHECKOP(ASMAtomicXchgU32(&s.u32, 0), 1, "%#x", uint32_t); 563 CHECKVAL(s.u32, 0, "%#x"); 564 565 CHECKOP(ASMAtomicXchgU32(&s.u32, ~0U), 0, "%#x", uint32_t); 566 CHECKVAL(s.u32, ~0U, "%#x"); 567 568 CHECKOP(ASMAtomicXchgU32(&s.u32, 0x87654321), ~0U, "%#x", uint32_t); 569 CHECKVAL(s.u32, 0x87654321, "%#x"); 570 571 CHECKVAL(s.u32Dummy0, 0x11223344, "%#x"); 572 CHECKVAL(s.u32Dummy1, 0x11223344, "%#x"); 605 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t); 606 } 607 608 609 DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64) 610 { 611 *pu64 = 0; 612 613 CHECKOP(ASMAtomicXchgU64(pu64, 1), UINT64_C(0), "%#llx", uint64_t); 614 CHECKVAL(*pu64, UINT64_C(1), "%#llx"); 615 616 CHECKOP(ASMAtomicXchgU64(pu64, 0), UINT64_C(1), "%#llx", uint64_t); 617 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 618 619 CHECKOP(ASMAtomicXchgU64(pu64, ~UINT64_C(0)), UINT64_C(0), "%#llx", uint64_t); 620 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 621 622 CHECKOP(ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), ~UINT64_C(0), "%#llx", uint64_t); 623 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 573 624 } 574 625 … … 576 627 static void tstASMAtomicXchgU64(void) 577 628 { 578 struct 579 { 580 uint64_t u64Dummy0; 581 uint64_t u64; 582 uint64_t u64Dummy1; 583 } s; 584 585 s.u64 = 0; 586 s.u64Dummy0 = s.u64Dummy1 = 0x1122334455667788ULL; 587 588 CHECKOP(ASMAtomicXchgU64(&s.u64, 1), 0ULL, "%#llx", uint64_t); 589 CHECKVAL(s.u64, 1ULL, "%#llx"); 590 591 CHECKOP(ASMAtomicXchgU64(&s.u64, 0), 1ULL, "%#llx", uint64_t); 592 CHECKVAL(s.u64, 0ULL, "%#llx"); 593 594 CHECKOP(ASMAtomicXchgU64(&s.u64, ~0ULL), 0ULL, "%#llx", uint64_t); 595 CHECKVAL(s.u64, ~0ULL, "%#llx"); 596 597 CHECKOP(ASMAtomicXchgU64(&s.u64, 0xfedcba0987654321ULL), ~0ULL, "%#llx", uint64_t); 598 CHECKVAL(s.u64, 0xfedcba0987654321ULL, "%#llx"); 599 600 CHECKVAL(s.u64Dummy0, 0x1122334455667788ULL, "%#llx"); 601 CHECKVAL(s.u64Dummy1, 0x1122334455667788ULL, "%#llx"); 629 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t); 630 } 631 632 633 DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv) 634 { 635 *ppv = NULL; 636 637 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, "%p", void *); 638 CHECKVAL(*ppv, (void *)(~(uintptr_t)0), "%p"); 639 640 CHECKOP(ASMAtomicXchgPtr(ppv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *); 641 CHECKVAL(*ppv, (void *)0x87654321, "%p"); 642 643 CHECKOP(ASMAtomicXchgPtr(ppv, NULL), (void *)0x87654321, "%p", void *); 644 CHECKVAL(*ppv, NULL, "%p"); 602 645 } 603 646 … … 605 648 static void tstASMAtomicXchgPtr(void) 606 649 { 607 void *pv = NULL; 608 609 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)(~(uintptr_t)0)), NULL, "%p", void *); 610 CHECKVAL(pv, (void *)(~(uintptr_t)0), "%p"); 611 612 CHECKOP(ASMAtomicXchgPtr(&pv, (void *)0x87654321), (void *)(~(uintptr_t)0), "%p", void *); 613 CHECKVAL(pv, (void *)0x87654321, "%p"); 614 615 CHECKOP(ASMAtomicXchgPtr(&pv, NULL), (void *)0x87654321, "%p", void *); 616 CHECKVAL(pv, NULL, "%p"); 650 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *); 651 } 652 653 654 DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8) 655 { 656 *pu8 = 0xff; 657 658 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0), false, "%d", bool); 659 CHECKVAL(*pu8, 0xff, "%x"); 660 661 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, "%d", bool); 662 CHECKVAL(*pu8, 0, "%x"); 663 664 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x79, 0xff), false, "%d", bool); 665 CHECKVAL(*pu8, 0, "%x"); 666 667 CHECKOP(ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, "%d", bool); 668 CHECKVAL(*pu8, 0x97, "%x"); 617 669 } 618 670 … … 620 672 static void tstASMAtomicCmpXchgU8(void) 621 673 { 622 struct 623 { 624 uint8_t u8Before; 625 uint8_t u8; 626 uint8_t u8After; 627 } u = { 0xcc, 0xff, 0xaa }; 628 629 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0, 0), false, "%d", bool); 630 CHECKVAL(u.u8, 0xff, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 631 632 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0, 0xff), true, "%d", bool); 633 CHECKVAL(u.u8, 0, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 634 635 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0x79, 0xff), false, "%d", bool); 636 CHECKVAL(u.u8, 0, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 637 638 CHECKOP(ASMAtomicCmpXchgU8(&u.u8, 0x97, 0), true, "%d", bool); 639 CHECKVAL(u.u8, 0x97, "%x"); CHECKVAL(u.u8Before, 0xcc, "%x"); CHECKVAL(u.u8After, 0xaa, "%x"); 674 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t); 675 } 676 677 678 DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32) 679 { 680 *pu32 = UINT32_C(0xffffffff); 681 682 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, 0), false, "%d", bool); 683 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 684 685 CHECKOP(ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, "%d", bool); 686 CHECKVAL(*pu32, 0, "%x"); 687 688 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff)), false, "%d", bool); 689 CHECKVAL(*pu32, 0, "%x"); 690 691 CHECKOP(ASMAtomicCmpXchgU32(pu32, UINT32_C(0x8008efd), 0), true, "%d", bool); 692 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x"); 640 693 } 641 694 … … 643 696 static void tstASMAtomicCmpXchgU32(void) 644 697 { 645 uint32_t u32 = 0xffffffff; 646 647 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0), false, "%d", bool); 648 CHECKVAL(u32, 0xffffffff, "%x"); 649 650 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0, 0xffffffff), true, "%d", bool); 651 CHECKVAL(u32, 0, "%x"); 652 653 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0xffffffff), false, "%d", bool); 654 CHECKVAL(u32, 0, "%x"); 655 656 CHECKOP(ASMAtomicCmpXchgU32(&u32, 0x8008efd, 0), true, "%d", bool); 657 CHECKVAL(u32, 0x8008efd, "%x"); 698 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t); 699 } 700 701 702 703 DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64) 704 { 705 *pu64 = UINT64_C(0xffffffffffffff); 706 707 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, 0), false, "%d", bool); 708 CHECKVAL(*pu64, UINT64_C(0xffffffffffffff), "%#llx"); 709 710 CHECKOP(ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, "%d", bool); 711 CHECKVAL(*pu64, 0, "%x"); 712 713 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff)), false, "%d", bool); 714 CHECKVAL(*pu64, 0, "%x"); 715 716 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000)), false, "%d", bool); 717 CHECKVAL(*pu64, 0, "%x"); 718 719 CHECKOP(ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, "%d", bool); 720 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%#llx"); 658 721 } 659 722 … … 661 724 static void tstASMAtomicCmpXchgU64(void) 662 725 { 663 uint64_t u64 = 0xffffffffffffffULL; 664 665 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0), false, "%d", bool); 666 CHECKVAL(u64, 0xffffffffffffffULL, "%#llx"); 667 668 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0, 0xffffffffffffffULL), true, "%d", bool); 669 CHECKVAL(u64, 0, "%x"); 670 671 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff), false, "%d", bool); 672 CHECKVAL(u64, 0, "%x"); 673 674 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL), false, "%d", bool); 675 CHECKVAL(u64, 0, "%x"); 676 677 CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool); 678 CHECKVAL(u64, 0x80040008008efdULL, "%#llx"); 726 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t); 727 } 728 729 730 DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32) 731 { 732 *pu32 = UINT32_C(0xffffffff); 733 uint32_t u32Old = UINT32_C(0x80005111); 734 735 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, "%d", bool); 736 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 737 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x"); 738 739 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, "%d", bool); 740 CHECKVAL(*pu32, 0, "%x"); 741 CHECKVAL(u32Old, UINT32_C(0xffffffff), "%x"); 742 743 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), UINT32_C(0xffffffff), &u32Old), false, "%d", bool); 744 CHECKVAL(*pu32, 0, "%x"); 745 CHECKVAL(u32Old, 0, "%x"); 746 747 CHECKOP(ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x8008efd), 0, &u32Old), true, "%d", bool); 748 CHECKVAL(*pu32, UINT32_C(0x8008efd), "%x"); 749 CHECKVAL(u32Old, 0, "%x"); 750 751 CHECKOP(ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0x8008efd), &u32Old), true, "%d", bool); 752 CHECKVAL(*pu32, 0, "%x"); 753 CHECKVAL(u32Old, UINT32_C(0x8008efd), "%x"); 679 754 } 680 755 … … 682 757 static void tstASMAtomicCmpXchgExU32(void) 683 758 { 684 uint32_t u32 = 0xffffffff; 685 uint32_t u32Old = 0x80005111; 686 687 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0, &u32Old), false, "%d", bool); 688 CHECKVAL(u32, 0xffffffff, "%x"); 689 CHECKVAL(u32Old, 0xffffffff, "%x"); 690 691 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0xffffffff, &u32Old), true, "%d", bool); 692 CHECKVAL(u32, 0, "%x"); 693 CHECKVAL(u32Old, 0xffffffff, "%x"); 694 695 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0xffffffff, &u32Old), false, "%d", bool); 696 CHECKVAL(u32, 0, "%x"); 697 CHECKVAL(u32Old, 0, "%x"); 698 699 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0, &u32Old), true, "%d", bool); 700 CHECKVAL(u32, 0x8008efd, "%x"); 701 CHECKVAL(u32Old, 0, "%x"); 702 703 CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0x8008efd, &u32Old), true, "%d", bool); 704 CHECKVAL(u32, 0, "%x"); 705 CHECKVAL(u32Old, 0x8008efd, "%x"); 759 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t); 760 } 761 762 763 DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64) 764 { 765 *pu64 = UINT64_C(0xffffffffffffffff); 766 uint64_t u64Old = UINT64_C(0x8000000051111111); 767 768 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, "%d", bool); 769 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%llx"); 770 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx"); 771 772 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, "%d", bool); 773 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 774 CHECKVAL(u64Old, UINT64_C(0xffffffffffffffff), "%llx"); 775 776 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0xffffffff, &u64Old), false, "%d", bool); 777 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 778 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 779 780 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), UINT64_C(0xffffffff00000000), &u64Old), false, "%d", bool); 781 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 782 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 783 784 CHECKOP(ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x80040008008efd), 0, &u64Old), true, "%d", bool); 785 CHECKVAL(*pu64, UINT64_C(0x80040008008efd), "%llx"); 786 CHECKVAL(u64Old, UINT64_C(0), "%llx"); 787 788 CHECKOP(ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0x80040008008efd), &u64Old), true, "%d", bool); 789 CHECKVAL(*pu64, UINT64_C(0), "%llx"); 790 CHECKVAL(u64Old, UINT64_C(0x80040008008efd), "%llx"); 706 791 } 707 792 … … 709 794 static void tstASMAtomicCmpXchgExU64(void) 710 795 { 711 uint64_t u64 = 0xffffffffffffffffULL; 712 uint64_t u64Old = 0x8000000051111111ULL; 713 714 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0, &u64Old), false, "%d", bool); 715 CHECKVAL(u64, 0xffffffffffffffffULL, "%llx"); 716 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx"); 717 718 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0xffffffffffffffffULL, &u64Old), true, "%d", bool); 719 CHECKVAL(u64, 0ULL, "%llx"); 720 CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx"); 721 722 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff, &u64Old), false, "%d", bool); 723 CHECKVAL(u64, 0ULL, "%llx"); 724 CHECKVAL(u64Old, 0ULL, "%llx"); 725 726 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL, &u64Old), false, "%d", bool); 727 CHECKVAL(u64, 0ULL, "%llx"); 728 CHECKVAL(u64Old, 0ULL, "%llx"); 729 730 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0, &u64Old), true, "%d", bool); 731 CHECKVAL(u64, 0x80040008008efdULL, "%llx"); 732 CHECKVAL(u64Old, 0ULL, "%llx"); 733 734 CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0x80040008008efdULL, &u64Old), true, "%d", bool); 735 CHECKVAL(u64, 0ULL, "%llx"); 736 CHECKVAL(u64Old, 0x80040008008efdULL, "%llx"); 796 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t); 797 } 798 799 800 DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64) 801 { 802 *pu64 = 0; 803 804 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0), "%#llx", uint64_t); 805 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 806 807 *pu64 = ~UINT64_C(0); 808 CHECKOP(ASMAtomicReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t); 809 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 810 811 *pu64 = UINT64_C(0xfedcba0987654321); 812 CHECKOP(ASMAtomicReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t); 813 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 737 814 } 738 815 … … 740 817 static void tstASMAtomicReadU64(void) 741 818 { 742 uint64_t u64 = 0; 743 744 CHECKOP(ASMAtomicReadU64(&u64), 0ULL, "%#llx", uint64_t); 745 CHECKVAL(u64, 0ULL, "%#llx"); 746 747 u64 = ~0ULL; 748 CHECKOP(ASMAtomicReadU64(&u64), ~0ULL, "%#llx", uint64_t); 749 CHECKVAL(u64, ~0ULL, "%#llx"); 750 751 u64 = 0xfedcba0987654321ULL; 752 CHECKOP(ASMAtomicReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t); 753 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx"); 819 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t); 820 } 821 822 823 DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64) 824 { 825 *pu64 = 0; 826 827 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0), "%#llx", uint64_t); 828 CHECKVAL(*pu64, UINT64_C(0), "%#llx"); 829 830 *pu64 = ~UINT64_C(0); 831 CHECKOP(ASMAtomicUoReadU64(pu64), ~UINT64_C(0), "%#llx", uint64_t); 832 CHECKVAL(*pu64, ~UINT64_C(0), "%#llx"); 833 834 *pu64 = UINT64_C(0xfedcba0987654321); 835 CHECKOP(ASMAtomicUoReadU64(pu64), UINT64_C(0xfedcba0987654321), "%#llx", uint64_t); 836 CHECKVAL(*pu64, UINT64_C(0xfedcba0987654321), "%#llx"); 754 837 } 755 838 … … 757 840 static void tstASMAtomicUoReadU64(void) 758 841 { 759 uint64_t u64 = 0; 760 761 CHECKOP(ASMAtomicUoReadU64(&u64), 0ULL, "%#llx", uint64_t); 762 CHECKVAL(u64, 0ULL, "%#llx"); 763 764 u64 = ~0ULL; 765 CHECKOP(ASMAtomicUoReadU64(&u64), ~0ULL, "%#llx", uint64_t); 766 CHECKVAL(u64, ~0ULL, "%#llx"); 767 768 u64 = 0xfedcba0987654321ULL; 769 CHECKOP(ASMAtomicUoReadU64(&u64), 0xfedcba0987654321ULL, "%#llx", uint64_t); 770 CHECKVAL(u64, 0xfedcba0987654321ULL, "%#llx"); 771 } 772 773 774 static void tstASMAtomicAddS32(void) 842 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t); 843 } 844 845 846 DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32) 775 847 { 776 848 int32_t i32Rc; 777 int32_ti32 = 10;849 *pi32 = 10; 778 850 #define MYCHECK(op, rc, val) \ 779 851 do { \ 780 852 i32Rc = op; \ 781 853 if (i32Rc != (rc)) \ 782 { \ 783 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 784 RTTestIErrorInc(); \ 785 } \ 786 if (i32 != (val)) \ 787 { \ 788 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, val); \ 789 RTTestIErrorInc(); \ 790 } \ 854 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 855 if (*pi32 != (val)) \ 856 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, val); \ 791 857 } while (0) 792 MYCHECK(ASMAtomicAddS32( &i32, 1), 10, 11);793 MYCHECK(ASMAtomicAddS32( &i32, -2), 11, 9);794 MYCHECK(ASMAtomicAddS32( &i32, -9), 9, 0);795 MYCHECK(ASMAtomicAddS32( &i32, -0x7fffffff), 0, -0x7fffffff);796 MYCHECK(ASMAtomicAddS32( &i32, 0), -0x7fffffff, -0x7fffffff);797 MYCHECK(ASMAtomicAddS32( &i32, 0x7fffffff), -0x7fffffff, 0);798 MYCHECK(ASMAtomicAddS32( &i32, 0), 0, 0);858 MYCHECK(ASMAtomicAddS32(pi32, 1), 10, 11); 859 MYCHECK(ASMAtomicAddS32(pi32, -2), 11, 9); 860 MYCHECK(ASMAtomicAddS32(pi32, -9), 9, 0); 861 MYCHECK(ASMAtomicAddS32(pi32, -0x7fffffff), 0, -0x7fffffff); 862 MYCHECK(ASMAtomicAddS32(pi32, 0), -0x7fffffff, -0x7fffffff); 863 MYCHECK(ASMAtomicAddS32(pi32, 0x7fffffff), -0x7fffffff, 0); 864 MYCHECK(ASMAtomicAddS32(pi32, 0), 0, 0); 799 865 #undef MYCHECK 800 866 } 801 867 802 803 static void tstASMAtomicDecIncS32(void) 868 static void tstASMAtomicAddS32(void) 869 { 870 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t); 871 } 872 873 874 DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64) 875 { 876 int64_t i64Rc; 877 *pi64 = 10; 878 #define MYCHECK(op, rc, val) \ 879 do { \ 880 i64Rc = op; \ 881 if (i64Rc != (rc)) \ 882 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, (int64_t)rc); \ 883 if (*pi64 != (val)) \ 884 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, (int64_t)(val)); \ 885 } while (0) 886 MYCHECK(ASMAtomicAddS64(pi64, 1), 10, 11); 887 MYCHECK(ASMAtomicAddS64(pi64, -2), 11, 9); 888 MYCHECK(ASMAtomicAddS64(pi64, -9), 9, 0); 889 MYCHECK(ASMAtomicAddS64(pi64, -INT64_MAX), 0, -INT64_MAX); 890 MYCHECK(ASMAtomicAddS64(pi64, 0), -INT64_MAX, -INT64_MAX); 891 MYCHECK(ASMAtomicAddS64(pi64, -1), -INT64_MAX, INT64_MIN); 892 MYCHECK(ASMAtomicAddS64(pi64, INT64_MAX), INT64_MIN, -1); 893 MYCHECK(ASMAtomicAddS64(pi64, 1), -1, 0); 894 MYCHECK(ASMAtomicAddS64(pi64, 0), 0, 0); 895 #undef MYCHECK 896 } 897 898 899 static void tstASMAtomicAddS64(void) 900 { 901 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t); 902 } 903 904 905 DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32) 804 906 { 805 907 int32_t i32Rc; 806 int32_ti32 = 10;908 *pi32 = 10; 807 909 #define MYCHECK(op, rc) \ 808 910 do { \ 809 911 i32Rc = op; \ 810 912 if (i32Rc != (rc)) \ 811 { \ 812 RTPrintf("%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 813 RTTestIErrorInc(); \ 814 } \ 815 if (i32 != (rc)) \ 816 { \ 817 RTPrintf("%s, %d: FAILURE: %s => i32=%d expected %d\n", __FUNCTION__, __LINE__, #op, i32, rc); \ 818 RTTestIErrorInc(); \ 819 } \ 913 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %d expected %d\n", __FUNCTION__, __LINE__, #op, i32Rc, rc); \ 914 if (*pi32 != (rc)) \ 915 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi32=%d expected %d\n", __FUNCTION__, __LINE__, #op, *pi32, rc); \ 820 916 } while (0) 821 MYCHECK(ASMAtomicDecS32( &i32), 9);822 MYCHECK(ASMAtomicDecS32( &i32), 8);823 MYCHECK(ASMAtomicDecS32( &i32), 7);824 MYCHECK(ASMAtomicDecS32( &i32), 6);825 MYCHECK(ASMAtomicDecS32( &i32), 5);826 MYCHECK(ASMAtomicDecS32( &i32), 4);827 MYCHECK(ASMAtomicDecS32( &i32), 3);828 MYCHECK(ASMAtomicDecS32( &i32), 2);829 MYCHECK(ASMAtomicDecS32( &i32), 1);830 MYCHECK(ASMAtomicDecS32( &i32), 0);831 MYCHECK(ASMAtomicDecS32( &i32), -1);832 MYCHECK(ASMAtomicDecS32( &i32), -2);833 MYCHECK(ASMAtomicIncS32( &i32), -1);834 MYCHECK(ASMAtomicIncS32( &i32), 0);835 MYCHECK(ASMAtomicIncS32( &i32), 1);836 MYCHECK(ASMAtomicIncS32( &i32), 2);837 MYCHECK(ASMAtomicIncS32( &i32), 3);838 MYCHECK(ASMAtomicDecS32( &i32), 2);839 MYCHECK(ASMAtomicIncS32( &i32), 3);840 MYCHECK(ASMAtomicDecS32( &i32), 2);841 MYCHECK(ASMAtomicIncS32( &i32), 3);917 MYCHECK(ASMAtomicDecS32(pi32), 9); 918 MYCHECK(ASMAtomicDecS32(pi32), 8); 919 MYCHECK(ASMAtomicDecS32(pi32), 7); 920 MYCHECK(ASMAtomicDecS32(pi32), 6); 921 MYCHECK(ASMAtomicDecS32(pi32), 5); 922 MYCHECK(ASMAtomicDecS32(pi32), 4); 923 MYCHECK(ASMAtomicDecS32(pi32), 3); 924 MYCHECK(ASMAtomicDecS32(pi32), 2); 925 MYCHECK(ASMAtomicDecS32(pi32), 1); 926 MYCHECK(ASMAtomicDecS32(pi32), 0); 927 MYCHECK(ASMAtomicDecS32(pi32), -1); 928 MYCHECK(ASMAtomicDecS32(pi32), -2); 929 MYCHECK(ASMAtomicIncS32(pi32), -1); 930 MYCHECK(ASMAtomicIncS32(pi32), 0); 931 MYCHECK(ASMAtomicIncS32(pi32), 1); 932 MYCHECK(ASMAtomicIncS32(pi32), 2); 933 MYCHECK(ASMAtomicIncS32(pi32), 3); 934 MYCHECK(ASMAtomicDecS32(pi32), 2); 935 MYCHECK(ASMAtomicIncS32(pi32), 3); 936 MYCHECK(ASMAtomicDecS32(pi32), 2); 937 MYCHECK(ASMAtomicIncS32(pi32), 3); 842 938 #undef MYCHECK 843 939 } 844 940 845 941 942 static void tstASMAtomicDecIncS32(void) 943 { 944 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t); 945 } 946 947 948 DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64) 949 { 950 int64_t i64Rc; 951 *pi64 = 10; 952 #define MYCHECK(op, rc) \ 953 do { \ 954 i64Rc = op; \ 955 if (i64Rc != (rc)) \ 956 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s -> %lld expected %lld\n", __FUNCTION__, __LINE__, #op, i64Rc, rc); \ 957 if (*pi64 != (rc)) \ 958 RTTestFailed(g_hTest, "%s, %d: FAILURE: %s => *pi64=%lld expected %lld\n", __FUNCTION__, __LINE__, #op, *pi64, rc); \ 959 } while (0) 960 MYCHECK(ASMAtomicDecS64(pi64), 9); 961 MYCHECK(ASMAtomicDecS64(pi64), 8); 962 MYCHECK(ASMAtomicDecS64(pi64), 7); 963 MYCHECK(ASMAtomicDecS64(pi64), 6); 964 MYCHECK(ASMAtomicDecS64(pi64), 5); 965 MYCHECK(ASMAtomicDecS64(pi64), 4); 966 MYCHECK(ASMAtomicDecS64(pi64), 3); 967 MYCHECK(ASMAtomicDecS64(pi64), 2); 968 MYCHECK(ASMAtomicDecS64(pi64), 1); 969 MYCHECK(ASMAtomicDecS64(pi64), 0); 970 MYCHECK(ASMAtomicDecS64(pi64), -1); 971 MYCHECK(ASMAtomicDecS64(pi64), -2); 972 MYCHECK(ASMAtomicIncS64(pi64), -1); 973 MYCHECK(ASMAtomicIncS64(pi64), 0); 974 MYCHECK(ASMAtomicIncS64(pi64), 1); 975 MYCHECK(ASMAtomicIncS64(pi64), 2); 976 MYCHECK(ASMAtomicIncS64(pi64), 3); 977 MYCHECK(ASMAtomicDecS64(pi64), 2); 978 MYCHECK(ASMAtomicIncS64(pi64), 3); 979 MYCHECK(ASMAtomicDecS64(pi64), 2); 980 MYCHECK(ASMAtomicIncS64(pi64), 3); 981 #undef MYCHECK 982 } 983 984 985 static void tstASMAtomicDecIncS64(void) 986 { 987 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t); 988 } 989 990 991 DECLINLINE(void) tstASMAtomicAndOrU32Worker(uint32_t volatile *pu32) 992 { 993 *pu32 = UINT32_C(0xffffffff); 994 995 ASMAtomicOrU32(pu32, UINT32_C(0xffffffff)); 996 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 997 998 ASMAtomicAndU32(pu32, UINT32_C(0xffffffff)); 999 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 1000 1001 ASMAtomicAndU32(pu32, UINT32_C(0x8f8f8f8f)); 1002 CHECKVAL(*pu32, UINT32_C(0x8f8f8f8f), "%x"); 1003 1004 ASMAtomicOrU32(pu32, UINT32_C(0x70707070)); 1005 CHECKVAL(*pu32, UINT32_C(0xffffffff), "%x"); 1006 1007 ASMAtomicAndU32(pu32, UINT32_C(1)); 1008 CHECKVAL(*pu32, UINT32_C(1), "%x"); 1009 1010 ASMAtomicOrU32(pu32, UINT32_C(0x80000000)); 1011 CHECKVAL(*pu32, UINT32_C(0x80000001), "%x"); 1012 1013 ASMAtomicAndU32(pu32, UINT32_C(0x80000000)); 1014 CHECKVAL(*pu32, UINT32_C(0x80000000), "%x"); 1015 1016 ASMAtomicAndU32(pu32, UINT32_C(0)); 1017 CHECKVAL(*pu32, UINT32_C(0), "%x"); 1018 1019 ASMAtomicOrU32(pu32, UINT32_C(0x42424242)); 1020 CHECKVAL(*pu32, UINT32_C(0x42424242), "%x"); 1021 } 1022 1023 846 1024 static void tstASMAtomicAndOrU32(void) 847 1025 { 848 uint32_t u32 = 0xffffffff; 849 850 ASMAtomicOrU32(&u32, 0xffffffff); 851 CHECKVAL(u32, 0xffffffff, "%x"); 852 853 ASMAtomicAndU32(&u32, 0xffffffff); 854 CHECKVAL(u32, 0xffffffff, "%x"); 855 856 ASMAtomicAndU32(&u32, 0x8f8f8f8f); 857 CHECKVAL(u32, 0x8f8f8f8f, "%x"); 858 859 ASMAtomicOrU32(&u32, 0x70707070); 860 CHECKVAL(u32, 0xffffffff, "%x"); 861 862 ASMAtomicAndU32(&u32, 1); 863 CHECKVAL(u32, 1, "%x"); 864 865 ASMAtomicOrU32(&u32, 0x80000000); 866 CHECKVAL(u32, 0x80000001, "%x"); 867 868 ASMAtomicAndU32(&u32, 0x80000000); 869 CHECKVAL(u32, 0x80000000, "%x"); 870 871 ASMAtomicAndU32(&u32, 0); 872 CHECKVAL(u32, 0, "%x"); 873 874 ASMAtomicOrU32(&u32, 0x42424242); 875 CHECKVAL(u32, 0x42424242, "%x"); 876 } 877 878 879 void tstASMMemZeroPage(void) 880 { 881 struct 1026 DO_SIMPLE_TEST(ASMAtomicAndOrU32, uint32_t); 1027 } 1028 1029 1030 DECLINLINE(void) tstASMAtomicAndOrU64Worker(uint64_t volatile *pu64) 1031 { 1032 *pu64 = UINT64_C(0xffffffff); 1033 1034 ASMAtomicOrU64(pu64, UINT64_C(0xffffffff)); 1035 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1036 1037 ASMAtomicAndU64(pu64, UINT64_C(0xffffffff)); 1038 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1039 1040 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f)); 1041 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f), "%x"); 1042 1043 ASMAtomicOrU64(pu64, UINT64_C(0x70707070)); 1044 CHECKVAL(*pu64, UINT64_C(0xffffffff), "%x"); 1045 1046 ASMAtomicAndU64(pu64, UINT64_C(1)); 1047 CHECKVAL(*pu64, UINT64_C(1), "%x"); 1048 1049 ASMAtomicOrU64(pu64, UINT64_C(0x80000000)); 1050 CHECKVAL(*pu64, UINT64_C(0x80000001), "%x"); 1051 1052 ASMAtomicAndU64(pu64, UINT64_C(0x80000000)); 1053 CHECKVAL(*pu64, UINT64_C(0x80000000), "%x"); 1054 1055 ASMAtomicAndU64(pu64, UINT64_C(0)); 1056 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1057 1058 ASMAtomicOrU64(pu64, UINT64_C(0x42424242)); 1059 CHECKVAL(*pu64, UINT64_C(0x42424242), "%x"); 1060 1061 // Same as above, but now 64-bit wide. 1062 ASMAtomicAndU64(pu64, UINT64_C(0)); 1063 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1064 1065 ASMAtomicOrU64(pu64, UINT64_C(0xffffffffffffffff)); 1066 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1067 1068 ASMAtomicAndU64(pu64, UINT64_C(0xffffffffffffffff)); 1069 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1070 1071 ASMAtomicAndU64(pu64, UINT64_C(0x8f8f8f8f8f8f8f8f)); 1072 CHECKVAL(*pu64, UINT64_C(0x8f8f8f8f8f8f8f8f), "%x"); 1073 1074 ASMAtomicOrU64(pu64, UINT64_C(0x7070707070707070)); 1075 CHECKVAL(*pu64, UINT64_C(0xffffffffffffffff), "%x"); 1076 1077 ASMAtomicAndU64(pu64, UINT64_C(1)); 1078 CHECKVAL(*pu64, UINT64_C(1), "%x"); 1079 1080 ASMAtomicOrU64(pu64, UINT64_C(0x8000000000000000)); 1081 CHECKVAL(*pu64, UINT64_C(0x8000000000000001), "%x"); 1082 1083 ASMAtomicAndU64(pu64, UINT64_C(0x8000000000000000)); 1084 CHECKVAL(*pu64, UINT64_C(0x8000000000000000), "%x"); 1085 1086 ASMAtomicAndU64(pu64, UINT64_C(0)); 1087 CHECKVAL(*pu64, UINT64_C(0), "%x"); 1088 1089 ASMAtomicOrU64(pu64, UINT64_C(0x4242424242424242)); 1090 CHECKVAL(*pu64, UINT64_C(0x4242424242424242), "%x"); 1091 } 1092 1093 1094 static void tstASMAtomicAndOrU64(void) 1095 { 1096 DO_SIMPLE_TEST(ASMAtomicAndOrU64, uint64_t); 1097 } 1098 1099 1100 typedef struct 1101 { 1102 uint8_t ab[PAGE_SIZE]; 1103 } TSTPAGE; 1104 1105 1106 DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage) 1107 { 1108 for (unsigned j = 0; j < 16; j++) 882 1109 { 883 uint64_t u64Magic1; 884 uint8_t abPage[PAGE_SIZE]; 885 uint64_t u64Magic2; 886 } Buf1, Buf2, Buf3; 887 888 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff); 889 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage)); 890 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff); 891 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff); 892 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage)); 893 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff); 894 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff); 895 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage)); 896 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff); 897 ASMMemZeroPage(Buf1.abPage); 898 ASMMemZeroPage(Buf2.abPage); 899 ASMMemZeroPage(Buf3.abPage); 900 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff) 901 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff) 902 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff) 903 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff) 904 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff) 905 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 906 { 907 RTPrintf("tstInlineAsm: ASMMemZeroPage violated one/both magic(s)!\n"); 908 RTTestIErrorInc(); 1110 memset(pPage, 0x11 * j, sizeof(*pPage)); 1111 ASMMemZeroPage(pPage); 1112 for (unsigned i = 0; i < sizeof(pPage->ab); i++) 1113 if (pPage->ab[i]) 1114 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 909 1115 } 910 for (unsigned i = 0; i < sizeof(Buf1.abPage); i++) 911 if (Buf1.abPage[i]) 912 { 913 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 914 RTTestIErrorInc(); 915 } 916 for (unsigned i = 0; i < sizeof(Buf2.abPage); i++) 917 if (Buf2.abPage[i]) 918 { 919 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 920 RTTestIErrorInc(); 921 } 922 for (unsigned i = 0; i < sizeof(Buf3.abPage); i++) 923 if (Buf3.abPage[i]) 924 { 925 RTPrintf("tstInlineAsm: ASMMemZeroPage didn't clear byte at offset %#x!\n", i); 926 RTTestIErrorInc(); 927 } 1116 } 1117 1118 1119 static void tstASMMemZeroPage(void) 1120 { 1121 DO_SIMPLE_TEST(ASMMemZeroPage, TSTPAGE); 928 1122 } 929 1123 … … 966 1160 void tstASMMemZero32(void) 967 1161 { 1162 RTTestSub(g_hTest, "ASMMemFill32"); 1163 968 1164 struct 969 1165 { … … 992 1188 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 993 1189 { 994 RTPrintf("tstInlineAsm: ASMMemZero32 violated one/both magic(s)!\n"); 995 RTTestIErrorInc(); 1190 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n"); 996 1191 } 997 1192 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++) 998 1193 if (Buf1.abPage[i]) 999 { 1000 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1001 RTTestIErrorInc(); 1002 } 1194 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1003 1195 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++) 1004 1196 if (Buf2.abPage[i]) 1005 { 1006 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1007 RTTestIErrorInc(); 1008 } 1197 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1009 1198 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++) 1010 1199 if (Buf3.abPage[i]) 1011 { 1012 RTPrintf("tstInlineAsm: ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1013 RTTestIErrorInc(); 1014 } 1200 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i); 1015 1201 } 1016 1202 … … 1018 1204 void tstASMMemFill32(void) 1019 1205 { 1206 RTTestSub(g_hTest, "ASMMemFill32"); 1207 1020 1208 struct 1021 1209 { … … 1055 1243 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff) 1056 1244 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff)) 1057 { 1058 RTPrintf("tstInlineAsm: ASMMemFill32 violated one/both magic(s)!\n"); 1059 RTTestIErrorInc(); 1060 } 1245 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n"); 1061 1246 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++) 1062 1247 if (Buf1.au32Page[i] != 0xdeadbeef) 1063 { 1064 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef); 1065 RTTestIErrorInc(); 1066 } 1248 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef); 1067 1249 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++) 1068 1250 if (Buf2.au32Page[i] != 0xcafeff01) 1069 { 1070 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01); 1071 RTTestIErrorInc(); 1072 } 1251 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01); 1073 1252 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++) 1074 1253 if (Buf3.au32Page[i] != 0xf00dd00f) 1075 { 1076 RTPrintf("tstInlineAsm: ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f); 1077 RTTestIErrorInc(); 1078 } 1254 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f); 1079 1255 } 1080 1256 … … 1083 1259 void tstASMMath(void) 1084 1260 { 1261 RTTestSub(g_hTest, "Math"); 1262 1085 1263 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000)); 1086 1264 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64"); … … 1158 1336 void tstASMByteSwap(void) 1159 1337 { 1160 RT Printf("tstInlineASM: TESTING - ASMByteSwap*\n");1338 RTTestSub(g_hTest, "ASMByteSwap*"); 1161 1339 1162 1340 uint64_t u64In = UINT64_C(0x0011223344556677); … … 1233 1411 static int64_t volatile s_i64; 1234 1412 register unsigned i; 1235 const unsigned cRounds = 2000000;1413 const unsigned cRounds = _2M; 1236 1414 register uint64_t u64Elapsed; 1237 1415 1238 RT Printf("tstInlineASM: Benchmarking:\n");1239 1240 #if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))1416 RTTestSub(g_hTest, "Benchmarking"); 1417 1418 #if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) 1241 1419 # define BENCH(op, str) \ 1242 1420 do { \ … … 1246 1424 op; \ 1247 1425 u64Elapsed = ASMReadTSC() - u64Elapsed; \ 1248 RT Printf(" %-30s %3llu cycles\n", str, u64Elapsed / cRounds); \1426 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \ 1249 1427 } while (0) 1250 1428 #else … … 1256 1434 op; \ 1257 1435 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \ 1258 RT Printf(" %-30s %3llu ns\n", str, u64Elapsed / cRounds); \1436 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \ 1259 1437 } while (0) 1260 1438 #endif 1261 1439 1262 BENCH(s_u32 = 0, "s_u32 = 0 :");1263 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8 :");1264 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8 :");1265 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16 :");1266 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16 :");1267 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32 :");1268 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32 :");1269 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64 :");1270 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64 :");1271 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8 :");1272 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8 :");1273 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16 :");1274 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16 :");1275 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32 :");1276 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32 :");1277 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64 :");1278 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64 :");1279 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8 :");1280 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8 :");1281 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16 :");1282 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16 :");1283 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32 :");1284 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32 :");1285 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64 :");1286 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64 :");1287 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8 :");1288 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8 :");1289 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16 :");1290 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16 :");1291 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32 :");1292 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32 :");1293 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64 :");1294 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64 :");1295 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8 :");1296 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8 :");1297 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16 :");1298 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16 :");1299 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32 :");1300 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32 :");1301 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64 :");1302 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64 :");1303 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32 :");1304 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32 :");1305 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64 :");1306 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64 :");1307 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg :");1308 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg :");1309 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg :");1310 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg :");1311 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32 :");1312 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32 :");1313 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32 :");1314 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32 :");1315 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32 :");1316 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32 :");1440 BENCH(s_u32 = 0, "s_u32 = 0"); 1441 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8"); 1442 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8"); 1443 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16"); 1444 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16"); 1445 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32"); 1446 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32"); 1447 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64"); 1448 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64"); 1449 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8"); 1450 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8"); 1451 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16"); 1452 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16"); 1453 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32"); 1454 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32"); 1455 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64"); 1456 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64"); 1457 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8"); 1458 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8"); 1459 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16"); 1460 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16"); 1461 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32"); 1462 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32"); 1463 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64"); 1464 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64"); 1465 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8"); 1466 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8"); 1467 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16"); 1468 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16"); 1469 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32"); 1470 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32"); 1471 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64"); 1472 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64"); 1473 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8"); 1474 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8"); 1475 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16"); 1476 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16"); 1477 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32"); 1478 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32"); 1479 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64"); 1480 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64"); 1481 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32"); 1482 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32"); 1483 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64"); 1484 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64"); 1485 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg"); 1486 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg"); 1487 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg"); 1488 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg"); 1489 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32"); 1490 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32"); 1491 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32"); 1492 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32"); 1493 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32"); 1494 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32"); 1317 1495 /* The Darwin gcc does not like this ... */ 1318 1496 #if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) 1319 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId :");1497 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId"); 1320 1498 #endif 1321 1499 1322 RTPrintf("Done.\n");1323 1324 1500 #undef BENCH 1325 1501 } … … 1328 1504 int main(int argc, char *argv[]) 1329 1505 { 1330 RTTEST hTest; 1331 int rc = RTTestInitAndCreate("tstRTInlineAsm", &hTest); 1506 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest); 1332 1507 if (rc) 1333 1508 return rc; 1334 RTTestBanner( hTest);1509 RTTestBanner(g_hTest); 1335 1510 1336 1511 /* … … 1352 1527 tstASMAtomicReadU64(); 1353 1528 tstASMAtomicUoReadU64(); 1529 1354 1530 tstASMAtomicAddS32(); 1531 tstASMAtomicAddS64(); 1355 1532 tstASMAtomicDecIncS32(); 1533 tstASMAtomicDecIncS64(); 1356 1534 tstASMAtomicAndOrU32(); 1535 tstASMAtomicAndOrU64(); 1536 1357 1537 tstASMMemZeroPage(); 1358 tstASMMemIsZeroPage( hTest);1538 tstASMMemIsZeroPage(g_hTest); 1359 1539 tstASMMemZero32(); 1360 1540 tstASMMemFill32(); 1541 1361 1542 tstASMMath(); 1543 1362 1544 tstASMByteSwap(); 1545 1363 1546 tstASMBench(); 1364 1547 … … 1366 1549 * Show the result. 1367 1550 */ 1368 return RTTestSummaryAndDestroy( hTest);1369 } 1370 1551 return RTTestSummaryAndDestroy(g_hTest); 1552 } 1553
Note:
See TracChangeset
for help on using the changeset viewer.