Changeset 90640 in vbox
- Timestamp:
- Aug 11, 2021 11:40:23 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 146252
- Location:
- trunk
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r87402 r90640 77 77 # pragma intrinsic(__stosq) 78 78 # pragma intrinsic(_byteswap_uint64) 79 # pragma intrinsic(_InterlockedCompareExchange128) 79 80 # pragma intrinsic(_InterlockedExchange64) 80 81 # pragma intrinsic(_InterlockedExchangeAdd64) … … 1427 1428 } 1428 1429 1430 #if defined(RT_ARCH_AMD64) || defined(DOXYGEN_RUNNING) 1431 1432 /** 1433 * Atomically compare and write an unsigned 128-bit value, ordered. 1434 * 1435 * @returns true if write was done. 1436 * @returns false if write wasn't done. 1437 * 1438 * @param pu128 Pointer to the 128-bit variable to update. 1439 * @param u128New The 128-bit value to assign to *pu128. 1440 * @param u128Old The value to compare with. 1441 * 1442 * @remarks AMD64: Not present in the earliest CPUs, so check CPUID. 1443 */ 1444 # if (RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN) 1445 DECLASM(bool) ASMAtomicCmpWriteU128(volatile uint128_t *pu128, const uint128_t u128New, const uint128_t u128Old) RT_NOTHROW_PROTO; 1446 # else 1447 DECLINLINE(bool) ASMAtomicCmpWriteU128(volatile uint128_t *pu128, const uint128_t u128New, const uint128_t u128Old) RT_NOTHROW_DEF 1448 { 1449 # if RT_INLINE_ASM_USES_INTRIN 1450 __int64 ai64Cmp[2]; 1451 ai64Cmp[0] = (__int64)u128Old.Lo; 1452 ai64Cmp[1] = (__int64)u128Old.Hi; 1453 return _InterlockedCompareExchange128((__int64 volatile *)pu128, u128New.Hi, u128New.Lo, ai64Cmp) != 0; 1454 1455 # elif defined(RT_ARCH_AMD64) 1456 # if RT_INLINE_ASM_GNU_STYLE 1457 uint64_t u64Ret; 1458 uint64_t u64Spill; 1459 __asm__ __volatile__("lock; cmpxchg16b %2\n\t" 1460 "setz %%al\n\t" 1461 "movzbl %%al, %%eax\n\t" 1462 : "=a" (u64Ret) 1463 , "=d" (u64Spill) 1464 , "+m" (*pu128) 1465 : "A" (u128Old) 1466 , "b" ((uint64_t)u128New) 1467 , "c" ((uint64_t)(u128New >> 64)) 1468 : "cc"); 1469 1470 return (bool)u64Ret; 1471 # else 1472 # error "Port me" 1473 # endif 1474 # else 1475 # error "Port me" 1476 # endif 1477 } 1478 # endif 1479 1480 /** @def RTASM_HAVE_CMP_WRITE_U128 1481 * Indicates that we've got ASMAtomicCmpWriteU128() available. */ 1482 # define RTASM_HAVE_CMP_WRITE_U128 1 1483 1484 /** 1485 * RTUINT128U wrapper for ASMAtomicCmpWriteU128. 1486 */ 1487 DECLINLINE(bool) ASMAtomicCmpWriteU128U(volatile RTUINT128U *pu128, const RTUINT128U u128New, 1488 const RTUINT128U u128Old) RT_NOTHROW_DEF 1489 { 1490 return ASMAtomicCmpWriteU128(&pu128->u, u128New.u, u128Old.u); 1491 } 1492 1493 #endif /* RT_ARCH_AMD64 */ 1494 1429 1495 1430 1496 /** -
trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp
r87256 r90640 100 100 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \ 101 101 CHECKVAL(a_uVar2, a_ExpectVarVal2, a_FmtVar); \ 102 } while (0) 103 104 #define CHECKVAL128(a_pu128Val, a_u64HiExpect, a_u64LoExpect) \ 105 do \ 106 { \ 107 if ((a_pu128Val)->s.Hi != (a_u64HiExpect) || (a_pu128Val)->s.Lo != (a_u64LoExpect)) \ 108 RTTestFailed(g_hTest, "%s, %d: " #a_pu128Val ": expected %#RX64'%016RX64 got %#RX64'%016RX64\n", \ 109 __FUNCTION__, __LINE__, (a_u64HiExpect), (a_u64LoExpect), (a_pu128Val)->s.Hi, (a_pu128Val)->s.Lo); \ 110 } while (0) 111 #define CHECKVAL128_C(a_pu128Val, a_u64HiExpect, a_u64LoExpect) \ 112 do \ 113 { \ 114 if ((a_pu128Val)->s.Hi != UINT64_C(a_u64HiExpect) || (a_pu128Val)->s.Lo != UINT64_C(a_u64LoExpect)) \ 115 RTTestFailed(g_hTest, "%s, %d: " #a_pu128Val ": expected %#RX64'%016RX64 got %#RX64'%016RX64\n", \ 116 __FUNCTION__, __LINE__, UINT64_C(a_u64HiExpect), UINT64_C(a_u64LoExpect), \ 117 (a_pu128Val)->s.Hi, (a_pu128Val)->s.Lo); \ 118 } while (0) 119 #define CHECK_OP_AND_VAL_128(a_TypeRet, a_FmtRet, a_pu128Val, a_Operation, a_ExpectRetVal, a_u64HiExpect, a_u64LoExpect) \ 120 do { \ 121 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \ 122 CHECKVAL128(a_pu128Val, a_u64HiExpect, a_u64LoExpect); \ 123 } while (0) 124 #define CHECK_OP_AND_VAL_128_C(a_TypeRet, a_FmtRet, a_pu128Val, a_Operation, a_ExpectRetVal, a_u64HiExpect, a_u64LoExpect) \ 125 do { \ 126 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \ 127 CHECKVAL128_C(a_pu128Val, a_u64HiExpect, a_u64LoExpect); \ 102 128 } while (0) 103 129 … … 1482 1508 1483 1509 1510 #ifdef RTASM_HAVE_CMP_WRITE_U128 1511 DECLINLINE(void) tstASMAtomicCmpWriteU128Worker(RTUINT128U volatile *pu128) 1512 { 1513 pu128->s.Lo = UINT64_C(0xffffffffffffff); 1514 pu128->s.Hi = UINT64_C(0xffffffffffffff); 1515 1516 RTUINT128U u128A, u128B; 1517 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1518 u128A = RTUINT128_INIT_C(0, 0), 1519 u128B = RTUINT128_INIT_C(0, 0)), 1520 false, 0xffffffffffffff, 0xffffffffffffff); 1521 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1522 u128A = RTUINT128_INIT_C(0, 0), 1523 u128B = RTUINT128_INIT_C(0xffffffffffffff, 0xffffffffffffff)), 1524 true, 0, 0); 1525 1526 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1527 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def), 1528 u128B = RTUINT128_INIT_C(0, 1)), 1529 false, 0, 0); 1530 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1531 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def), 1532 u128B = RTUINT128_INIT_C(1, 0)), 1533 false, 0, 0); 1534 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1535 u128A = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def), 1536 u128B = RTUINT128_INIT_C(0, 0)), 1537 true, 0x80040008008efd, 0x40080004004def); 1538 1539 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1540 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4), 1541 u128B = RTUINT128_INIT_C(0x80040008008efd, 0)), 1542 false, 0x80040008008efd, 0x40080004004def); 1543 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1544 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4), 1545 u128B = RTUINT128_INIT_C(0, 0x40080004004def)), 1546 false, 0x80040008008efd, 0x40080004004def); 1547 CHECK_OP_AND_VAL_128_C(bool, "%d", pu128, ASMAtomicCmpWriteU128U(pu128, 1548 u128A = RTUINT128_INIT_C(0xfff40ff8f08ef3, 0x4ee8ee04cc4de4), 1549 u128B = RTUINT128_INIT_C(0x80040008008efd, 0x40080004004def)), 1550 true, 0xfff40ff8f08ef3, 0x4ee8ee04cc4de4); 1551 } 1552 #endif /* RTASM_HAVE_CMP_WRITE_U128 */ 1553 1554 1484 1555 static void tstASMAtomicCmpXchg(void) 1485 1556 { … … 1487 1558 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t); 1488 1559 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t); 1560 #ifdef RTASM_HAVE_CMP_WRITE_U128 1561 # ifdef RT_ARCH_AMD64 1562 if (ASMCpuId_ECX(1) & X86_CPUID_FEATURE_ECX_CX16) 1563 # endif 1564 { 1565 RTTestISub("ASMAtomicCmpWriteU128U"); 1566 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMAtomicCmpWriteU128Worker, RTUINT128U); 1567 } 1568 #endif 1489 1569 } 1490 1570
Note:
See TracChangeset
for help on using the changeset viewer.