Changeset 104795 in vbox
- Timestamp:
- May 27, 2024 8:08:37 PM (10 months ago)
- svn:sync-xref-src-repo-rev:
- 163367
- Location:
- trunk
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r103082 r104795 2560 2560 */ 2561 2561 DECLINLINE(bool) ASMAtomicCmpXchgU128U(volatile RTUINT128U *pu128, const RTUINT128U u128New, 2562 2562 const RTUINT128U u128Old, PRTUINT128U pu128Old) RT_NOTHROW_DEF 2563 2563 { 2564 2564 # if (defined(__clang_major__) || defined(__GNUC__)) && defined(RT_ARCH_ARM64) … … 3523 3523 3524 3524 3525 /** @def RTASM_HAVE_READ_U128 3526 * Defined in the target architecture supports atomic reading of 128-bit 3527 * integers. 3528 * 3529 * The define value is zero if both ordered and unordered reads are implemented 3530 * using ASMAtomicCmpXchgU128v2(). It is 1 if unordered reads are done natively 3531 * w/o cmpxchg and 3 if both variants are done natively w/o cmpxchg. 3532 * 3533 * @note AMD64: Caller must check for cmpxchg16b support before use and make 3534 * sure variables are writable (won't be changed). 3535 * @sa RTASM_HAVE_CMP_XCHG_U128, RTASM_HAVE_WRITE_U128 3536 */ 3537 #if defined(RT_ARCH_ARM64) || defined(DOXYGEN_RUNNING) 3538 # define RTASM_HAVE_READ_U128 3 3539 #elif defined(RTASM_HAVE_CMP_XCHG_U128) 3540 # define RTASM_HAVE_READ_U128 0 3541 #endif 3542 3543 #ifdef RTASM_HAVE_READ_U128 3544 3545 /** 3546 * Atomically reads an unsigned 128-bit value, ordered. 3547 * 3548 * @returns Current *pu128 value 3549 * @param pu128 Pointer to the 128-bit variable to read. 3550 * The memory pointed to must be writable. 3551 * 3552 * @remarks AMD64: Requires the memory to be both readable and writable. 3553 * @remarks AMD64: Requires support for cmpxchg16b. 3554 */ 3555 DECLINLINE(uint128_t) ASMAtomicReadU128(volatile uint128_t RT_FAR *pu128) RT_NOTHROW_DEF 3556 { 3557 Assert(!((uintptr_t)pu128 & 15)); 3558 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 3559 RTUINT128U u128Ret; 3560 __asm__ __volatile__("Lstart_ASMAtomicReadU128_%=:\n\t" 3561 RTASM_ARM_DMB_SY 3562 "ldp %[uRetLo], %[uRetHi], %[pMem]\n\t" 3563 RTASM_ARM_DMB_SY 3564 : [uRetHi] "=r" (u128Ret.s.Hi) 3565 , [uRetLo] "=r" (u128Ret.s.Lo) 3566 : [pMem] "Q" (*pu128) 3567 : ); 3568 return u128Ret.u; 3569 # else 3570 uint128_t u128Ret; 3571 ASMAtomicCmpXchgU128v2(pu128, 0, 0, 0, 0, &u128Ret); 3572 return u128Ret; 3573 # endif 3574 } 3575 3576 /** 3577 * Atomically reads an unsigned 128-bit value, ordered. 3578 * 3579 * @returns Current *pu128 value 3580 * @param pu128 Pointer to the 128-bit variable to read. 3581 * The memory pointed to must be writable. 3582 * 3583 * @remarks AMD64: Requires the memory to be both readable and writable. 3584 * @remarks AMD64: Requires support for cmpxchg16b. 3585 */ 3586 DECLINLINE(RTUINT128U) ASMAtomicReadU128U(volatile RTUINT128U RT_FAR *pu128) RT_NOTHROW_DEF 3587 { 3588 Assert(!((uintptr_t)pu128 & 15)); 3589 RTUINT128U u128Ret; 3590 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 3591 __asm__ __volatile__("Lstart_ASMAtomicReadU128U_%=:\n\t" 3592 RTASM_ARM_DMB_SY 3593 "ldp %[uRetLo], %[uRetHi], %[pMem]\n\t" 3594 RTASM_ARM_DMB_SY 3595 : [uRetHi] "=r" (u128Ret.s.Hi) 3596 , [uRetLo] "=r" (u128Ret.s.Lo) 3597 : [pMem] "Q" (*pu128) 3598 : ); 3599 return u128Ret; 3600 # else 3601 ASMAtomicCmpXchgU128v2(&pu128->u, 0, 0, 0, 0, &u128Ret.u); 3602 return u128Ret; 3603 # endif 3604 } 3605 3606 3607 /** 3608 * Atomically reads an unsigned 128-bit value, unordered. 3609 * 3610 * @returns Current *pu128 value 3611 * @param pu128 Pointer to the 128-bit variable to read. 3612 * The memory pointed to must be writable. 3613 * 3614 * @remarks AMD64: Requires the memory to be both readable and writable. 3615 * @remarks AMD64: Requires support for cmpxchg16b. 3616 * @remarks AMD64: Is ordered. 3617 */ 3618 DECLINLINE(uint128_t) ASMAtomicUoReadU128(volatile uint128_t RT_FAR *pu128) RT_NOTHROW_DEF 3619 { 3620 Assert(!((uintptr_t)pu128 & 15)); 3621 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 3622 RTUINT128U u128Ret; 3623 __asm__ __volatile__("Lstart_ASMAtomicUoReadU128_%=:\n\t" 3624 "ldp %[uRetLo], %[uRetHi], %[pMem]\n\t" 3625 : [uRetHi] "=r" (u128Ret.s.Hi) 3626 , [uRetLo] "=r" (u128Ret.s.Lo) 3627 : [pMem] "Q" (*pu128) 3628 : ); 3629 return u128Ret.u; 3630 3631 # elif defined(RT_ARCH_AMD64) && 0 3632 /* This doesn't work because __m128i can't be made volatile and we're not 3633 able to force MSC (2019) to emit _mm_load_si128 (besides it emits movdqu 3634 instead of movdqa). */ 3635 __m128i uTmpSse = _mm_load_si128((__m128i volatile *)pu128); 3636 __m128i uTmpSseHi = _mm_srli_si128(uTmpSse, 64 / 8); 3637 RTUINT128U u128Ret; 3638 u128Ret.s.Lo = (uint64_t)_mm_cvtsi128_si64(uTmpSse); 3639 u128Ret.s.Hi = (uint64_t)_mm_cvtsi128_si64(uTmpSseHi); 3640 return u128Ret.u; 3641 3642 # else 3643 return ASMAtomicReadU128(pu128); 3644 # endif 3645 } 3646 3647 /** 3648 * Atomically reads an unsigned 128-bit value, unordered. 3649 * 3650 * @returns Current *pu128 value 3651 * @param pu128 Pointer to the 128-bit variable to read. 3652 * The memory pointed to must be writable. 3653 * 3654 * @remarks AMD64: Requires the memory to be both readable and writable. 3655 * @remarks AMD64: Requires support for cmpxchg16b. 3656 * @remarks AMD64: Is ordered. 3657 */ 3658 DECLINLINE(RTUINT128U) ASMAtomicUoReadU128U(volatile RTUINT128U RT_FAR *pu128) RT_NOTHROW_DEF 3659 { 3660 Assert(!((uintptr_t)pu128 & 15)); 3661 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 3662 RTUINT128U u128Ret; 3663 __asm__ __volatile__("Lstart_ASMAtomicUoReadU128U_%=:\n\t" 3664 "ldp %[uRetLo], %[uRetHi], %[pMem]\n\t" 3665 : [uRetHi] "=r" (u128Ret.s.Hi) 3666 , [uRetLo] "=r" (u128Ret.s.Lo) 3667 : [pMem] "Q" (*pu128) 3668 : ); 3669 return u128Ret; 3670 # else 3671 return ASMAtomicReadU128U(pu128); 3672 # endif 3673 } 3674 3675 #endif /* RTASM_HAVE_READ_U128 */ 3676 3525 3677 /** 3526 3678 * Atomically reads a size_t value, ordered. … … 4052 4204 } 4053 4205 4206 4207 /** @def RTASM_HAVE_WRITE_U128 4208 * Defined in the target architecture supports atomic of 128-bit integers. 4209 * 4210 * The define value is zero if both ordered and unordered writes are implemented 4211 * using ASMAtomicCmpXchgU128v2(). It is 1 if unordered writes are done 4212 * natively w/o cmpxchg and 3 if both variants are done natively w/o cmpxchg. 4213 * 4214 * @note AMD64: Caller must check for cmpxchg16b support before use. 4215 * @sa RTASM_HAVE_CMP_XCHG_U128 4216 */ 4217 #if defined(RT_ARCH_ARM64) || defined(DOXYGEN_RUNNING) 4218 # define RTASM_HAVE_WRITE_U128 3 4219 #elif defined(RTASM_HAVE_CMP_XCHG_U128) 4220 # define RTASM_HAVE_WRITE_U128 0 4221 #endif 4222 4223 #ifdef RTASM_HAVE_WRITE_U128 4224 4225 /** 4226 * Atomically writes an unsigned 128-bit value, ordered. 4227 * 4228 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4229 * on 16 byte boundrary. 4230 * @param u64Hi The high 64 bits of the new value. 4231 * @param u64Lo The low 64 bits of the new value. 4232 */ 4233 DECLINLINE(void) ASMAtomicWriteU128v2(volatile uint128_t *pu128, const uint64_t u64Hi, const uint64_t u64Lo) RT_NOTHROW_DEF 4234 { 4235 Assert(!((uintptr_t)pu128 & 15)); 4236 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 4237 __asm__ __volatile__("Lstart_ASMAtomicWriteU128v2_%=:\n\t" 4238 # if 0 && defined(RTASM_ARM64_USE_FEAT_LSE128) /** @todo hw support? test + debug */ 4239 RTASM_ARM_DMB_SY 4240 "swpp %[uValueLo], %[uValueHi], %[pMem]\n\t" 4241 # else 4242 RTASM_ARM_DMB_SY 4243 "stp %[uValueLo], %[uValueHi], %[pMem]\n\t" 4244 "dmb sy\n\t" 4245 # endif 4246 : [pMem] "+Q" (*pu128) 4247 : [uValueHi] "r" (u64Hi) 4248 , [uValueLo] "r" (u64Lo) 4249 : ); 4250 4251 # else 4252 RTUINT128U u128Old; 4253 # ifdef RT_COMPILER_WITH_128BIT_INT_TYPES 4254 u128Old.u = *pu128; 4255 # else 4256 u128Old.u.Lo = pu128->Lo; 4257 u128Old.u.Hi = pu128->Hi; 4258 # endif 4259 while (!ASMAtomicCmpXchgU128v2(pu128, u64Hi, u64Lo, u128Old.s.Hi, u128Old.s.Lo, &u128Old.u)) 4260 { } 4261 # endif 4262 } 4263 4264 4265 /** 4266 * Atomically writes an unsigned 128-bit value, ordered. 4267 * 4268 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4269 * on 16 byte boundrary. 4270 * @param u64Hi The high 64 bits of the new value. 4271 * @param u64Lo The low 64 bits of the new value. 4272 * @note This is ordered on AMD64. 4273 */ 4274 DECLINLINE(void) ASMAtomicUoWriteU128v2(volatile uint128_t *pu128, const uint64_t u64Hi, const uint64_t u64Lo) RT_NOTHROW_DEF 4275 { 4276 Assert(!((uintptr_t)pu128 & 15)); 4277 # if defined(__GNUC__) && defined(RT_ARCH_ARM64) 4278 __asm__ __volatile__("Lstart_ASMAtomicUoWriteU128v2_%=:\n\t" 4279 "stp %[uValueLo], %[uValueHi], %[pMem]\n\t" 4280 : [pMem] "+Q" (*pu128) 4281 : [uValueHi] "r" (u64Hi) 4282 , [uValueLo] "r" (u64Lo) 4283 : ); 4284 4285 # else 4286 RTUINT128U u128Old; 4287 # ifdef RT_COMPILER_WITH_128BIT_INT_TYPES 4288 u128Old.u = *pu128; 4289 # else 4290 u128Old.u.Lo = pu128->Lo; 4291 u128Old.u.Hi = pu128->Hi; 4292 # endif 4293 while (!ASMAtomicCmpXchgU128v2(pu128, u64Hi, u64Lo, u128Old.s.Hi, u128Old.s.Lo, &u128Old.u)) 4294 { } 4295 # endif 4296 } 4297 4298 4299 /** 4300 * Atomically writes an unsigned 128-bit value, ordered. 4301 * 4302 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4303 * on 16 byte boundrary. 4304 * @param u128 The the new value. 4305 */ 4306 DECLINLINE(void) ASMAtomicWriteU128(volatile uint128_t *pu128, const uint128_t u128) RT_NOTHROW_DEF 4307 { 4308 # ifdef RT_COMPILER_WITH_128BIT_INT_TYPES 4309 ASMAtomicWriteU128v2(pu128, (uint64_t)(u128 >> 64), (uint64_t)u128); 4310 # else 4311 ASMAtomicWriteU128v2(pu128, u128.Hi, u128.Lo); 4312 # endif 4313 } 4314 4315 4316 /** 4317 * Atomically writes an unsigned 128-bit value, unordered. 4318 * 4319 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4320 * on 16 byte boundrary. 4321 * @param u128 The the new value. 4322 * @note This is ordered on AMD64. 4323 */ 4324 DECLINLINE(void) ASMAtomicUoWriteU128(volatile uint128_t *pu128, const uint128_t u128) RT_NOTHROW_DEF 4325 { 4326 # ifdef RT_COMPILER_WITH_128BIT_INT_TYPES 4327 ASMAtomicUoWriteU128v2(pu128, (uint64_t)(u128 >> 64), (uint64_t)u128); 4328 # else 4329 ASMAtomicUoWriteU128v2(pu128, u128.Hi, u128.Lo); 4330 # endif 4331 } 4332 4333 4334 /** 4335 * Atomically writes an unsigned 128-bit value, ordered. 4336 * 4337 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4338 * on 16 byte boundrary. 4339 * @param u128 The the new value. 4340 */ 4341 DECLINLINE(void) ASMAtomicWriteU128U(volatile RTUINT128U *pu128, const RTUINT128U u128) RT_NOTHROW_DEF 4342 { 4343 ASMAtomicWriteU128v2(&pu128->u, u128.s.Hi, u128.s.Lo); 4344 } 4345 4346 4347 /** 4348 * Atomically writes an unsigned 128-bit value, unordered. 4349 * 4350 * @param pu128 Pointer to the variable to overwrite. Must be aligned 4351 * on 16 byte boundrary. 4352 * @param u128 The the new value. 4353 * @note This is ordered on AMD64. 4354 */ 4355 DECLINLINE(void) ASMAtomicUoWriteU128U(volatile RTUINT128U *pu128, const RTUINT128U u128) RT_NOTHROW_DEF 4356 { 4357 ASMAtomicUoWriteU128v2(&pu128->u, u128.s.Hi, u128.s.Lo); 4358 } 4359 4360 #endif /* RTASM_HAVE_WRITE_U128 */ 4054 4361 4055 4362 /** -
trunk/include/iprt/types.h
r101140 r104795 496 496 /** 497 497 * 128-bit unsigned integer union. 498 * 499 * @note This is not necessarily automatically 16 byte aligned. Sorry. 498 500 */ 499 501 #pragma pack(1) -
trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp
r103354 r104795 176 176 RTTestISub(#name); \ 177 177 DO_SIMPLE_TEST_NO_SUB(tst ## name ## Worker, type); \ 178 } while (0) 179 180 181 /** 182 * Calls a worker function with different worker variable storage types. 183 */ 184 #define DO_SIMPLE_TEST_NO_STACK(name, type) \ 185 do \ 186 { \ 187 RTTestISub(#name); \ 188 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tst ## name ## Worker, type); \ 178 189 } while (0) 179 190 … … 1022 1033 } 1023 1034 1035 #ifdef RTASM_HAVE_READ_U128 1036 # define TEST_READ_128_EX(a_pVar, a_szFunction, a_CallExpr, a_u64ValHi, a_u64ValLo) do { \ 1037 a_pVar->s.Hi = a_u64ValHi; \ 1038 a_pVar->s.Lo = a_u64ValLo; \ 1039 RTUINT128U uRet; \ 1040 a_CallExpr; \ 1041 if (uRet.s.Lo != a_u64ValLo || uRet.s.Hi != a_u64ValHi) \ 1042 RTTestFailed(g_hTest, "%s, %d: " a_szFunction ": expected %#RX64'%016RX64 got %#RX64'%016RX64\n", \ 1043 __FUNCTION__, __LINE__, a_u64ValHi, a_u64ValLo, uRet.s.Hi, uRet.s.Lo); \ 1044 CHECKVAL128(a_pVar, a_u64ValHi, a_u64ValLo); \ 1045 } while (0) 1046 1047 # define TEST_READ_128U(a_pVar, a_Function, a_u64ValHi, a_u64ValLo) \ 1048 TEST_READ_128_EX(a_pVar, #a_Function, uRet = a_Function(a_pVar), a_u64ValHi, a_u64ValLo) 1049 # define TEST_READ_128(a_pVar, a_Function, a_u64ValHi, a_u64ValLo) \ 1050 TEST_READ_128_EX(a_pVar, #a_Function, uRet.u = a_Function(&a_pVar->u), a_u64ValHi, a_u64ValLo) 1051 1052 # define TEST_ATOMIC_READ_U128_TMPL(a_TestMacro, a_fn) \ 1053 DECLINLINE(void) tst ## a_fn ## Worker(RTUINT128U volatile *pu128) \ 1054 { \ 1055 a_TestMacro(pu128, a_fn, 0, 0); \ 1056 a_TestMacro(pu128, a_fn, 19983, 20245); \ 1057 a_TestMacro(pu128, a_fn, UINT16_MAX, INT16_MAX); \ 1058 a_TestMacro(pu128, a_fn, INT16_MAX, UINT16_MAX); \ 1059 a_TestMacro(pu128, a_fn, UINT32_MAX, INT32_MAX); \ 1060 a_TestMacro(pu128, a_fn, INT32_MAX, UINT32_MAX); \ 1061 a_TestMacro(pu128, a_fn, UINT64_MAX, INT64_MAX); \ 1062 a_TestMacro(pu128, a_fn, INT64_MAX, UINT64_MAX); \ 1063 a_TestMacro(pu128, a_fn, UINT64_C(0xb5a23edcc258ad0a), UINT64_C(0xaf88507eceb58580)); \ 1064 a_TestMacro(pu128, a_fn, UINT64_C(0x5dc7d02e4e474fdb), UINT64_C(0x132b375f2b60f4b6)); \ 1065 } 1066 1067 TEST_ATOMIC_READ_U128_TMPL(TEST_READ_128, ASMAtomicReadU128) 1068 TEST_ATOMIC_READ_U128_TMPL(TEST_READ_128, ASMAtomicUoReadU128) 1069 1070 TEST_ATOMIC_READ_U128_TMPL(TEST_READ_128U, ASMAtomicReadU128U) 1071 TEST_ATOMIC_READ_U128_TMPL(TEST_READ_128U, ASMAtomicUoReadU128U) 1072 1073 #endif 1074 1024 1075 1025 1076 static void tstASMAtomicRead(void) … … 1036 1087 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t); 1037 1088 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t); 1089 1090 #ifdef RTASM_HAVE_READ_U128 1091 DO_SIMPLE_TEST_NO_STACK(ASMAtomicReadU128, RTUINT128U); 1092 DO_SIMPLE_TEST_NO_STACK(ASMAtomicReadU128U, RTUINT128U); 1093 1094 DO_SIMPLE_TEST_NO_STACK(ASMAtomicUoReadU128, RTUINT128U); 1095 DO_SIMPLE_TEST_NO_STACK(ASMAtomicUoReadU128U, RTUINT128U); 1096 #endif 1038 1097 } 1039 1098 … … 1277 1336 } 1278 1337 1338 #ifdef RTASM_HAVE_WRITE_U128 1339 1340 # define TEST_WRITE_128(a_pVar, a_Function, a_HiVal, a_LoVal) do { \ 1341 RTUINT128U uValTmp; \ 1342 a_Function(&a_pVar->u, (uValTmp = RTUINT128_INIT(a_HiVal, a_LoVal)).u); \ 1343 CHECKVAL128(a_pVar, a_HiVal, a_LoVal); \ 1344 } while (0) 1345 1346 # define TEST_WRITE_128U(a_pVar, a_Function, a_HiVal, a_LoVal) do { \ 1347 RTUINT128U uValTmp; \ 1348 a_Function(a_pVar, uValTmp = RTUINT128_INIT(a_HiVal, a_LoVal)); \ 1349 CHECKVAL128(a_pVar, a_HiVal, a_LoVal); \ 1350 } while (0) 1351 1352 # define TEST_WRITE_128v2(a_pVar, a_Function, a_HiVal, a_LoVal) \ 1353 do { a_Function(&a_pVar->u, a_HiVal, a_LoVal); CHECKVAL128(a_pVar, a_HiVal, a_LoVal); } while (0) 1354 1355 #define TEST_ATOMIC_WRITE_U128_TMPL(a_TestMacro, a_fn) \ 1356 DECLINLINE(void) tst ## a_fn ## Worker(RTUINT128U volatile *pu128) \ 1357 { \ 1358 a_TestMacro(pu128, a_fn, 0, 0); \ 1359 a_TestMacro(pu128, a_fn, 19983, 20245); \ 1360 a_TestMacro(pu128, a_fn, UINT16_MAX, INT16_MAX); \ 1361 a_TestMacro(pu128, a_fn, INT16_MAX, UINT16_MAX); \ 1362 a_TestMacro(pu128, a_fn, UINT32_MAX, INT32_MAX); \ 1363 a_TestMacro(pu128, a_fn, INT32_MAX, UINT32_MAX); \ 1364 a_TestMacro(pu128, a_fn, UINT64_MAX, INT64_MAX); \ 1365 a_TestMacro(pu128, a_fn, INT64_MAX, UINT64_MAX); \ 1366 a_TestMacro(pu128, a_fn, UINT64_C(0xb5a23edcc258ad0a), UINT64_C(0xaf88507eceb58580)); \ 1367 a_TestMacro(pu128, a_fn, UINT64_C(0x5dc7d02e4e474fdb), UINT64_C(0x132b375f2b60f4b6)); \ 1368 } 1369 1370 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128, ASMAtomicWriteU128) 1371 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128, ASMAtomicUoWriteU128) 1372 1373 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128U, ASMAtomicWriteU128U) 1374 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128U, ASMAtomicUoWriteU128U) 1375 1376 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128v2, ASMAtomicWriteU128v2) 1377 TEST_ATOMIC_WRITE_U128_TMPL(TEST_WRITE_128v2, ASMAtomicUoWriteU128v2) 1378 1379 #endif /* RTASM_HAVE_WRITE_U128 */ 1380 1279 1381 static void tstASMAtomicWrite(void) 1280 1382 { … … 1290 1392 DO_SIMPLE_TEST(ASMAtomicWriteU64, uint64_t); 1291 1393 DO_SIMPLE_TEST(ASMAtomicUoWriteU64, uint64_t); 1394 1395 #ifdef RTASM_HAVE_WRITE_U128 1396 /* Not doing stack here, as it won't be necessarily correctly aligned for cmpxchg16b on MSC. */ 1397 DO_SIMPLE_TEST_NO_STACK(ASMAtomicWriteU128, RTUINT128U); 1398 DO_SIMPLE_TEST_NO_STACK(ASMAtomicWriteU128U, RTUINT128U); 1399 DO_SIMPLE_TEST_NO_STACK(ASMAtomicWriteU128v2, RTUINT128U); 1400 1401 DO_SIMPLE_TEST_NO_STACK(ASMAtomicUoWriteU128, RTUINT128U); 1402 DO_SIMPLE_TEST_NO_STACK(ASMAtomicUoWriteU128U, RTUINT128U); 1403 DO_SIMPLE_TEST_NO_STACK(ASMAtomicUoWriteU128v2, RTUINT128U); 1404 #endif 1292 1405 } 1293 1406 … … 3080 3193 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64"); 3081 3194 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64"); 3195 #ifdef RTASM_HAVE_READ_U128 3196 if (fHaveCmpXchg128) 3197 { 3198 BENCH(ASMAtomicUoReadU128(&s_u128.u), "ASMAtomicUoReadU128"); 3199 BENCH(ASMAtomicUoReadU128U(&s_u128), "ASMAtomicUoReadU128U"); 3200 } 3201 #endif 3082 3202 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8"); 3083 3203 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8"); … … 3088 3208 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64"); 3089 3209 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64"); 3210 #ifdef RTASM_HAVE_READ_U128 3211 if (fHaveCmpXchg128) 3212 { 3213 BENCH(ASMAtomicReadU128(&s_u128.u), "ASMAtomicReadU128"); 3214 BENCH(ASMAtomicReadU128U(&s_u128), "ASMAtomicReadU128U"); 3215 } 3216 #endif 3090 3217 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8"); 3091 3218 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8"); … … 3096 3223 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64"); 3097 3224 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64"); 3225 #ifdef RTASM_HAVE_WRITE_U128 3226 if (fHaveCmpXchg128) 3227 { 3228 BENCH(ASMAtomicUoWriteU128(&s_u128.u, (u128Tmp1 = RTUINT128_INIT_C(0, 0)).u), "ASMAtomicUoWriteU128"); 3229 BENCH(ASMAtomicUoWriteU128v2(&s_u128.u, 0, 0), "ASMAtomicUoWriteU128v2"); 3230 BENCH(ASMAtomicUoWriteU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0)), "ASMAtomicUoWriteU128U"); 3231 } 3232 #endif 3098 3233 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8"); 3099 3234 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8"); … … 3104 3239 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64"); 3105 3240 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64"); 3241 #ifdef RTASM_HAVE_WRITE_U128 3242 if (fHaveCmpXchg128) 3243 { 3244 BENCH(ASMAtomicWriteU128(&s_u128.u, (u128Tmp1 = RTUINT128_INIT_C(0, 0)).u), "ASMAtomicWriteU128"); 3245 BENCH(ASMAtomicWriteU128v2(&s_u128.u, 0, 0), "ASMAtomicWriteU128v2"); 3246 BENCH(ASMAtomicWriteU128U(&s_u128, u128Tmp1 = RTUINT128_INIT_C(0, 0)), "ASMAtomicWriteU128U"); 3247 } 3248 #endif 3106 3249 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8"); 3107 3250 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
Note:
See TracChangeset
for help on using the changeset viewer.