VirtualBox

Changeset 6644 in vbox


Ignore:
Timestamp:
Jan 31, 2008 9:40:28 AM (17 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
27709
Message:

Add CmpXchg functions to IPRT which additionally pass back the old value, including some testcases.

Location:
trunk
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/asm.h

    r6385 r6644  
    24302430
    24312431
    2432 
    24332432/** @def ASMAtomicCmpXchgSize
    24342433 * Atomically Compare and Exchange a value which size might differ
     
    24702469#elif ARCH_BITS == 64
    24712470    return ASMAtomicCmpXchgU64((volatile uint64_t *)(void *)ppv, (uint64_t)pvNew, (uint64_t)pvOld);
     2471#else
     2472# error "ARCH_BITS is bogus"
     2473#endif
     2474}
     2475
     2476
     2477/**
     2478 * Atomically Compare and Exchange an unsigned 32-bit value, additionally
     2479 * passes back old value.
     2480 *
     2481 * @returns true if xchg was done.
     2482 * @returns false if xchg wasn't done.
     2483 *
     2484 * @param   pu32        Pointer to the value to update.
     2485 * @param   u32New      The new value to assigned to *pu32.
     2486 * @param   u32Old      The old value to *pu32 compare with.
     2487 * @param   pu32Old     Pointer store the old value at.
     2488 */
     2489#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     2490DECLASM(bool) ASMAtomicCmpXchgExU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old, uint32_t *pu32Old);
     2491#else
     2492DECLINLINE(bool) ASMAtomicCmpXchgExU32(volatile uint32_t *pu32, const uint32_t u32New, const uint32_t u32Old, uint32_t *pu32Old)
     2493{
     2494# if RT_INLINE_ASM_GNU_STYLE
     2495    uint32_t u32Ret;
     2496    __asm__ __volatile__("lock; cmpxchgl %3, %0\n\t"
     2497                         "movl %%eax, %2\n\t"
     2498                         "setz  %%al\n\t"
     2499                         "movzbl %%al, %%eax\n\t"
     2500                         : "=m" (*pu32),
     2501                           "=a" (u32Ret),
     2502                           "=m" (*pu32Old)
     2503                         : "r" (u32New),
     2504                           "1" (u32Old));
     2505    return (bool)u32Ret;
     2506
     2507# elif RT_INLINE_ASM_USES_INTRIN
     2508    return (*pu32Old =_InterlockedCompareExchange((long *)pu32, u32New, u32Old)) == u32Old;
     2509
     2510# else
     2511    uint32_t u32Ret;
     2512    __asm
     2513    {
     2514#  ifdef RT_ARCH_AMD64
     2515        mov     rdx, [pu32]
     2516#  else
     2517        mov     edx, [pu32]
     2518#  endif
     2519        mov     eax, [u32Old]
     2520        mov     ecx, [u32New]
     2521#  ifdef RT_ARCH_AMD64
     2522        lock cmpxchg [rdx], ecx
     2523        mov     rdx, [pu32Old]
     2524        mov     [rdx], eax
     2525#  else
     2526        lock cmpxchg [edx], ecx
     2527        mov     edx, [pu32Old]
     2528        mov     [edx], eax
     2529#  endif
     2530        setz    al
     2531        movzx   eax, al
     2532        mov     [u32Ret], eax
     2533    }
     2534    return !!u32Ret;
     2535# endif
     2536}
     2537#endif
     2538
     2539
     2540/**
     2541 * Atomically Compare and Exchange a signed 32-bit value, additionally
     2542 * passes back old value.
     2543 *
     2544 * @returns true if xchg was done.
     2545 * @returns false if xchg wasn't done.
     2546 *
     2547 * @param   pi32        Pointer to the value to update.
     2548 * @param   i32New      The new value to assigned to *pi32.
     2549 * @param   i32Old      The old value to *pi32 compare with.
     2550 * @param   pi32Old     Pointer store the old value at.
     2551 */
     2552DECLINLINE(bool) ASMAtomicCmpXchgExS32(volatile int32_t *pi32, const int32_t i32New, const int32_t i32Old, int32_t *pi32Old)
     2553{
     2554    return ASMAtomicCmpXchgExU32((volatile uint32_t *)pi32, (uint32_t)i32New, (uint32_t)i32Old, (uint32_t *)pi32Old);
     2555}
     2556
     2557
     2558/**
     2559 * Atomically Compare and exchange an unsigned 64-bit value, additionally
     2560 * passing back old value.
     2561 *
     2562 * @returns true if xchg was done.
     2563 * @returns false if xchg wasn't done.
     2564 *
     2565 * @param   pu64    Pointer to the 64-bit variable to update.
     2566 * @param   u64New  The 64-bit value to assign to *pu64.
     2567 * @param   u64Old  The value to compare with.
     2568 * @param   pu32Old     Pointer store the old value at.
     2569 */
     2570#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     2571DECLASM(bool) ASMAtomicCmpXchgExU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old, uint64_t *pu64Old);
     2572#else
     2573DECLINLINE(bool) ASMAtomicCmpXchgExU64(volatile uint64_t *pu64, const uint64_t u64New, const uint64_t u64Old, uint64_t *pu64Old)
     2574{
     2575# if RT_INLINE_ASM_USES_INTRIN
     2576   return (*pu64Old =_InterlockedCompareExchange64((__int64 *)pu64, u64New, u64Old)) == u64Old;
     2577
     2578# elif defined(RT_ARCH_AMD64)
     2579#  if RT_INLINE_ASM_GNU_STYLE
     2580    uint64_t u64Ret;
     2581    __asm__ __volatile__("lock; cmpxchgq %3, %0\n\t"
     2582                         "movq %%rax, %2\n\t"
     2583                         "setz  %%al\n\t"
     2584                         "movzbl %%al, %%eax\n\t"
     2585                         : "=m" (*pu64),
     2586                           "=a" (u64Ret),
     2587                           "=m" (*pu64Old)
     2588                         : "r" (u64New),
     2589                           "1" (u64Old));
     2590    return (bool)u64Ret;
     2591#  else
     2592    bool fRet;
     2593    __asm
     2594    {
     2595        mov     rdx, [pu32]
     2596        mov     rax, [u64Old]
     2597        mov     rcx, [u64New]
     2598        lock cmpxchg [rdx], rcx
     2599        mov     rdx, [pu64Old]
     2600        mov     [rdx], rax
     2601        setz    al
     2602        mov     [fRet], al
     2603    }
     2604    return fRet;
     2605#  endif
     2606# else /* !RT_ARCH_AMD64 */
     2607    uint32_t u32Ret;
     2608#  if RT_INLINE_ASM_GNU_STYLE
     2609    uint64_t u64Ret;
     2610#   if defined(PIC) || defined(RT_OS_DARWIN) /* darwin: 4.0.1 compiler option / bug? */
     2611    __asm__ __volatile__("xchgl %%ebx, %2\n\t"
     2612                         "lock; cmpxchg8b %1\n\t"
     2613                         "xchgl %%ebx, %2\n\t"
     2614                         : "=A" (u64Ret),
     2615                           "=m" (*pu64)
     2616                         : "DS" (u64New & 0xffffffff),
     2617                           "c" (u64New >> 32),
     2618                           "m" (*pu64),
     2619                           "a" (u64Old & 0xffffffff),
     2620                           "d" (u64Old >> 32) );
     2621#   else /* !PIC */
     2622    __asm__ __volatile__("lock; cmpxchg8b %3\n\t"
     2623                         : "=A" (u64Ret),
     2624                           "=m" (*pu64)
     2625                         : "b" (u64New & 0xffffffff),
     2626                           "c" (u64New >> 32),
     2627                           "m" (*pu64),
     2628                           "a" (u64Old & 0xffffffff),
     2629                           "d" (u64Old >> 32) );
     2630#   endif
     2631    *pu64Old = u64Ret;
     2632    return u64Ret != u64Old;
     2633#  else
     2634    __asm
     2635    {
     2636        mov     ebx, dword ptr [u64New]
     2637        mov     ecx, dword ptr [u64New + 4]
     2638        mov     edi, [pu64]
     2639        mov     eax, dword ptr [u64Old]
     2640        mov     edx, dword ptr [u64Old + 4]
     2641        lock cmpxchg8b [edi]
     2642        mov     ebx, [pu64Old]
     2643        mov     [ebx], eax
     2644        add     ebx, 8
     2645        mov     [ebx], edx
     2646        setz    al
     2647        movzx   eax, al
     2648        mov     dword ptr [u32Ret], eax
     2649    }
     2650    return !!u32Ret;
     2651#  endif
     2652# endif /* !RT_ARCH_AMD64 */
     2653}
     2654#endif
     2655
     2656
     2657/**
     2658 * Atomically Compare and exchange a signed 64-bit value, additionally
     2659 * passing back old value.
     2660 *
     2661 * @returns true if xchg was done.
     2662 * @returns false if xchg wasn't done.
     2663 *
     2664 * @param   pi64    Pointer to the 64-bit variable to update.
     2665 * @param   i64     The 64-bit value to assign to *pu64.
     2666 * @param   i64Old  The value to compare with.
     2667 * @param   pi64Old Pointer store the old value at.
     2668 */
     2669DECLINLINE(bool) ASMAtomicCmpXchgExS64(volatile int64_t *pi64, const int64_t i64, const int64_t i64Old, int64_t *pi64Old)
     2670{
     2671    return ASMAtomicCmpXchgExU64((volatile uint64_t *)pi64, (uint64_t)i64, (uint64_t)i64Old, (uint64_t *)pi64Old);
     2672}
     2673
     2674
     2675/** @def ASMAtomicCmpXchgExSize
     2676 * Atomically Compare and Exchange a value which size might differ
     2677 * between platforms or compilers. Additionally passes back old value.
     2678 *
     2679 * @param   pu          Pointer to the value to update.
     2680 * @param   uNew        The new value to assigned to *pu.
     2681 * @param   uOld        The old value to *pu compare with.
     2682 * @param   fRc         Where to store the result.
     2683 * @param   uOldVal     Where to store the old value.
     2684 */
     2685#define ASMAtomicCmpXchgExSize(pu, uNew, uOld, fRc, uOldVal) \
     2686    do { \
     2687        switch (sizeof(*(pu))) { \
     2688            case 4: (fRc) = ASMAtomicCmpXchgExU32((volatile uint32_t *)(void *)(pu), (uint32_t)(uNew), (uint32_t)(uOld), (uint32_t *)&(uOldVal)); \
     2689                break; \
     2690            case 8: (fRc) = ASMAtomicCmpXchgExU64((volatile uint64_t *)(void *)(pu), (uint64_t)(uNew), (uint64_t)(uOld), (uint64_t *)&(uOldVal)); \
     2691                break; \
     2692            default: AssertMsgFailed(("ASMAtomicCmpXchgSize: size %d is not supported\n", sizeof(*(pu)))); \
     2693                (fRc) = false; \
     2694                (uOldVal) = 0; \
     2695                break; \
     2696        } \
     2697    } while (0)
     2698
     2699
     2700/**
     2701 * Atomically Compare and Exchange a pointer value, additionally
     2702 * passing back old value.
     2703 *
     2704 * @returns true if xchg was done.
     2705 * @returns false if xchg wasn't done.
     2706 *
     2707 * @param   ppv         Pointer to the value to update.
     2708 * @param   pvNew       The new value to assigned to *ppv.
     2709 * @param   pvOld       The old value to *ppv compare with.
     2710 * @param   ppvOld      Pointer store the old value at.
     2711 */
     2712DECLINLINE(bool) ASMAtomicCmpXchgExPtr(void * volatile *ppv, void *pvNew, void *pvOld, void **ppvOld)
     2713{
     2714#if ARCH_BITS == 32
     2715    return ASMAtomicCmpXchgExU32((volatile uint32_t *)(void *)ppv, (uint32_t)pvNew, (uint32_t)pvOld, (uint32_t *)ppvOld);
     2716#elif ARCH_BITS == 64
     2717    return ASMAtomicCmpXchgExU64((volatile uint64_t *)(void *)ppv, (uint64_t)pvNew, (uint64_t)pvOld, (uint64_t *)ppvOld);
    24722718#else
    24732719# error "ARCH_BITS is bogus"
  • trunk/src/VBox/Runtime/testcase/tstInlineAsm.cpp

    r5999 r6644  
    658658    CHECKOP(ASMAtomicCmpXchgU64(&u64, 0x80040008008efdULL, 0), true, "%d", bool);
    659659    CHECKVAL(u64, 0x80040008008efdULL, "%x");
     660}
     661
     662
     663static void tstASMAtomicCmpXchgExU32(void)
     664{
     665    uint32_t u32 = 0xffffffff;
     666    uint32_t u32Old = 0x80005111;
     667
     668    CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0, &u32Old), false, "%d", bool);
     669    CHECKVAL(u32, 0xffffffff, "%x");
     670    CHECKVAL(u32Old, 0xffffffff, "%x");
     671
     672    CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0xffffffff, &u32Old), true, "%d", bool);
     673    CHECKVAL(u32, 0, "%x");
     674    CHECKVAL(u32Old, 0xffffffff, "%x");
     675
     676    CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0xffffffff, &u32Old), false, "%d", bool);
     677    CHECKVAL(u32, 0, "%x");
     678    CHECKVAL(u32Old, 0, "%x");
     679
     680    CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0x8008efd, 0, &u32Old), true, "%d", bool);
     681    CHECKVAL(u32, 0x8008efd, "%x");
     682    CHECKVAL(u32Old, 0, "%x");
     683
     684    CHECKOP(ASMAtomicCmpXchgExU32(&u32, 0, 0x8008efd, &u32Old), true, "%d", bool);
     685    CHECKVAL(u32, 0, "%x");
     686    CHECKVAL(u32Old, 0x8008efd, "%x");
     687}
     688
     689
     690static void tstASMAtomicCmpXchgExU64(void)
     691{
     692    uint64_t u64 = 0xffffffffffffffffULL;
     693    uint64_t u64Old = 0x8000000051111111ULL;
     694
     695    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0, &u64Old), false, "%d", bool);
     696    CHECKVAL(u64, 0xffffffffffffffffULL, "%llx");
     697    CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx");
     698
     699    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0xffffffffffffffffULL, &u64Old), true, "%d", bool);
     700    CHECKVAL(u64, 0ULL, "%llx");
     701    CHECKVAL(u64Old, 0xffffffffffffffffULL, "%llx");
     702
     703    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff, &u64Old), false, "%d", bool);
     704    CHECKVAL(u64, 0ULL, "%llx");
     705    CHECKVAL(u64Old, 0ULL, "%llx");
     706
     707    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0xffffffff00000000ULL, &u64Old), false, "%d", bool);
     708    CHECKVAL(u64, 0ULL, "%llx");
     709    CHECKVAL(u64Old, 0ULL, "%llx");
     710
     711    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0x80040008008efdULL, 0, &u64Old), true, "%d", bool);
     712    CHECKVAL(u64, 0x80040008008efdULL, "%llx");
     713    CHECKVAL(u64Old, 0ULL, "%llx");
     714
     715    CHECKOP(ASMAtomicCmpXchgExU64(&u64, 0, 0x80040008008efdULL, &u64Old), true, "%d", bool);
     716    CHECKVAL(u64, 0ULL, "%llx");
     717    CHECKVAL(u64Old, 0x80040008008efdULL, "%llx");
    660718}
    661719
     
    885943    tstASMAtomicCmpXchgU32();
    886944    tstASMAtomicCmpXchgU64();
     945    tstASMAtomicCmpXchgExU32();
     946    tstASMAtomicCmpXchgExU64();
    887947    tstASMAtomicReadU64();
    888948    tstASMAtomicDecIncS32();
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette