VirtualBox

Changeset 106551 in vbox for trunk/include


Ignore:
Timestamp:
Oct 21, 2024 10:14:22 AM (7 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
165385
Message:

iprt/asm.h: Adjustments of the win/arm64 changes - RT_INLINE_ASM_USES_INTRIN is always defined and we must test the value it has. bugref:10392

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/asm.h

    r106550 r106551  
    14211421
    14221422# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    1423 #  ifdef RT_INLINE_ASM_USES_INTRIN
     1423#  if RT_INLINE_ASM_USES_INTRIN
    14241424#   if defined(RTASM_ARM64_USE_FEAT_LSE_WITHOUT_DMB)
    14251425    uint32_t const uOldActual = __casal32(pu32, u32Old, u32New);
     
    16181618
    16191619# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    1620 #  ifdef RT_INLINE_ASM_USES_INTRIN
     1620#  if RT_INLINE_ASM_USES_INTRIN
    16211621#   if defined(RTASM_ARM64_USE_FEAT_LSE_WITHOUT_DMB)
    16221622    uint64_t const uOldActual = __casal64(pu64, u64Old, u64New);
     
    19981998
    19991999# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    2000 #  ifdef RT_INLINE_ASM_USES_INTRIN
     2000#  if RT_INLINE_ASM_USES_INTRIN
    20012001#   if defined(RTASM_ARM64_USE_FEAT_LSE)
    20022002#    if defined(RTASM_ARM64_USE_FEAT_LSE_WITHOUT_DMB)
     
    21662166
    21672167# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    2168 #  ifdef RT_INLINE_ASM_USES_INTRIN
     2168#  if RT_INLINE_ASM_USES_INTRIN
    21692169#   if defined(RTASM_ARM64_USE_FEAT_LSE)
    21702170#    if defined(RTASM_ARM64_USE_FEAT_LSE_WITHOUT_DMB)
     
    23342334
    23352335# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    2336 #  ifdef RT_INLINE_ASM_USES_INTRIN
     2336#  if RT_INLINE_ASM_USES_INTRIN
    23372337#   if defined(RTASM_ARM64_USE_FEAT_LSE)
    23382338#    if defined(RTASM_ARM64_USE_FEAT_LSE_WITHOUT_DMB)
     
    30023002DECLINLINE(void) ASMSerializeInstruction(void) RT_NOTHROW_DEF
    30033003{
    3004 # ifdef RT_INLINE_ASM_USES_INTRIN
     3004# if RT_INLINE_ASM_USES_INTRIN
    30053005    __dsb(_ARM64_BARRIER_SY);
    30063006# else
     
    30333033# endif
    30343034#elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3035 # ifdef RT_INLINE_ASM_USES_INTRIN
     3035# if RT_INLINE_ASM_USES_INTRIN
    30363036    __dmb(_ARM64_BARRIER_SY);
    30373037# else
     
    30683068# endif
    30693069#elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3070 # ifdef RT_INLINE_ASM_USES_INTRIN
     3070# if RT_INLINE_ASM_USES_INTRIN
    30713071    __dmb(_ARM64_BARRIER_ST);
    30723072# else
     
    30993099# endif
    31003100#elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3101 # ifdef RT_INLINE_ASM_USES_INTRIN
     3101# if RT_INLINE_ASM_USES_INTRIN
    31023102    __dmb(_ARM64_BARRIER_LD);
    31033103# else
     
    31193119{
    31203120#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3121 # ifdef RT_INLINE_ASM_USES_INTRIN
     3121# if RT_INLINE_ASM_USES_INTRIN
    31223122    return __load_acquire8(pu8);
    31233123
     
    31703170{
    31713171#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3172 # ifdef RT_INLINE_ASM_USES_INTRIN
     3172# if RT_INLINE_ASM_USES_INTRIN
    31733173    return (uint8_t)__iso_volatile_load8((volatile char *)pu8); /* (emits ldrsb, sign-extending it to 32-bit) */
    31743174
     
    32183218{
    32193219#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3220 # ifdef RT_INLINE_ASM_USES_INTRIN
     3220# if RT_INLINE_ASM_USES_INTRIN
    32213221    return __iso_volatile_load8((volatile const char *)pi8);
    32223222
     
    32503250    Assert(!((uintptr_t)pu16 & 1));
    32513251#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3252 # ifdef RT_INLINE_ASM_USES_INTRIN
     3252# if RT_INLINE_ASM_USES_INTRIN
    32533253    return __load_acquire16(pu16);
    32543254
     
    33023302    Assert(!((uintptr_t)pu16 & 1));
    33033303#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3304 # ifdef RT_INLINE_ASM_USES_INTRIN
     3304# if RT_INLINE_ASM_USES_INTRIN
    33053305    return (uint16_t)__iso_volatile_load16((volatile int16_t *)pu16);  /* (emits ldrsh, sign-extending it to 32-bit) */
    33063306
     
    33523352    Assert(!((uintptr_t)pi16 & 1));
    33533353#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3354 # ifdef RT_INLINE_ASM_USES_INTRIN
     3354# if RT_INLINE_ASM_USES_INTRIN
    33553355    return __iso_volatile_load16(pi16);
    33563356
     
    33843384    Assert(!((uintptr_t)pu32 & 3));
    33853385#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3386 # ifdef RT_INLINE_ASM_USES_INTRIN
     3386# if RT_INLINE_ASM_USES_INTRIN
    33873387    return (uint32_t)__load_acquire32(pu32);
    33883388
     
    34393439    Assert(!((uintptr_t)pu32 & 3));
    34403440#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3441 # ifdef RT_INLINE_ASM_USES_INTRIN
     3441# if RT_INLINE_ASM_USES_INTRIN
    34423442    return (uint32_t)__iso_volatile_load32((volatile int32_t *)pu32);
    34433443
     
    34953495    Assert(!((uintptr_t)pi32 & 3));
    34963496#if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    3497 # ifdef RT_INLINE_ASM_USES_INTRIN
     3497# if RT_INLINE_ASM_USES_INTRIN
    34983498    return __iso_volatile_load32(pi32);
    34993499
     
    36023602    Assert(!((uintptr_t)pu64 & 7));
    36033603
    3604 #  ifdef RT_INLINE_ASM_USES_INTRIN
     3604#  if RT_INLINE_ASM_USES_INTRIN
    36053605    u64 = (uint64_t)__load_acquire64(pu64);
    36063606
     
    37263726# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
    37273727    Assert(!((uintptr_t)pu64 & 7));
    3728 #  ifdef RT_INLINE_ASM_USES_INTRIN
     3728#  if RT_INLINE_ASM_USES_INTRIN
    37293729    u64 = (uint64_t)__iso_volatile_load64((volatile int64_t *)pu64);
    37303730
     
    41854185    /* The DMB SY will ensure ordering a la x86, the stlrb is probably overkill
    41864186       as all byte accesses are single-copy atomic, which I think suffices here. */
    4187 # ifdef RT_INLINE_ASM_USES_INTRIN
     4187# if RT_INLINE_ASM_USES_INTRIN
    41884188    __dmb(_ARM64_BARRIER_SY);
    41894189    __stlr8(pu8, u8);
     
    42584258#if defined(RT_ARCH_ARM64)
    42594259    /* See ASMAtomicWriteU8 comments. */
    4260 # ifdef RT_INLINE_ASM_USES_INTRIN
     4260# if RT_INLINE_ASM_USES_INTRIN
    42614261    __dmb(_ARM64_BARRIER_SY);
    42624262    __stlr16(pu16, u16);
     
    43334333#if defined(RT_ARCH_ARM64)
    43344334   /* See ASMAtomicWriteU8 comments. */
    4335 # ifdef RT_INLINE_ASM_USES_INTRIN
     4335# if RT_INLINE_ASM_USES_INTRIN
    43364336    __dmb(_ARM64_BARRIER_SY);
    43374337    __stlr32(pu32, u32);
     
    44164416#if defined(RT_ARCH_ARM64)
    44174417    /* See ASMAtomicWriteU8 comments. */
    4418 # ifdef RT_INLINE_ASM_USES_INTRIN
     4418# if RT_INLINE_ASM_USES_INTRIN
    44194419    __dmb(_ARM64_BARRIER_SY);
    44204420    __stlr64(pu64, u64);
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette