Changeset 87172 in vbox for trunk/include/iprt
- Timestamp:
- Jan 4, 2021 9:47:15 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 142101
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r87171 r87172 303 303 uint32_t uOld; 304 304 uint32_t rcSpill; 305 __asm__ __volatile__(".Ltry_again%=:\n\t" 305 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU8_%=:\n\t" 306 "dmb sy\n\t" 306 307 # if defined(RT_ARCH_ARM64) 307 308 "ldaxrb %w0, [%3]\n\t" 308 309 "stlxrb %w1, %w2, [%3]\n\t" 309 "cbnz %w1, .Ltry_again %=\n\t"310 "cbnz %w1, .Ltry_again_ASMAtomicXchgU8_%=\n\t" 310 311 # else 311 312 "ldrexb %0, [%3]\n\t" /* ARMv6+ */ 312 313 "strexb %1, %2, [%3]\n\t" 313 314 "cmp %1, #0\n\t" 314 "bne .Ltry_again %=\n\t"315 "bne .Ltry_again_ASMAtomicXchgU8_%=\n\t" 315 316 # endif 316 317 : "=&r" (uOld), … … 399 400 uint32_t uOld; 400 401 uint32_t rcSpill; 401 __asm__ __volatile__(".Ltry_again%=:\n\t" 402 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU16_%=:\n\t" 403 "dmb sy\n\t" 402 404 # if defined(RT_ARCH_ARM64) 403 405 "ldaxrh %w0, [%3]\n\t" 404 406 "stlxrh %w1, %w2, [%3]\n\t" 405 "cbnz %w1, .Ltry_again %=\n\t"407 "cbnz %w1, .Ltry_again_ASMAtomicXchgU16_%=\n\t" 406 408 # else 407 409 "ldrexh %0, [%3]\n\t" /* ARMv6+ */ 408 410 "strexh %1, %2, [%3]\n\t" 409 411 "cmp %1, #0\n\t" 410 "bne .Ltry_again %=\n\t"412 "bne .Ltry_again_ASMAtomicXchgU16_%=\n\t" 411 413 # endif 412 414 : "=&r" (uOld), … … 484 486 uint32_t uOld; 485 487 uint32_t rcSpill; 486 __asm__ __volatile__(".Ltry_again%=:\n\t" 488 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU32_%=:\n\t" 489 "dmb sy\n\t" 487 490 # if defined(RT_ARCH_ARM64) 488 491 "ldaxr %w0, [%3]\n\t" 489 492 "stlxr %w1, %w2, [%3]\n\t" 490 "cbnz %w1, .Ltry_again %=\n\t"493 "cbnz %w1, .Ltry_again_ASMAtomicXchgU32_%=\n\t" 491 494 # else 492 495 "ldrex %0, [%3]\n\t" /* ARMv6+ */ 493 496 "strex %1, %2, [%3]\n\t" 494 497 "cmp %1, #0\n\t" 495 "bne .Ltry_again %=\n\t"498 "bne .Ltry_again_ASMAtomicXchgU32_%=\n\t" 496 499 # endif 497 500 : "=&r" (uOld), … … 603 606 # elif defined(RT_ARCH_ARM32) || defined(RT_ARCH_ARM64) 604 607 uint32_t rcSpill; 605 __asm__ __volatile__(".Ltry_again%=:\n\t" 608 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU64_%=:\n\t" 609 "dmb sy\n\t" 606 610 # if defined(RT_ARCH_ARM64) 607 611 "ldaxr %0, [%3]\n\t" 608 612 "stlxr %w1, %2, [%3]\n\t" 609 "cbnz %w1, .Ltry_again %=\n\t"613 "cbnz %w1, .Ltry_again_ASMAtomicXchgU64_%=\n\t" 610 614 # else 611 615 "ldrexd %H0, [%3]\n\t" /* ARMv6+ */ 612 616 "strexd %1, %H2, [%3]\n\t" 613 617 "cmp %1, #0\n\t" 614 "bne .Ltry_again %=\n\t"618 "bne .Ltry_again_ASMAtomicXchgU64_%=\n\t" 615 619 # endif 616 620 : "=&r" (u64), … … 858 862 uint32_t u32Spill; 859 863 uint32_t rcSpill; 860 __asm__ __volatile__(".Ltry_again%=:\n\t" 864 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU8_%=:\n\t" 865 "dmb sy\n\t" 861 866 # if defined(RT_ARCH_ARM64) 862 867 "ldaxrb %w0, [%5]\n\t" … … 864 869 "bne 1f\n\t" /* stop here if not equal */ 865 870 "stlxrb %w1, %w4, [%5]\n\t" 866 "cbnz %w1, .Ltry_again %=\n\t"871 "cbnz %w1, .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 867 872 "mov %w2, #1\n\t" 868 873 # else … … 872 877 "bne 1f\n\t" /* stop here if not equal */ 873 878 "cmp %1, #0\n\t" 874 "bne .Ltry_again %=\n\t"879 "bne .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 875 880 "mov %2, #1\n\t" 876 881 # endif … … 990 995 uint32_t u32Spill; 991 996 uint32_t rcSpill; 992 __asm__ __volatile__(".Ltry_again%=:\n\t" 997 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU32_%=:\n\t" 998 "dmb sy\n\t" 993 999 # if defined(RT_ARCH_ARM64) 994 1000 "ldaxr %w0, [%5]\n\t" … … 996 1002 "bne 1f\n\t" /* stop here if not equal */ 997 1003 "stlxr %w1, %w4, [%5]\n\t" 998 "cbnz %w1, .Ltry_again %=\n\t"1004 "cbnz %w1, .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 999 1005 "mov %w2, #1\n\t" 1000 1006 # else … … 1004 1010 "bne 1f\n\t" /* stop here if not equal */ 1005 1011 "cmp %1, #0\n\t" 1006 "bne .Ltry_again %=\n\t"1012 "bne .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 1007 1013 "mov %2, #1\n\t" 1008 1014 # endif … … 1146 1152 uint64_t u64Spill; 1147 1153 uint32_t rcSpill; 1148 __asm__ __volatile__(".Ltry_again%=:\n\t" 1154 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t" 1155 "dmb sy\n\t" 1149 1156 # if defined(RT_ARCH_ARM64) 1150 1157 "ldaxr %0, [%5]\n\t" … … 1152 1159 "bne 1f\n\t" /* stop here if not equal */ 1153 1160 "stlxr %w1, %4, [%5]\n\t" 1154 "cbnz %w1, .Ltry_again %=\n\t"1161 "cbnz %w1, .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1155 1162 "mov %w2, #1\n\t" 1156 1163 # else … … 1161 1168 "bne 1f\n\t" /* stop here if not equal */ 1162 1169 "cmp %1, #0\n\t" 1163 "bne .Ltry_again %=\n\t"1170 "bne .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1164 1171 "mov %2, #1\n\t" 1165 1172 # endif … … 1374 1381 uint32_t u32ActualOld; 1375 1382 uint32_t rcSpill; 1376 __asm__ __volatile__(".Ltry_again%=:\n\t" 1383 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgExU32_%=:\n\t" 1384 "dmb sy\n\t" 1377 1385 # if defined(RT_ARCH_ARM64) 1378 1386 "ldaxr %w0, [%5]\n\t" … … 1380 1388 "bne 1f\n\t" /* stop here if not equal */ 1381 1389 "stlxr %w1, %w4, [%5]\n\t" 1382 "cbnz %w1, .Ltry_again %=\n\t"1390 "cbnz %w1, .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1383 1391 "mov %w2, #1\n\t" 1384 1392 # else … … 1388 1396 "bne 1f\n\t" /* stop here if not equal */ 1389 1397 "cmp %1, #0\n\t" 1390 "bne .Ltry_again %=\n\t"1398 "bne .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1391 1399 "mov %2, #1\n\t" 1392 1400 # endif … … 1535 1543 uint64_t u64ActualOld; 1536 1544 uint32_t rcSpill; 1537 __asm__ __volatile__(".Ltry_again%=:\n\t" 1545 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgExU64_%=:\n\t" 1546 "dmb sy\n\t" 1538 1547 # if defined(RT_ARCH_ARM64) 1539 1548 "ldaxr %0, [%5]\n\t" … … 1541 1550 "bne 1f\n\t" /* stop here if not equal */ 1542 1551 "stlxr %w1, %4, [%5]\n\t" 1543 "cbnz %w1, .Ltry_again %=\n\t"1552 "cbnz %w1, .Ltry_again_ASMAtomicCmpXchgExU64_%=\n\t" 1544 1553 "mov %w2, #1\n\t" 1545 1554 # else … … 1550 1559 "bne 1f\n\t" /* stop here if not equal */ 1551 1560 "cmp %1, #0\n\t" 1552 "bne .Ltry_again %=\n\t"1561 "bne .Ltry_again_ASMAtomicCmpXchgExU64_%=\n\t" 1553 1562 "mov %2, #1\n\t" 1554 1563 # endif … … 1875 1884 #elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 1876 1885 /* Note! Only armv7 and later. */ 1877 __asm__ __volatile__ ("d sb sy\n\t" ::: "memory"); /** @todo dmb? */1886 __asm__ __volatile__ ("dmb sy\n\t" ::: "memory"); 1878 1887 #elif ARCH_BITS == 16 1879 1888 uint16_t volatile u16; … … 2194 2203 # elif defined(RT_ARCH_ARM32) 2195 2204 Assert(!((uintptr_t)pu64 & 7)); 2196 __asm__ __volatile__("d sb sy\n\t" /** @todo dmb? */2205 __asm__ __volatile__("dmb sy\n\t" 2197 2206 "ldrexd %0, %H0, [%1]\n\t" 2198 2207 : "=&r" (u64) … … 3140 3149 uint32_t u32Spill; 3141 3150 __asm__ __volatile__(".Ltry_again_add_u32_%=:\n\t" 3151 "dmb sy\n\t" 3142 3152 # if defined(RT_ARCH_ARM64) 3143 3153 "ldaxr %w0, [%4]\n\t" … … 3215 3225 uint64_t u64Spill; 3216 3226 __asm__ __volatile__(".Ltry_again_add_u64_%=:\n\t" 3227 "dmb sy\n\t" 3217 3228 # if defined(RT_ARCH_ARM64) 3218 3229 "ldaxr %0, [%4]\n\t" … … 3505 3516 uint32_t rcSpill; 3506 3517 __asm__ __volatile__(".Ltry_again_inc_u32_%=:\n\t" 3518 "dmb sy\n\t" 3507 3519 # if defined(RT_ARCH_ARM64) 3508 3520 "ldaxr %w0, [%2]\n\t" … … 3575 3587 uint32_t rcSpill; 3576 3588 __asm__ __volatile__(".Ltry_again_inc_u64_%=:\n\t" 3589 "dmb sy\n\t" 3577 3590 # if defined(RT_ARCH_ARM64) 3578 3591 "ldaxr %0, [%2]\n\t" … … 3701 3714 uint32_t rcSpill; 3702 3715 __asm__ __volatile__(".Ltry_again_dec_u32_%=:\n\t" 3716 "dmb sy\n\t" 3703 3717 # if defined(RT_ARCH_ARM64) 3704 3718 "ldaxr %w0, [%2]\n\t" … … 3771 3785 uint32_t rcSpill; 3772 3786 __asm__ __volatile__(".Ltry_again_dec_u64_%=:\n\t" 3787 "dmb sy\n\t" 3773 3788 # if defined(RT_ARCH_ARM64) 3774 3789 "ldaxr %0, [%2]\n\t"
Note:
See TracChangeset
for help on using the changeset viewer.