Changeset 87181 in vbox for trunk/include/iprt
- Timestamp:
- Jan 5, 2021 11:15:34 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 142112
- Location:
- trunk/include/iprt
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r87178 r87181 182 182 #endif 183 183 184 /* 185 * ARM is great fun. 186 */ 187 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 188 189 # define RTASM_ARM_NO_BARRIER 190 # ifdef RT_ARCH_ARM64 191 # define RTASM_ARM_NO_BARRIER_IN_REG 192 # define RTASM_ARM_NO_BARRIER_COMMA_IN_REG 193 # define RTASM_ARM_DSB_SY "dsb sy\n\t" 194 # define RTASM_ARM_DSB_SY_IN_REG 195 # define RTASM_ARM_DSB_SY_COMMA_IN_REG 196 # define RTASM_ARM_DMB_SY "dmb sy\n\t" 197 # define RTASM_ARM_DMB_SY_IN_REG 198 # define RTASM_ARM_DMB_SY_COMMA_IN_REG 199 # define RTASM_ARM_DMB_ST "dmb st\n\t" 200 # define RTASM_ARM_DMB_ST_IN_REG 201 # define RTASM_ARM_DMB_ST_COMMA_IN_REG 202 # define RTASM_ARM_DMB_LD "dmb ld\n\t" 203 # define RTASM_ARM_DMB_LD_IN_REG 204 # define RTASM_ARM_DMB_LD_COMMA_IN_REG 205 # define RTASM_ARM_PICK_6432(expr64, expr32) expr64 206 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(name, a_pu32Mem, barrier_type, modify64, modify32, in_reg) \ 207 uint32_t rcSpill; \ 208 uint32_t u32NewRet; \ 209 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 210 RTASM_ARM_##barrier_type /* before lable? */ \ 211 "ldaxr %w[uNew], %[pMem]\n\t" \ 212 modify64 \ 213 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" \ 214 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \ 215 : [pMem] "+m" (*a_pu32Mem) \ 216 , [uNew] "=&r" (u32NewRet) \ 217 , [rc] "=&r" (rcSpill) \ 218 : in_reg \ 219 : "cc") 220 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(name, a_pu32Mem, barrier_type, modify64, modify32, in_reg) \ 221 uint32_t rcSpill; \ 222 uint32_t u32OldRet; \ 223 uint32_t u32NewSpill; \ 224 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 225 RTASM_ARM_##barrier_type /* before lable? */ \ 226 "ldaxr %w[uOld], %[pMem]\n\t" \ 227 modify64 \ 228 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" \ 229 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \ 230 : [pMem] "+m" (*a_pu32Mem) \ 231 , [uOld] "=&r" (u32OldRet) \ 232 , [uNew] "=&r" (u32NewSpill) \ 233 , [rc] "=&r" (rcSpill) \ 234 : in_reg \ 235 : "cc") 236 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_64(name, a_pu64Mem, barrier_type, modify64, modify32, in_reg) \ 237 uint32_t rcSpill; \ 238 uint64_t u64NewRet; \ 239 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 240 RTASM_ARM_##barrier_type /* before lable? */ \ 241 "ldaxr %[uNew], %[pMem]\n\t" \ 242 modify64 \ 243 "stlxr %w[rc], %[uNew], %[pMem]\n\t" \ 244 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \ 245 : [pMem] "+m" (*a_pu64Mem) \ 246 , [uNew] "=&r" (u64NewRet) \ 247 , [rc] "=&r" (rcSpill) \ 248 : in_reg \ 249 : "cc") 250 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_64(name, a_pu64Mem, barrier_type, modify64, modify32, in_reg) \ 251 uint32_t rcSpill; \ 252 uint64_t u64OldRet; \ 253 uint64_t u64NewSpill; \ 254 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 255 RTASM_ARM_##barrier_type /* before lable? */ \ 256 "ldaxr %[uOld], %[pMem]\n\t" \ 257 modify64 \ 258 "stlxr %w[rc], %[uNew], %[pMem]\n\t" \ 259 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \ 260 : [pMem] "+m" (*a_pu64Mem) \ 261 , [uOld] "=&r" (u64OldRet) \ 262 , [uNew] "=&r" (u64NewSpill) \ 263 , [rc] "=&r" (rcSpill) \ 264 : in_reg \ 265 : "cc") 266 267 # else /* RT_ARCH_ARM32 */ 268 # define RTASM_ARM_PICK_6432(expr64, expr32) expr32 269 # if RT_ARCH_ARM32 >= 7 270 # warning armv7 271 # define RTASM_ARM_NO_BARRIER_IN_REG 272 # define RTASM_ARM_NO_BARRIER_COMMA_IN_REG 273 # define RTASM_ARM_DSB_SY "dsb sy\n\t" 274 # define RTASM_ARM_DSB_SY_IN_REG "X" (0xfade) 275 # define RTASM_ARM_DMB_SY "dmb sy\n\t" 276 # define RTASM_ARM_DMB_SY_IN_REG "X" (0xfade) 277 # define RTASM_ARM_DMB_ST "dmb st\n\t" 278 # define RTASM_ARM_DMB_ST_IN_REG "X" (0xfade) 279 # define RTASM_ARM_DMB_LD "dmb ld\n\t" 280 # define RTASM_ARM_DMB_LD_IN_REG "X" (0xfade) 281 282 # elif RT_ARCH_ARM32 >= 6 283 # warning armv6 284 # define RTASM_ARM_DSB_SY "mcr p15, 0, %[uZero], c7, c10, 4\n\t" 285 # define RTASM_ARM_DSB_SY_IN_REG [uZero] "r" (0) 286 # define RTASM_ARM_DMB_SY "mcr p15, 0, %[uZero], c7, c10, 5\n\t" 287 # define RTASM_ARM_DMB_SY_IN_REG [uZero] "r" (0) 288 # define RTASM_ARM_DMB_ST RTASM_ARM_DMB_SY 289 # define RTASM_ARM_DMB_ST_IN_REG RTASM_ARM_DMB_SY_IN_REG 290 # define RTASM_ARM_DMB_LD RTASM_ARM_DMB_SY 291 # define RTASM_ARM_DMB_LD_IN_REG RTASM_ARM_DMB_SY_IN_REG 292 # elif RT_ARCH_ARM32 >= 4 293 # warning armv5 or older 294 # define RTASM_ARM_DSB_SY "mcr p15, 0, %[uZero], c7, c10, 4\n\t" 295 # define RTASM_ARM_DSB_SY_IN_REG [uZero] "r" (0) 296 # define RTASM_ARM_DMB_SY RTASM_ARM_DSB_SY 297 # define RTASM_ARM_DMB_SY_IN_REG RTASM_ARM_DSB_SY_IN_REG 298 # define RTASM_ARM_DMB_ST RTASM_ARM_DSB_SY 299 # define RTASM_ARM_DMB_ST_IN_REG RTASM_ARM_DSB_SY_IN_REG 300 # define RTASM_ARM_DMB_LD RTASM_ARM_DSB_SY 301 # define RTASM_ARM_DMB_LD_IN_REG RTASM_ARM_DSB_SY_IN_REG 302 # else 303 # error "huh? Odd RT_ARCH_ARM32 value!" 304 # endif 305 # define RTASM_ARM_DSB_SY_COMMA_IN_REG , RTASM_ARM_DSB_SY_IN_REG 306 # define RTASM_ARM_DMB_SY_COMMA_IN_REG , RTASM_ARM_DMB_SY_IN_REG 307 # define RTASM_ARM_DMB_ST_COMMA_IN_REG , RTASM_ARM_DMB_ST_IN_REG 308 # define RTASM_ARM_DMB_LD_COMMA_IN_REG , RTASM_ARM_DMB_LD_IN_REG 309 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(name, a_pu32Mem, barrier_type, modify64, modify32, in_reg) \ 310 uint32_t rcSpill; \ 311 uint32_t u32NewRet; \ 312 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 313 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 314 "ldrex %[uNew], %[pMem]\n\t" \ 315 modify32 \ 316 "strex %[rc], %[uNew], %[pMem]\n\t" \ 317 "cmp %[rc], #0\n\t" \ 318 "bne .Ltry_again_" #name "_%=\n\t" \ 319 : [pMem] "+m" (*a_pu32Mem) \ 320 , [uNew] "=&r" (u32NewRet) \ 321 , [rc] "=&r" (rcSpill) \ 322 : RT_CONCAT3(RTASM_ARM_,barrier_type,_IN_REG) \ 323 , in_reg \ 324 : "cc") 325 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(name, a_pu32Mem, barrier_type, modify64, modify32, in_reg) \ 326 uint32_t rcSpill; \ 327 uint32_t u32OldRet; \ 328 uint32_t u32NewSpill; \ 329 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 330 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 331 "ldrex %[uOld], %[pMem]\n\t" \ 332 modify32 \ 333 "strex %[rc], %[uNew], %[pMem]\n\t" \ 334 "cmp %[rc], #0\n\t" \ 335 "bne .Ltry_again_" #name "_%=\n\t" \ 336 : [pMem] "+m" (*a_pu32Mem) \ 337 , [uOld] "=&r" (u32OldRet) \ 338 , [uNew] "=&r" (u32NewSpill) \ 339 , [rc] "=&r" (rcSpill) \ 340 : RT_CONCAT3(RTASM_ARM_,barrier_type,_IN_REG) \ 341 , in_reg \ 342 : "cc") 343 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_64(name, a_pu64Mem, barrier_type, modify64, modify32, in_reg) \ 344 uint32_t rcSpill; \ 345 uint64_t u64NewRet; \ 346 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 347 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 348 "ldrexd %[uNew], %H[uNew], %[pMem]\n\t" \ 349 modify32 \ 350 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" \ 351 "cmp %[rc], #0\n\t" \ 352 "bne .Ltry_again_" #name "_%=\n\t" \ 353 : [pMem] "+m" (*a_pu64Mem), \ 354 [uNew] "=&r" (u64NewRet), \ 355 [rc] "=&r" (rcSpill) \ 356 : RT_CONCAT3(RTASM_ARM_,barrier_type,_IN_REG) \ 357 , in_reg \ 358 : "cc") 359 # define RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_64(name, a_pu64Mem, barrier_type, modify64, modify32, in_reg) \ 360 uint32_t rcSpill; \ 361 uint64_t u64OldRet; \ 362 uint64_t u64NewSpill; \ 363 __asm__ __volatile__(".Ltry_again_" #name "_%=:\n\t" \ 364 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 365 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" \ 366 modify32 \ 367 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" \ 368 "cmp %[rc], #0\n\t" \ 369 "bne .Ltry_again_" #name "_%=\n\t" \ 370 : [pMem] "+m" (*a_pu64Mem), \ 371 [uOld] "=&r" (u64OldRet), \ 372 [uNew] "=&r" (u64NewSpill), \ 373 [rc] "=&r" (rcSpill) \ 374 : RT_CONCAT3(RTASM_ARM_,barrier_type,_IN_REG) \ 375 , in_reg \ 376 : "cc") 377 # endif /* RT_ARCH_ARM32 */ 378 #endif 379 184 380 185 381 /** @def ASMReturnAddress … … 309 505 uint32_t rcSpill; 310 506 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU8_%=:\n\t" 311 "dmb sy\n\t"507 RTASM_ARM_DMB_SY 312 508 # if defined(RT_ARCH_ARM64) 313 "ldaxrb %w 0, [%3]\n\t"314 "stlxrb %w 1, %w2, [%3]\n\t"315 "cbnz %w 1, .Ltry_again_ASMAtomicXchgU8_%=\n\t"316 # else 317 "ldrexb % 0, [%3]\n\t" /* ARMv6+ */318 "strexb % 1, %2, [%3]\n\t"319 "cmp % 1, #0\n\t"509 "ldaxrb %w[uOld], %[pMem]\n\t" 510 "stlxrb %w[rc], %w[uNew], %[pMem]\n\t" 511 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU8_%=\n\t" 512 # else 513 "ldrexb %[uOld], %[pMem]\n\t" /* ARMv6+ */ 514 "strexb %[rc], %[uNew], %[pMem]\n\t" 515 "cmp %[rc], #0\n\t" 320 516 "bne .Ltry_again_ASMAtomicXchgU8_%=\n\t" 321 517 # endif 322 : "=&r" (uOld),323 "=&r" (rcSpill)324 : "r" ((uint32_t)u8),325 "r" (pu8)326 : "memory",327 518 : [pMem] "+m" (*pu8) 519 , [uOld] "=&r" (uOld) 520 , [rc] "=&r" (rcSpill) 521 : [uNew] "r" ((uint32_t)u8) 522 RTASM_ARM_DMB_SY_COMMA_IN_REG 523 : "cc"); 328 524 return (uint8_t)uOld; 329 525 … … 406 602 uint32_t rcSpill; 407 603 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU16_%=:\n\t" 408 "dmb sy\n\t"604 RTASM_ARM_DMB_SY 409 605 # if defined(RT_ARCH_ARM64) 410 "ldaxrh %w 0, [%3]\n\t"411 "stlxrh %w 1, %w2, [%3]\n\t"412 "cbnz %w 1, .Ltry_again_ASMAtomicXchgU16_%=\n\t"413 # else 414 "ldrexh % 0, [%3]\n\t" /* ARMv6+ */415 "strexh % 1, %2, [%3]\n\t"416 "cmp % 1, #0\n\t"606 "ldaxrh %w[uOld], %[pMem]\n\t" 607 "stlxrh %w[rc], %w[uNew], %[pMem]\n\t" 608 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU16_%=\n\t" 609 # else 610 "ldrexh %[uOld], %[pMem]\n\t" /* ARMv6+ */ 611 "strexh %[rc], %[uNew], %[pMem]\n\t" 612 "cmp %[rc], #0\n\t" 417 613 "bne .Ltry_again_ASMAtomicXchgU16_%=\n\t" 418 614 # endif 419 : "=&r" (uOld),420 "=&r" (rcSpill)421 : "r" ((uint32_t)u16),422 "r" (pu16)423 : "memory",424 615 : [pMem] "+m" (*pu16) 616 , [uOld] "=&r" (uOld) 617 , [rc] "=&r" (rcSpill) 618 : [uNew] "r" ((uint32_t)u16) 619 RTASM_ARM_DMB_SY_COMMA_IN_REG 620 : "cc"); 425 621 return (uint16_t)uOld; 426 622 … … 462 658 # if RT_INLINE_ASM_GNU_STYLE 463 659 __asm__ __volatile__("xchgl %0, %1\n\t" 464 : "=m" (*pu32) 660 : "=m" (*pu32) /** @todo r=bird: +m rather than =m here? */ 465 661 , "=r" (u32) 466 662 : "1" (u32) … … 492 688 uint32_t rcSpill; 493 689 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU32_%=:\n\t" 494 "dmb sy\n\t"690 RTASM_ARM_DMB_SY 495 691 # if defined(RT_ARCH_ARM64) 496 "ldaxr %w 0, [%3]\n\t"497 "stlxr %w 1, %w2, [%3]\n\t"498 "cbnz %w 1, .Ltry_again_ASMAtomicXchgU32_%=\n\t"499 # else 500 "ldrex % 0, [%3]\n\t" /* ARMv6+ */501 "strex % 1, %2, [%3]\n\t"502 "cmp % 1, #0\n\t"692 "ldaxr %w[uOld], %[pMem]\n\t" 693 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 694 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU32_%=\n\t" 695 # else 696 "ldrex %[uOld], %[pMem]\n\t" /* ARMv6+ */ 697 "strex %[rc], %[uNew], %[pMem]\n\t" 698 "cmp %[rc], #0\n\t" 503 699 "bne .Ltry_again_ASMAtomicXchgU32_%=\n\t" 504 700 # endif 505 : "=&r" (uOld),506 "=&r" (rcSpill)507 : "r" ((uint32_t)u32),508 "r" (pu32)509 : "memory",510 511 return (uint32_t)uOld;701 : [pMem] "+m" (*pu32) 702 , [uOld] "=&r" (uOld) 703 , [rc] "=&r" (rcSpill) 704 : [uNew] "r" (u32) 705 RTASM_ARM_DMB_SY_COMMA_IN_REG 706 : "cc"); 707 return uOld; 512 708 513 709 # else … … 548 744 # if defined(RT_ARCH_AMD64) 549 745 # if RT_INLINE_ASM_USES_INTRIN 550 u64 =_InterlockedExchange64((__int64 *)pu64, u64);746 return _InterlockedExchange64((__int64 *)pu64, u64); 551 747 552 748 # elif RT_INLINE_ASM_GNU_STYLE … … 565 761 } 566 762 # endif 763 return u64; 567 764 568 765 # elif defined(RT_ARCH_X86) … … 610 807 } 611 808 # endif 809 return u64; 612 810 613 811 # elif defined(RT_ARCH_ARM32) || defined(RT_ARCH_ARM64) 614 812 uint32_t rcSpill; 813 uint64_t uOld; 615 814 __asm__ __volatile__(".Ltry_again_ASMAtomicXchgU64_%=:\n\t" 616 "dmb sy\n\t"815 RTASM_ARM_DMB_SY 617 816 # if defined(RT_ARCH_ARM64) 618 "ldaxr % 0, [%3]\n\t"619 "stlxr %w 1, %2, [%3]\n\t"620 "cbnz %w 1, .Ltry_again_ASMAtomicXchgU64_%=\n\t"621 # else 622 "ldrexd % H0, [%3]\n\t" /* ARMv6+ */623 "strexd % 1, %H2, [%3]\n\t"624 "cmp % 1, #0\n\t"817 "ldaxr %[uOld], %[pMem]\n\t" 818 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 819 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU64_%=\n\t" 820 # else 821 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" /* ARMv6+ */ 822 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" 823 "cmp %[rc], #0\n\t" 625 824 "bne .Ltry_again_ASMAtomicXchgU64_%=\n\t" 626 825 # endif 627 : "=&r" (u64), 628 "=&r" (rcSpill) 629 : "r" (u64), 630 "r" (pu64) 631 : "memory", 632 "cc"); 826 : [pMem] "+m" (*pu64) 827 , [uOld] "=&r" (uOld) 828 , [rc] "=&r" (rcSpill) 829 : [uNew] "r" (u64) 830 RTASM_ARM_DMB_SY_COMMA_IN_REG 831 : "cc"); 832 return uOld; 633 833 634 834 # else 635 835 # error "Port me" 636 836 # endif 637 return u64;638 837 } 639 838 #endif … … 871 1070 uint32_t rcSpill; 872 1071 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU8_%=:\n\t" 873 "dmb sy\n\t"1072 RTASM_ARM_DMB_SY 874 1073 # if defined(RT_ARCH_ARM64) 875 "ldaxrb %w 0, [%5]\n\t"876 "cmp %w 0, %w3\n\t"1074 "ldaxrb %w[uOld], %[pMem]\n\t" 1075 "cmp %w[uOld], %w[uCmp]\n\t" 877 1076 "bne 1f\n\t" /* stop here if not equal */ 878 "stlxrb %w 1, %w4, [%5]\n\t"879 "cbnz %w 1, .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t"880 "mov %w 2, #1\n\t"881 # else 882 "ldrexb % 0, [%5]\n\t"883 "teq % 0, %3\n\t"884 "strexbeq % 1, %4, [%5]\n\t"1077 "stlxrb %w[rc], %w[uNew], %[pMem]\n\t" 1078 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 1079 "mov %w[fXchg], #1\n\t" 1080 # else 1081 "ldrexb %[uOld], %[pMem]\n\t" 1082 "teq %[uOld], %[uCmp]\n\t" 1083 "strexbeq %[rc], %[uNew], %[pMem]\n\t" 885 1084 "bne 1f\n\t" /* stop here if not equal */ 886 "cmp % 1, #0\n\t"1085 "cmp %[rc], #0\n\t" 887 1086 "bne .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 888 "mov % 2, #1\n\t"1087 "mov %[fXchg], #1\n\t" 889 1088 # endif 890 1089 "1:\n\t" 891 : "=&r" (u32Spill),892 "=&r" (rcSpill),893 "=&r" (fXchg.u)894 : "r" ((uint32_t)u8Old),895 "r" ((uint32_t)u8New),896 "r" (pu8),897 "2" (0) /*fXchg*/898 : "memory",899 1090 : [pMem] "+m" (*pu8) 1091 , [uOld] "=&r" (u32Spill) 1092 , [rc] "=&r" (rcSpill) 1093 , [fXchg] "=&r" (fXchg.u) 1094 : [uCmp] "r" ((uint32_t)u8Old) 1095 , [uNew] "r" ((uint32_t)u8New) 1096 , "[fXchg]" (0) 1097 RTASM_ARM_DMB_SY_COMMA_IN_REG 1098 : "cc"); 900 1099 return fXchg.f; 901 1100 … … 1005 1204 uint32_t rcSpill; 1006 1205 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU32_%=:\n\t" 1007 "dmb sy\n\t"1206 RTASM_ARM_DMB_SY 1008 1207 # if defined(RT_ARCH_ARM64) 1009 "ldaxr %w 0, [%5]\n\t"1010 "cmp %w 0, %w3\n\t"1208 "ldaxr %w[uOld], %[pMem]\n\t" 1209 "cmp %w[uOld], %w[uCmp]\n\t" 1011 1210 "bne 1f\n\t" /* stop here if not equal */ 1012 "stlxr %w 1, %w4, [%5]\n\t"1013 "cbnz %w 1, .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t"1014 "mov %w 2, #1\n\t"1015 # else 1016 "ldrex % 0, [%5]\n\t"1017 "teq % 0, %3\n\t"1018 "strexeq % 1, %4, [%5]\n\t"1211 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 1212 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 1213 "mov %w[fXchg], #1\n\t" 1214 # else 1215 "ldrex %[uOld], %[pMem]\n\t" 1216 "teq %[uOld], %[uCmp]\n\t" 1217 "strexeq %[rc], %[uNew], %[pMem]\n\t" 1019 1218 "bne 1f\n\t" /* stop here if not equal */ 1020 "cmp % 1, #0\n\t"1219 "cmp %[rc], #0\n\t" 1021 1220 "bne .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 1022 "mov % 2, #1\n\t"1221 "mov %[fXchg], #1\n\t" 1023 1222 # endif 1024 1223 "1:\n\t" 1025 : "=&r" (u32Spill),1026 "=&r" (rcSpill),1027 "=&r" (fXchg.u)1028 : "r" (u32Old),1029 "r" (u32New),1030 "r" (pu32),1031 "2" (0) /*fXchg*/1032 : "memory",1033 1224 : [pMem] "+m" (*pu32) 1225 , [uOld] "=&r" (u32Spill) 1226 , [rc] "=&r" (rcSpill) 1227 , [fXchg] "=&r" (fXchg.u) 1228 : [uCmp] "r" (u32Old) 1229 , [uNew] "r" (u32New) 1230 , "[fXchg]" (0) 1231 RTASM_ARM_DMB_SY_COMMA_IN_REG 1232 : "cc"); 1034 1233 return fXchg.f; 1035 1234 … … 1165 1364 uint32_t rcSpill; 1166 1365 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t" 1167 "dmb sy\n\t"1366 RTASM_ARM_DMB_SY 1168 1367 # if defined(RT_ARCH_ARM64) 1169 "ldaxr % 0, [%5]\n\t"1170 "cmp % 0, %3\n\t"1368 "ldaxr %[uOld], %[pMem]\n\t" 1369 "cmp %[uOld], %[uCmp]\n\t" 1171 1370 "bne 1f\n\t" /* stop here if not equal */ 1172 "stlxr %w 1, %4, [%5]\n\t"1173 "cbnz %w 1, .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t"1174 "mov %w 2, #1\n\t"1175 # else 1176 "ldrexd % 0, %H0, [%5]\n\t"1177 "teq % 0, %3\n\t"1178 "teqeq %H 0, %H3\n\t"1179 "strexdeq % 1, %4, %H4, [%5]\n\t"1371 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 1372 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1373 "mov %w[fXchg], #1\n\t" 1374 # else 1375 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" 1376 "teq %[uOld], %[uCmp]\n\t" 1377 "teqeq %H[uOld], %H[uCmp]\n\t" 1378 "strexdeq %[rc], %[uNew], %H[uNew], %[pMem]\n\t" 1180 1379 "bne 1f\n\t" /* stop here if not equal */ 1181 "cmp % 1, #0\n\t"1380 "cmp %[rc], #0\n\t" 1182 1381 "bne .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1183 "mov % 2, #1\n\t"1382 "mov %[fXchg], #1\n\t" 1184 1383 # endif 1185 1384 "1:\n\t" 1186 : "=&r" (u64Spill),1187 "=&r" (rcSpill),1188 "=&r" (fXchg.u)1189 : "r" (u64Old),1190 "r" (u64New),1191 "r" (pu64),1192 "2" (0) /*fXchg*/1193 : "memory",1194 1385 : [pMem] "+m" (*pu64) 1386 , [uOld] "=&r" (u64Spill) 1387 , [rc] "=&r" (rcSpill) 1388 , [fXchg] "=&r" (fXchg.u) 1389 : [uCmp] "r" (u64Old) 1390 , [uNew] "r" (u64New) 1391 , "[fXchg]" (0) 1392 RTASM_ARM_DMB_SY_COMMA_IN_REG 1393 : "cc"); 1195 1394 return fXchg.f; 1196 1395 … … 1395 1594 uint32_t rcSpill; 1396 1595 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgExU32_%=:\n\t" 1397 "dmb sy\n\t"1596 RTASM_ARM_DMB_SY 1398 1597 # if defined(RT_ARCH_ARM64) 1399 "ldaxr %w 0, [%5]\n\t"1400 "cmp %w 0, %w3\n\t"1598 "ldaxr %w[uOld], %[pMem]\n\t" 1599 "cmp %w[uOld], %w[uCmp]\n\t" 1401 1600 "bne 1f\n\t" /* stop here if not equal */ 1402 "stlxr %w 1, %w4, [%5]\n\t"1403 "cbnz %w 1, .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t"1404 "mov %w 2, #1\n\t"1405 # else 1406 "ldrex % 0, [%5]\n\t"1407 "teq % 0, %3\n\t"1408 "strexeq % 1, %4, [%5]\n\t"1601 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 1602 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1603 "mov %w[fXchg], #1\n\t" 1604 # else 1605 "ldrex %[uOld], %[pMem]\n\t" 1606 "teq %[uOld], %[uCmp]\n\t" 1607 "strexeq %[rc], %[uNew], %[pMem]\n\t" 1409 1608 "bne 1f\n\t" /* stop here if not equal */ 1410 "cmp % 1, #0\n\t"1609 "cmp %[rc], #0\n\t" 1411 1610 "bne .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1412 "mov % 2, #1\n\t"1611 "mov %[fXchg], #1\n\t" 1413 1612 # endif 1414 1613 "1:\n\t" 1415 : "=&r" (u32ActualOld),1416 "=&r" (rcSpill),1417 "=&r" (fXchg.u)1418 : "r" (u32Old),1419 "r" (u32New),1420 "r" (pu32),1421 "2" (0) /*fXchg*/1422 : "memory",1423 1614 : [pMem] "+m" (*pu32) 1615 , [uOld] "=&r" (u32ActualOld) 1616 , [rc] "=&r" (rcSpill) 1617 , [fXchg] "=&r" (fXchg.u) 1618 : [uCmp] "r" (u32Old) 1619 , [uNew] "r" (u32New) 1620 , "[fXchg]" (0) 1621 RTASM_ARM_DMB_SY_COMMA_IN_REG 1622 : "cc"); 1424 1623 *pu32Old = u32ActualOld; 1425 1624 return fXchg.f; … … 1559 1758 uint64_t u64ActualOld; 1560 1759 uint32_t rcSpill; 1561 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchg ExU64_%=:\n\t"1562 "dmb sy\n\t"1760 __asm__ __volatile__(".Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t" 1761 RTASM_ARM_DMB_SY 1563 1762 # if defined(RT_ARCH_ARM64) 1564 "ldaxr % 0, [%5]\n\t"1565 "cmp % 0, %3\n\t"1763 "ldaxr %[uOld], %[pMem]\n\t" 1764 "cmp %[uOld], %[uCmp]\n\t" 1566 1765 "bne 1f\n\t" /* stop here if not equal */ 1567 "stlxr %w 1, %4, [%5]\n\t"1568 "cbnz %w 1, .Ltry_again_ASMAtomicCmpXchgExU64_%=\n\t"1569 "mov %w 2, #1\n\t"1570 # else 1571 "ldrexd % 0, %H0, [%5]\n\t"1572 "teq % 0, %3\n\t"1573 "teqeq %H 0, %H3\n\t"1574 "strexdeq % 1, %4, %H4, [%5]\n\t"1766 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 1767 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1768 "mov %w[fXchg], #1\n\t" 1769 # else 1770 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" 1771 "teq %[uOld], %[uCmp]\n\t" 1772 "teqeq %H[uOld], %H[uCmp]\n\t" 1773 "strexdeq %[rc], %[uNew], %H[uNew], %[pMem]\n\t" 1575 1774 "bne 1f\n\t" /* stop here if not equal */ 1576 "cmp % 1, #0\n\t"1577 "bne .Ltry_again_ASMAtomicCmpXchg ExU64_%=\n\t"1578 "mov % 2, #1\n\t"1775 "cmp %[rc], #0\n\t" 1776 "bne .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1777 "mov %[fXchg], #1\n\t" 1579 1778 # endif 1580 1779 "1:\n\t" 1581 : "=&r" (u64ActualOld),1582 "=&r" (rcSpill),1583 "=&r" (fXchg.u)1584 : "r" (u64Old),1585 "r" (u64New),1586 "r" (pu64),1587 "2" (0) /*fXchg*/1588 : "memory",1589 1780 : [pMem] "+m" (*pu64) 1781 , [uOld] "=&r" (u64ActualOld) 1782 , [rc] "=&r" (rcSpill) 1783 , [fXchg] "=&r" (fXchg.u) 1784 : [uCmp] "r" (u64Old) 1785 , [uNew] "r" (u64New) 1786 , "[fXchg]" (0) 1787 RTASM_ARM_DMB_SY_COMMA_IN_REG 1788 : "cc"); 1590 1789 *pu64Old = u64ActualOld; 1591 1790 return fXchg.f; … … 1872 2071 DECLINLINE(void) ASMSerializeInstruction(void) RT_NOTHROW_DEF 1873 2072 { 1874 /* Note! Only armv7 and later. */ 1875 __asm__ __volatile__ ("dsb sy\n\t" ::: "memory"); 2073 __asm__ __volatile__ (RTASM_ARM_DSB_SY :: RTASM_ARM_DSB_SY_IN_REG :); 1876 2074 } 1877 2075 #else … … 1882 2080 /** 1883 2081 * Memory fence, waits for any pending writes and reads to complete. 2082 * @note No implicit compiler barrier (which is probably stupid). 1884 2083 */ 1885 2084 DECLINLINE(void) ASMMemoryFence(void) RT_NOTHROW_DEF … … 1899 2098 # endif 1900 2099 #elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 1901 /* Note! Only armv7 and later. */ 1902 __asm__ __volatile__ ("dmb sy\n\t" ::: "memory"); 2100 __asm__ __volatile__ (RTASM_ARM_DMB_SY :: RTASM_ARM_DMB_SY_IN_REG :); 1903 2101 #elif ARCH_BITS == 16 1904 2102 uint16_t volatile u16; … … 1913 2111 /** 1914 2112 * Write fence, waits for any pending writes to complete. 2113 * @note No implicit compiler barrier (which is probably stupid). 1915 2114 */ 1916 2115 DECLINLINE(void) ASMWriteFence(void) RT_NOTHROW_DEF … … 1930 2129 # endif 1931 2130 #elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 1932 /* Note! Only armv7 and later. */ 1933 __asm__ __volatile__ ("dmb st\n\t" ::: "memory"); 2131 __asm__ __volatile__ (RTASM_ARM_DMB_ST :: RTASM_ARM_DMB_ST_IN_REG :); 1934 2132 #else 1935 2133 ASMMemoryFence(); … … 1940 2138 /** 1941 2139 * Read fence, waits for any pending reads to complete. 2140 * @note No implicit compiler barrier (which is probably stupid). 1942 2141 */ 1943 2142 DECLINLINE(void) ASMReadFence(void) RT_NOTHROW_DEF … … 1957 2156 # endif 1958 2157 #elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 1959 /* Note! Only armv7 and later. */ 1960 __asm__ __volatile__ ("dmb ld\n\t" ::: "memory"); 2158 __asm__ __volatile__ (RTASM_ARM_DMB_LD :: RTASM_ARM_DMB_LD_IN_REG :); 1961 2159 #else 1962 2160 ASMMemoryFence(); … … 1973 2171 DECLINLINE(uint8_t) ASMAtomicReadU8(volatile uint8_t RT_FAR *pu8) RT_NOTHROW_DEF 1974 2172 { 2173 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2174 uint32_t u32; 2175 __asm__ __volatile__(".Lstart_ASMAtomicReadU8_%=:\n\t" 2176 RTASM_ARM_DMB_SY 2177 # if defined(RT_ARCH_ARM64) 2178 "ldxrb %w[uDst], %[pMem]\n\t" 2179 # else 2180 "ldrexb %[uDst], %[pMem]\n\t" 2181 # endif 2182 : [uDst] "=&r" (u32) 2183 : [pMem] "m" (*pu8) 2184 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2185 return (uint8_t)u32; 2186 #else 1975 2187 ASMMemoryFence(); 1976 2188 return *pu8; /* byte reads are atomic on x86 */ 2189 #endif 1977 2190 } 1978 2191 … … 1986 2199 DECLINLINE(uint8_t) ASMAtomicUoReadU8(volatile uint8_t RT_FAR *pu8) RT_NOTHROW_DEF 1987 2200 { 2201 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2202 uint32_t u32; 2203 __asm__ __volatile__(".Lstart_ASMAtomicUoReadU8_%=:\n\t" 2204 # if defined(RT_ARCH_ARM64) 2205 "ldxrb %w[uDst], %[pMem]\n\t" 2206 # else 2207 "ldrexb %[uDst], %[pMem]\n\t" 2208 # endif 2209 : [uDst] "=&r" (u32) 2210 : [pMem] "m" (*pu8)); 2211 return (uint8_t)u32; 2212 #else 1988 2213 return *pu8; /* byte reads are atomic on x86 */ 2214 #endif 1989 2215 } 1990 2216 … … 1999 2225 { 2000 2226 ASMMemoryFence(); 2227 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2228 int32_t i32; 2229 __asm__ __volatile__(".Lstart_ASMAtomicReadS8_%=:\n\t" 2230 RTASM_ARM_DMB_SY 2231 # if defined(RT_ARCH_ARM64) 2232 "ldxrb %w[iDst], %[pMem]\n\t" 2233 # else 2234 "ldrexb %[iDst], %[pMem]\n\t" 2235 # endif 2236 : [iDst] "=&r" (i32) 2237 : [pMem] "m" (*pi8) 2238 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2239 return (int8_t)i32; 2240 #else 2001 2241 return *pi8; /* byte reads are atomic on x86 */ 2242 #endif 2002 2243 } 2003 2244 … … 2011 2252 DECLINLINE(int8_t) ASMAtomicUoReadS8(volatile int8_t RT_FAR *pi8) RT_NOTHROW_DEF 2012 2253 { 2254 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2255 int32_t i32; 2256 __asm__ __volatile__(".Lstart_ASMAtomicUoReadS8_%=:\n\t" 2257 # if defined(RT_ARCH_ARM64) 2258 "ldxrb %w[iDst], %[pMem]\n\t" 2259 # else 2260 "ldrexb %[iDst], %[pMem]\n\t" 2261 # endif 2262 : [iDst] "=&r" (i32) 2263 : [pMem] "m" (*pi8)); 2264 return (int8_t)i32; 2265 #else 2013 2266 return *pi8; /* byte reads are atomic on x86 */ 2267 #endif 2014 2268 } 2015 2269 … … 2024 2278 { 2025 2279 Assert(!((uintptr_t)pu16 & 1)); 2280 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2281 uint32_t u32; 2282 __asm__ __volatile__(".Lstart_ASMAtomicReadU16_%=:\n\t" 2283 RTASM_ARM_DMB_SY 2284 # if defined(RT_ARCH_ARM64) 2285 "ldxrh %w[uDst], %[pMem]\n\t" 2286 # else 2287 "ldrexh %[uDst], %[pMem]\n\t" 2288 # endif 2289 : [uDst] "=&r" (u32) 2290 : [pMem] "m" (*pu16) 2291 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2292 return (uint16_t)u32; 2293 #else 2026 2294 ASMMemoryFence(); 2027 2295 return *pu16; 2296 #endif 2028 2297 } 2029 2298 … … 2038 2307 { 2039 2308 Assert(!((uintptr_t)pu16 & 1)); 2309 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2310 uint32_t u32; 2311 __asm__ __volatile__(".Lstart_ASMAtomicUoReadU16_%=:\n\t" 2312 # if defined(RT_ARCH_ARM64) 2313 "ldxrh %w[uDst], %[pMem]\n\t" 2314 # else 2315 "ldrexh %[uDst], %[pMem]\n\t" 2316 # endif 2317 : [uDst] "=&r" (u32) 2318 : [pMem] "m" (*pu16)); 2319 return (uint16_t)u32; 2320 #else 2040 2321 return *pu16; 2322 #endif 2041 2323 } 2042 2324 … … 2051 2333 { 2052 2334 Assert(!((uintptr_t)pi16 & 1)); 2335 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2336 int32_t i32; 2337 __asm__ __volatile__(".Lstart_ASMAtomicReadS16_%=:\n\t" 2338 RTASM_ARM_DMB_SY 2339 # if defined(RT_ARCH_ARM64) 2340 "ldxrh %w[iDst], %[pMem]\n\t" 2341 # else 2342 "ldrexh %[iDst], %[pMem]\n\t" 2343 # endif 2344 : [iDst] "=&r" (i32) 2345 : [pMem] "m" (*pi16) 2346 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2347 return (int16_t)i32; 2348 #else 2053 2349 ASMMemoryFence(); 2054 2350 return *pi16; 2351 #endif 2055 2352 } 2056 2353 … … 2065 2362 { 2066 2363 Assert(!((uintptr_t)pi16 & 1)); 2364 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2365 int32_t i32; 2366 __asm__ __volatile__(".Lstart_ASMAtomicUoReadS16_%=:\n\t" 2367 # if defined(RT_ARCH_ARM64) 2368 "ldxrh %w[iDst], %[pMem]\n\t" 2369 # else 2370 "ldrexh %[iDst], %[pMem]\n\t" 2371 # endif 2372 : [iDst] "=&r" (i32) 2373 : [pMem] "m" (*pi16)); 2374 return (int16_t)i32; 2375 #else 2067 2376 return *pi16; 2377 #endif 2068 2378 } 2069 2379 … … 2078 2388 { 2079 2389 Assert(!((uintptr_t)pu32 & 3)); 2390 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2391 uint32_t u32; 2392 __asm__ __volatile__(".Lstart_ASMAtomicReadU32_%=:\n\t" 2393 RTASM_ARM_DMB_SY 2394 # if defined(RT_ARCH_ARM64) 2395 "ldxr %w[uDst], %[pMem]\n\t" 2396 # else 2397 "ldrex %[uDst], %[pMem]\n\t" 2398 # endif 2399 : [uDst] "=&r" (u32) 2400 : [pMem] "m" (*pu32) 2401 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2402 return u32; 2403 #else 2080 2404 ASMMemoryFence(); 2081 # if ARCH_BITS == 162405 # if ARCH_BITS == 16 2082 2406 AssertFailed(); /** @todo 16-bit */ 2083 # endif2407 # endif 2084 2408 return *pu32; 2409 #endif 2085 2410 } 2086 2411 … … 2095 2420 { 2096 2421 Assert(!((uintptr_t)pu32 & 3)); 2097 #if ARCH_BITS == 16 2422 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2423 uint32_t u32; 2424 __asm__ __volatile__(".Lstart_ASMAtomicUoReadU32_%=:\n\t" 2425 # if defined(RT_ARCH_ARM64) 2426 "ldxr %w[uDst], %[pMem]\n\t" 2427 # else 2428 "ldrex %[uDst], %[pMem]\n\t" 2429 # endif 2430 : [uDst] "=&r" (u32) 2431 : [pMem] "m" (*pu32)); 2432 return u32; 2433 #else 2434 # if ARCH_BITS == 16 2098 2435 AssertFailed(); /** @todo 16-bit */ 2099 # endif2436 # endif 2100 2437 return *pu32; 2438 #endif 2101 2439 } 2102 2440 … … 2111 2449 { 2112 2450 Assert(!((uintptr_t)pi32 & 3)); 2451 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2452 int32_t i32; 2453 __asm__ __volatile__(".Lstart_ASMAtomicReadS32_%=:\n\t" 2454 RTASM_ARM_DMB_SY 2455 # if defined(RT_ARCH_ARM64) 2456 "ldxr %w[iDst], %[pMem]\n\t" 2457 # else 2458 "ldrex %[iDst], %[pMem]\n\t" 2459 # endif 2460 : [iDst] "=&r" (i32) 2461 : [pMem] "m" (*pi32) 2462 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2463 return i32; 2464 #else 2113 2465 ASMMemoryFence(); 2114 # if ARCH_BITS == 162466 # if ARCH_BITS == 16 2115 2467 AssertFailed(); /** @todo 16-bit */ 2116 # endif2468 # endif 2117 2469 return *pi32; 2470 #endif 2118 2471 } 2119 2472 … … 2128 2481 { 2129 2482 Assert(!((uintptr_t)pi32 & 3)); 2130 #if ARCH_BITS == 16 2483 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2484 Assert(!((uintptr_t)pu64 & 7)); 2485 int32_t i32; 2486 __asm__ __volatile__(".Lstart_ASMAtomicUoReadS32_%=:\n\t" 2487 # if defined(RT_ARCH_ARM64) 2488 "ldxr %w[iDst], %[pMem]\n\t" 2489 # else 2490 "ldrex %[iDst], %[pMem]\n\t" 2491 # endif 2492 : [iDst] "=&r" (i32) 2493 : [pMem] "m" (*pi32)); 2494 return i32; 2495 2496 #else 2497 # if ARCH_BITS == 16 2131 2498 AssertFailed(); /** @todo 16-bit */ 2132 # endif2499 # endif 2133 2500 return *pi32; 2501 #endif 2134 2502 } 2135 2503 … … 2214 2582 # endif 2215 2583 2216 # elif defined(RT_ARCH_ARM64) 2584 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2217 2585 Assert(!((uintptr_t)pu64 & 7)); 2218 ASMMemoryFence();2219 u64 = *pu64;2220 2221 # elif defined(RT_ARCH_ARM32) 2222 Assert(!((uintptr_t)pu64 & 7)); 2223 __asm__ __volatile__("dmb sy\n\t"2224 "ldrexd %0, %H0, [%1]\n\t" 2225 : "=&r" (u64)2226 : "r" (pu64)2227 : "memory");2586 __asm__ __volatile__(".Lstart_ASMAtomicReadU64_%=:\n\t" 2587 RTASM_ARM_DMB_SY 2588 # if defined(RT_ARCH_ARM64) 2589 "ldxr %[uDst], %[pMem]\n\t" 2590 # else 2591 "ldrexd %[uDst], %H[uDst], %[pMem]\n\t" 2592 # endif 2593 : [uDst] "=&r" (u64) 2594 : [pMem] "m" (*pu64) 2595 RTASM_ARM_DMB_SY_COMMA_IN_REG); 2228 2596 2229 2597 # else … … 2316 2684 # endif 2317 2685 2318 # elif defined(RT_ARCH_ARM64) 2686 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2319 2687 Assert(!((uintptr_t)pu64 & 7)); 2320 u64 = *pu64;2321 2322 # elif defined(RT_ARCH_ARM32) 2323 Assert(!((uintptr_t)pu64 & 7)); 2324 __asm__ __volatile__("ldrexd %0, %H0, [%1]\n\t"2325 : "=&r" (u64) 2326 : "r" (pu64)2327 : );2688 __asm__ __volatile__(".Lstart_ASMAtomicUoReadU64_%=:\n\t" 2689 # if defined(RT_ARCH_ARM64) 2690 "ldxr %[uDst], %[pMem]\n\t" 2691 # else 2692 "ldrexd %[uDst], %H[uDst], %[pMem]\n\t" 2693 # endif 2694 : [uDst] "=&r" (u64) 2695 : [pMem] "m" (*pu64)); 2328 2696 2329 2697 # else … … 2630 2998 DECLINLINE(void) ASMAtomicUoWriteU8(volatile uint8_t RT_FAR *pu8, uint8_t u8) RT_NOTHROW_DEF 2631 2999 { 3000 /** @todo Any possible ARM32/ARM64 improvements here? */ 2632 3001 *pu8 = u8; /* byte writes are atomic on x86 */ 2633 3002 } … … 3166 3535 3167 3536 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3168 uint32_t u32Ret; 3169 uint32_t rcSpill; 3170 uint32_t u32Spill; 3171 __asm__ __volatile__(".Ltry_again_add_u32_%=:\n\t" 3172 "dmb sy\n\t" 3173 # if defined(RT_ARCH_ARM64) 3174 "ldaxr %w0, [%4]\n\t" 3175 "add %w1, %w0, %w3\n\t" 3176 "stlxr %w2, %w1, [%4]\n\t" 3177 "cbnz %w2, .Ltry_again_add_u32_%=\n\t" 3178 # else 3179 "ldrex %0, [%4]\n\t" 3180 "add %1, %0, %3\n\t" 3181 "strex %2, %1, [%4]\n\t" 3182 "cmp %2, #0\n\t" 3183 "bne .Ltry_again_add_u32_%=\n\t" 3184 # endif 3185 : "=&r" (u32Ret), 3186 "=&r" (u32Spill), 3187 "=&r" (rcSpill) 3188 : "r" (u32), 3189 "r" (pu32) 3190 : "memory", 3191 "cc"); 3192 return u32Ret; 3537 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicAddU32, pu32, DMB_SY, 3538 "add %w[uNew], %w[uOld], %w[uVal]\n\t", 3539 "add %[uNew], %[uOld], %[uVal]\n\t", 3540 [uVal] "r" (u32)); 3541 return u32OldRet; 3193 3542 3194 3543 # else … … 3243 3592 3244 3593 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3245 uint64_t u64Ret; 3246 uint32_t rcSpill; 3247 uint64_t u64Spill; 3248 __asm__ __volatile__(".Ltry_again_add_u64_%=:\n\t" 3249 "dmb sy\n\t" 3250 # if defined(RT_ARCH_ARM64) 3251 "ldaxr %0, [%4]\n\t" 3252 "add %1, %0, %3\n\t" 3253 "stlxr %w2, %1, [%4]\n\t" 3254 "cbnz %w2, .Ltry_again_add_u64_%=\n\t" 3255 # else 3256 "ldrexd %0, %H0, [%4]\n\t" 3257 "add %1, %0, %3\n\t" 3258 "adc %H1, %H0, %H3\n\t" 3259 "strexd %2, %1, %H1, [%4]\n\t" 3260 "cmp %2, #0\n\t" 3261 "bne .Ltry_again_add_u64_%=\n\t" 3262 # endif 3263 : "=&r" (u64Ret), 3264 "=&r" (u64Spill), 3265 "=&r" (rcSpill) 3266 : "r" (u64), 3267 "r" (pu64) 3268 : "memory", 3269 "cc"); 3270 return u64Ret; 3594 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_64(ASMAtomicAddU64, pu64, DMB_SY, 3595 "add %[uNew], %[uOld], %[uVal]\n\t" 3596 , 3597 "add %[uNew], %[uOld], %[uVal]\n\t" 3598 "adc %H[uNew], %H[uOld], %H[uVal]\n\t", 3599 [uVal] "r" (u64)); 3600 return u64OldRet; 3271 3601 3272 3602 # else … … 3536 3866 3537 3867 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3538 uint32_t u32Ret; 3539 uint32_t rcSpill; 3540 __asm__ __volatile__(".Ltry_again_inc_u32_%=:\n\t" 3541 "dmb sy\n\t" 3542 # if defined(RT_ARCH_ARM64) 3543 "ldaxr %w0, [%2]\n\t" 3544 "add %w0, %w0, #1\n\t" 3545 "stlxr %w1, %w0, [%2]\n\t" 3546 "cbnz %w1, .Ltry_again_inc_u32_%=\n\t" 3547 # else 3548 "ldrex %0, [%2]\n\t" 3549 "add %0, %0, #1\n\t" /* arm6 / thumb2+ */ 3550 "strex %1, %0, [%2]\n\t" 3551 "cmp %1, #0\n\t" 3552 "bne .Ltry_again_inc_u32_%=\n\t" 3553 # endif 3554 : "=&r" (u32Ret), 3555 "=&r" (rcSpill) 3556 : "r" (pu32) 3557 : "memory", 3558 "cc"); 3559 return u32Ret; 3868 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicIncU32, pu32, DMB_SY, 3869 "add %w[uNew], %w[uNew], #1\n\t", 3870 "add %[uNew], %[uNew], #1\n\t" /* arm6 / thumb2+ */, 3871 "X" (0) /* dummy */); 3872 return u32NewRet; 3560 3873 3561 3874 # else … … 3608 3921 3609 3922 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3610 uint64_t u64Ret; 3611 uint32_t rcSpill; 3612 __asm__ __volatile__(".Ltry_again_inc_u64_%=:\n\t" 3613 "dmb sy\n\t" 3614 # if defined(RT_ARCH_ARM64) 3615 "ldaxr %0, [%2]\n\t" 3616 "add %0, %0, #1\n\t" 3617 "stlxr %w1, %0, [%2]\n\t" 3618 "cbnz %w1, .Ltry_again_inc_u64_%=\n\t" 3619 # else 3620 "ldrexd %0, %H0, [%2]\n\t" 3621 "add %0, %0, #1\n\t" /* arm6 / thumb2+ */ 3622 "adc %H0, %H0, %3\n\t" 3623 "strexd %1, %0, %H0, [%2]\n\t" 3624 "cmp %1, #0\n\t" 3625 "bne .Ltry_again_inc_u64_%=\n\t" 3626 # endif 3627 : "=&r" (u64Ret), 3628 "=&r" (rcSpill) 3629 : "r" (pu64) 3630 # if !defined(RT_ARCH_ARM64) 3631 , "r" (0) 3632 # endif 3633 : "memory", 3634 "cc"); 3635 return u64Ret; 3923 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_64(ASMAtomicIncU64, pu64, DMB_SY, 3924 "add %[uNew], %[uNew], #1\n\t" 3925 , 3926 "add %[uNew], %[uNew], #1\n\t" /* arm6 / thumb2+ */ 3927 "adc %H[uNew], %H[uNew], %[uZeroVal]\n\t", 3928 RTASM_ARM_PICK_6432("X" (0) /* dummy */, [uZeroVal] "r" (0)) ); 3929 return u64NewRet; 3636 3930 3637 3931 # else … … 3736 4030 3737 4031 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3738 uint32_t u32Ret; 3739 uint32_t rcSpill; 3740 __asm__ __volatile__(".Ltry_again_dec_u32_%=:\n\t" 3741 "dmb sy\n\t" 3742 # if defined(RT_ARCH_ARM64) 3743 "ldaxr %w0, [%2]\n\t" 3744 "sub %w0, %w0, #1\n\t" 3745 "stlxr %w1, %w0, [%2]\n\t" 3746 "cbnz %w1, .Ltry_again_dec_u32_%=\n\t" 3747 # else 3748 "ldrex %0, [%2]\n\t" 3749 "sub %0, %0, #1\n\t" /* arm6 / thumb2+ */ 3750 "strex %1, %0, [%2]\n\t" 3751 "cmp %1, #0\n\t" 3752 "bne .Ltry_again_dec_u32_%=\n\t" 3753 # endif 3754 : "=&r" (u32Ret), 3755 "=&r" (rcSpill) 3756 : "r" (pu32) 3757 : "memory", 3758 "cc"); 3759 return u32Ret; 4032 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicDecU32, pu32, DMB_SY, 4033 "sub %w[uNew], %w[uNew], #1\n\t", 4034 "sub %[uNew], %[uNew], #1\n\t" /* arm6 / thumb2+ */, 4035 "X" (0) /* dummy */); 4036 return u32NewRet; 3760 4037 3761 4038 # else … … 3808 4085 3809 4086 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3810 uint64_t u64Ret; 3811 uint32_t rcSpill; 3812 __asm__ __volatile__(".Ltry_again_dec_u64_%=:\n\t" 3813 "dmb sy\n\t" 3814 # if defined(RT_ARCH_ARM64) 3815 "ldaxr %0, [%2]\n\t" 3816 "sub %0, %0, #1\n\t" 3817 "stlxr %w1, %0, [%2]\n\t" 3818 "cbnz %w1, .Ltry_again_dec_u64_%=\n\t" 3819 # else 3820 "ldrexd %0, %H0, [%2]\n\t" 3821 "sub %0, %0, #1\n\t" /* arm6 / thumb2+ */ 3822 "sbc %H0, %H0, %3\n\t" 3823 "strexd %1, %0, %H0, [%2]\n\t" 3824 "cmp %1, #0\n\t" 3825 "bne .Ltry_again_dec_u64_%=\n\t" 3826 # endif 3827 : "=&r" (u64Ret), 3828 "=&r" (rcSpill) 3829 : "r" (pu64) 3830 # if !defined(RT_ARCH_ARM64) 3831 , "r" (0) 3832 # endif 3833 : "memory", 3834 "cc"); 3835 return u64Ret; 4087 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_64(ASMAtomicDecU64, pu64, DMB_SY, 4088 "sub %[uNew], %[uNew], #1\n\t" 4089 , 4090 "sub %[uNew], %[uNew], #1\n\t" /* arm6 / thumb2+ */ 4091 "sbc %H[uNew], %H[uNew], %[uZeroVal]\n\t", 4092 RTASM_ARM_PICK_6432("X" (0) /* dummy */, [uZeroVal] "r" (0)) ); 4093 return u64NewRet; 3836 4094 3837 4095 # else -
trunk/include/iprt/cdefs.h
r87117 r87181 109 109 110 110 /** @def RT_ARCH_ARM32 111 * Indicates that we're compiling for the 32-bit ARM architecture. 111 * Indicates that we're compiling for the 32-bit ARM architecture, the value 112 * is the version (i.e. 6 for ARMv6). 112 113 */ 113 114 … … 131 132 # define RT_ARCH_SPARC 132 133 # elif defined(__arm64__) || defined(__aarch64__) 133 # define RT_ARCH_ARM64 134 # elif defined(__arm__) || defined(__arm32__) 135 # define RT_ARCH_ARM32 134 # define RT_ARCH_ARM64 __ARM_ARCH 135 # elif defined(__arm__) 136 # define RT_ARCH_ARM32 __ARM_ARCH 137 # elif defined(__arm32__) 138 # define RT_ARCH_ARM32 __ARM_ARCH 136 139 # else /* PORTME: append test for new archs. */ 137 140 # error "Check what predefined macros your compiler uses to indicate architecture."
Note:
See TracChangeset
for help on using the changeset viewer.