- Timestamp:
- Sep 21, 2023 8:01:59 PM (19 months ago)
- svn:sync-xref-src-repo-rev:
- 159211
- Location:
- trunk/include/iprt
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm-arm.h
r100201 r101228 64 64 uint64_t u64; 65 65 # ifdef RT_ARCH_ARM64 66 __asm__ __volatile__("isb\n\t" 66 __asm__ __volatile__("Lstart_ASMReadTSC_%=:\n\t" 67 "isb\n\t" 67 68 "mrs %0, CNTVCT_EL0\n\t" 68 69 : "=r" (u64)); … … 70 71 uint32_t u32Spill; 71 72 uint32_t u32Comp; 72 __asm__ __volatile__("isb\n" 73 "Lagain:\n\t" 73 __asm__ __volatile__("Lstart_ASMReadTSC_%=:\n\t" 74 "isb\n" 75 "Ltry_again_ASMReadTSC_%=:\n\t" 74 76 "mrrc p15, 0, %[uSpill], %H[uRet], c14\n\t" /* CNTPCT high into uRet.hi */ 75 77 "mrrc p15, 0, %[uRet], %[uSpill], c14\n\t" /* CNTPCT low into uRet.lo */ 76 78 "mrrc p15, 0, %[uSpill], %[uHiComp], c14\n\t" /* CNTPCT high into uHiComp */ 77 79 "cmp %H[uRet], %[uHiComp]\n\t" 78 "b.eq L again\n\t"/* Redo if high value changed. */80 "b.eq Ltry_again_ASMReadTSC_%=\n\t" /* Redo if high value changed. */ 79 81 : [uRet] "=r" (u64) 80 82 , "=r" (uHiComp) … … 103 105 uint64_t u64; 104 106 # ifdef RT_ARCH_ARM64 105 __asm__ __volatile__("isb\n\t" 107 __asm__ __volatile__("Lstart_ASMReadCntFrqEl0_%=:\n\t" 108 "isb\n\t" 106 109 "mrs %0, CNTFRQ_EL0\n\t" 107 110 : "=r" (u64)); 108 111 # else 109 112 u64 = 0; 110 __asm__ __volatile__("isb\n" 113 __asm__ __volatile__("Lstart_ASMReadCntFrqEl0_%=:\n\t" 114 "isb\n\t" 111 115 "mrc p15, 0, %[uRet], c14, 0, 0\n\t" /* CNTFRQ */ 112 116 : [uRet] "=r" (u64)); … … 131 135 # if RT_INLINE_ASM_GNU_STYLE 132 136 # ifdef RT_ARCH_ARM64 133 __asm__ __volatile__("msr daifclr, #0xf\n\t"); 137 __asm__ __volatile__("Lstart_ASMIntEnable_%=:\n\t" 138 "msr daifclr, #0xf\n\t"); 134 139 # else 135 140 RTCCUINTREG uFlags; 136 __asm__ __volatile__("mrs %0, cpsr\n\t" 141 __asm__ __volatile__("Lstart_ASMIntEnable_%=:\n\t" 142 "mrs %0, cpsr\n\t" 137 143 "bic %0, %0, #0xc0\n\t" 138 144 "msr cpsr_c, %0\n\t" … … 156 162 # if RT_INLINE_ASM_GNU_STYLE 157 163 # ifdef RT_ARCH_ARM64 158 __asm__ __volatile__("msr daifset, #0xf\n\t"); 164 __asm__ __volatile__("Lstart_ASMIntDisable_%=:\n\t" 165 "msr daifset, #0xf\n\t"); 159 166 # else 160 167 RTCCUINTREG uFlags; 161 __asm__ __volatile__("mrs %0, cpsr\n\t" 168 __asm__ __volatile__("Lstart_ASMIntDisable_%=:\n\t" 169 "mrs %0, cpsr\n\t" 162 170 "orr %0, %0, #0xc0\n\t" 163 171 "msr cpsr_c, %0\n\t" … … 182 190 # if RT_INLINE_ASM_GNU_STYLE 183 191 # ifdef RT_ARCH_ARM64 184 __asm__ __volatile__("mrs %[uRet], daif\n\t" 192 __asm__ __volatile__("Lstart_ASMIntDisableFlags_%=:\n\t" 193 "mrs %[uRet], daif\n\t" 185 194 "msr daifset, #0xf\n\t" 186 195 : [uRet] "=r" (uFlags)); 187 196 # else 188 197 RTCCUINTREG uNewFlags; 189 __asm__ __volatile__("mrs %0, cpsr\n\t" 198 __asm__ __volatile__("Lstart_ASMIntDisableFlags_%=:\n\t" 199 "mrs %0, cpsr\n\t" 190 200 "orr %1, %0, #0xc0\n\t" 191 201 "msr cpsr_c, %1\n\t" … … 212 222 # if RT_INLINE_ASM_GNU_STYLE 213 223 # ifdef RT_ARCH_ARM64 214 __asm__ __volatile__("isb\n\t" 224 __asm__ __volatile__("Lstart_ASMGetFlags_%=:\n\t" 225 "isb\n\t" 215 226 "mrs %0, daif\n\t" 216 227 : "=r" (uFlags)); … … 236 247 # if RT_INLINE_ASM_GNU_STYLE 237 248 # ifdef RT_ARCH_ARM64 238 __asm__ __volatile__("isb\n\t" 249 __asm__ __volatile__("Lstart_ASMSetFlags_%=:\n\t" 250 "isb\n\t" 239 251 "msr daif, %[uFlags]\n\t" 240 252 : : [uFlags] "r" (uFlags)); … … 270 282 { 271 283 # if RT_INLINE_ASM_GNU_STYLE 272 __asm__ __volatile__ ("wfi\n\t"); /* wait for interrupt */ 284 __asm__ __volatile__ ("Lstart_ASMHalt_%=:\n\t" 285 "wfi\n\t"); /* wait for interrupt */ 273 286 # else 274 287 # error "Unsupported compiler" … … 292 305 # if RT_INLINE_ASM_GNU_STYLE 293 306 RTCCUINTREG uCpuId; 294 __asm__ ("mrc p15, 0, %0, c0, c0, 5\n\t" /* CPU ID Register, privileged */ 307 __asm__ ("Lstart_ASMGetApicId_%=:\n\t" 308 "mrc p15, 0, %0, c0, c0, 5\n\t" /* CPU ID Register, privileged */ 295 309 : "=r" (uCpuId)); 296 310 return uCpuId; -
trunk/include/iprt/asm.h
r100318 r101228 221 221 uint32_t rcSpill; \ 222 222 uint32_t u32NewRet; \ 223 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \223 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 224 224 RTASM_ARM_##barrier_type /* before lable? */ \ 225 225 "ldaxr %w[uNew], %[pMem]\n\t" \ 226 226 modify64 \ 227 227 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" \ 228 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \228 "cbnz %w[rc], Ltry_again_" #name "_%=\n\t" \ 229 229 : [pMem] "+Q" (*a_pu32Mem) \ 230 230 , [uNew] "=&r" (u32NewRet) \ … … 236 236 uint32_t u32OldRet; \ 237 237 uint32_t u32NewSpill; \ 238 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \238 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 239 239 RTASM_ARM_##barrier_type /* before lable? */ \ 240 240 "ldaxr %w[uOld], %[pMem]\n\t" \ 241 241 modify64 \ 242 242 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" \ 243 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \243 "cbnz %w[rc], Ltry_again_" #name "_%=\n\t" \ 244 244 : [pMem] "+Q" (*a_pu32Mem) \ 245 245 , [uOld] "=&r" (u32OldRet) \ … … 251 251 uint32_t rcSpill; \ 252 252 uint64_t u64NewRet; \ 253 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \253 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 254 254 RTASM_ARM_##barrier_type /* before lable? */ \ 255 255 "ldaxr %[uNew], %[pMem]\n\t" \ 256 256 modify64 \ 257 257 "stlxr %w[rc], %[uNew], %[pMem]\n\t" \ 258 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \258 "cbnz %w[rc], Ltry_again_" #name "_%=\n\t" \ 259 259 : [pMem] "+Q" (*a_pu64Mem) \ 260 260 , [uNew] "=&r" (u64NewRet) \ … … 266 266 uint64_t u64OldRet; \ 267 267 uint64_t u64NewSpill; \ 268 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \268 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 269 269 RTASM_ARM_##barrier_type /* before lable? */ \ 270 270 "ldaxr %[uOld], %[pMem]\n\t" \ 271 271 modify64 \ 272 272 "stlxr %w[rc], %[uNew], %[pMem]\n\t" \ 273 "cbnz %w[rc], .Ltry_again_" #name "_%=\n\t" \273 "cbnz %w[rc], Ltry_again_" #name "_%=\n\t" \ 274 274 : [pMem] "+Q" (*a_pu64Mem) \ 275 275 , [uOld] "=&r" (u64OldRet) \ … … 324 324 uint32_t rcSpill; \ 325 325 uint32_t u32NewRet; \ 326 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \326 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 327 327 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 328 328 "ldrex %[uNew], %[pMem]\n\t" \ … … 330 330 "strex %[rc], %[uNew], %[pMem]\n\t" \ 331 331 "cmp %[rc], #0\n\t" \ 332 "bne .Ltry_again_" #name "_%=\n\t" \332 "bne Ltry_again_" #name "_%=\n\t" \ 333 333 : [pMem] "+m" (*a_pu32Mem) \ 334 334 , [uNew] "=&r" (u32NewRet) \ … … 341 341 uint32_t u32OldRet; \ 342 342 uint32_t u32NewSpill; \ 343 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \343 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 344 344 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 345 345 "ldrex %[uOld], %[pMem]\n\t" \ … … 347 347 "strex %[rc], %[uNew], %[pMem]\n\t" \ 348 348 "cmp %[rc], #0\n\t" \ 349 "bne .Ltry_again_" #name "_%=\n\t" \349 "bne Ltry_again_" #name "_%=\n\t" \ 350 350 : [pMem] "+m" (*a_pu32Mem) \ 351 351 , [uOld] "=&r" (u32OldRet) \ … … 358 358 uint32_t rcSpill; \ 359 359 uint64_t u64NewRet; \ 360 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \360 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 361 361 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 362 362 "ldrexd %[uNew], %H[uNew], %[pMem]\n\t" \ … … 364 364 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" \ 365 365 "cmp %[rc], #0\n\t" \ 366 "bne .Ltry_again_" #name "_%=\n\t" \366 "bne Ltry_again_" #name "_%=\n\t" \ 367 367 : [pMem] "+m" (*a_pu64Mem), \ 368 368 [uNew] "=&r" (u64NewRet), \ … … 375 375 uint64_t u64OldRet; \ 376 376 uint64_t u64NewSpill; \ 377 __asm__ __volatile__(" .Ltry_again_" #name "_%=:\n\t" \377 __asm__ __volatile__("Ltry_again_" #name "_%=:\n\t" \ 378 378 RT_CONCAT(RTASM_ARM_,barrier_type) /* before lable? */ \ 379 379 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" \ … … 381 381 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" \ 382 382 "cmp %[rc], #0\n\t" \ 383 "bne .Ltry_again_" #name "_%=\n\t" \383 "bne Ltry_again_" #name "_%=\n\t" \ 384 384 : [pMem] "+m" (*a_pu64Mem), \ 385 385 [uOld] "=&r" (u64OldRet), \ … … 518 518 uint32_t uOld; 519 519 uint32_t rcSpill; 520 __asm__ __volatile__(" .Ltry_again_ASMAtomicXchgU8_%=:\n\t"520 __asm__ __volatile__("Ltry_again_ASMAtomicXchgU8_%=:\n\t" 521 521 RTASM_ARM_DMB_SY 522 522 # if defined(RT_ARCH_ARM64) 523 523 "ldaxrb %w[uOld], %[pMem]\n\t" 524 524 "stlxrb %w[rc], %w[uNew], %[pMem]\n\t" 525 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU8_%=\n\t"525 "cbnz %w[rc], Ltry_again_ASMAtomicXchgU8_%=\n\t" 526 526 # else 527 527 "ldrexb %[uOld], %[pMem]\n\t" /* ARMv6+ */ 528 528 "strexb %[rc], %[uNew], %[pMem]\n\t" 529 529 "cmp %[rc], #0\n\t" 530 "bne .Ltry_again_ASMAtomicXchgU8_%=\n\t"530 "bne Ltry_again_ASMAtomicXchgU8_%=\n\t" 531 531 # endif 532 532 : [pMem] "+Q" (*pu8) … … 615 615 uint32_t uOld; 616 616 uint32_t rcSpill; 617 __asm__ __volatile__(" .Ltry_again_ASMAtomicXchgU16_%=:\n\t"617 __asm__ __volatile__("Ltry_again_ASMAtomicXchgU16_%=:\n\t" 618 618 RTASM_ARM_DMB_SY 619 619 # if defined(RT_ARCH_ARM64) 620 620 "ldaxrh %w[uOld], %[pMem]\n\t" 621 621 "stlxrh %w[rc], %w[uNew], %[pMem]\n\t" 622 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU16_%=\n\t"622 "cbnz %w[rc], Ltry_again_ASMAtomicXchgU16_%=\n\t" 623 623 # else 624 624 "ldrexh %[uOld], %[pMem]\n\t" /* ARMv6+ */ 625 625 "strexh %[rc], %[uNew], %[pMem]\n\t" 626 626 "cmp %[rc], #0\n\t" 627 "bne .Ltry_again_ASMAtomicXchgU16_%=\n\t"627 "bne Ltry_again_ASMAtomicXchgU16_%=\n\t" 628 628 # endif 629 629 : [pMem] "+Q" (*pu16) … … 701 701 uint32_t uOld; 702 702 uint32_t rcSpill; 703 __asm__ __volatile__(" .Ltry_again_ASMAtomicXchgU32_%=:\n\t"703 __asm__ __volatile__("Ltry_again_ASMAtomicXchgU32_%=:\n\t" 704 704 RTASM_ARM_DMB_SY 705 705 # if defined(RT_ARCH_ARM64) 706 706 "ldaxr %w[uOld], %[pMem]\n\t" 707 707 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 708 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU32_%=\n\t"708 "cbnz %w[rc], Ltry_again_ASMAtomicXchgU32_%=\n\t" 709 709 # else 710 710 "ldrex %[uOld], %[pMem]\n\t" /* ARMv6+ */ 711 711 "strex %[rc], %[uNew], %[pMem]\n\t" 712 712 "cmp %[rc], #0\n\t" 713 "bne .Ltry_again_ASMAtomicXchgU32_%=\n\t"713 "bne Ltry_again_ASMAtomicXchgU32_%=\n\t" 714 714 # endif 715 715 : [pMem] "+Q" (*pu32) … … 827 827 uint32_t rcSpill; 828 828 uint64_t uOld; 829 __asm__ __volatile__(" .Ltry_again_ASMAtomicXchgU64_%=:\n\t"829 __asm__ __volatile__("Ltry_again_ASMAtomicXchgU64_%=:\n\t" 830 830 RTASM_ARM_DMB_SY 831 831 # if defined(RT_ARCH_ARM64) 832 832 "ldaxr %[uOld], %[pMem]\n\t" 833 833 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 834 "cbnz %w[rc], .Ltry_again_ASMAtomicXchgU64_%=\n\t"834 "cbnz %w[rc], Ltry_again_ASMAtomicXchgU64_%=\n\t" 835 835 # else 836 836 "ldrexd %[uOld], %H[uOld], %[pMem]\n\t" /* ARMv6+ */ 837 837 "strexd %[rc], %[uNew], %H[uNew], %[pMem]\n\t" 838 838 "cmp %[rc], #0\n\t" 839 "bne .Ltry_again_ASMAtomicXchgU64_%=\n\t"839 "bne Ltry_again_ASMAtomicXchgU64_%=\n\t" 840 840 # endif 841 841 : [pMem] "+Q" (*pu64) … … 1085 1085 uint32_t u32Spill; 1086 1086 uint32_t rcSpill; 1087 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgU8_%=:\n\t"1087 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgU8_%=:\n\t" 1088 1088 RTASM_ARM_DMB_SY 1089 1089 # if defined(RT_ARCH_ARM64) … … 1092 1092 "bne 1f\n\t" /* stop here if not equal */ 1093 1093 "stlxrb %w[rc], %w[uNew], %[pMem]\n\t" 1094 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t"1094 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 1095 1095 "mov %w[fXchg], #1\n\t" 1096 1096 # else … … 1100 1100 "bne 1f\n\t" /* stop here if not equal */ 1101 1101 "cmp %[rc], #0\n\t" 1102 "bne .Ltry_again_ASMAtomicCmpXchgU8_%=\n\t"1102 "bne Ltry_again_ASMAtomicCmpXchgU8_%=\n\t" 1103 1103 "mov %[fXchg], #1\n\t" 1104 1104 # endif … … 1222 1222 uint32_t u32Spill; 1223 1223 uint32_t rcSpill; 1224 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgU32_%=:\n\t"1224 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgU32_%=:\n\t" 1225 1225 RTASM_ARM_DMB_SY 1226 1226 # if defined(RT_ARCH_ARM64) … … 1229 1229 "bne 1f\n\t" /* stop here if not equal */ 1230 1230 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 1231 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t"1231 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 1232 1232 "mov %w[fXchg], #1\n\t" 1233 1233 # else … … 1237 1237 "bne 1f\n\t" /* stop here if not equal */ 1238 1238 "cmp %[rc], #0\n\t" 1239 "bne .Ltry_again_ASMAtomicCmpXchgU32_%=\n\t"1239 "bne Ltry_again_ASMAtomicCmpXchgU32_%=\n\t" 1240 1240 "mov %[fXchg], #1\n\t" 1241 1241 # endif … … 1384 1384 uint64_t u64Spill; 1385 1385 uint32_t rcSpill; 1386 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t"1386 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t" 1387 1387 RTASM_ARM_DMB_SY 1388 1388 # if defined(RT_ARCH_ARM64) … … 1391 1391 "bne 1f\n\t" /* stop here if not equal */ 1392 1392 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 1393 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t"1393 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1394 1394 "mov %w[fXchg], #1\n\t" 1395 1395 # else … … 1400 1400 "bne 1f\n\t" /* stop here if not equal */ 1401 1401 "cmp %[rc], #0\n\t" 1402 "bne .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t"1402 "bne Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 1403 1403 "mov %[fXchg], #1\n\t" 1404 1404 # endif … … 1729 1729 uint8_t u8ActualOld; 1730 1730 uint8_t rcSpill; 1731 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgExU8_%=:\n\t"1731 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgExU8_%=:\n\t" 1732 1732 RTASM_ARM_DMB_SY 1733 1733 # if defined(RT_ARCH_ARM64) … … 1736 1736 "bne 1f\n\t" /* stop here if not equal */ 1737 1737 "stlxrb %w[rc], %w[uNew], %[pMem]\n\t" 1738 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgExU8_%=\n\t"1738 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgExU8_%=\n\t" 1739 1739 "mov %w[fXchg], #1\n\t" 1740 1740 # else … … 1744 1744 "bne 1f\n\t" /* stop here if not equal */ 1745 1745 "cmp %[rc], #0\n\t" 1746 "bne .Ltry_again_ASMAtomicCmpXchgExU8_%=\n\t"1746 "bne Ltry_again_ASMAtomicCmpXchgExU8_%=\n\t" 1747 1747 "mov %[fXchg], #1\n\t" 1748 1748 # endif … … 1854 1854 uint16_t u16ActualOld; 1855 1855 uint16_t rcSpill; 1856 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgExU16_%=:\n\t"1856 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgExU16_%=:\n\t" 1857 1857 RTASM_ARM_DMB_SY 1858 1858 # if defined(RT_ARCH_ARM64) … … 1861 1861 "bne 1f\n\t" /* stop here if not equal */ 1862 1862 "stlxrh %w[rc], %w[uNew], %[pMem]\n\t" 1863 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgExU16_%=\n\t"1863 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgExU16_%=\n\t" 1864 1864 "mov %w[fXchg], #1\n\t" 1865 1865 # else … … 1869 1869 "bne 1f\n\t" /* stop here if not equal */ 1870 1870 "cmp %[rc], #0\n\t" 1871 "bne .Ltry_again_ASMAtomicCmpXchgExU16_%=\n\t"1871 "bne Ltry_again_ASMAtomicCmpXchgExU16_%=\n\t" 1872 1872 "mov %[fXchg], #1\n\t" 1873 1873 # endif … … 1979 1979 uint32_t u32ActualOld; 1980 1980 uint32_t rcSpill; 1981 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgExU32_%=:\n\t"1981 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgExU32_%=:\n\t" 1982 1982 RTASM_ARM_DMB_SY 1983 1983 # if defined(RT_ARCH_ARM64) … … 1986 1986 "bne 1f\n\t" /* stop here if not equal */ 1987 1987 "stlxr %w[rc], %w[uNew], %[pMem]\n\t" 1988 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t"1988 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1989 1989 "mov %w[fXchg], #1\n\t" 1990 1990 # else … … 1994 1994 "bne 1f\n\t" /* stop here if not equal */ 1995 1995 "cmp %[rc], #0\n\t" 1996 "bne .Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t"1996 "bne Ltry_again_ASMAtomicCmpXchgExU32_%=\n\t" 1997 1997 "mov %[fXchg], #1\n\t" 1998 1998 # endif … … 2149 2149 uint64_t u64ActualOld; 2150 2150 uint32_t rcSpill; 2151 __asm__ __volatile__(" .Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t"2151 __asm__ __volatile__("Ltry_again_ASMAtomicCmpXchgU64_%=:\n\t" 2152 2152 RTASM_ARM_DMB_SY 2153 2153 # if defined(RT_ARCH_ARM64) … … 2156 2156 "bne 1f\n\t" /* stop here if not equal */ 2157 2157 "stlxr %w[rc], %[uNew], %[pMem]\n\t" 2158 "cbnz %w[rc], .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t"2158 "cbnz %w[rc], Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 2159 2159 "mov %w[fXchg], #1\n\t" 2160 2160 # else … … 2165 2165 "bne 1f\n\t" /* stop here if not equal */ 2166 2166 "cmp %[rc], #0\n\t" 2167 "bne .Ltry_again_ASMAtomicCmpXchgU64_%=\n\t"2167 "bne Ltry_again_ASMAtomicCmpXchgU64_%=\n\t" 2168 2168 "mov %[fXchg], #1\n\t" 2169 2169 # endif … … 2681 2681 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2682 2682 uint32_t u32; 2683 __asm__ __volatile__(" .Lstart_ASMAtomicReadU8_%=:\n\t"2683 __asm__ __volatile__("Lstart_ASMAtomicReadU8_%=:\n\t" 2684 2684 RTASM_ARM_DMB_SY 2685 2685 # if defined(RT_ARCH_ARM64) … … 2709 2709 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2710 2710 uint32_t u32; 2711 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadU8_%=:\n\t"2711 __asm__ __volatile__("Lstart_ASMAtomicUoReadU8_%=:\n\t" 2712 2712 # if defined(RT_ARCH_ARM64) 2713 2713 "ldxrb %w[uDst], %[pMem]\n\t" … … 2735 2735 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2736 2736 int32_t i32; 2737 __asm__ __volatile__(" .Lstart_ASMAtomicReadS8_%=:\n\t"2737 __asm__ __volatile__("Lstart_ASMAtomicReadS8_%=:\n\t" 2738 2738 RTASM_ARM_DMB_SY 2739 2739 # if defined(RT_ARCH_ARM64) … … 2762 2762 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2763 2763 int32_t i32; 2764 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadS8_%=:\n\t"2764 __asm__ __volatile__("Lstart_ASMAtomicUoReadS8_%=:\n\t" 2765 2765 # if defined(RT_ARCH_ARM64) 2766 2766 "ldxrb %w[iDst], %[pMem]\n\t" … … 2788 2788 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2789 2789 uint32_t u32; 2790 __asm__ __volatile__(" .Lstart_ASMAtomicReadU16_%=:\n\t"2790 __asm__ __volatile__("Lstart_ASMAtomicReadU16_%=:\n\t" 2791 2791 RTASM_ARM_DMB_SY 2792 2792 # if defined(RT_ARCH_ARM64) … … 2817 2817 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2818 2818 uint32_t u32; 2819 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadU16_%=:\n\t"2819 __asm__ __volatile__("Lstart_ASMAtomicUoReadU16_%=:\n\t" 2820 2820 # if defined(RT_ARCH_ARM64) 2821 2821 "ldxrh %w[uDst], %[pMem]\n\t" … … 2843 2843 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2844 2844 int32_t i32; 2845 __asm__ __volatile__(" .Lstart_ASMAtomicReadS16_%=:\n\t"2845 __asm__ __volatile__("Lstart_ASMAtomicReadS16_%=:\n\t" 2846 2846 RTASM_ARM_DMB_SY 2847 2847 # if defined(RT_ARCH_ARM64) … … 2872 2872 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2873 2873 int32_t i32; 2874 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadS16_%=:\n\t"2874 __asm__ __volatile__("Lstart_ASMAtomicUoReadS16_%=:\n\t" 2875 2875 # if defined(RT_ARCH_ARM64) 2876 2876 "ldxrh %w[iDst], %[pMem]\n\t" … … 2898 2898 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2899 2899 uint32_t u32; 2900 __asm__ __volatile__(" .Lstart_ASMAtomicReadU32_%=:\n\t"2900 __asm__ __volatile__("Lstart_ASMAtomicReadU32_%=:\n\t" 2901 2901 RTASM_ARM_DMB_SY 2902 2902 # if defined(RT_ARCH_ARM64) … … 2930 2930 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2931 2931 uint32_t u32; 2932 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadU32_%=:\n\t"2932 __asm__ __volatile__("Lstart_ASMAtomicUoReadU32_%=:\n\t" 2933 2933 # if defined(RT_ARCH_ARM64) 2934 2934 "ldxr %w[uDst], %[pMem]\n\t" … … 2959 2959 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2960 2960 int32_t i32; 2961 __asm__ __volatile__(" .Lstart_ASMAtomicReadS32_%=:\n\t"2961 __asm__ __volatile__("Lstart_ASMAtomicReadS32_%=:\n\t" 2962 2962 RTASM_ARM_DMB_SY 2963 2963 # if defined(RT_ARCH_ARM64) … … 2991 2991 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 2992 2992 int32_t i32; 2993 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadS32_%=:\n\t"2993 __asm__ __volatile__("Lstart_ASMAtomicUoReadS32_%=:\n\t" 2994 2994 # if defined(RT_ARCH_ARM64) 2995 2995 "ldxr %w[iDst], %[pMem]\n\t" … … 3091 3091 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3092 3092 Assert(!((uintptr_t)pu64 & 7)); 3093 __asm__ __volatile__(" .Lstart_ASMAtomicReadU64_%=:\n\t"3093 __asm__ __volatile__("Lstart_ASMAtomicReadU64_%=:\n\t" 3094 3094 RTASM_ARM_DMB_SY 3095 3095 # if defined(RT_ARCH_ARM64) … … 3193 3193 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 3194 3194 Assert(!((uintptr_t)pu64 & 7)); 3195 __asm__ __volatile__(" .Lstart_ASMAtomicUoReadU64_%=:\n\t"3195 __asm__ __volatile__("Lstart_ASMAtomicUoReadU64_%=:\n\t" 3196 3196 # if defined(RT_ARCH_ARM64) 3197 3197 "ldxr %[uDst], %[pMem]\n\t" … … 5942 5942 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 5943 5943 uint32_t u32; 5944 __asm__ __volatile__(" .Lstart_ASMProbeReadByte_%=:\n\t"5944 __asm__ __volatile__("Lstart_ASMProbeReadByte_%=:\n\t" 5945 5945 # if defined(RT_ARCH_ARM64) 5946 5946 "ldxrb %w[uDst], %[pMem]\n\t"
Note:
See TracChangeset
for help on using the changeset viewer.