Changeset 87189 in vbox
- Timestamp:
- Jan 6, 2021 1:19:56 PM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 142120
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r87188 r87189 4419 4419 4420 4420 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4421 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicUoOr 32, pu32, NO_BARRIER,4421 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicUoOrU32, pu32, NO_BARRIER, 4422 4422 "orr %w[uNew], %w[uNew], %w[uVal]\n\t", 4423 4423 "orr %[uNew], %[uNew], %[uVal]\n\t", … … 5186 5186 * traps accessing the last bits in the bitmap. 5187 5187 */ 5188 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5188 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5189 5189 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMBitSet(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5190 5190 #else … … 5194 5194 _bittestandset((long RT_FAR *)pvBitmap, iBit); 5195 5195 5196 # elif RT_INLINE_ASM_GNU_STYLE 5196 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5197 # if RT_INLINE_ASM_GNU_STYLE 5197 5198 __asm__ __volatile__("btsl %1, %0" 5198 5199 : "=m" (*(volatile long RT_FAR *)pvBitmap) … … 5201 5202 : "memory" 5202 5203 , "cc"); 5203 # else5204 # else 5204 5205 __asm 5205 5206 { 5206 # ifdef RT_ARCH_AMD645207 # ifdef RT_ARCH_AMD64 5207 5208 mov rax, [pvBitmap] 5208 5209 mov edx, [iBit] 5209 5210 bts [rax], edx 5210 # else5211 # else 5211 5212 mov eax, [pvBitmap] 5212 5213 mov edx, [iBit] 5213 5214 bts [eax], edx 5214 # endif5215 # endif 5215 5216 } 5217 # endif 5218 5219 # else 5220 int32_t offBitmap = iBit / 32; 5221 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 5222 ASMAtomicUoOrU32(&((uint32_t volatile *)pvBitmap)[offBitmap], RT_BIT_32(iBit & 31)); 5216 5223 # endif 5217 5224 } … … 5228 5235 * @remarks x86: Requires a 386 or later. 5229 5236 */ 5230 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5237 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5231 5238 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMAtomicBitSet(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5232 5239 #else … … 5236 5243 # if RT_INLINE_ASM_USES_INTRIN 5237 5244 _interlockedbittestandset((long RT_FAR *)pvBitmap, iBit); 5238 # elif RT_INLINE_ASM_GNU_STYLE 5245 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5246 # if RT_INLINE_ASM_GNU_STYLE 5239 5247 __asm__ __volatile__("lock; btsl %1, %0" 5240 5248 : "=m" (*(volatile long *)pvBitmap) … … 5243 5251 : "memory" 5244 5252 , "cc"); 5245 # else5253 # else 5246 5254 __asm 5247 5255 { 5248 # ifdef RT_ARCH_AMD645256 # ifdef RT_ARCH_AMD64 5249 5257 mov rax, [pvBitmap] 5250 5258 mov edx, [iBit] 5251 5259 lock bts [rax], edx 5252 # else5260 # else 5253 5261 mov eax, [pvBitmap] 5254 5262 mov edx, [iBit] 5255 5263 lock bts [eax], edx 5256 # endif5264 # endif 5257 5265 } 5266 # endif 5267 5268 # else 5269 ASMAtomicOrU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], RT_BIT_32(iBit & 31)); 5258 5270 # endif 5259 5271 } … … 5271 5283 * traps accessing the last bits in the bitmap. 5272 5284 */ 5273 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5285 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5274 5286 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMBitClear(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5275 5287 #else … … 5279 5291 _bittestandreset((long RT_FAR *)pvBitmap, iBit); 5280 5292 5281 # elif RT_INLINE_ASM_GNU_STYLE 5293 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5294 # if RT_INLINE_ASM_GNU_STYLE 5282 5295 __asm__ __volatile__("btrl %1, %0" 5283 5296 : "=m" (*(volatile long RT_FAR *)pvBitmap) … … 5286 5299 : "memory" 5287 5300 , "cc"); 5288 # else5301 # else 5289 5302 __asm 5290 5303 { 5291 # ifdef RT_ARCH_AMD645304 # ifdef RT_ARCH_AMD64 5292 5305 mov rax, [pvBitmap] 5293 5306 mov edx, [iBit] 5294 5307 btr [rax], edx 5295 # else5308 # else 5296 5309 mov eax, [pvBitmap] 5297 5310 mov edx, [iBit] 5298 5311 btr [eax], edx 5299 # endif5312 # endif 5300 5313 } 5314 # endif 5315 5316 # else 5317 int32_t offBitmap = iBit / 32; 5318 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 5319 ASMAtomicUoAndU32(&((uint32_t volatile *)pvBitmap)[offBitmap], ~RT_BIT_32(iBit & 31)); 5301 5320 # endif 5302 5321 } … … 5314 5333 * @remarks x86: Requires a 386 or later. 5315 5334 */ 5316 #if RT_INLINE_ASM_EXTERNAL 5335 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM 5317 5336 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMAtomicBitClear(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5318 5337 #else … … 5320 5339 { 5321 5340 AssertMsg(!((uintptr_t)pvBitmap & 3), ("address %p not 32-bit aligned", pvBitmap)); 5322 # if RT_INLINE_ASM_GNU_STYLE 5341 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5342 # if RT_INLINE_ASM_GNU_STYLE 5323 5343 __asm__ __volatile__("lock; btrl %1, %0" 5324 5344 : "=m" (*(volatile long RT_FAR *)pvBitmap) … … 5327 5347 : "memory" 5328 5348 , "cc"); 5329 # else5349 # else 5330 5350 __asm 5331 5351 { 5332 # ifdef RT_ARCH_AMD645352 # ifdef RT_ARCH_AMD64 5333 5353 mov rax, [pvBitmap] 5334 5354 mov edx, [iBit] 5335 5355 lock btr [rax], edx 5336 # else5356 # else 5337 5357 mov eax, [pvBitmap] 5338 5358 mov edx, [iBit] 5339 5359 lock btr [eax], edx 5340 # endif5360 # endif 5341 5361 } 5362 # endif 5363 # else 5364 ASMAtomicAndU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], ~RT_BIT_32(iBit & 31)); 5342 5365 # endif 5343 5366 }
Note:
See TracChangeset
for help on using the changeset viewer.