Changeset 87191 in vbox
- Timestamp:
- Jan 7, 2021 7:59:09 PM (4 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r87189 r87191 63 63 # pragma intrinsic(_InterlockedAnd) 64 64 # pragma intrinsic(_InterlockedOr) 65 # pragma intrinsic(_InterlockedXor) 65 66 # pragma intrinsic(_InterlockedIncrement) 66 67 # pragma intrinsic(_InterlockedDecrement) … … 4189 4190 4190 4191 /** 4192 * Atomically OR an unsigned 32-bit value, ordered, extended version (for bitmap 4193 * fallback). 4194 * 4195 * @returns Old value. 4196 * @param pu32 Pointer to the variable to OR @a u32 with. 4197 * @param u32 The value to OR @a *pu32 with. 4198 */ 4199 DECLINLINE(uint32_t) ASMAtomicOrExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4200 { 4201 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4202 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicOrEx32, pu32, DMB_SY, 4203 "orr %w[uNew], %w[uOld], %w[uVal]\n\t", 4204 "orr %[uNew], %[uOld], %[uVal]\n\t", 4205 [uVal] "r" (u32)); 4206 return u32OldRet; 4207 4208 #else 4209 uint32_t u32RetOld = ASMAtomicUoReadU32(pu32); 4210 uint32_t u32New; 4211 do 4212 u32New = u32RetOld | u32; 4213 while (!ASMAtomicCmpXchgExU32(pu32, u32New, u32RetOld, &u32RetOld)); 4214 return u32RetOld; 4215 #endif 4216 } 4217 4218 4219 /** 4191 4220 * Atomically Or a signed 32-bit value, ordered. 4192 4221 * … … 4312 4341 4313 4342 /** 4343 * Atomically AND an unsigned 32-bit value, ordered, extended version. 4344 * 4345 * @returns Old value. 4346 * @param pu32 Pointer to the variable to AND @a u32 with. 4347 * @param u32 The value to AND @a *pu32 with. 4348 */ 4349 DECLINLINE(uint32_t) ASMAtomicAndExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4350 { 4351 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4352 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicAndEx32, pu32, DMB_SY, 4353 "and %w[uNew], %w[uOld], %w[uVal]\n\t", 4354 "and %[uNew], %[uOld], %[uVal]\n\t", 4355 [uVal] "r" (u32)); 4356 return u32OldRet; 4357 4358 #else 4359 uint32_t u32RetOld = ASMAtomicUoReadU32(pu32); 4360 uint32_t u32New; 4361 do 4362 u32New = u32RetOld & u32; 4363 while (!ASMAtomicCmpXchgExU32(pu32, u32New, u32RetOld, &u32RetOld)); 4364 return u32RetOld; 4365 #endif 4366 } 4367 4368 4369 /** 4314 4370 * Atomically And a signed 32-bit value, ordered. 4315 4371 * … … 4381 4437 { 4382 4438 ASMAtomicAndU64((uint64_t volatile RT_FAR *)pi64, (uint64_t)i64); 4439 } 4440 4441 4442 /** 4443 * Atomically XOR an unsigned 32-bit value and a memory location, ordered. 4444 * 4445 * @param pu32 Pointer to the variable to XOR @a u32 with. 4446 * @param u32 The value to XOR @a *pu32 with. 4447 * 4448 * @remarks x86: Requires a 386 or later. 4449 */ 4450 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 4451 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMAtomicXorU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_PROTO; 4452 #else 4453 DECLINLINE(void) ASMAtomicXorU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4454 { 4455 # if RT_INLINE_ASM_USES_INTRIN 4456 _InterlockedXor((long volatile RT_FAR *)pu32, u32); 4457 4458 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 4459 # if RT_INLINE_ASM_GNU_STYLE 4460 __asm__ __volatile__("lock; xor %1, %0\n\t" 4461 : "=m" (*pu32) 4462 : "ir" (u32) 4463 , "m" (*pu32) 4464 : "cc"); 4465 # else 4466 __asm 4467 { 4468 mov eax, [u32] 4469 # ifdef RT_ARCH_AMD64 4470 mov rdx, [pu32] 4471 lock xor [rdx], eax 4472 # else 4473 mov edx, [pu32] 4474 lock xor [edx], eax 4475 # endif 4476 } 4477 # endif 4478 4479 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4480 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicXor32, pu32, DMB_SY, 4481 "eor %w[uNew], %w[uNew], %w[uVal]\n\t", 4482 "eor %[uNew], %[uNew], %[uVal]\n\t", 4483 [uVal] "r" (u32)); 4484 4485 # else 4486 # error "Port me" 4487 # endif 4488 } 4489 #endif 4490 4491 4492 /** 4493 * Atomically XOR an unsigned 32-bit value and a memory location, ordered, 4494 * extended version (for bitmaps). 4495 * 4496 * @returns Old value. 4497 * @param pu32 Pointer to the variable to XOR @a u32 with. 4498 * @param u32 The value to XOR @a *pu32 with. 4499 */ 4500 DECLINLINE(uint32_t) ASMAtomicXorExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4501 { 4502 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4503 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicXorEx32, pu32, DMB_SY, 4504 "eor %w[uNew], %w[uOld], %w[uVal]\n\t", 4505 "eor %[uNew], %[uOld], %[uVal]\n\t", 4506 [uVal] "r" (u32)); 4507 return u32OldRet; 4508 4509 #else 4510 uint32_t u32RetOld = ASMAtomicUoReadU32(pu32); 4511 uint32_t u32New; 4512 do 4513 u32New = u32RetOld ^ u32; 4514 while (!ASMAtomicCmpXchgExU32(pu32, u32New, u32RetOld, &u32RetOld)); 4515 return u32RetOld; 4516 #endif 4517 } 4518 4519 4520 /** 4521 * Atomically XOR a signed 32-bit value, ordered. 4522 * 4523 * @param pi32 Pointer to the variable to XOR i32 with. 4524 * @param i32 The value to XOR *pi32 with. 4525 * 4526 * @remarks x86: Requires a 386 or later. 4527 */ 4528 DECLINLINE(void) ASMAtomicXorS32(int32_t volatile RT_FAR *pi32, int32_t i32) RT_NOTHROW_DEF 4529 { 4530 ASMAtomicXorU32((uint32_t volatile RT_FAR *)pi32, (uint32_t)i32); 4383 4531 } 4384 4532 … … 4432 4580 4433 4581 /** 4582 * Atomically OR an unsigned 32-bit value, unordered but interrupt safe, 4583 * extended version (for bitmap fallback). 4584 * 4585 * @returns Old value. 4586 * @param pu32 Pointer to the variable to OR @a u32 with. 4587 * @param u32 The value to OR @a *pu32 with. 4588 */ 4589 DECLINLINE(uint32_t) ASMAtomicUoOrExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4590 { 4591 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4592 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicUoOrExU32, pu32, NO_BARRIER, 4593 "orr %w[uNew], %w[uOld], %w[uVal]\n\t", 4594 "orr %[uNew], %[uOld], %[uVal]\n\t", 4595 [uVal] "r" (u32)); 4596 return u32OldRet; 4597 4598 #else 4599 return ASMAtomicOrExU32(pu32, u32); /* (we have no unordered cmpxchg primitive atm.) */ 4600 #endif 4601 } 4602 4603 4604 /** 4434 4605 * Atomically OR a signed 32-bit value, unordered. 4435 4606 * … … 4549 4720 4550 4721 /** 4722 * Atomically AND an unsigned 32-bit value, unordered, extended version (for 4723 * bitmap fallback). 4724 * 4725 * @returns Old value. 4726 * @param pu32 Pointer to the pointer to AND @a u32 with. 4727 * @param u32 The value to AND @a *pu32 with. 4728 */ 4729 DECLINLINE(uint32_t) ASMAtomicUoAndExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4730 { 4731 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4732 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicUoAndEx32, pu32, NO_BARRIER, 4733 "and %w[uNew], %w[uOld], %w[uVal]\n\t", 4734 "and %[uNew], %[uOld], %[uVal]\n\t", 4735 [uVal] "r" (u32)); 4736 return u32OldRet; 4737 4738 #else 4739 return ASMAtomicAndExU32(pu32, u32); /* (we have no unordered cmpxchg primitive atm.) */ 4740 #endif 4741 } 4742 4743 4744 /** 4551 4745 * Atomically And a signed 32-bit value, unordered. 4552 4746 * … … 4615 4809 { 4616 4810 ASMAtomicUoAndU64((uint64_t volatile RT_FAR *)pi64, (uint64_t)i64); 4811 } 4812 4813 4814 /** 4815 * Atomically XOR an unsigned 32-bit value, unordered but interrupt safe. 4816 * 4817 * @param pu32 Pointer to the variable to XOR @a u32 with. 4818 * @param u32 The value to OR @a *pu32 with. 4819 * 4820 * @remarks x86: Requires a 386 or later. 4821 */ 4822 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM 4823 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMAtomicUoXorU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_PROTO; 4824 #else 4825 DECLINLINE(void) ASMAtomicUoXorU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4826 { 4827 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 4828 # if RT_INLINE_ASM_GNU_STYLE 4829 __asm__ __volatile__("xorl %1, %0\n\t" 4830 : "=m" (*pu32) 4831 : "ir" (u32) 4832 , "m" (*pu32) 4833 : "cc"); 4834 # else 4835 __asm 4836 { 4837 mov eax, [u32] 4838 # ifdef RT_ARCH_AMD64 4839 mov rdx, [pu32] 4840 xor [rdx], eax 4841 # else 4842 mov edx, [pu32] 4843 xor [edx], eax 4844 # endif 4845 } 4846 # endif 4847 4848 # elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4849 RTASM_ARM_LOAD_MODIFY_STORE_RET_NEW_32(ASMAtomicUoXorU32, pu32, NO_BARRIER, 4850 "eor %w[uNew], %w[uNew], %w[uVal]\n\t", 4851 "eor %[uNew], %[uNew], %[uVal]\n\t", 4852 [uVal] "r" (u32)); 4853 4854 # else 4855 # error "Port me" 4856 # endif 4857 } 4858 #endif 4859 4860 4861 /** 4862 * Atomically XOR an unsigned 32-bit value, unordered but interrupt safe, 4863 * extended version (for bitmap fallback). 4864 * 4865 * @returns Old value. 4866 * @param pu32 Pointer to the variable to XOR @a u32 with. 4867 * @param u32 The value to OR @a *pu32 with. 4868 */ 4869 DECLINLINE(uint32_t) ASMAtomicUoXorExU32(uint32_t volatile RT_FAR *pu32, uint32_t u32) RT_NOTHROW_DEF 4870 { 4871 #if defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32) 4872 RTASM_ARM_LOAD_MODIFY_STORE_RET_OLD_32(ASMAtomicUoXorExU32, pu32, NO_BARRIER, 4873 "eor %w[uNew], %w[uOld], %w[uVal]\n\t", 4874 "eor %[uNew], %[uOld], %[uVal]\n\t", 4875 [uVal] "r" (u32)); 4876 return u32OldRet; 4877 4878 #else 4879 return ASMAtomicXorExU32(pu32, u32); /* (we have no unordered cmpxchg primitive atm.) */ 4880 #endif 4881 } 4882 4883 4884 /** 4885 * Atomically XOR a signed 32-bit value, unordered. 4886 * 4887 * @param pi32 Pointer to the variable to XOR @a u32 with. 4888 * @param i32 The value to XOR @a *pu32 with. 4889 * 4890 * @remarks x86: Requires a 386 or later. 4891 */ 4892 DECLINLINE(void) ASMAtomicUoXorS32(int32_t volatile RT_FAR *pi32, int32_t i32) RT_NOTHROW_DEF 4893 { 4894 ASMAtomicUoXorU32((uint32_t volatile RT_FAR *)pi32, (uint32_t)i32); 4617 4895 } 4618 4896 … … 5378 5656 * traps accessing the last bits in the bitmap. 5379 5657 */ 5380 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5658 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5381 5659 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMBitToggle(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5382 5660 #else … … 5385 5663 # if RT_INLINE_ASM_USES_INTRIN 5386 5664 _bittestandcomplement((long RT_FAR *)pvBitmap, iBit); 5387 # elif RT_INLINE_ASM_GNU_STYLE 5665 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5666 # if RT_INLINE_ASM_GNU_STYLE 5388 5667 __asm__ __volatile__("btcl %1, %0" 5389 5668 : "=m" (*(volatile long *)pvBitmap) … … 5392 5671 : "memory" 5393 5672 , "cc"); 5394 # else5673 # else 5395 5674 __asm 5396 5675 { 5397 # ifdef RT_ARCH_AMD645676 # ifdef RT_ARCH_AMD64 5398 5677 mov rax, [pvBitmap] 5399 5678 mov edx, [iBit] 5400 5679 btc [rax], edx 5401 # else5680 # else 5402 5681 mov eax, [pvBitmap] 5403 5682 mov edx, [iBit] 5404 5683 btc [eax], edx 5405 # endif5684 # endif 5406 5685 } 5686 # endif 5687 # else 5688 int32_t offBitmap = iBit / 32; 5689 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 5690 ASMAtomicUoXorU32(&((uint32_t volatile *)pvBitmap)[offBitmap], RT_BIT_32(iBit & 31)); 5407 5691 # endif 5408 5692 } … … 5419 5703 * @remarks x86: Requires a 386 or later. 5420 5704 */ 5421 #if RT_INLINE_ASM_EXTERNAL 5705 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM 5422 5706 RT_ASM_DECL_PRAGMA_WATCOM(void) ASMAtomicBitToggle(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5423 5707 #else … … 5425 5709 { 5426 5710 AssertMsg(!((uintptr_t)pvBitmap & 3), ("address %p not 32-bit aligned", pvBitmap)); 5427 # if RT_INLINE_ASM_GNU_STYLE 5711 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5712 # if RT_INLINE_ASM_GNU_STYLE 5428 5713 __asm__ __volatile__("lock; btcl %1, %0" 5429 5714 : "=m" (*(volatile long RT_FAR *)pvBitmap) … … 5432 5717 : "memory" 5433 5718 , "cc"); 5434 # else5719 # else 5435 5720 __asm 5436 5721 { 5437 # ifdef RT_ARCH_AMD645722 # ifdef RT_ARCH_AMD64 5438 5723 mov rax, [pvBitmap] 5439 5724 mov edx, [iBit] 5440 5725 lock btc [rax], edx 5441 # else5726 # else 5442 5727 mov eax, [pvBitmap] 5443 5728 mov edx, [iBit] 5444 5729 lock btc [eax], edx 5445 # endif5730 # endif 5446 5731 } 5732 # endif 5733 # else 5734 ASMAtomicXorU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], RT_BIT_32(iBit & 31)); 5447 5735 # endif 5448 5736 } … … 5463 5751 * traps accessing the last bits in the bitmap. 5464 5752 */ 5465 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5753 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5466 5754 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMBitTestAndSet(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5467 5755 #else … … 5472 5760 rc.u8 = _bittestandset((long RT_FAR *)pvBitmap, iBit); 5473 5761 5474 # elif RT_INLINE_ASM_GNU_STYLE 5762 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5763 # if RT_INLINE_ASM_GNU_STYLE 5475 5764 __asm__ __volatile__("btsl %2, %1\n\t" 5476 5765 "setc %b0\n\t" … … 5482 5771 : "memory" 5483 5772 , "cc"); 5484 # else5773 # else 5485 5774 __asm 5486 5775 { 5487 5776 mov edx, [iBit] 5488 # ifdef RT_ARCH_AMD645777 # ifdef RT_ARCH_AMD64 5489 5778 mov rax, [pvBitmap] 5490 5779 bts [rax], edx 5491 # else5780 # else 5492 5781 mov eax, [pvBitmap] 5493 5782 bts [eax], edx 5494 # endif5783 # endif 5495 5784 setc al 5496 5785 and eax, 1 5497 5786 mov [rc.u32], eax 5498 5787 } 5788 # endif 5789 5790 # else 5791 int32_t offBitmap = iBit / 32; 5792 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 5793 rc.u32 = ASMAtomicUoOrExU32(&((uint32_t volatile *)pvBitmap)[offBitmap], RT_BIT_32(iBit & 31)) >> (iBit & 31); 5499 5794 # endif 5500 5795 return rc.f; … … 5515 5810 * @remarks x86: Requires a 386 or later. 5516 5811 */ 5517 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5812 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5518 5813 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMAtomicBitTestAndSet(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5519 5814 #else … … 5524 5819 # if RT_INLINE_ASM_USES_INTRIN 5525 5820 rc.u8 = _interlockedbittestandset((long RT_FAR *)pvBitmap, iBit); 5526 # elif RT_INLINE_ASM_GNU_STYLE 5821 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5822 # if RT_INLINE_ASM_GNU_STYLE 5527 5823 __asm__ __volatile__("lock; btsl %2, %1\n\t" 5528 5824 "setc %b0\n\t" … … 5534 5830 : "memory" 5535 5831 , "cc"); 5536 # else5832 # else 5537 5833 __asm 5538 5834 { 5539 5835 mov edx, [iBit] 5540 # ifdef RT_ARCH_AMD645836 # ifdef RT_ARCH_AMD64 5541 5837 mov rax, [pvBitmap] 5542 5838 lock bts [rax], edx 5543 # else5839 # else 5544 5840 mov eax, [pvBitmap] 5545 5841 lock bts [eax], edx 5546 # endif5842 # endif 5547 5843 setc al 5548 5844 and eax, 1 5549 5845 mov [rc.u32], eax 5550 5846 } 5847 # endif 5848 5849 # else 5850 rc.u32 = ASMAtomicOrExU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], RT_BIT_32(iBit & 31)) >> (iBit & 31); 5551 5851 # endif 5552 5852 return rc.f; … … 5568 5868 * traps accessing the last bits in the bitmap. 5569 5869 */ 5570 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5870 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5571 5871 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMBitTestAndClear(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5572 5872 #else … … 5577 5877 rc.u8 = _bittestandreset((long RT_FAR *)pvBitmap, iBit); 5578 5878 5579 # elif RT_INLINE_ASM_GNU_STYLE 5879 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5880 # if RT_INLINE_ASM_GNU_STYLE 5580 5881 __asm__ __volatile__("btrl %2, %1\n\t" 5581 5882 "setc %b0\n\t" … … 5587 5888 : "memory" 5588 5889 , "cc"); 5589 # else5890 # else 5590 5891 __asm 5591 5892 { 5592 5893 mov edx, [iBit] 5593 # ifdef RT_ARCH_AMD645894 # ifdef RT_ARCH_AMD64 5594 5895 mov rax, [pvBitmap] 5595 5896 btr [rax], edx 5596 # else5897 # else 5597 5898 mov eax, [pvBitmap] 5598 5899 btr [eax], edx 5599 # endif5900 # endif 5600 5901 setc al 5601 5902 and eax, 1 5602 5903 mov [rc.u32], eax 5603 5904 } 5905 # endif 5906 5907 # else 5908 int32_t offBitmap = iBit / 32; 5909 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 5910 rc.u32 = ASMAtomicUoAndExU32(&((uint32_t volatile *)pvBitmap)[offBitmap], ~RT_BIT_32(iBit & 31)) >> (iBit & 31); 5604 5911 # endif 5605 5912 return rc.f; … … 5621 5928 * @remarks x86: Requires a 386 or later. 5622 5929 */ 5623 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5930 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5624 5931 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMAtomicBitTestAndClear(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5625 5932 #else … … 5631 5938 rc.u8 = _interlockedbittestandreset((long RT_FAR *)pvBitmap, iBit); 5632 5939 5633 # elif RT_INLINE_ASM_GNU_STYLE 5940 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5941 # if RT_INLINE_ASM_GNU_STYLE 5634 5942 __asm__ __volatile__("lock; btrl %2, %1\n\t" 5635 5943 "setc %b0\n\t" … … 5641 5949 : "memory" 5642 5950 , "cc"); 5643 # else5951 # else 5644 5952 __asm 5645 5953 { 5646 5954 mov edx, [iBit] 5647 # ifdef RT_ARCH_AMD645955 # ifdef RT_ARCH_AMD64 5648 5956 mov rax, [pvBitmap] 5649 5957 lock btr [rax], edx 5650 # else5958 # else 5651 5959 mov eax, [pvBitmap] 5652 5960 lock btr [eax], edx 5653 # endif5961 # endif 5654 5962 setc al 5655 5963 and eax, 1 5656 5964 mov [rc.u32], eax 5657 5965 } 5966 # endif 5967 5968 # else 5969 rc.u32 = ASMAtomicAndExU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], ~RT_BIT_32(iBit & 31)) >> (iBit & 31); 5658 5970 # endif 5659 5971 return rc.f; … … 5675 5987 * traps accessing the last bits in the bitmap. 5676 5988 */ 5677 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN5989 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5678 5990 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMBitTestAndToggle(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5679 5991 #else … … 5684 5996 rc.u8 = _bittestandcomplement((long RT_FAR *)pvBitmap, iBit); 5685 5997 5686 # elif RT_INLINE_ASM_GNU_STYLE 5998 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 5999 # if RT_INLINE_ASM_GNU_STYLE 5687 6000 __asm__ __volatile__("btcl %2, %1\n\t" 5688 6001 "setc %b0\n\t" … … 5694 6007 : "memory" 5695 6008 , "cc"); 5696 # else6009 # else 5697 6010 __asm 5698 6011 { 5699 6012 mov edx, [iBit] 5700 # ifdef RT_ARCH_AMD646013 # ifdef RT_ARCH_AMD64 5701 6014 mov rax, [pvBitmap] 5702 6015 btc [rax], edx 5703 # else6016 # else 5704 6017 mov eax, [pvBitmap] 5705 6018 btc [eax], edx 5706 # endif6019 # endif 5707 6020 setc al 5708 6021 and eax, 1 5709 6022 mov [rc.u32], eax 5710 6023 } 6024 # endif 6025 6026 # else 6027 int32_t offBitmap = iBit / 32; 6028 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 6029 rc.u32 = ASMAtomicUoXorExU32(&((uint32_t volatile *)pvBitmap)[offBitmap], RT_BIT_32(iBit & 31)) >> (iBit & 31); 5711 6030 # endif 5712 6031 return rc.f; … … 5727 6046 * @remarks x86: Requires a 386 or later. 5728 6047 */ 5729 #if RT_INLINE_ASM_EXTERNAL 6048 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM 5730 6049 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMAtomicBitTestAndToggle(volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5731 6050 #else … … 5734 6053 union { bool f; uint32_t u32; uint8_t u8; } rc; 5735 6054 AssertMsg(!((uintptr_t)pvBitmap & 3), ("address %p not 32-bit aligned", pvBitmap)); 5736 # if RT_INLINE_ASM_GNU_STYLE 6055 # if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 6056 # if RT_INLINE_ASM_GNU_STYLE 5737 6057 __asm__ __volatile__("lock; btcl %2, %1\n\t" 5738 6058 "setc %b0\n\t" … … 5744 6064 : "memory" 5745 6065 , "cc"); 5746 # else6066 # else 5747 6067 __asm 5748 6068 { 5749 6069 mov edx, [iBit] 5750 # ifdef RT_ARCH_AMD646070 # ifdef RT_ARCH_AMD64 5751 6071 mov rax, [pvBitmap] 5752 6072 lock btc [rax], edx 5753 # else6073 # else 5754 6074 mov eax, [pvBitmap] 5755 6075 lock btc [eax], edx 5756 # endif6076 # endif 5757 6077 setc al 5758 6078 and eax, 1 5759 6079 mov [rc.u32], eax 5760 6080 } 6081 # endif 6082 6083 # else 6084 rc.u32 = ASMAtomicXorExU32(&((uint32_t volatile *)pvBitmap)[iBit / 32], RT_BIT_32(iBit & 31)) >> (iBit & 31); 5761 6085 # endif 5762 6086 return rc.f; … … 5778 6102 * traps accessing the last bits in the bitmap. 5779 6103 */ 5780 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN6104 #if RT_INLINE_ASM_EXTERNAL_TMP_ARM && !RT_INLINE_ASM_USES_INTRIN 5781 6105 RT_ASM_DECL_PRAGMA_WATCOM(bool) ASMBitTest(const volatile void RT_FAR *pvBitmap, int32_t iBit) RT_NOTHROW_PROTO; 5782 6106 #else … … 5786 6110 # if RT_INLINE_ASM_USES_INTRIN 5787 6111 rc.u32 = _bittest((long *)pvBitmap, iBit); 5788 # elif RT_INLINE_ASM_GNU_STYLE 6112 6113 # elif defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 6114 # if RT_INLINE_ASM_GNU_STYLE 5789 6115 5790 6116 __asm__ __volatile__("btl %2, %1\n\t" … … 5796 6122 : "memory" 5797 6123 , "cc"); 5798 # else6124 # else 5799 6125 __asm 5800 6126 { 5801 6127 mov edx, [iBit] 5802 # ifdef RT_ARCH_AMD646128 # ifdef RT_ARCH_AMD64 5803 6129 mov rax, [pvBitmap] 5804 6130 bt [rax], edx 5805 # else6131 # else 5806 6132 mov eax, [pvBitmap] 5807 6133 bt [eax], edx 5808 # endif6134 # endif 5809 6135 setc al 5810 6136 and eax, 1 5811 6137 mov [rc.u32], eax 5812 6138 } 6139 # endif 6140 6141 # else 6142 int32_t offBitmap = iBit / 32; 6143 AssertStmt(!((uintptr_t)pvBitmap & 3), offBitmap += (uintptr_t)pvBitmap & 3; iBit += ((uintptr_t)pvBitmap & 3) * 8); 6144 rc.u32 = ASMAtomicUoReadU32(&((uint32_t volatile *)pvBitmap)[offBitmap]) >> (iBit & 31); 5813 6145 # endif 5814 6146 return rc.f;
Note:
See TracChangeset
for help on using the changeset viewer.