Changeset 59480 in vbox for trunk/include
- Timestamp:
- Jan 26, 2016 3:00:43 PM (9 years ago)
- Location:
- trunk/include/iprt
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm-watcom-x86-16.h
r58792 r59480 83 83 #undef ASMAtomicXchgU32 84 84 #pragma aux ASMAtomicXchgU32 = \ 85 ".386" \ 85 86 "shl ecx, 16" \ 86 87 "mov cx, ax" \ … … 150 151 151 152 #undef ASMSerializeInstruction 152 #pragma aux ASMSerializeInstruction = \ 153 #if 1 154 # pragma aux ASMSerializeInstruction = \ 155 "pushf" \ 156 "push cs" \ 157 "call foo" /* 'push offset done' doesn't work */ \ 158 "jmp done" \ 159 "foo:" \ 160 "iret" /* serializing */ \ 161 "done:" \ 162 parm [] \ 163 modify exact [ax]; 164 #else 165 # pragma aux ASMSerializeInstruction = \ 153 166 ".586" \ 154 167 "xor eax, eax" \ … … 156 169 parm [] \ 157 170 modify exact [ax bx cx dx]; 171 #endif 158 172 159 173 #undef ASMAtomicReadU64 … … 261 275 #undef ASMAtomicOrU32 262 276 #pragma aux ASMAtomicOrU32 = \ 277 ".386" \ 263 278 "shl edx, 16" \ 264 279 "mov dx, ax" \ … … 271 286 #undef ASMAtomicAndU32 272 287 #pragma aux ASMAtomicAndU32 = \ 288 ".386" \ 273 289 "shl edx, 16" \ 274 290 "mov dx, ax" \ … … 281 297 #undef ASMAtomicUoOrU32 282 298 #pragma aux ASMAtomicUoOrU32 = \ 299 ".386" \ 283 300 "shl edx, 16" \ 284 301 "mov dx, ax" \ … … 291 308 #undef ASMAtomicUoAndU32 292 309 #pragma aux ASMAtomicUoAndU32 = \ 310 ".386" \ 293 311 "shl edx, 16" \ 294 312 "mov dx, ax" \ … … 324 342 325 343 #undef ASMMemZeroPage 326 #pragma aux ASMMemZeroPage = \ 344 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 345 # pragma aux ASMMemZeroPage = \ 346 "mov cx, 2048" \ 347 "xor ax, ax" \ 348 "rep stosw" \ 349 parm [es di] \ 350 modify exact [ax cx di]; 351 #else 352 # pragma aux ASMMemZeroPage = \ 327 353 "mov ecx, 1024" \ 328 354 "xor eax, eax" \ … … 330 356 parm [es di] \ 331 357 modify exact [ax cx di]; 358 #endif 332 359 333 360 #undef ASMMemZero32 334 #pragma aux ASMMemZero32 = \ 361 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 362 # pragma aux ASMMemZero32 = \ 363 "xor ax, ax" \ 364 "mov dx, cx" \ 365 "rep stosw" \ 366 "mov cx, dx" \ 367 "rep stosw" \ 368 parm [es di] [cx] \ 369 modify exact [ax dx cx di]; 370 #else 371 # pragma aux ASMMemZero32 = \ 335 372 "and ecx, 0ffffh" /* probably not necessary, lazy bird should check... */ \ 336 373 "shr ecx, 2" \ … … 339 376 parm [es di] [cx] \ 340 377 modify exact [ax cx di]; 378 #endif 341 379 342 380 #undef ASMMemFill32 343 #pragma aux ASMMemFill32 = \ 381 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 382 # pragma aux ASMMemFill32 = \ 383 " test cx, cx" \ 384 " jz done" \ 385 "again:" \ 386 " stosw" \ 387 " xchg ax, dx" \ 388 " stosw" \ 389 " xchg ax, dx" \ 390 " dec cx" \ 391 " jnz again" \ 392 "done:" \ 393 parm [es di] [cx] [ax dx]\ 394 modify exact [cx di]; 395 #else 396 # pragma aux ASMMemFill32 = \ 344 397 "and ecx, 0ffffh" /* probably not necessary, lazy bird should check... */ \ 345 398 "shr ecx, 2" \ … … 350 403 parm [es di] [cx] [ax dx]\ 351 404 modify exact [ax cx di]; 405 #endif 352 406 353 407 #undef ASMProbeReadByte … … 359 413 360 414 #undef ASMBitSet 361 #pragma aux ASMBitSet = \ 415 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 416 # pragma aux ASMBitSet = \ 417 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 418 " mov cl, 5" \ 419 " shl ch, cl" \ 420 " add bh, ch" /* Adjust the pointer. */ \ 421 " mov cl, al" \ 422 " shr ax, 1" /* convert to byte offset */ \ 423 " shr ax, 1" \ 424 " shr ax, 1" \ 425 " add bx, ax" /* adjust pointer again */\ 426 " and cl, 7" \ 427 " mov al, 1" \ 428 " shl al, cl" /* al=bitmask */ \ 429 " or es:[bx], al" \ 430 parm [es bx] [ax cx] \ 431 modify exact [ax bx cx]; 432 #else 433 # pragma aux ASMBitSet = \ 362 434 "shl edx, 16" \ 363 435 "mov dx, ax" \ … … 365 437 parm [es bx] [ax dx] \ 366 438 modify exact [dx]; 439 #endif 367 440 368 441 #undef ASMAtomicBitSet 369 442 #pragma aux ASMAtomicBitSet = \ 443 ".386" \ 370 444 "shl edx, 16" \ 371 445 "mov dx, ax" \ … … 375 449 376 450 #undef ASMBitClear 377 #pragma aux ASMBitClear = \ 451 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 452 # pragma aux ASMBitClear = \ 453 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 454 " mov cl, 5" \ 455 " shl ch, cl" \ 456 " add bh, ch" /* Adjust the pointer. */ \ 457 " mov cl, al" \ 458 " shr ax, 1" /* convert to byte offset */ \ 459 " shr ax, 1" \ 460 " shr ax, 1" \ 461 " add bx, ax" /* adjust pointer again */\ 462 " and cl, 7" \ 463 " mov al, 1" \ 464 " shl al, cl" \ 465 " not al" /* al=bitmask */ \ 466 " and es:[bx], al" \ 467 parm [es bx] [ax cx] \ 468 modify exact [ax bx cx]; 469 #else 470 # pragma aux ASMBitClear = \ 378 471 "shl edx, 16" \ 379 472 "mov dx, ax" \ … … 381 474 parm [es bx] [ax dx] \ 382 475 modify exact [dx]; 476 #endif 383 477 384 478 #undef ASMAtomicBitClear 385 479 #pragma aux ASMAtomicBitClear = \ 480 ".386" \ 386 481 "shl edx, 16" \ 387 482 "mov dx, ax" \ … … 391 486 392 487 #undef ASMBitToggle 393 #pragma aux ASMBitToggle = \ 488 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 489 # pragma aux ASMBitToggle = \ 490 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 491 " mov cl, 5" \ 492 " shl ch, cl" \ 493 " add bh, ch" /* Adjust the pointer. */ \ 494 " mov cl, al" \ 495 " shr ax, 1" /* convert to byte offset */ \ 496 " shr ax, 1" \ 497 " shr ax, 1" \ 498 " add bx, ax" /* adjust pointer again */\ 499 " and cl, 7" \ 500 " mov al, 1" \ 501 " shl al, cl" /* al=bitmask */ \ 502 " xor es:[bx], al" \ 503 parm [es bx] [ax cx] \ 504 modify exact [ax bx cx]; 505 #else 506 # pragma aux ASMBitToggle = \ 394 507 "shl edx, 16" \ 395 508 "mov dx, ax" \ … … 397 510 parm [es bx] [ax dx] \ 398 511 modify exact [dx]; 512 #endif 399 513 400 514 #undef ASMAtomicBitToggle 401 515 #pragma aux ASMAtomicBitToggle = \ 516 ".386" \ 402 517 "shl edx, 16" \ 403 518 "mov dx, ax" \ … … 407 522 408 523 #undef ASMBitTestAndSet 409 #pragma aux ASMBitTestAndSet = \ 524 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 525 # pragma aux ASMBitTestAndSet = \ 526 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 527 " mov cl, 5" \ 528 " shl ch, cl" \ 529 " add bh, ch" /* Adjust the pointer. */ \ 530 " mov cl, al" \ 531 " shr ax, 1" /* convert to byte offset */ \ 532 " shr ax, 1" \ 533 " shr ax, 1" \ 534 " add bx, ax" /* adjust pointer again */\ 535 " and cl, 7" /* cl=byte shift count */ \ 536 " mov ah, 1" \ 537 " shl ah, cl" /* ah=bitmask */ \ 538 " mov al, es:[bx]" \ 539 " or ah, al" \ 540 " mov es:[bx], ah" \ 541 " shr al, cl" \ 542 " and al, 1" \ 543 parm [es bx] [ax cx] \ 544 value [al] \ 545 modify exact [ax bx cx]; 546 #else 547 # pragma aux ASMBitTestAndSet = \ 410 548 "shl edx, 16" \ 411 549 "mov dx, ax" \ … … 415 553 value [al] \ 416 554 modify exact [ax dx]; 555 #endif 417 556 418 557 #undef ASMAtomicBitTestAndSet 419 558 #pragma aux ASMAtomicBitTestAndSet = \ 559 ".386" \ 420 560 "shl edx, 16" \ 421 561 "mov dx, ax" \ … … 427 567 428 568 #undef ASMBitTestAndClear 429 #pragma aux ASMBitTestAndClear = \ 569 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 570 # pragma aux ASMBitTestAndClear = \ 571 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 572 " mov cl, 5" \ 573 " shl ch, cl" \ 574 " add bh, ch" /* Adjust the pointer. */ \ 575 " mov cl, al" \ 576 " shr ax, 1" /* convert to byte offset */ \ 577 " shr ax, 1" \ 578 " shr ax, 1" \ 579 " add bx, ax" /* adjust pointer again */\ 580 " and cl, 7" /* cl=byte shift count */ \ 581 " mov ah, 1" \ 582 " shl ah, cl" \ 583 " not ah" /* ah=bitmask */ \ 584 " mov al, es:[bx]" \ 585 " and ah, al" \ 586 " mov es:[bx], ah" \ 587 " shr al, cl" \ 588 " and al, 1" \ 589 parm [es bx] [ax cx] \ 590 value [al] \ 591 modify exact [ax bx cx]; 592 #else 593 # pragma aux ASMBitTestAndClear = \ 430 594 "shl edx, 16" \ 431 595 "mov dx, ax" \ … … 435 599 value [al] \ 436 600 modify exact [ax dx]; 601 #endif 437 602 438 603 #undef ASMAtomicBitTestAndClear 439 604 #pragma aux ASMAtomicBitTestAndClear = \ 605 ".386" \ 440 606 "shl edx, 16" \ 441 607 "mov dx, ax" \ … … 447 613 448 614 #undef ASMBitTestAndToggle 449 #pragma aux ASMBitTestAndToggle = \ 615 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 616 # pragma aux ASMBitTestAndToggle = \ 617 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 618 " mov cl, 5" \ 619 " shl ch, cl" \ 620 " add bh, ch" /* Adjust the pointer. */ \ 621 " mov cl, al" \ 622 " shr ax, 1" /* convert to byte offset */ \ 623 " shr ax, 1" \ 624 " shr ax, 1" \ 625 " add bx, ax" /* adjust pointer again */\ 626 " and cl, 7" /* cl=byte shift count */ \ 627 " mov ah, 1" \ 628 " shl ah, cl" /* ah=bitmask */ \ 629 " mov al, es:[bx]" \ 630 " xor ah, al" \ 631 " mov es:[bx], ah" \ 632 " shr al, cl" \ 633 " and al, 1" \ 634 parm [es bx] [ax cx] \ 635 value [al] \ 636 modify exact [ax bx cx]; 637 #else 638 # pragma aux ASMBitTestAndToggle = \ 450 639 "shl edx, 16" \ 451 640 "mov dx, ax" \ … … 455 644 value [al] \ 456 645 modify exact [ax dx]; 646 #endif 457 647 458 648 #undef ASMAtomicBitTestAndToggle 459 649 #pragma aux ASMAtomicBitTestAndToggle = \ 650 ".386" \ 460 651 "shl edx, 16" \ 461 652 "mov dx, ax" \ … … 467 658 468 659 #undef ASMBitTest 469 #pragma aux ASMBitTest = \ 660 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 661 # pragma aux ASMBitTest = \ 662 " mov ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \ 663 " mov cl, 5" \ 664 " shl ch, cl" \ 665 " add bh, ch" /* Adjust the pointer. */ \ 666 " mov cl, al" \ 667 " shr ax, 1" /* convert to byte offset */ \ 668 " shr ax, 1" \ 669 " shr ax, 1" \ 670 " add bx, ax" /* adjust pointer again */\ 671 " and cl, 7" \ 672 " mov al, es:[bx]" \ 673 " shr al, cl" \ 674 " and al, 1" \ 675 parm [es bx] [ax cx] \ 676 value [al] \ 677 modify exact [ax bx cx]; 678 #else 679 # pragma aux ASMBitTest = \ 470 680 "shl edx, 16" \ 471 681 "mov dx, ax" \ … … 475 685 value [al] \ 476 686 modify exact [ax dx] nomemory; 477 478 #if 0 479 /** @todo this is way to much inline assembly, better off in an external function. */ 480 #undef ASMBitFirstClear 481 #pragma aux ASMBitFirstClear = \ 482 "mov bx, di" /* save start of bitmap for later */ \ 483 "shl ecx, 16" \ 484 "mov cx, ax" /* ecx = cBits */ \ 485 "add ecx, 31" \ 486 "shr ecx, 5" /* cDWord = RT_ALIGN_32(cBits, 32) / 32; */ \ 487 "mov eax, 0ffffffffh" \ 488 "mov edx, eax" /* default return value */ \ 489 "repe scasd" \ 490 "je done" \ 491 "sub di, 4" /* rewind di */ \ 492 "xor eax, es:[di]" /* load inverted bits */ \ 493 "sub di, bx" /* calc byte offset */ \ 494 "movzx edi, di" \ 495 "shl edi, 3" /* convert byte to bit offset */ \ 496 "bsf edx, eax" \ 497 "add edx, edi" \ 498 "done:" \ 499 "mov eax, edx" \ 500 "shr edx, 16" \ 501 parm [es di] [ax cx] \ 502 value [ax dx] \ 503 modify exact [ax bx cx dx di]; 504 505 /* ASMBitNextClear: Too much work, do when needed. */ 506 507 /** @todo this is way to much inline assembly, better off in an external function. */ 508 #undef ASMBitFirstSet 509 #pragma aux ASMBitFirstSet = \ 510 "mov bx, di" /* save start of bitmap for later */ \ 511 "shl ecx, 16" \ 512 "mov cx, ax" /* ecx = cBits */ \ 513 "add ecx, 31" \ 514 "shr ecx, 5" /* cDWord = RT_ALIGN_32(cBits, 32) / 32; */ \ 515 "xor eax, eax" \ 516 "mov edx, 0ffffffffh" /* default return value */ \ 517 "repe scasd" \ 518 "je done" \ 519 "sub di, 4" /* rewind di */ \ 520 "mov eax, es:[di]" /* reload previous dword */ \ 521 "sub di, bx" /* calc byte offset */ \ 522 "movzx edi, di" \ 523 "shl edi, 3" /* convert byte to bit offset */ \ 524 "bsf edx, eax" /* find first set bit in dword */ \ 525 "add edx, edi" /* calc final bit number */ \ 526 "done:" \ 527 "mov eax, edx" \ 528 "shr edx, 16" \ 529 parm [es di] [ax cx] \ 530 value [ax dx] \ 531 modify exact [ax bx cx dx di]; 532 533 /* ASMBitNextSet: Too much work, do when needed. */ 534 #else 687 #endif 688 535 689 /* ASMBitFirstClear: External file. */ 536 690 /* ASMBitNextClear: External file. */ 537 691 /* ASMBitFirstSet: External file. */ 538 692 /* ASMBitNextSet: External file. */ 539 #endif 540 541 #undef ASMBitFirstSetU32 542 #pragma aux ASMBitFirstSetU32 = \ 693 694 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 695 /* ASMBitFirstSetU32: External file. */ 696 #else 697 # undef ASMBitFirstSetU32 698 # pragma aux ASMBitFirstSetU32 = \ 543 699 "shl edx, 16" \ 544 700 "mov dx, ax" \ … … 553 709 value [ax] \ 554 710 modify exact [ax dx] nomemory; 555 556 #undef ASMBitFirstSetU64 557 #pragma aux ASMBitFirstSetU64 = \ 711 #endif 712 713 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 714 /* ASMBitFirstSetU64: External file. */ 715 #else 716 # undef ASMBitFirstSetU64 717 # pragma aux ASMBitFirstSetU64 = \ 718 ".386" \ 558 719 "shl ecx, 16" \ 559 720 "mov cx, dx" \ … … 578 739 value [ax] \ 579 740 modify exact [ax cx] nomemory; 580 581 #undef ASMBitFirstSetU16 582 #pragma aux ASMBitFirstSetU16 = \ 741 #endif 742 743 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 744 /* ASMBitFirstSetU16: External file. */ 745 #else 746 # undef ASMBitFirstSetU16 747 # pragma aux ASMBitFirstSetU16 = \ 583 748 "bsf ax, ax" \ 584 749 "jz not_found" \ … … 591 756 value [ax] \ 592 757 modify exact [ax] nomemory; 593 594 #undef ASMBitLastSetU32 595 #pragma aux ASMBitLastSetU32 = \ 758 #endif 759 760 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 761 /* ASMBitLastSetU32: External file. */ 762 #else 763 # undef ASMBitLastSetU32 764 # pragma aux ASMBitLastSetU32 = \ 596 765 "shl edx, 16" \ 597 766 "mov dx, ax" \ … … 606 775 value [ax] \ 607 776 modify exact [ax dx] nomemory; 608 609 #undef ASMBitLastSetU64 610 #pragma aux ASMBitLastSetU64 = \ 777 #endif 778 779 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 780 /* ASMBitLastSetU64: External file. */ 781 #else 782 # undef ASMBitLastSetU64 783 # pragma aux ASMBitLastSetU64 = \ 784 ".386" \ 611 785 "shl ecx, 16" \ 612 786 "mov cx, dx" \ … … 631 805 value [ax] \ 632 806 modify exact [ax cx] nomemory; 633 634 #undef ASMBitLastSetU16 635 #pragma aux ASMBitLastSetU16 = \ 807 #endif 808 809 #if defined(__SW_0) || defined(__SW_1) || defined(__SW_2) 810 /* ASMBitLastSetU16: External file. */ 811 #else 812 # undef ASMBitLastSetU16 813 # pragma aux ASMBitLastSetU16 = \ 636 814 "bsr ax, ax" \ 637 815 "jz not_found" \ … … 644 822 value [ax] \ 645 823 modify exact [ax] nomemory; 824 #endif 646 825 647 826 #undef ASMByteSwapU16 648 827 #pragma aux ASMByteSwapU16 = \ 649 " ror ax, 8" \828 "xchg al, ah" \ 650 829 parm [ax] nomemory \ 651 830 value [ax] \ … … 654 833 #undef ASMByteSwapU32 655 834 #pragma aux ASMByteSwapU32 = \ 656 "xchg ax, dx" \ 835 "xchg dh, al" \ 836 "xchg dl, ah" \ 657 837 parm [ax dx] nomemory \ 658 838 value [ax dx] \ … … 661 841 #undef ASMRotateLeftU32 662 842 #pragma aux ASMRotateLeftU32 = \ 843 ".386" \ 663 844 "shl edx, 16" \ 664 845 "mov dx, ax" \ … … 672 853 #undef ASMRotateRightU32 673 854 #pragma aux ASMRotateRightU32 = \ 855 ".386" \ 674 856 "shl edx, 16" \ 675 857 "mov dx, ax" \ -
trunk/include/iprt/asm.h
r58791 r59480 372 372 * @param pu32 Pointer to the 32-bit variable to update. 373 373 * @param u32 The 32-bit value to assign to *pu32. 374 * 375 * @remarks Does not work on 286 and earlier. 374 376 */ 375 377 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 428 430 * @param pu64 Pointer to the 64-bit variable to update. 429 431 * @param u64 The 64-bit value to assign to *pu64. 432 * 433 * @remarks Works on 32-bit x86 CPUs starting with Pentium. 430 434 */ 431 435 #if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \ … … 685 689 * @param u8New The new value to assigned to *pu8. 686 690 * @param u8Old The old value to *pu8 compare with. 691 * 692 * @remarks x86: Requires a 486 or later. 687 693 */ 688 694 #if RT_INLINE_ASM_EXTERNAL || !RT_INLINE_ASM_GNU_STYLE … … 714 720 * @param i8New The new value to assigned to *pi8. 715 721 * @param i8Old The old value to *pi8 compare with. 722 * 723 * @remarks x86: Requires a 486 or later. 716 724 */ 717 725 DECLINLINE(bool) ASMAtomicCmpXchgS8(volatile int8_t *pi8, const int8_t i8New, const int8_t i8Old) … … 730 738 * @param fNew The new value to assigned to *pf. 731 739 * @param fOld The old value to *pf compare with. 740 * 741 * @remarks x86: Requires a 486 or later. 732 742 */ 733 743 DECLINLINE(bool) ASMAtomicCmpXchgBool(volatile bool *pf, const bool fNew, const bool fOld) … … 746 756 * @param u32New The new value to assigned to *pu32. 747 757 * @param u32Old The old value to *pu32 compare with. 758 * 759 * @remarks x86: Requires a 486 or later. 748 760 */ 749 761 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 802 814 * @param i32New The new value to assigned to *pi32. 803 815 * @param i32Old The old value to *pi32 compare with. 816 * 817 * @remarks x86: Requires a 486 or later. 804 818 */ 805 819 DECLINLINE(bool) ASMAtomicCmpXchgS32(volatile int32_t *pi32, const int32_t i32New, const int32_t i32Old) … … 818 832 * @param u64New The 64-bit value to assign to *pu64. 819 833 * @param u64Old The value to compare with. 834 * 835 * @remarks x86: Requires a Pentium or later. 820 836 */ 821 837 #if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \ … … 917 933 * @param i64 The 64-bit value to assign to *pu64. 918 934 * @param i64Old The value to compare with. 935 * 936 * @remarks x86: Requires a Pentium or later. 919 937 */ 920 938 DECLINLINE(bool) ASMAtomicCmpXchgS64(volatile int64_t *pi64, const int64_t i64, const int64_t i64Old) … … 933 951 * @param pvNew The new value to assigned to *ppv. 934 952 * @param pvOld The old value to *ppv compare with. 953 * 954 * @remarks x86: Requires a 486 or later. 935 955 */ 936 956 DECLINLINE(bool) ASMAtomicCmpXchgPtrVoid(void * volatile *ppv, const void *pvNew, const void *pvOld) … … 957 977 * 958 978 * @remarks This is relatively type safe on GCC platforms. 979 * @remarks x86: Requires a 486 or later. 959 980 */ 960 981 #ifdef __GNUC__ … … 984 1005 * 985 1006 * @remarks This doesn't currently work for all handles (like RTFILE). 1007 * @remarks x86: Requires a 486 or later. 986 1008 */ 987 1009 #if HC_ARCH_BITS == 32 || (ARCH_BITS == 16 && RT_FAR_DATA) … … 1010 1032 * @param uOld The old value to *pu compare with. 1011 1033 * @param fRc Where to store the result. 1034 * 1035 * @remarks x86: Requires a 486 or later. 1012 1036 */ 1013 1037 #define ASMAtomicCmpXchgSize(pu, uNew, uOld, fRc) \ … … 1036 1060 * @param u32Old The old value to *pu32 compare with. 1037 1061 * @param pu32Old Pointer store the old value at. 1062 * 1063 * @remarks x86: Requires a 486 or later. 1038 1064 */ 1039 1065 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 1098 1124 * @param i32Old The old value to *pi32 compare with. 1099 1125 * @param pi32Old Pointer store the old value at. 1126 * 1127 * @remarks x86: Requires a 486 or later. 1100 1128 */ 1101 1129 DECLINLINE(bool) ASMAtomicCmpXchgExS32(volatile int32_t *pi32, const int32_t i32New, const int32_t i32Old, int32_t *pi32Old) … … 1116 1144 * @param u64Old The value to compare with. 1117 1145 * @param pu64Old Pointer store the old value at. 1146 * 1147 * @remarks x86: Requires a Pentium or later. 1118 1148 */ 1119 1149 #if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \ … … 1217 1247 * @param i64Old The value to compare with. 1218 1248 * @param pi64Old Pointer store the old value at. 1249 * 1250 * @remarks x86: Requires a Pentium or later. 1219 1251 */ 1220 1252 DECLINLINE(bool) ASMAtomicCmpXchgExS64(volatile int64_t *pi64, const int64_t i64, const int64_t i64Old, int64_t *pi64Old) … … 1262 1294 * @param fRc Where to store the result. 1263 1295 * @param puOldVal Pointer to where to store the old value. 1296 * 1297 * @remarks x86: Requires a 486 or later. 1264 1298 */ 1265 1299 #define ASMAtomicCmpXchgExSize(pu, uNew, uOld, fRc, puOldVal) \ … … 1289 1323 * @param pvOld The old value to *ppv compare with. 1290 1324 * @param ppvOld Pointer store the old value at. 1325 * 1326 * @remarks x86: Requires a 486 or later. 1291 1327 */ 1292 1328 DECLINLINE(bool) ASMAtomicCmpXchgExPtrVoid(void * volatile *ppv, const void *pvNew, const void *pvOld, void **ppvOld) … … 1315 1351 * 1316 1352 * @remarks This is relatively type safe on GCC platforms. 1353 * @remarks x86: Requires a 486 or later. 1317 1354 */ 1318 1355 #ifdef __GNUC__ … … 1576 1613 * @param pu64 Pointer to the 64-bit variable to read. 1577 1614 * The memory pointed to must be writable. 1578 * @remark This will fault if the memory is read-only! 1615 * 1616 * @remarks This may fault if the memory is read-only! 1617 * @remarks x86: Requires a Pentium or later. 1579 1618 */ 1580 1619 #if (RT_INLINE_ASM_EXTERNAL && !defined(RT_ARCH_AMD64)) \ … … 1655 1694 * @param pu64 Pointer to the 64-bit variable to read. 1656 1695 * The memory pointed to must be writable. 1657 * @remark This will fault if the memory is read-only! 1696 * 1697 * @remarks This may fault if the memory is read-only! 1698 * @remarks x86: Requires a Pentium or later. 1658 1699 */ 1659 1700 #if !defined(RT_ARCH_AMD64) \ … … 1736 1777 * @param pi64 Pointer to the 64-bit variable to read. 1737 1778 * The memory pointed to must be writable. 1738 * @remark This will fault if the memory is read-only! 1779 * 1780 * @remarks This may fault if the memory is read-only! 1781 * @remarks x86: Requires a Pentium or later. 1739 1782 */ 1740 1783 DECLINLINE(int64_t) ASMAtomicReadS64(volatile int64_t *pi64) … … 1750 1793 * @param pi64 Pointer to the 64-bit variable to read. 1751 1794 * The memory pointed to must be writable. 1752 * @remark This will fault if the memory is read-only! 1795 * 1796 * @remarks This will fault if the memory is read-only! 1797 * @remarks x86: Requires a Pentium or later. 1753 1798 */ 1754 1799 DECLINLINE(int64_t) ASMAtomicUoReadS64(volatile int64_t *pi64) … … 2468 2513 * @param pu16 Pointer to the value. 2469 2514 * @param u16 Number to add. 2515 * 2470 2516 * @remarks Currently not implemented, just to make 16-bit code happy. 2517 * @remarks x86: Requires a 486 or later. 2471 2518 */ 2472 2519 DECLASM(uint16_t) ASMAtomicAddU16(uint16_t volatile *pu16, uint32_t u16); … … 2479 2526 * @param pu32 Pointer to the value. 2480 2527 * @param u32 Number to add. 2528 * 2529 * @remarks x86: Requires a 486 or later. 2481 2530 */ 2482 2531 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2522 2571 * @param pi32 Pointer to the value. 2523 2572 * @param i32 Number to add. 2573 * 2574 * @remarks x86: Requires a 486 or later. 2524 2575 */ 2525 2576 DECLINLINE(int32_t) ASMAtomicAddS32(int32_t volatile *pi32, int32_t i32) … … 2535 2586 * @param pu64 Pointer to the value. 2536 2587 * @param u64 Number to add. 2588 * 2589 * @remarks x86: Requires a Pentium or later. 2537 2590 */ 2538 2591 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2576 2629 * @param pi64 Pointer to the value. 2577 2630 * @param i64 Number to add. 2631 * 2632 * @remarks x86: Requires a Pentium or later. 2578 2633 */ 2579 2634 DECLINLINE(int64_t) ASMAtomicAddS64(int64_t volatile *pi64, int64_t i64) … … 2632 2687 * @param pu16 Pointer to the value. 2633 2688 * @param u16 Number to subtract. 2689 * 2690 * @remarks x86: Requires a 486 or later. 2634 2691 */ 2635 2692 DECLINLINE(uint16_t) ASMAtomicSubU16(uint16_t volatile *pu16, uint32_t u16) … … 2645 2702 * @param pi16 Pointer to the value. 2646 2703 * @param i16 Number to subtract. 2704 * 2705 * @remarks x86: Requires a 486 or later. 2647 2706 */ 2648 2707 DECLINLINE(int16_t) ASMAtomicSubS16(int16_t volatile *pi16, int16_t i16) … … 2658 2717 * @param pu32 Pointer to the value. 2659 2718 * @param u32 Number to subtract. 2719 * 2720 * @remarks x86: Requires a 486 or later. 2660 2721 */ 2661 2722 DECLINLINE(uint32_t) ASMAtomicSubU32(uint32_t volatile *pu32, uint32_t u32) … … 2671 2732 * @param pi32 Pointer to the value. 2672 2733 * @param i32 Number to subtract. 2734 * 2735 * @remarks x86: Requires a 486 or later. 2673 2736 */ 2674 2737 DECLINLINE(int32_t) ASMAtomicSubS32(int32_t volatile *pi32, int32_t i32) … … 2684 2747 * @param pu64 Pointer to the value. 2685 2748 * @param u64 Number to subtract. 2749 * 2750 * @remarks x86: Requires a Pentium or later. 2686 2751 */ 2687 2752 DECLINLINE(uint64_t) ASMAtomicSubU64(uint64_t volatile *pu64, uint64_t u64) … … 2697 2762 * @param pi64 Pointer to the value. 2698 2763 * @param i64 Number to subtract. 2764 * 2765 * @remarks x86: Requires a Pentium or later. 2699 2766 */ 2700 2767 DECLINLINE(int64_t) ASMAtomicSubS64(int64_t volatile *pi64, int64_t i64) … … 2710 2777 * @param pcb Pointer to the size_t value. 2711 2778 * @param cb Number to subtract. 2779 * 2780 * @remarks x86: Requires a 486 or later. 2712 2781 */ 2713 2782 DECLINLINE(size_t) ASMAtomicSubZ(size_t volatile *pcb, size_t cb) … … 2733 2802 * @param uNew The value to subtract to *pu. 2734 2803 * @param puOld Where to store the old value. 2804 * 2805 * @remarks x86: Requires a 486 or later. 2735 2806 */ 2736 2807 #define ASMAtomicSubSize(pu, uNew, puOld) \ … … 2751 2822 * @param pu16 Pointer to the value to increment. 2752 2823 * @remarks Not implemented. Just to make 16-bit code happy. 2824 * 2825 * @remarks x86: Requires a 486 or later. 2753 2826 */ 2754 2827 DECLASM(uint16_t) ASMAtomicIncU16(uint16_t volatile *pu16); … … 2760 2833 * @returns The new value. 2761 2834 * @param pu32 Pointer to the value to increment. 2835 * 2836 * @remarks x86: Requires a 486 or later. 2762 2837 */ 2763 2838 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2803 2878 * @returns The new value. 2804 2879 * @param pi32 Pointer to the value to increment. 2880 * 2881 * @remarks x86: Requires a 486 or later. 2805 2882 */ 2806 2883 DECLINLINE(int32_t) ASMAtomicIncS32(int32_t volatile *pi32) … … 2815 2892 * @returns The new value. 2816 2893 * @param pu64 Pointer to the value to increment. 2894 * 2895 * @remarks x86: Requires a Pentium or later. 2817 2896 */ 2818 2897 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2847 2926 * @returns The new value. 2848 2927 * @param pi64 Pointer to the value to increment. 2928 * 2929 * @remarks x86: Requires a Pentium or later. 2849 2930 */ 2850 2931 DECLINLINE(int64_t) ASMAtomicIncS64(int64_t volatile *pi64) … … 2859 2940 * @returns The new value. 2860 2941 * @param pcb Pointer to the value to increment. 2942 * 2943 * @remarks x86: Requires a 486 or later. 2861 2944 */ 2862 2945 DECLINLINE(int64_t) ASMAtomicIncZ(size_t volatile *pcb) … … 2881 2964 * @param pu16 Pointer to the value to decrement. 2882 2965 * @remarks Not implemented. Just to make 16-bit code happy. 2966 * 2967 * @remarks x86: Requires a 486 or later. 2883 2968 */ 2884 2969 DECLASM(uint32_t) ASMAtomicDecU16(uint16_t volatile *pu16); … … 2890 2975 * @returns The new value. 2891 2976 * @param pu32 Pointer to the value to decrement. 2977 * 2978 * @remarks x86: Requires a 486 or later. 2892 2979 */ 2893 2980 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2933 3020 * @returns The new value. 2934 3021 * @param pi32 Pointer to the value to decrement. 3022 * 3023 * @remarks x86: Requires a 486 or later. 2935 3024 */ 2936 3025 DECLINLINE(int32_t) ASMAtomicDecS32(int32_t volatile *pi32) … … 2945 3034 * @returns The new value. 2946 3035 * @param pu64 Pointer to the value to decrement. 3036 * 3037 * @remarks x86: Requires a Pentium or later. 2947 3038 */ 2948 3039 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 2976 3067 * @returns The new value. 2977 3068 * @param pi64 Pointer to the value to decrement. 3069 * 3070 * @remarks x86: Requires a Pentium or later. 2978 3071 */ 2979 3072 DECLINLINE(int64_t) ASMAtomicDecS64(int64_t volatile *pi64) … … 2988 3081 * @returns The new value. 2989 3082 * @param pcb Pointer to the value to decrement. 3083 * 3084 * @remarks x86: Requires a 486 or later. 2990 3085 */ 2991 3086 DECLINLINE(int64_t) ASMAtomicDecZ(size_t volatile *pcb) … … 3008 3103 * @param pu32 Pointer to the pointer variable to OR u32 with. 3009 3104 * @param u32 The value to OR *pu32 with. 3105 * 3106 * @remarks x86: Requires a 386 or later. 3010 3107 */ 3011 3108 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 3044 3141 * @param pi32 Pointer to the pointer variable to OR u32 with. 3045 3142 * @param i32 The value to OR *pu32 with. 3143 * 3144 * @remarks x86: Requires a 386 or later. 3046 3145 */ 3047 3146 DECLINLINE(void) ASMAtomicOrS32(int32_t volatile *pi32, int32_t i32) … … 3056 3155 * @param pu64 Pointer to the pointer variable to OR u64 with. 3057 3156 * @param u64 The value to OR *pu64 with. 3157 * 3158 * @remarks x86: Requires a Pentium or later. 3058 3159 */ 3059 3160 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 3089 3190 * @param pi64 Pointer to the pointer variable to OR u64 with. 3090 3191 * @param i64 The value to OR *pu64 with. 3192 * 3193 * @remarks x86: Requires a Pentium or later. 3091 3194 */ 3092 3195 DECLINLINE(void) ASMAtomicOrS64(int64_t volatile *pi64, int64_t i64) … … 3101 3204 * @param pu32 Pointer to the pointer variable to AND u32 with. 3102 3205 * @param u32 The value to AND *pu32 with. 3206 * 3207 * @remarks x86: Requires a 386 or later. 3103 3208 */ 3104 3209 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 3137 3242 * @param pi32 Pointer to the pointer variable to AND i32 with. 3138 3243 * @param i32 The value to AND *pi32 with. 3244 * 3245 * @remarks x86: Requires a 386 or later. 3139 3246 */ 3140 3247 DECLINLINE(void) ASMAtomicAndS32(int32_t volatile *pi32, int32_t i32) … … 3149 3256 * @param pu64 Pointer to the pointer variable to AND u64 with. 3150 3257 * @param u64 The value to AND *pu64 with. 3258 * 3259 * @remarks x86: Requires a Pentium or later. 3151 3260 */ 3152 3261 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 3182 3291 * @param pi64 Pointer to the pointer variable to AND i64 with. 3183 3292 * @param i64 The value to AND *pi64 with. 3293 * 3294 * @remarks x86: Requires a Pentium or later. 3184 3295 */ 3185 3296 DECLINLINE(void) ASMAtomicAndS64(int64_t volatile *pi64, int64_t i64) … … 3194 3305 * @param pu32 Pointer to the pointer variable to OR u32 with. 3195 3306 * @param u32 The value to OR *pu32 with. 3307 * 3308 * @remarks x86: Requires a 386 or later. 3196 3309 */ 3197 3310 #if RT_INLINE_ASM_EXTERNAL … … 3227 3340 * @param pi32 Pointer to the pointer variable to OR u32 with. 3228 3341 * @param i32 The value to OR *pu32 with. 3342 * 3343 * @remarks x86: Requires a 386 or later. 3229 3344 */ 3230 3345 DECLINLINE(void) ASMAtomicUoOrS32(int32_t volatile *pi32, int32_t i32) … … 3239 3354 * @param pu64 Pointer to the pointer variable to OR u64 with. 3240 3355 * @param u64 The value to OR *pu64 with. 3356 * 3357 * @remarks x86: Requires a Pentium or later. 3241 3358 */ 3242 3359 #if RT_INLINE_ASM_EXTERNAL … … 3269 3386 * @param pi64 Pointer to the pointer variable to OR u64 with. 3270 3387 * @param i64 The value to OR *pu64 with. 3388 * 3389 * @remarks x86: Requires a Pentium or later. 3271 3390 */ 3272 3391 DECLINLINE(void) ASMAtomicUoOrS64(int64_t volatile *pi64, int64_t i64) … … 3281 3400 * @param pu32 Pointer to the pointer variable to AND u32 with. 3282 3401 * @param u32 The value to AND *pu32 with. 3402 * 3403 * @remarks x86: Requires a 386 or later. 3283 3404 */ 3284 3405 #if RT_INLINE_ASM_EXTERNAL … … 3314 3435 * @param pi32 Pointer to the pointer variable to AND i32 with. 3315 3436 * @param i32 The value to AND *pi32 with. 3437 * 3438 * @remarks x86: Requires a 386 or later. 3316 3439 */ 3317 3440 DECLINLINE(void) ASMAtomicUoAndS32(int32_t volatile *pi32, int32_t i32) … … 3326 3449 * @param pu64 Pointer to the pointer variable to AND u64 with. 3327 3450 * @param u64 The value to AND *pu64 with. 3451 * 3452 * @remarks x86: Requires a Pentium or later. 3328 3453 */ 3329 3454 #if RT_INLINE_ASM_EXTERNAL … … 3356 3481 * @param pi64 Pointer to the pointer variable to AND i64 with. 3357 3482 * @param i64 The value to AND *pi64 with. 3483 * 3484 * @remarks x86: Requires a Pentium or later. 3358 3485 */ 3359 3486 DECLINLINE(void) ASMAtomicUoAndS64(int64_t volatile *pi64, int64_t i64) … … 3368 3495 * @returns the new value. 3369 3496 * @param pu32 Pointer to the variable to increment. 3497 * 3498 * @remarks x86: Requires a 486 or later. 3370 3499 */ 3371 3500 #if RT_INLINE_ASM_EXTERNAL … … 3407 3536 * @returns the new value. 3408 3537 * @param pu32 Pointer to the variable to decrement. 3538 * 3539 * @remarks x86: Requires a 486 or later. 3409 3540 */ 3410 3541 #if RT_INLINE_ASM_EXTERNAL … … 3853 3984 * the memory access isn't atomic! 3854 3985 * @param iBit The bit to set. 3986 * 3987 * @remarks x86: Requires a 386 or later. 3855 3988 */ 3856 3989 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 3934 4067 * the memory access isn't atomic! 3935 4068 * @param iBit The bit to toggle set. 4069 * 3936 4070 * @remarks No memory barrier, take care on smp. 4071 * @remarks x86: Requires a 386 or later. 3937 4072 */ 3938 4073 #if RT_INLINE_ASM_EXTERNAL … … 4013 4148 * the memory access isn't atomic! 4014 4149 * @param iBit The bit to test and set. 4150 * 4151 * @remarks x86: Requires a 386 or later. 4015 4152 */ 4016 4153 #if RT_INLINE_ASM_EXTERNAL … … 4105 4242 * the memory access isn't atomic! 4106 4243 * @param iBit The bit to set. 4244 * 4245 * @remarks x86: Requires a 386 or later. 4107 4246 */ 4108 4247 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 4208 4347 * 4209 4348 * @remarks No memory barrier, take care on smp. 4349 * @remarks x86: Requires a 386 or later. 4210 4350 */ 4211 4351 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN … … 4310 4450 * the memory access isn't atomic! 4311 4451 * @param iBit The bit to test and toggle. 4452 * 4453 * @remarks x86: Requires a 386 or later. 4312 4454 */ 4313 4455 #if RT_INLINE_ASM_EXTERNAL … … 4921 5063 */ 4922 5064 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 4923 DECLASM(unsigned) ASMBitFirstSetU16(uint 32_t u16);4924 #else 4925 DECLINLINE(unsigned) ASMBitFirstSetU16(uint 32_t u16)5065 DECLASM(unsigned) ASMBitFirstSetU16(uint16_t u16); 5066 #else 5067 DECLINLINE(unsigned) ASMBitFirstSetU16(uint16_t u16) 4926 5068 { 4927 5069 return ASMBitFirstSetU32((uint32_t)u16); … … 5059 5201 */ 5060 5202 #if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN 5061 DECLASM(unsigned) ASMBitLastSetU16(uint 32_t u16);5062 #else 5063 DECLINLINE(unsigned) ASMBitLastSetU16(uint 32_t u16)5203 DECLASM(unsigned) ASMBitLastSetU16(uint16_t u16); 5204 #else 5205 DECLINLINE(unsigned) ASMBitLastSetU16(uint16_t u16) 5064 5206 { 5065 5207 return ASMBitLastSetU32((uint32_t)u16); -
trunk/include/iprt/mangling.h
r59459 r59480 133 133 # define ASMBitFirstSet RT_MANGLER(ASMBitFirstSet) 134 134 # define ASMBitFirstSet_EndProc RT_MANGLER(ASMBitFirstSet_EndProc) 135 # define ASMBitFirstSetU16 RT_MANGLER(ASMBitFirstSetU16) 136 # define ASMBitFirstSetU16_EndProc RT_MANGLER(ASMBitFirstSetU16_EndProc) 135 137 # define ASMBitFirstSetU32 RT_MANGLER(ASMBitFirstSetU32) 136 138 # define ASMBitFirstSetU32_EndProc RT_MANGLER(ASMBitFirstSetU32_EndProc) 139 # define ASMBitFirstSetU64 RT_MANGLER(ASMBitFirstSetU64) 140 # define ASMBitFirstSetU64_EndProc RT_MANGLER(ASMBitFirstSetU64_EndProc) 141 # define ASMBitLastSetU16 RT_MANGLER(ASMBitLastSetU16) 142 # define ASMBitLastSetU16_EndProc RT_MANGLER(ASMBitLastSetU16_EndProc) 137 143 # define ASMBitLastSetU32 RT_MANGLER(ASMBitLastSetU32) 138 144 # define ASMBitLastSetU32_EndProc RT_MANGLER(ASMBitLastSetU32_EndProc) 145 # define ASMBitLastSetU64 RT_MANGLER(ASMBitLastSetU64) 146 # define ASMBitLastSetU64_EndProc RT_MANGLER(ASMBitLastSetU64_EndProc) 139 147 # define ASMBitNextClear RT_MANGLER(ASMBitNextClear) 140 148 # define ASMBitNextClear_EndProc RT_MANGLER(ASMBitNextClear_EndProc)
Note:
See TracChangeset
for help on using the changeset viewer.