VirtualBox

Changeset 59480 in vbox for trunk/include


Ignore:
Timestamp:
Jan 26, 2016 3:00:43 PM (9 years ago)
Author:
vboxsync
Message:

asm.h,asm-watcom-x86-16.h,mangling.h: Documented CPU requirements, ported much of asm-watcom-x86-16.h to 8086.

Location:
trunk/include/iprt
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/asm-watcom-x86-16.h

    r58792 r59480  
    8383#undef      ASMAtomicXchgU32
    8484#pragma aux ASMAtomicXchgU32 = \
     85    ".386" \
    8586    "shl  ecx, 16" \
    8687    "mov  cx, ax" \
     
    150151
    151152#undef      ASMSerializeInstruction
    152 #pragma aux ASMSerializeInstruction = \
     153#if 1
     154# pragma aux ASMSerializeInstruction = \
     155    "pushf" \
     156    "push cs" \
     157    "call foo" /* 'push offset done' doesn't work */ \
     158    "jmp  done" \
     159    "foo:" \
     160    "iret" /* serializing */ \
     161    "done:" \
     162    parm [] \
     163    modify exact [ax];
     164#else
     165# pragma aux ASMSerializeInstruction = \
    153166    ".586" \
    154167    "xor eax, eax" \
     
    156169    parm [] \
    157170    modify exact [ax bx cx dx];
     171#endif
    158172
    159173#undef      ASMAtomicReadU64
     
    261275#undef      ASMAtomicOrU32
    262276#pragma aux ASMAtomicOrU32 = \
     277    ".386" \
    263278    "shl edx, 16" \
    264279    "mov dx, ax" \
     
    271286#undef      ASMAtomicAndU32
    272287#pragma aux ASMAtomicAndU32 = \
     288    ".386" \
    273289    "shl edx, 16" \
    274290    "mov dx, ax" \
     
    281297#undef      ASMAtomicUoOrU32
    282298#pragma aux ASMAtomicUoOrU32 = \
     299    ".386" \
    283300    "shl edx, 16" \
    284301    "mov dx, ax" \
     
    291308#undef      ASMAtomicUoAndU32
    292309#pragma aux ASMAtomicUoAndU32 = \
     310    ".386" \
    293311    "shl edx, 16" \
    294312    "mov dx, ax" \
     
    324342
    325343#undef      ASMMemZeroPage
    326 #pragma aux ASMMemZeroPage = \
     344#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     345# pragma aux ASMMemZeroPage = \
     346    "mov cx, 2048" \
     347    "xor ax, ax" \
     348    "rep stosw"  \
     349    parm [es di] \
     350    modify exact [ax cx di];
     351#else
     352# pragma aux ASMMemZeroPage = \
    327353    "mov ecx, 1024" \
    328354    "xor eax, eax" \
     
    330356    parm [es di] \
    331357    modify exact [ax cx di];
     358#endif
    332359
    333360#undef      ASMMemZero32
    334 #pragma aux ASMMemZero32 = \
     361#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     362# pragma aux ASMMemZero32 = \
     363    "xor ax, ax" \
     364    "mov dx, cx" \
     365    "rep stosw" \
     366    "mov cx, dx" \
     367    "rep stosw" \
     368    parm [es di] [cx] \
     369    modify exact [ax dx cx di];
     370#else
     371# pragma aux ASMMemZero32 = \
    335372    "and ecx, 0ffffh" /* probably not necessary, lazy bird should check... */ \
    336373    "shr ecx, 2" \
     
    339376    parm [es di] [cx] \
    340377    modify exact [ax cx di];
     378#endif
    341379
    342380#undef      ASMMemFill32
    343 #pragma aux ASMMemFill32 = \
     381#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     382# pragma aux ASMMemFill32 = \
     383    "   test    cx, cx" \
     384    "   jz      done" \
     385    "again:" \
     386    "   stosw" \
     387    "   xchg    ax, dx" \
     388    "   stosw" \
     389    "   xchg    ax, dx" \
     390    "   dec     cx" \
     391    "   jnz     again" \
     392    "done:" \
     393    parm [es di] [cx] [ax dx]\
     394    modify exact [cx di];
     395#else
     396# pragma aux ASMMemFill32 = \
    344397    "and ecx, 0ffffh" /* probably not necessary, lazy bird should check... */ \
    345398    "shr ecx, 2" \
     
    350403    parm [es di] [cx] [ax dx]\
    351404    modify exact [ax cx di];
     405#endif
    352406
    353407#undef      ASMProbeReadByte
     
    359413
    360414#undef      ASMBitSet
    361 #pragma aux ASMBitSet = \
     415#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     416# pragma aux ASMBitSet = \
     417    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     418    "   mov     cl, 5" \
     419    "   shl     ch, cl" \
     420    "   add     bh, ch" /* Adjust the pointer. */ \
     421    "   mov     cl, al" \
     422    "   shr     ax, 1"  /* convert to byte offset */ \
     423    "   shr     ax, 1" \
     424    "   shr     ax, 1" \
     425    "   add     bx, ax" /* adjust pointer again */\
     426    "   and     cl, 7" \
     427    "   mov     al, 1" \
     428    "   shl     al, cl" /* al=bitmask */ \
     429    "   or      es:[bx], al" \
     430    parm [es bx] [ax cx] \
     431    modify exact [ax bx cx];
     432#else
     433# pragma aux ASMBitSet = \
    362434    "shl edx, 16" \
    363435    "mov dx, ax" \
     
    365437    parm [es bx] [ax dx] \
    366438    modify exact [dx];
     439#endif
    367440
    368441#undef      ASMAtomicBitSet
    369442#pragma aux ASMAtomicBitSet = \
     443    ".386" \
    370444    "shl edx, 16" \
    371445    "mov dx, ax" \
     
    375449
    376450#undef      ASMBitClear
    377 #pragma aux ASMBitClear = \
     451#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     452# pragma aux ASMBitClear = \
     453    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     454    "   mov     cl, 5" \
     455    "   shl     ch, cl" \
     456    "   add     bh, ch" /* Adjust the pointer. */ \
     457    "   mov     cl, al" \
     458    "   shr     ax, 1"  /* convert to byte offset */ \
     459    "   shr     ax, 1" \
     460    "   shr     ax, 1" \
     461    "   add     bx, ax" /* adjust pointer again */\
     462    "   and     cl, 7" \
     463    "   mov     al, 1" \
     464    "   shl     al, cl" \
     465    "   not     al" /* al=bitmask */ \
     466    "   and     es:[bx], al" \
     467    parm [es bx] [ax cx] \
     468    modify exact [ax bx cx];
     469#else
     470# pragma aux ASMBitClear = \
    378471    "shl edx, 16" \
    379472    "mov dx, ax" \
     
    381474    parm [es bx] [ax dx] \
    382475    modify exact [dx];
     476#endif
    383477
    384478#undef      ASMAtomicBitClear
    385479#pragma aux ASMAtomicBitClear = \
     480    ".386" \
    386481    "shl edx, 16" \
    387482    "mov dx, ax" \
     
    391486
    392487#undef      ASMBitToggle
    393 #pragma aux ASMBitToggle = \
     488#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     489# pragma aux ASMBitToggle = \
     490    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     491    "   mov     cl, 5" \
     492    "   shl     ch, cl" \
     493    "   add     bh, ch" /* Adjust the pointer. */ \
     494    "   mov     cl, al" \
     495    "   shr     ax, 1"  /* convert to byte offset */ \
     496    "   shr     ax, 1" \
     497    "   shr     ax, 1" \
     498    "   add     bx, ax" /* adjust pointer again */\
     499    "   and     cl, 7" \
     500    "   mov     al, 1" \
     501    "   shl     al, cl" /* al=bitmask */ \
     502    "   xor     es:[bx], al" \
     503    parm [es bx] [ax cx] \
     504    modify exact [ax bx cx];
     505#else
     506# pragma aux ASMBitToggle = \
    394507    "shl edx, 16" \
    395508    "mov dx, ax" \
     
    397510    parm [es bx] [ax dx] \
    398511    modify exact [dx];
     512#endif
    399513
    400514#undef      ASMAtomicBitToggle
    401515#pragma aux ASMAtomicBitToggle = \
     516    ".386" \
    402517    "shl edx, 16" \
    403518    "mov dx, ax" \
     
    407522
    408523#undef      ASMBitTestAndSet
    409 #pragma aux ASMBitTestAndSet = \
     524#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     525# pragma aux ASMBitTestAndSet = \
     526    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     527    "   mov     cl, 5" \
     528    "   shl     ch, cl" \
     529    "   add     bh, ch" /* Adjust the pointer. */ \
     530    "   mov     cl, al" \
     531    "   shr     ax, 1"  /* convert to byte offset */ \
     532    "   shr     ax, 1" \
     533    "   shr     ax, 1" \
     534    "   add     bx, ax" /* adjust pointer again */\
     535    "   and     cl, 7" /* cl=byte shift count */ \
     536    "   mov     ah, 1" \
     537    "   shl     ah, cl" /* ah=bitmask */ \
     538    "   mov     al, es:[bx]" \
     539    "   or      ah, al" \
     540    "   mov     es:[bx], ah" \
     541    "   shr     al, cl" \
     542    "   and     al, 1" \
     543    parm [es bx] [ax cx] \
     544    value [al] \
     545    modify exact [ax bx cx];
     546#else
     547# pragma aux ASMBitTestAndSet = \
    410548    "shl edx, 16" \
    411549    "mov dx, ax" \
     
    415553    value [al] \
    416554    modify exact [ax dx];
     555#endif
    417556
    418557#undef      ASMAtomicBitTestAndSet
    419558#pragma aux ASMAtomicBitTestAndSet = \
     559    ".386" \
    420560    "shl edx, 16" \
    421561    "mov dx, ax" \
     
    427567
    428568#undef      ASMBitTestAndClear
    429 #pragma aux ASMBitTestAndClear = \
     569#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     570# pragma aux ASMBitTestAndClear = \
     571    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     572    "   mov     cl, 5" \
     573    "   shl     ch, cl" \
     574    "   add     bh, ch" /* Adjust the pointer. */ \
     575    "   mov     cl, al" \
     576    "   shr     ax, 1"  /* convert to byte offset */ \
     577    "   shr     ax, 1" \
     578    "   shr     ax, 1" \
     579    "   add     bx, ax" /* adjust pointer again */\
     580    "   and     cl, 7" /* cl=byte shift count */ \
     581    "   mov     ah, 1" \
     582    "   shl     ah, cl" \
     583    "   not     ah" /* ah=bitmask */ \
     584    "   mov     al, es:[bx]" \
     585    "   and     ah, al" \
     586    "   mov     es:[bx], ah" \
     587    "   shr     al, cl" \
     588    "   and     al, 1" \
     589    parm [es bx] [ax cx] \
     590    value [al] \
     591    modify exact [ax bx cx];
     592#else
     593# pragma aux ASMBitTestAndClear = \
    430594    "shl edx, 16" \
    431595    "mov dx, ax" \
     
    435599    value [al] \
    436600    modify exact [ax dx];
     601#endif
    437602
    438603#undef      ASMAtomicBitTestAndClear
    439604#pragma aux ASMAtomicBitTestAndClear = \
     605    ".386" \
    440606    "shl edx, 16" \
    441607    "mov dx, ax" \
     
    447613
    448614#undef      ASMBitTestAndToggle
    449 #pragma aux ASMBitTestAndToggle = \
     615#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     616# pragma aux ASMBitTestAndToggle = \
     617    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     618    "   mov     cl, 5" \
     619    "   shl     ch, cl" \
     620    "   add     bh, ch" /* Adjust the pointer. */ \
     621    "   mov     cl, al" \
     622    "   shr     ax, 1"  /* convert to byte offset */ \
     623    "   shr     ax, 1" \
     624    "   shr     ax, 1" \
     625    "   add     bx, ax" /* adjust pointer again */\
     626    "   and     cl, 7" /* cl=byte shift count */ \
     627    "   mov     ah, 1" \
     628    "   shl     ah, cl" /* ah=bitmask */ \
     629    "   mov     al, es:[bx]" \
     630    "   xor     ah, al" \
     631    "   mov     es:[bx], ah" \
     632    "   shr     al, cl" \
     633    "   and     al, 1" \
     634    parm [es bx] [ax cx] \
     635    value [al] \
     636    modify exact [ax bx cx];
     637#else
     638# pragma aux ASMBitTestAndToggle = \
    450639    "shl edx, 16" \
    451640    "mov dx, ax" \
     
    455644    value [al] \
    456645    modify exact [ax dx];
     646#endif
    457647
    458648#undef      ASMAtomicBitTestAndToggle
    459649#pragma aux ASMAtomicBitTestAndToggle = \
     650    ".386" \
    460651    "shl edx, 16" \
    461652    "mov dx, ax" \
     
    467658
    468659#undef      ASMBitTest
    469 #pragma aux ASMBitTest = \
     660#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     661# pragma aux ASMBitTest = \
     662    "   mov     ch, cl" /* Only the three lowest bits are relevant due to 64KB segments */ \
     663    "   mov     cl, 5" \
     664    "   shl     ch, cl" \
     665    "   add     bh, ch" /* Adjust the pointer. */ \
     666    "   mov     cl, al" \
     667    "   shr     ax, 1"  /* convert to byte offset */ \
     668    "   shr     ax, 1" \
     669    "   shr     ax, 1" \
     670    "   add     bx, ax" /* adjust pointer again */\
     671    "   and     cl, 7" \
     672    "   mov     al, es:[bx]" \
     673    "   shr     al, cl" \
     674    "   and     al, 1" \
     675    parm [es bx] [ax cx] \
     676    value [al] \
     677    modify exact [ax bx cx];
     678#else
     679# pragma aux ASMBitTest = \
    470680    "shl edx, 16" \
    471681    "mov dx, ax" \
     
    475685    value [al] \
    476686    modify exact [ax dx] nomemory;
    477 
    478 #if 0
    479 /** @todo this is way to much inline assembly, better off in an external function. */
    480 #undef      ASMBitFirstClear
    481 #pragma aux ASMBitFirstClear = \
    482     "mov bx, di" /* save start of bitmap for later */ \
    483     "shl ecx, 16" \
    484     "mov cx, ax" /* ecx = cBits */ \
    485     "add ecx, 31" \
    486     "shr ecx, 5" /* cDWord = RT_ALIGN_32(cBits, 32) / 32; */  \
    487     "mov eax, 0ffffffffh" \
    488     "mov edx, eax" /* default return value */ \
    489     "repe scasd" \
    490     "je done" \
    491     "sub di, 4" /* rewind di */ \
    492     "xor eax, es:[di]" /* load inverted bits */ \
    493     "sub di, bx" /* calc byte offset */ \
    494     "movzx edi, di" \
    495     "shl edi, 3" /* convert byte to bit offset */ \
    496     "bsf edx, eax" \
    497     "add edx, edi" \
    498     "done:" \
    499     "mov eax, edx" \
    500     "shr edx, 16" \
    501     parm [es di] [ax cx] \
    502     value [ax dx] \
    503     modify exact [ax bx cx dx di];
    504 
    505 /* ASMBitNextClear: Too much work, do when needed. */
    506 
    507 /** @todo this is way to much inline assembly, better off in an external function. */
    508 #undef      ASMBitFirstSet
    509 #pragma aux ASMBitFirstSet = \
    510     "mov bx, di" /* save start of bitmap for later */ \
    511     "shl ecx, 16" \
    512     "mov cx, ax" /* ecx = cBits */ \
    513     "add ecx, 31" \
    514     "shr ecx, 5" /* cDWord = RT_ALIGN_32(cBits, 32) / 32; */  \
    515     "xor eax, eax" \
    516     "mov edx, 0ffffffffh" /* default return value */ \
    517     "repe scasd" \
    518     "je done" \
    519     "sub di, 4" /* rewind di */ \
    520     "mov eax, es:[di]" /* reload previous dword */ \
    521     "sub di, bx" /* calc byte offset */ \
    522     "movzx edi, di" \
    523     "shl edi, 3" /* convert byte to bit offset */ \
    524     "bsf edx, eax" /* find first set bit in dword */ \
    525     "add edx, edi" /* calc final bit number */ \
    526     "done:" \
    527     "mov eax, edx" \
    528     "shr edx, 16" \
    529     parm [es di] [ax cx] \
    530     value [ax dx] \
    531     modify exact [ax bx cx dx di];
    532 
    533 /* ASMBitNextSet: Too much work, do when needed. */
    534 #else
     687#endif
     688
    535689/* ASMBitFirstClear: External file.  */
    536690/* ASMBitNextClear:  External file.  */
    537691/* ASMBitFirstSet:   External file.  */
    538692/* ASMBitNextSet:    External file.  */
    539 #endif
    540 
    541 #undef      ASMBitFirstSetU32
    542 #pragma aux ASMBitFirstSetU32 = \
     693
     694#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     695/* ASMBitFirstSetU32: External file. */
     696#else
     697# undef      ASMBitFirstSetU32
     698# pragma aux ASMBitFirstSetU32 = \
    543699    "shl edx, 16" \
    544700    "mov dx, ax" \
     
    553709    value [ax] \
    554710    modify exact [ax dx] nomemory;
    555 
    556 #undef      ASMBitFirstSetU64
    557 #pragma aux ASMBitFirstSetU64 = \
     711#endif
     712
     713#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     714/* ASMBitFirstSetU64: External file. */
     715#else
     716# undef      ASMBitFirstSetU64
     717# pragma aux ASMBitFirstSetU64 = \
     718    ".386" \
    558719    "shl ecx, 16" \
    559720    "mov cx, dx" \
     
    578739    value [ax] \
    579740    modify exact [ax cx] nomemory;
    580 
    581 #undef      ASMBitFirstSetU16
    582 #pragma aux ASMBitFirstSetU16 = \
     741#endif
     742
     743#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     744/* ASMBitFirstSetU16: External file. */
     745#else
     746# undef      ASMBitFirstSetU16
     747# pragma aux ASMBitFirstSetU16 = \
    583748    "bsf ax, ax" \
    584749    "jz  not_found" \
     
    591756    value [ax] \
    592757    modify exact [ax] nomemory;
    593 
    594 #undef      ASMBitLastSetU32
    595 #pragma aux ASMBitLastSetU32 = \
     758#endif
     759
     760#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     761/* ASMBitLastSetU32: External file. */
     762#else
     763# undef      ASMBitLastSetU32
     764# pragma aux ASMBitLastSetU32 = \
    596765    "shl edx, 16" \
    597766    "mov dx, ax" \
     
    606775    value [ax] \
    607776    modify exact [ax dx] nomemory;
    608 
    609 #undef      ASMBitLastSetU64
    610 #pragma aux ASMBitLastSetU64 = \
     777#endif
     778
     779#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     780/* ASMBitLastSetU64: External file. */
     781#else
     782# undef      ASMBitLastSetU64
     783# pragma aux ASMBitLastSetU64 = \
     784    ".386" \
    611785    "shl ecx, 16" \
    612786    "mov cx, dx" \
     
    631805    value [ax] \
    632806    modify exact [ax cx] nomemory;
    633 
    634 #undef      ASMBitLastSetU16
    635 #pragma aux ASMBitLastSetU16 = \
     807#endif
     808
     809#if defined(__SW_0) || defined(__SW_1) || defined(__SW_2)
     810/* ASMBitLastSetU16: External file. */
     811#else
     812# undef      ASMBitLastSetU16
     813# pragma aux ASMBitLastSetU16 = \
    636814    "bsr ax, ax" \
    637815    "jz  not_found" \
     
    644822    value [ax] \
    645823    modify exact [ax] nomemory;
     824#endif
    646825
    647826#undef      ASMByteSwapU16
    648827#pragma aux ASMByteSwapU16 = \
    649     "ror ax, 8" \
     828    "xchg al, ah" \
    650829    parm [ax] nomemory \
    651830    value [ax] \
     
    654833#undef      ASMByteSwapU32
    655834#pragma aux ASMByteSwapU32 = \
    656     "xchg ax, dx" \
     835    "xchg dh, al" \
     836    "xchg dl, ah" \
    657837    parm [ax dx] nomemory \
    658838    value [ax dx] \
     
    661841#undef      ASMRotateLeftU32
    662842#pragma aux ASMRotateLeftU32 = \
     843    ".386" \
    663844    "shl    edx, 16" \
    664845    "mov    dx, ax" \
     
    672853#undef      ASMRotateRightU32
    673854#pragma aux ASMRotateRightU32 = \
     855    ".386" \
    674856    "shl    edx, 16" \
    675857    "mov    dx, ax" \
  • trunk/include/iprt/asm.h

    r58791 r59480  
    372372 * @param   pu32    Pointer to the 32-bit variable to update.
    373373 * @param   u32     The 32-bit value to assign to *pu32.
     374 *
     375 * @remarks Does not work on 286 and earlier.
    374376 */
    375377#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    428430 * @param   pu64    Pointer to the 64-bit variable to update.
    429431 * @param   u64     The 64-bit value to assign to *pu64.
     432 *
     433 * @remarks Works on 32-bit x86 CPUs starting with Pentium.
    430434 */
    431435#if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \
     
    685689 * @param   u8New       The new value to assigned to *pu8.
    686690 * @param   u8Old       The old value to *pu8 compare with.
     691 *
     692 * @remarks x86: Requires a 486 or later.
    687693 */
    688694#if RT_INLINE_ASM_EXTERNAL || !RT_INLINE_ASM_GNU_STYLE
     
    714720 * @param   i8New       The new value to assigned to *pi8.
    715721 * @param   i8Old       The old value to *pi8 compare with.
     722 *
     723 * @remarks x86: Requires a 486 or later.
    716724 */
    717725DECLINLINE(bool) ASMAtomicCmpXchgS8(volatile int8_t *pi8, const int8_t i8New, const int8_t i8Old)
     
    730738 * @param   fNew        The new value to assigned to *pf.
    731739 * @param   fOld        The old value to *pf compare with.
     740 *
     741 * @remarks x86: Requires a 486 or later.
    732742 */
    733743DECLINLINE(bool) ASMAtomicCmpXchgBool(volatile bool *pf, const bool fNew, const bool fOld)
     
    746756 * @param   u32New      The new value to assigned to *pu32.
    747757 * @param   u32Old      The old value to *pu32 compare with.
     758 *
     759 * @remarks x86: Requires a 486 or later.
    748760 */
    749761#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    802814 * @param   i32New      The new value to assigned to *pi32.
    803815 * @param   i32Old      The old value to *pi32 compare with.
     816 *
     817 * @remarks x86: Requires a 486 or later.
    804818 */
    805819DECLINLINE(bool) ASMAtomicCmpXchgS32(volatile int32_t *pi32, const int32_t i32New, const int32_t i32Old)
     
    818832 * @param   u64New  The 64-bit value to assign to *pu64.
    819833 * @param   u64Old  The value to compare with.
     834 *
     835 * @remarks x86: Requires a Pentium or later.
    820836 */
    821837#if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \
     
    917933 * @param   i64     The 64-bit value to assign to *pu64.
    918934 * @param   i64Old  The value to compare with.
     935 *
     936 * @remarks x86: Requires a Pentium or later.
    919937 */
    920938DECLINLINE(bool) ASMAtomicCmpXchgS64(volatile int64_t *pi64, const int64_t i64, const int64_t i64Old)
     
    933951 * @param   pvNew       The new value to assigned to *ppv.
    934952 * @param   pvOld       The old value to *ppv compare with.
     953 *
     954 * @remarks x86: Requires a 486 or later.
    935955 */
    936956DECLINLINE(bool) ASMAtomicCmpXchgPtrVoid(void * volatile *ppv, const void *pvNew, const void *pvOld)
     
    957977 *
    958978 * @remarks This is relatively type safe on GCC platforms.
     979 * @remarks x86: Requires a 486 or later.
    959980 */
    960981#ifdef __GNUC__
     
    9841005 *
    9851006 * @remarks This doesn't currently work for all handles (like RTFILE).
     1007 * @remarks x86: Requires a 486 or later.
    9861008 */
    9871009#if HC_ARCH_BITS == 32 || (ARCH_BITS == 16 && RT_FAR_DATA)
     
    10101032 * @param   uOld        The old value to *pu compare with.
    10111033 * @param   fRc         Where to store the result.
     1034 *
     1035 * @remarks x86: Requires a 486 or later.
    10121036 */
    10131037#define ASMAtomicCmpXchgSize(pu, uNew, uOld, fRc) \
     
    10361060 * @param   u32Old      The old value to *pu32 compare with.
    10371061 * @param   pu32Old     Pointer store the old value at.
     1062 *
     1063 * @remarks x86: Requires a 486 or later.
    10381064 */
    10391065#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    10981124 * @param   i32Old      The old value to *pi32 compare with.
    10991125 * @param   pi32Old     Pointer store the old value at.
     1126 *
     1127 * @remarks x86: Requires a 486 or later.
    11001128 */
    11011129DECLINLINE(bool) ASMAtomicCmpXchgExS32(volatile int32_t *pi32, const int32_t i32New, const int32_t i32Old, int32_t *pi32Old)
     
    11161144 * @param   u64Old  The value to compare with.
    11171145 * @param   pu64Old     Pointer store the old value at.
     1146 *
     1147 * @remarks x86: Requires a Pentium or later.
    11181148 */
    11191149#if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) \
     
    12171247 * @param   i64Old  The value to compare with.
    12181248 * @param   pi64Old Pointer store the old value at.
     1249 *
     1250 * @remarks x86: Requires a Pentium or later.
    12191251 */
    12201252DECLINLINE(bool) ASMAtomicCmpXchgExS64(volatile int64_t *pi64, const int64_t i64, const int64_t i64Old, int64_t *pi64Old)
     
    12621294 * @param   fRc         Where to store the result.
    12631295 * @param   puOldVal    Pointer to where to store the old value.
     1296 *
     1297 * @remarks x86: Requires a 486 or later.
    12641298 */
    12651299#define ASMAtomicCmpXchgExSize(pu, uNew, uOld, fRc, puOldVal) \
     
    12891323 * @param   pvOld       The old value to *ppv compare with.
    12901324 * @param   ppvOld      Pointer store the old value at.
     1325 *
     1326 * @remarks x86: Requires a 486 or later.
    12911327 */
    12921328DECLINLINE(bool) ASMAtomicCmpXchgExPtrVoid(void * volatile *ppv, const void *pvNew, const void *pvOld, void **ppvOld)
     
    13151351 *
    13161352 * @remarks This is relatively type safe on GCC platforms.
     1353 * @remarks x86: Requires a 486 or later.
    13171354 */
    13181355#ifdef __GNUC__
     
    15761613 * @param   pu64    Pointer to the 64-bit variable to read.
    15771614 *                  The memory pointed to must be writable.
    1578  * @remark  This will fault if the memory is read-only!
     1615 *
     1616 * @remarks This may fault if the memory is read-only!
     1617 * @remarks x86: Requires a Pentium or later.
    15791618 */
    15801619#if (RT_INLINE_ASM_EXTERNAL && !defined(RT_ARCH_AMD64)) \
     
    16551694 * @param   pu64    Pointer to the 64-bit variable to read.
    16561695 *                  The memory pointed to must be writable.
    1657  * @remark  This will fault if the memory is read-only!
     1696 *
     1697 * @remarks This may fault if the memory is read-only!
     1698 * @remarks x86: Requires a Pentium or later.
    16581699 */
    16591700#if !defined(RT_ARCH_AMD64) \
     
    17361777 * @param   pi64    Pointer to the 64-bit variable to read.
    17371778 *                  The memory pointed to must be writable.
    1738  * @remark  This will fault if the memory is read-only!
     1779 *
     1780 * @remarks This may fault if the memory is read-only!
     1781 * @remarks x86: Requires a Pentium or later.
    17391782 */
    17401783DECLINLINE(int64_t) ASMAtomicReadS64(volatile int64_t *pi64)
     
    17501793 * @param   pi64    Pointer to the 64-bit variable to read.
    17511794 *                  The memory pointed to must be writable.
    1752  * @remark  This will fault if the memory is read-only!
     1795 *
     1796 * @remarks This will fault if the memory is read-only!
     1797 * @remarks x86: Requires a Pentium or later.
    17531798 */
    17541799DECLINLINE(int64_t) ASMAtomicUoReadS64(volatile int64_t *pi64)
     
    24682513 * @param   pu16        Pointer to the value.
    24692514 * @param   u16         Number to add.
     2515 *
    24702516 * @remarks Currently not implemented, just to make 16-bit code happy.
     2517 * @remarks x86: Requires a 486 or later.
    24712518 */
    24722519DECLASM(uint16_t) ASMAtomicAddU16(uint16_t volatile *pu16, uint32_t u16);
     
    24792526 * @param   pu32        Pointer to the value.
    24802527 * @param   u32         Number to add.
     2528 *
     2529 * @remarks x86: Requires a 486 or later.
    24812530 */
    24822531#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    25222571 * @param   pi32        Pointer to the value.
    25232572 * @param   i32         Number to add.
     2573 *
     2574 * @remarks x86: Requires a 486 or later.
    25242575 */
    25252576DECLINLINE(int32_t) ASMAtomicAddS32(int32_t volatile *pi32, int32_t i32)
     
    25352586 * @param   pu64        Pointer to the value.
    25362587 * @param   u64         Number to add.
     2588 *
     2589 * @remarks x86: Requires a Pentium or later.
    25372590 */
    25382591#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    25762629 * @param   pi64        Pointer to the value.
    25772630 * @param   i64         Number to add.
     2631 *
     2632 * @remarks x86: Requires a Pentium or later.
    25782633 */
    25792634DECLINLINE(int64_t) ASMAtomicAddS64(int64_t volatile *pi64, int64_t i64)
     
    26322687 * @param   pu16        Pointer to the value.
    26332688 * @param   u16         Number to subtract.
     2689 *
     2690 * @remarks x86: Requires a 486 or later.
    26342691 */
    26352692DECLINLINE(uint16_t) ASMAtomicSubU16(uint16_t volatile *pu16, uint32_t u16)
     
    26452702 * @param   pi16        Pointer to the value.
    26462703 * @param   i16         Number to subtract.
     2704 *
     2705 * @remarks x86: Requires a 486 or later.
    26472706 */
    26482707DECLINLINE(int16_t) ASMAtomicSubS16(int16_t volatile *pi16, int16_t i16)
     
    26582717 * @param   pu32        Pointer to the value.
    26592718 * @param   u32         Number to subtract.
     2719 *
     2720 * @remarks x86: Requires a 486 or later.
    26602721 */
    26612722DECLINLINE(uint32_t) ASMAtomicSubU32(uint32_t volatile *pu32, uint32_t u32)
     
    26712732 * @param   pi32        Pointer to the value.
    26722733 * @param   i32         Number to subtract.
     2734 *
     2735 * @remarks x86: Requires a 486 or later.
    26732736 */
    26742737DECLINLINE(int32_t) ASMAtomicSubS32(int32_t volatile *pi32, int32_t i32)
     
    26842747 * @param   pu64        Pointer to the value.
    26852748 * @param   u64         Number to subtract.
     2749 *
     2750 * @remarks x86: Requires a Pentium or later.
    26862751 */
    26872752DECLINLINE(uint64_t) ASMAtomicSubU64(uint64_t volatile *pu64, uint64_t u64)
     
    26972762 * @param   pi64        Pointer to the value.
    26982763 * @param   i64         Number to subtract.
     2764 *
     2765 * @remarks x86: Requires a Pentium or later.
    26992766 */
    27002767DECLINLINE(int64_t) ASMAtomicSubS64(int64_t volatile *pi64, int64_t i64)
     
    27102777 * @param   pcb         Pointer to the size_t value.
    27112778 * @param   cb          Number to subtract.
     2779 *
     2780 * @remarks x86: Requires a 486 or later.
    27122781 */
    27132782DECLINLINE(size_t) ASMAtomicSubZ(size_t volatile *pcb, size_t cb)
     
    27332802 * @param   uNew    The value to subtract to *pu.
    27342803 * @param   puOld   Where to store the old value.
     2804 *
     2805 * @remarks x86: Requires a 486 or later.
    27352806 */
    27362807#define ASMAtomicSubSize(pu, uNew, puOld) \
     
    27512822 * @param   pu16        Pointer to the value to increment.
    27522823 * @remarks Not implemented. Just to make 16-bit code happy.
     2824 *
     2825 * @remarks x86: Requires a 486 or later.
    27532826 */
    27542827DECLASM(uint16_t) ASMAtomicIncU16(uint16_t volatile *pu16);
     
    27602833 * @returns The new value.
    27612834 * @param   pu32        Pointer to the value to increment.
     2835 *
     2836 * @remarks x86: Requires a 486 or later.
    27622837 */
    27632838#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    28032878 * @returns The new value.
    28042879 * @param   pi32        Pointer to the value to increment.
     2880 *
     2881 * @remarks x86: Requires a 486 or later.
    28052882 */
    28062883DECLINLINE(int32_t) ASMAtomicIncS32(int32_t volatile *pi32)
     
    28152892 * @returns The new value.
    28162893 * @param   pu64        Pointer to the value to increment.
     2894 *
     2895 * @remarks x86: Requires a Pentium or later.
    28172896 */
    28182897#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    28472926 * @returns The new value.
    28482927 * @param   pi64        Pointer to the value to increment.
     2928 *
     2929 * @remarks x86: Requires a Pentium or later.
    28492930 */
    28502931DECLINLINE(int64_t) ASMAtomicIncS64(int64_t volatile *pi64)
     
    28592940 * @returns The new value.
    28602941 * @param   pcb         Pointer to the value to increment.
     2942 *
     2943 * @remarks x86: Requires a 486 or later.
    28612944 */
    28622945DECLINLINE(int64_t) ASMAtomicIncZ(size_t volatile *pcb)
     
    28812964 * @param   pu16        Pointer to the value to decrement.
    28822965 * @remarks Not implemented. Just to make 16-bit code happy.
     2966 *
     2967 * @remarks x86: Requires a 486 or later.
    28832968 */
    28842969DECLASM(uint32_t) ASMAtomicDecU16(uint16_t volatile *pu16);
     
    28902975 * @returns The new value.
    28912976 * @param   pu32        Pointer to the value to decrement.
     2977 *
     2978 * @remarks x86: Requires a 486 or later.
    28922979 */
    28932980#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    29333020 * @returns The new value.
    29343021 * @param   pi32        Pointer to the value to decrement.
     3022 *
     3023 * @remarks x86: Requires a 486 or later.
    29353024 */
    29363025DECLINLINE(int32_t) ASMAtomicDecS32(int32_t volatile *pi32)
     
    29453034 * @returns The new value.
    29463035 * @param   pu64        Pointer to the value to decrement.
     3036 *
     3037 * @remarks x86: Requires a Pentium or later.
    29473038 */
    29483039#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    29763067 * @returns The new value.
    29773068 * @param   pi64        Pointer to the value to decrement.
     3069 *
     3070 * @remarks x86: Requires a Pentium or later.
    29783071 */
    29793072DECLINLINE(int64_t) ASMAtomicDecS64(int64_t volatile *pi64)
     
    29883081 * @returns The new value.
    29893082 * @param   pcb         Pointer to the value to decrement.
     3083 *
     3084 * @remarks x86: Requires a 486 or later.
    29903085 */
    29913086DECLINLINE(int64_t) ASMAtomicDecZ(size_t volatile *pcb)
     
    30083103 * @param   pu32   Pointer to the pointer variable to OR u32 with.
    30093104 * @param   u32    The value to OR *pu32 with.
     3105 *
     3106 * @remarks x86: Requires a 386 or later.
    30103107 */
    30113108#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    30443141 * @param   pi32   Pointer to the pointer variable to OR u32 with.
    30453142 * @param   i32    The value to OR *pu32 with.
     3143 *
     3144 * @remarks x86: Requires a 386 or later.
    30463145 */
    30473146DECLINLINE(void) ASMAtomicOrS32(int32_t volatile *pi32, int32_t i32)
     
    30563155 * @param   pu64   Pointer to the pointer variable to OR u64 with.
    30573156 * @param   u64    The value to OR *pu64 with.
     3157 *
     3158 * @remarks x86: Requires a Pentium or later.
    30583159 */
    30593160#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    30893190 * @param   pi64   Pointer to the pointer variable to OR u64 with.
    30903191 * @param   i64    The value to OR *pu64 with.
     3192 *
     3193 * @remarks x86: Requires a Pentium or later.
    30913194 */
    30923195DECLINLINE(void) ASMAtomicOrS64(int64_t volatile *pi64, int64_t i64)
     
    31013204 * @param   pu32   Pointer to the pointer variable to AND u32 with.
    31023205 * @param   u32    The value to AND *pu32 with.
     3206 *
     3207 * @remarks x86: Requires a 386 or later.
    31033208 */
    31043209#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    31373242 * @param   pi32   Pointer to the pointer variable to AND i32 with.
    31383243 * @param   i32    The value to AND *pi32 with.
     3244 *
     3245 * @remarks x86: Requires a 386 or later.
    31393246 */
    31403247DECLINLINE(void) ASMAtomicAndS32(int32_t volatile *pi32, int32_t i32)
     
    31493256 * @param   pu64   Pointer to the pointer variable to AND u64 with.
    31503257 * @param   u64    The value to AND *pu64 with.
     3258 *
     3259 * @remarks x86: Requires a Pentium or later.
    31513260 */
    31523261#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    31823291 * @param   pi64   Pointer to the pointer variable to AND i64 with.
    31833292 * @param   i64    The value to AND *pi64 with.
     3293 *
     3294 * @remarks x86: Requires a Pentium or later.
    31843295 */
    31853296DECLINLINE(void) ASMAtomicAndS64(int64_t volatile *pi64, int64_t i64)
     
    31943305 * @param   pu32   Pointer to the pointer variable to OR u32 with.
    31953306 * @param   u32    The value to OR *pu32 with.
     3307 *
     3308 * @remarks x86: Requires a 386 or later.
    31963309 */
    31973310#if RT_INLINE_ASM_EXTERNAL
     
    32273340 * @param   pi32   Pointer to the pointer variable to OR u32 with.
    32283341 * @param   i32    The value to OR *pu32 with.
     3342 *
     3343 * @remarks x86: Requires a 386 or later.
    32293344 */
    32303345DECLINLINE(void) ASMAtomicUoOrS32(int32_t volatile *pi32, int32_t i32)
     
    32393354 * @param   pu64   Pointer to the pointer variable to OR u64 with.
    32403355 * @param   u64    The value to OR *pu64 with.
     3356 *
     3357 * @remarks x86: Requires a Pentium or later.
    32413358 */
    32423359#if RT_INLINE_ASM_EXTERNAL
     
    32693386 * @param   pi64   Pointer to the pointer variable to OR u64 with.
    32703387 * @param   i64    The value to OR *pu64 with.
     3388 *
     3389 * @remarks x86: Requires a Pentium or later.
    32713390 */
    32723391DECLINLINE(void) ASMAtomicUoOrS64(int64_t volatile *pi64, int64_t i64)
     
    32813400 * @param   pu32   Pointer to the pointer variable to AND u32 with.
    32823401 * @param   u32    The value to AND *pu32 with.
     3402 *
     3403 * @remarks x86: Requires a 386 or later.
    32833404 */
    32843405#if RT_INLINE_ASM_EXTERNAL
     
    33143435 * @param   pi32   Pointer to the pointer variable to AND i32 with.
    33153436 * @param   i32    The value to AND *pi32 with.
     3437 *
     3438 * @remarks x86: Requires a 386 or later.
    33163439 */
    33173440DECLINLINE(void) ASMAtomicUoAndS32(int32_t volatile *pi32, int32_t i32)
     
    33263449 * @param   pu64   Pointer to the pointer variable to AND u64 with.
    33273450 * @param   u64    The value to AND *pu64 with.
     3451 *
     3452 * @remarks x86: Requires a Pentium or later.
    33283453 */
    33293454#if RT_INLINE_ASM_EXTERNAL
     
    33563481 * @param   pi64   Pointer to the pointer variable to AND i64 with.
    33573482 * @param   i64    The value to AND *pi64 with.
     3483 *
     3484 * @remarks x86: Requires a Pentium or later.
    33583485 */
    33593486DECLINLINE(void) ASMAtomicUoAndS64(int64_t volatile *pi64, int64_t i64)
     
    33683495 * @returns the new value.
    33693496 * @param   pu32   Pointer to the variable to increment.
     3497 *
     3498 * @remarks x86: Requires a 486 or later.
    33703499 */
    33713500#if RT_INLINE_ASM_EXTERNAL
     
    34073536 * @returns the new value.
    34083537 * @param   pu32   Pointer to the variable to decrement.
     3538 *
     3539 * @remarks x86: Requires a 486 or later.
    34093540 */
    34103541#if RT_INLINE_ASM_EXTERNAL
     
    38533984 *                      the memory access isn't atomic!
    38543985 * @param   iBit        The bit to set.
     3986 *
     3987 * @remarks x86: Requires a 386 or later.
    38553988 */
    38563989#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    39344067 *                      the memory access isn't atomic!
    39354068 * @param   iBit        The bit to toggle set.
     4069 *
    39364070 * @remarks No memory barrier, take care on smp.
     4071 * @remarks x86: Requires a 386 or later.
    39374072 */
    39384073#if RT_INLINE_ASM_EXTERNAL
     
    40134148 *                      the memory access isn't atomic!
    40144149 * @param   iBit        The bit to test and set.
     4150 *
     4151 * @remarks x86: Requires a 386 or later.
    40154152 */
    40164153#if RT_INLINE_ASM_EXTERNAL
     
    41054242 *                      the memory access isn't atomic!
    41064243 * @param   iBit        The bit to set.
     4244 *
     4245 * @remarks x86: Requires a 386 or later.
    41074246 */
    41084247#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    42084347 *
    42094348 * @remarks No memory barrier, take care on smp.
     4349 * @remarks x86: Requires a 386 or later.
    42104350 */
    42114351#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
     
    43104450 *                      the memory access isn't atomic!
    43114451 * @param   iBit        The bit to test and toggle.
     4452 *
     4453 * @remarks x86: Requires a 386 or later.
    43124454 */
    43134455#if RT_INLINE_ASM_EXTERNAL
     
    49215063 */
    49225064#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
    4923 DECLASM(unsigned) ASMBitFirstSetU16(uint32_t u16);
    4924 #else
    4925 DECLINLINE(unsigned) ASMBitFirstSetU16(uint32_t u16)
     5065DECLASM(unsigned) ASMBitFirstSetU16(uint16_t u16);
     5066#else
     5067DECLINLINE(unsigned) ASMBitFirstSetU16(uint16_t u16)
    49265068{
    49275069    return ASMBitFirstSetU32((uint32_t)u16);
     
    50595201 */
    50605202#if RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN
    5061 DECLASM(unsigned) ASMBitLastSetU16(uint32_t u16);
    5062 #else
    5063 DECLINLINE(unsigned) ASMBitLastSetU16(uint32_t u16)
     5203DECLASM(unsigned) ASMBitLastSetU16(uint16_t u16);
     5204#else
     5205DECLINLINE(unsigned) ASMBitLastSetU16(uint16_t u16)
    50645206{
    50655207    return ASMBitLastSetU32((uint32_t)u16);
  • trunk/include/iprt/mangling.h

    r59459 r59480  
    133133# define ASMBitFirstSet                                 RT_MANGLER(ASMBitFirstSet)
    134134# define ASMBitFirstSet_EndProc                         RT_MANGLER(ASMBitFirstSet_EndProc)
     135# define ASMBitFirstSetU16                              RT_MANGLER(ASMBitFirstSetU16)
     136# define ASMBitFirstSetU16_EndProc                      RT_MANGLER(ASMBitFirstSetU16_EndProc)
    135137# define ASMBitFirstSetU32                              RT_MANGLER(ASMBitFirstSetU32)
    136138# define ASMBitFirstSetU32_EndProc                      RT_MANGLER(ASMBitFirstSetU32_EndProc)
     139# define ASMBitFirstSetU64                              RT_MANGLER(ASMBitFirstSetU64)
     140# define ASMBitFirstSetU64_EndProc                      RT_MANGLER(ASMBitFirstSetU64_EndProc)
     141# define ASMBitLastSetU16                               RT_MANGLER(ASMBitLastSetU16)
     142# define ASMBitLastSetU16_EndProc                       RT_MANGLER(ASMBitLastSetU16_EndProc)
    137143# define ASMBitLastSetU32                               RT_MANGLER(ASMBitLastSetU32)
    138144# define ASMBitLastSetU32_EndProc                       RT_MANGLER(ASMBitLastSetU32_EndProc)
     145# define ASMBitLastSetU64                               RT_MANGLER(ASMBitLastSetU64)
     146# define ASMBitLastSetU64_EndProc                       RT_MANGLER(ASMBitLastSetU64_EndProc)
    139147# define ASMBitNextClear                                RT_MANGLER(ASMBitNextClear)
    140148# define ASMBitNextClear_EndProc                        RT_MANGLER(ASMBitNextClear_EndProc)
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette