VirtualBox

Changeset 21535 in vbox for trunk/include


Ignore:
Timestamp:
Jul 13, 2009 2:44:54 PM (16 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
50008
Message:

iprt/asm.h: Added ASMIntAreEnabled and ASMNopPause. Fixed a bunch of ANSI C warnings.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/asm.h

    r21451 r21535  
    15011501
    15021502/**
     1503 * Are interrupts enabled?
     1504 *
     1505 * @returns true / false.
     1506 */
     1507#if RT_INLINE_ASM_EXTERNAL
     1508DECLASM(RTCCUINTREG) ASMIntAreEnabled(void);
     1509#else
     1510DECLINLINE(RTCCUINTREG) ASMIntAreEnabled(void)
     1511{
     1512    RTCCUINTREG uFlags = ASMGetFlags();
     1513    return uFlags & 0x200 /* X86_EFL_IF */ ? true : false;
     1514}
     1515#endif
     1516
     1517
     1518/**
    15031519 * Halts the CPU until interrupted.
    15041520 */
     
    15131529    __asm {
    15141530        hlt
     1531    }
     1532# endif
     1533}
     1534#endif
     1535
     1536
     1537/**
     1538 * The PAUSE variant of NOP for helping hyperthreaded CPUs detecing spin locks.
     1539 */
     1540#if RT_INLINE_ASM_EXTERNAL
     1541DECLASM(void) ASMNopPause(void);
     1542#else
     1543DECLINLINE(void) ASMNopPause(void)
     1544{
     1545# if RT_INLINE_ASM_GNU_STYLE
     1546    __asm__ __volatile__(".byte 0xf3,0x90\n\t");
     1547# else
     1548    __asm {
     1549        _emit 0f3h
     1550        _emit 090h
    15151551    }
    15161552# endif
     
    29142950# define ASMAtomicXchgHandle(ph, hNew, phRes) \
    29152951   do { \
    2916        *(uint32_t *)(phRes) = ASMAtomicXchgU32((uint32_t volatile *)(ph), (const uint32_t)(hNew)); \
    29172952       AssertCompile(sizeof(*(ph))    == sizeof(uint32_t)); \
    29182953       AssertCompile(sizeof(*(phRes)) == sizeof(uint32_t)); \
     2954       *(uint32_t *)(phRes) = ASMAtomicXchgU32((uint32_t volatile *)(ph), (const uint32_t)(hNew)); \
    29192955   } while (0)
    29202956#elif HC_ARCH_BITS == 64
    29212957# define ASMAtomicXchgHandle(ph, hNew, phRes) \
    29222958   do { \
    2923        *(uint64_t *)(phRes) = ASMAtomicXchgU64((uint64_t volatile *)(ph), (const uint64_t)(hNew)); \
    29242959       AssertCompile(sizeof(*(ph))    == sizeof(uint64_t)); \
    29252960       AssertCompile(sizeof(*(phRes)) == sizeof(uint64_t)); \
     2961       *(uint64_t *)(phRes) = ASMAtomicXchgU64((uint64_t volatile *)(ph), (const uint64_t)(hNew)); \
    29262962   } while (0)
    29272963#else
     
    31913227# define ASMAtomicCmpXchgHandle(ph, hNew, hOld, fRc) \
    31923228   do { \
     3229       AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    31933230       (fRc) = ASMAtomicCmpXchgU32((uint32_t volatile *)(ph), (const uint32_t)(hNew), (const uint32_t)(hOld)); \
    3194        AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    31953231   } while (0)
    31963232#elif HC_ARCH_BITS == 64
    31973233# define ASMAtomicCmpXchgHandle(ph, hNew, hOld, fRc) \
    31983234   do { \
     3235       AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    31993236       (fRc) = ASMAtomicCmpXchgU64((uint64_t volatile *)(ph), (const uint64_t)(hNew), (const uint64_t)(hOld)); \
    3200        AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    32013237   } while (0)
    32023238#else
     
    34393475# define ASMAtomicCmpXchgExHandle(ph, hNew, hOld, fRc, phOldVal) \
    34403476    do { \
    3441         (fRc) = ASMAtomicCmpXchgExU32((volatile uint32_t *)(pu), (uint32_t)(uNew), (uint32_t)(uOld), (uint32_t *)(puOldVal)); \
    34423477        AssertCompile(sizeof(*ph)       == sizeof(uint32_t)); \
    34433478        AssertCompile(sizeof(*phOldVal) == sizeof(uint32_t)); \
     3479        (fRc) = ASMAtomicCmpXchgExU32((volatile uint32_t *)(pu), (uint32_t)(uNew), (uint32_t)(uOld), (uint32_t *)(puOldVal)); \
    34443480    } while (0)
    34453481#elif HC_ARCH_BITS == 64
    34463482# define ASMAtomicCmpXchgExHandle(ph, hNew, hOld, fRc, phOldVal) \
    34473483    do { \
    3448         (fRc) = ASMAtomicCmpXchgExU64((volatile uint64_t *)(pu), (uint64_t)(uNew), (uint64_t)(uOld), (uint64_t *)(puOldVal)); \
    34493484        AssertCompile(sizeof(*(ph))       == sizeof(uint64_t)); \
    34503485        AssertCompile(sizeof(*(phOldVal)) == sizeof(uint64_t)); \
     3486        (fRc) = ASMAtomicCmpXchgExU64((volatile uint64_t *)(pu), (uint64_t)(uNew), (uint64_t)(uOld), (uint64_t *)(puOldVal)); \
    34513487    } while (0)
    34523488#else
     
    43484384# define ASMAtomicReadHandle(ph, phRes) \
    43494385    do { \
    4350         *(uint32_t *)(phRes) = ASMAtomicReadU32((uint32_t volatile *)(ph)); \
    43514386        AssertCompile(sizeof(*(ph))    == sizeof(uint32_t)); \
    43524387        AssertCompile(sizeof(*(phRes)) == sizeof(uint32_t)); \
     4388        *(uint32_t *)(phRes) = ASMAtomicReadU32((uint32_t volatile *)(ph)); \
    43534389    } while (0)
    43544390#elif HC_ARCH_BITS == 64
    43554391# define ASMAtomicReadHandle(ph, phRes) \
    43564392    do { \
    4357         *(uint64_t *)(phRes) = ASMAtomicReadU64((uint64_t volatile *)(ph)); \
    43584393        AssertCompile(sizeof(*(ph))    == sizeof(uint64_t)); \
    43594394        AssertCompile(sizeof(*(phRes)) == sizeof(uint64_t)); \
     4395        *(uint64_t *)(phRes) = ASMAtomicReadU64((uint64_t volatile *)(ph)); \
    43604396    } while (0)
    43614397#else
     
    43754411# define ASMAtomicUoReadHandle(ph, phRes) \
    43764412    do { \
    4377         *(uint32_t *)(phRes) = ASMAtomicUoReadU32((uint32_t volatile *)(ph)); \
    43784413        AssertCompile(sizeof(*(ph))    == sizeof(uint32_t)); \
    43794414        AssertCompile(sizeof(*(phRes)) == sizeof(uint32_t)); \
     4415        *(uint32_t *)(phRes) = ASMAtomicUoReadU32((uint32_t volatile *)(ph)); \
    43804416    } while (0)
    43814417#elif HC_ARCH_BITS == 64
    43824418# define ASMAtomicUoReadHandle(ph, phRes) \
    43834419    do { \
    4384         *(uint64_t *)(phRes) = ASMAtomicUoReadU64((uint64_t volatile *)(ph)); \
    43854420        AssertCompile(sizeof(*(ph))    == sizeof(uint64_t)); \
    43864421        AssertCompile(sizeof(*(phRes)) == sizeof(uint64_t)); \
     4422        *(uint64_t *)(phRes) = ASMAtomicUoReadU64((uint64_t volatile *)(ph)); \
    43874423    } while (0)
    43884424#else
     
    47084744# define ASMAtomicWriteHandle(ph, hNew) \
    47094745    do { \
     4746        AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    47104747        ASMAtomicWriteU32((uint32_t volatile *)(ph), (const uint32_t)(hNew)); \
    4711         AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    47124748    } while (0)
    47134749#elif HC_ARCH_BITS == 64
    47144750# define ASMAtomicWriteHandle(ph, hNew) \
    47154751    do { \
     4752        AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    47164753        ASMAtomicWriteU64((uint64_t volatile *)(ph), (const uint64_t)(hNew)); \
    4717         AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    47184754    } while (0)
    47194755#else
     
    47334769# define ASMAtomicUoWriteHandle(ph, hNew) \
    47344770    do { \
     4771        AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    47354772        ASMAtomicUoWriteU32((uint32_t volatile *)(ph), (const uint32_t)hNew); \
    4736         AssertCompile(sizeof(*(ph)) == sizeof(uint32_t)); \
    47374773    } while (0)
    47384774#elif HC_ARCH_BITS == 64
    47394775# define ASMAtomicUoWriteHandle(ph, hNew) \
    47404776    do { \
     4777        AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    47414778        ASMAtomicUoWriteU64((uint64_t volatile *)(ph), (const uint64_t)hNew); \
    4742         AssertCompile(sizeof(*(ph)) == sizeof(uint64_t)); \
    47434779    } while (0)
    47444780#else
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette