VirtualBox

Ignore:
Timestamp:
Feb 15, 2022 2:01:53 PM (3 years ago)
Author:
vboxsync
Message:

IPRT/asm.h,tstRTInlineAsm: Added 8-bit and 16-bit extended cmpxchg functions (needed for IEM). bugref:9898

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp

    r93515 r93752  
    15701570
    15711571
     1572DECLINLINE(void) tstASMAtomicCmpXchgExU8Worker(uint8_t volatile *pu8)
     1573{
     1574    *pu8          = UINT8_C(0xff);
     1575    uint8_t u8Old = UINT8_C(0x11);
     1576    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, 0, &u8Old), false, UINT8_C(0xff), UINT8_C(0xff));
     1577    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, UINT8_C(0xff), &u8Old), true,  0, UINT8_C(0xff));
     1578    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, 0, UINT8_C(0xff), &u8Old), false, 0, UINT8_C(0x00));
     1579    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xfd), 0, &u8Old), true,  UINT8_C(0xfd), 0);
     1580    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xfd), 0, &u8Old), false, UINT8_C(0xfd), UINT8_C(0xfd));
     1581    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu8, u8Old, ASMAtomicCmpXchgExU8(pu8, UINT8_C(0xe0), UINT8_C(0xfd), &u8Old), true,  UINT8_C(0xe0), UINT8_C(0xfd));
     1582
     1583    int8_t volatile *pi8   = (int8_t volatile *)pu8;
     1584    int8_t           i8Old = 0;
     1585    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 32, 32, &i8Old), false, -32, -32);
     1586    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 32, -32, &i8Old), true, 32, -32);
     1587    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MIN, 32, &i8Old), true, INT8_MIN, 32);
     1588    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MIN, 32, &i8Old), false, INT8_MIN, INT8_MIN);
     1589    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MAX, INT8_MAX, &i8Old), false, INT8_MIN, INT8_MIN);
     1590    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, INT8_MAX, INT8_MIN, &i8Old), true, INT8_MAX, INT8_MIN);
     1591    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi8, i8Old, ASMAtomicCmpXchgExS8(pi8, 42, INT8_MAX, &i8Old), true, 42, INT8_MAX);
     1592}
     1593
     1594
     1595DECLINLINE(void) tstASMAtomicCmpXchgExU16Worker(uint16_t volatile *pu16)
     1596{
     1597    *pu16           = UINT16_C(0xffff);
     1598    uint16_t u16Old = UINT16_C(0x5111);
     1599    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, 0, &u16Old), false, UINT16_C(0xffff), UINT16_C(0xffff));
     1600    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, UINT16_C(0xffff), &u16Old), true,  0, UINT16_C(0xffff));
     1601    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, 0, UINT16_C(0xffff), &u16Old), false, 0, UINT16_C(0x0000));
     1602    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0x8efd), 0, &u16Old), true,  UINT16_C(0x8efd), 0);
     1603    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0x8efd), 0, &u16Old), false, UINT16_C(0x8efd), UINT16_C(0x8efd));
     1604    CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu16, u16Old, ASMAtomicCmpXchgExU16(pu16, UINT16_C(0xffe0), UINT16_C(0x8efd), &u16Old), true,  UINT16_C(0xffe0), UINT16_C(0x8efd));
     1605
     1606    int16_t volatile *pi16   = (int16_t volatile *)pu16;
     1607    int16_t           i16Old = 0;
     1608    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 32, 32, &i16Old), false, -32, -32);
     1609    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 32, -32, &i16Old), true, 32, -32);
     1610    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MIN, 32, &i16Old), true, INT16_MIN, 32);
     1611    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MIN, 32, &i16Old), false, INT16_MIN, INT16_MIN);
     1612    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MAX, INT16_MAX, &i16Old), false, INT16_MIN, INT16_MIN);
     1613    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, INT16_MAX, INT16_MIN, &i16Old), true, INT16_MAX, INT16_MIN);
     1614    CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi16, i16Old, ASMAtomicCmpXchgExS16(pi16, 42, INT16_MAX, &i16Old), true, 42, INT16_MAX);
     1615}
     1616
     1617
    15721618DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
    15731619{
     
    16551701static void tstASMAtomicCmpXchgEx(void)
    16561702{
     1703    DO_SIMPLE_TEST(ASMAtomicCmpXchgExU8, uint8_t);
     1704    DO_SIMPLE_TEST(ASMAtomicCmpXchgExU16, uint16_t);
    16571705    DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
    16581706    DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
     
    27532801    static uint64_t volatile s_u64;
    27542802    static int64_t  volatile s_i64;
     2803    static uint8_t  s_u8Old;
     2804    static int8_t   s_i8Old;
     2805    static uint16_t s_u16Old;
     2806    static int16_t  s_i16Old;
     2807    static uint32_t s_u32Old;
     2808    static int32_t  s_i32Old;
     2809    static uint64_t s_u64Old;
     2810    static int64_t  s_i64Old;
    27552811    unsigned i;
    27562812    const unsigned cRounds = _16M;       /* Must be multiple of 8 */
     
    28532909    BENCH(ASMAtomicXchgU64(&s_u64, 0),           "ASMAtomicXchgU64");
    28542910    BENCH(ASMAtomicXchgS64(&s_i64, 0),           "ASMAtomicXchgS64");
     2911    BENCH(ASMAtomicCmpXchgU8(&s_u8, 0, 0),       "ASMAtomicCmpXchgU8");
     2912    BENCH(ASMAtomicCmpXchgS8(&s_i8, 0, 0),       "ASMAtomicCmpXchgS8");
     2913    //BENCH(ASMAtomicCmpXchgU16(&s_u16, 0, 0),     "ASMAtomicCmpXchgU16");
     2914    //BENCH(ASMAtomicCmpXchgS16(&s_i16, 0, 0),     "ASMAtomicCmpXchgS16");
    28552915    BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0),     "ASMAtomicCmpXchgU32");
    28562916    BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0),     "ASMAtomicCmpXchgS32");
    28572917    BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0),     "ASMAtomicCmpXchgU64");
    28582918    BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0),     "ASMAtomicCmpXchgS64");
     2919    BENCH(ASMAtomicCmpXchgU8(&s_u8, 0, 1),       "ASMAtomicCmpXchgU8/neg");
     2920    BENCH(ASMAtomicCmpXchgS8(&s_i8, 0, 1),       "ASMAtomicCmpXchgS8/neg");
     2921    //BENCH(ASMAtomicCmpXchgU16(&s_u16, 0, 1),     "ASMAtomicCmpXchgU16/neg");
     2922    //BENCH(ASMAtomicCmpXchgS16(&s_s16, 0, 1),     "ASMAtomicCmpXchgS16/neg");
    28592923    BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1),     "ASMAtomicCmpXchgU32/neg");
    28602924    BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1),     "ASMAtomicCmpXchgS32/neg");
    28612925    BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1),     "ASMAtomicCmpXchgU64/neg");
    28622926    BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1),     "ASMAtomicCmpXchgS64/neg");
     2927    BENCH(ASMAtomicCmpXchgExU8(&s_u8, 0, 0, &s_u8Old),    "ASMAtomicCmpXchgExU8");
     2928    BENCH(ASMAtomicCmpXchgExS8(&s_i8, 0, 0, &s_i8Old),    "ASMAtomicCmpXchgExS8");
     2929    BENCH(ASMAtomicCmpXchgExU16(&s_u16, 0, 0, &s_u16Old), "ASMAtomicCmpXchgExU16");
     2930    BENCH(ASMAtomicCmpXchgExS16(&s_i16, 0, 0, &s_i16Old), "ASMAtomicCmpXchgExS16");
     2931    BENCH(ASMAtomicCmpXchgExU32(&s_u32, 0, 0, &s_u32Old), "ASMAtomicCmpXchgExU32");
     2932    BENCH(ASMAtomicCmpXchgExS32(&s_i32, 0, 0, &s_i32Old), "ASMAtomicCmpXchgExS32");
     2933    BENCH(ASMAtomicCmpXchgExU64(&s_u64, 0, 0, &s_u64Old), "ASMAtomicCmpXchgExU64");
     2934    BENCH(ASMAtomicCmpXchgExS64(&s_i64, 0, 0, &s_i64Old), "ASMAtomicCmpXchgExS64");
     2935    BENCH(ASMAtomicCmpXchgExU8(&s_u8, 0, 1, &s_u8Old),    "ASMAtomicCmpXchgExU8/neg");
     2936    BENCH(ASMAtomicCmpXchgExS8(&s_i8, 0, 1, &s_i8Old),    "ASMAtomicCmpXchgExS8/neg");
     2937    BENCH(ASMAtomicCmpXchgExU16(&s_u16, 0, 1, &s_u16Old), "ASMAtomicCmpXchgExU16/neg");
     2938    BENCH(ASMAtomicCmpXchgExS16(&s_i16, 0, 1, &s_i16Old), "ASMAtomicCmpXchgExS16/neg");
     2939    BENCH(ASMAtomicCmpXchgExU32(&s_u32, 0, 1, &s_u32Old), "ASMAtomicCmpXchgExU32/neg");
     2940    BENCH(ASMAtomicCmpXchgExS32(&s_i32, 0, 1, &s_i32Old), "ASMAtomicCmpXchgExS32/neg");
     2941    BENCH(ASMAtomicCmpXchgExU64(&s_u64, 0, 1, &s_u64Old), "ASMAtomicCmpXchgExU64/neg");
     2942    BENCH(ASMAtomicCmpXchgExS64(&s_i64, 0, 1, &s_i64Old), "ASMAtomicCmpXchgExS64/neg");
    28632943    BENCH(ASMAtomicIncU32(&s_u32),               "ASMAtomicIncU32");
    28642944    BENCH(ASMAtomicIncS32(&s_i32),               "ASMAtomicIncS32");
    2865     BENCH(ASMAtomicDecU32(&s_u32),               "ASMAtomicDecU32");
     2945    BENCH(ASMAtomicDecU32(&s_u32),               "ASMAtomicDecU32") ;
    28662946    BENCH(ASMAtomicDecS32(&s_i32),               "ASMAtomicDecS32");
    28672947    BENCH(ASMAtomicAddU32(&s_u32, 5),            "ASMAtomicAddU32");
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette