VirtualBox

Changeset 104294 in vbox


Ignore:
Timestamp:
Apr 11, 2024 12:28:26 PM (8 months ago)
Author:
vboxsync
Message:

VMM/IEM: Implement native emitters for psllw,pslld,psllq, bugref:10652

Location:
trunk
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/iprt/armv8.h

    r104293 r104294  
    46164616typedef enum ARMV8INSTRUSHIFTSZ
    46174617{
    4618     kArmv8InstrShiftSz_U8  = 16,  /**< Byte. */
    4619     kArmv8InstrShiftSz_U16 = 32,  /**< Halfword. */
    4620     kArmv8InstrShiftSz_U32 = 64,  /**< 32-bit. */
    4621     kArmv8InstrShiftSz_U64 = 128  /**< 64-bit. */
     4618    kArmv8InstrShiftSz_U8  =  8,  /**< Byte. */
     4619    kArmv8InstrShiftSz_U16 = 16,  /**< Halfword. */
     4620    kArmv8InstrShiftSz_U32 = 32,  /**< 32-bit. */
     4621    kArmv8InstrShiftSz_U64 = 64   /**< 64-bit. */
    46224622} ARMV8INSTRUSHIFTSZ;
    46234623
     
    46464646               || (enmSz == kArmv8InstrShiftSz_U64 && cShift <= 64)));
    46474647
    4648     return UINT32_C(0xf000400)
     4648    return UINT32_C(0x0f000400)
    46494649         | ((uint32_t)f128Bit << 30)
    46504650         | ((uint32_t)fUnsigned << 29)
    4651          | (((uint32_t)enmSz - cShift) << 16)
     4651         | ((((uint32_t)enmSz << 1) - cShift) << 16)
    46524652         | ((uint32_t)fRound << 13)
    46534653         | ((uint32_t)fAccum << 12)
     
    46554655         | iVecRegDst;
    46564656}
     4657
     4658
     4659/**
     4660 * A64: Encodes SHL (vector, register).
     4661 *
     4662 * @returns The encoded instruction.
     4663 * @param   iVecRegDst  The vector register to put the result into.
     4664 * @param   iVecRegSrc  The vector source register.
     4665 * @param   cShift      Number of bits to shift.
     4666 * @param   enmSz       Element size.
     4667 * @param   f128Bit     Flag whether this operates on the full 128-bit (true, default) of the vector register
     4668 *                      or just the low 64-bit (false).
     4669 */
     4670DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShlImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
     4671                                                     bool f128Bit = true)
     4672{
     4673    Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
     4674    Assert(   (enmSz == kArmv8InstrShiftSz_U8 &&  cShift < 8)
     4675           || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16)
     4676           || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32)
     4677           || (enmSz == kArmv8InstrShiftSz_U64 && cShift < 64));
     4678
     4679    return UINT32_C(0x0f005400)
     4680         | ((uint32_t)f128Bit << 30)
     4681         | (((uint32_t)enmSz | cShift) << 16)
     4682         | (iVecRegSrc << 5)
     4683         | iVecRegDst;
     4684}
    46574685/** @} */
    46584686
  • trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h

    r104292 r104294  
    66126612{
    66136613//    IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
    6614     SSE2_SHIFT_BODY_Imm(psllw, bRm, 0);
     6614    SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
    66156615}
    66166616
     
    66886688{
    66896689//    IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
    6690     SSE2_SHIFT_BODY_Imm(pslld, bRm, 0);
     6690    SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
    66916691}
    66926692
     
    67566756{
    67576757//    IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
    6758     SSE2_SHIFT_BODY_Imm(psllq, bRm, 0);
     6758    SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
    67596759}
    67606760
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h

    r104292 r104294  
    20572057            uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \
    20582058                                                                                  kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \
    2059             PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5); \
     2059            PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); \
    20602060            pCodeBuf[off++] = X86_OP_PRF_SIZE_OP; \
    20612061            if (idxSimdRegDst >= 8) \
     
    20972097IEMNATIVE_NATIVE_EMIT_SHIFT_RIGHT_IMM_U128(psrlq, 64, kArmv8InstrShiftSz_U64, 0x73);
    20982098
     2099
     2100/**
     2101 * Common emitter for the shift left with immediate instructions.
     2102 */
     2103#ifdef RT_ARCH_AMD64
     2104# define IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(a_Instr, a_cShiftMax, a_ArmElemSz, a_bOpcX86) \
     2105    DECL_INLINE_THROW(uint32_t) \
     2106    RT_CONCAT3(iemNativeEmit_,a_Instr,_ri_u128)(PIEMRECOMPILERSTATE pReNative, uint32_t off, \
     2107                                                uint8_t const idxSimdGstRegDst, uint8_t const bImm) \
     2108    { \
     2109        if (bImm) \
     2110        { \
     2111            uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \
     2112                                                                                  kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \
     2113            PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); \
     2114            pCodeBuf[off++] = X86_OP_PRF_SIZE_OP; \
     2115            if (idxSimdRegDst >= 8) \
     2116                pCodeBuf[off++] = X86_OP_REX_B; \
     2117            pCodeBuf[off++] = 0x0f; \
     2118            pCodeBuf[off++] = (a_bOpcX86); \
     2119            pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 6, idxSimdRegDst & 7); \
     2120            pCodeBuf[off++] = bImm; \
     2121            iemNativeSimdRegFreeTmp(pReNative, idxSimdRegDst); \
     2122            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); \
     2123        } \
     2124        /* Immediate 0 is a nop. */ \
     2125        return off; \
     2126    }
     2127#elif defined(RT_ARCH_ARM64)
     2128# define IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(a_Instr, a_cShiftMax, a_ArmElemSz, a_bOpcX86) \
     2129    DECL_INLINE_THROW(uint32_t) \
     2130    RT_CONCAT3(iemNativeEmit_,a_Instr,_ri_u128)(PIEMRECOMPILERSTATE pReNative, uint32_t off, \
     2131                                                uint8_t const idxSimdGstRegDst, uint8_t const bImm) \
     2132    { \
     2133        if (bImm) /* bImm == 0 is a nop */ \
     2134        { \
     2135            uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \
     2136                                                                                  kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \
     2137            PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); \
     2138            if (bImm < (a_cShiftMax)) \
     2139                pCodeBuf[off++] = Armv8A64MkVecInstrShlImm(idxSimdRegDst, idxSimdRegDst, bImm, (a_ArmElemSz)); \
     2140            else /* Everything >= a_cShiftMax sets the register to zero. */ \
     2141                pCodeBuf[off++] = Armv8A64MkVecInstrEor(idxSimdRegDst, idxSimdRegDst, idxSimdRegDst); \
     2142            iemNativeSimdRegFreeTmp(pReNative, idxSimdRegDst); \
     2143            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); \
     2144        } \
     2145        return off; \
     2146    }
     2147#else
     2148# error "Port me"
     2149#endif
     2150
     2151IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(psllw, 16, kArmv8InstrShiftSz_U16, 0x71);
     2152IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(pslld, 32, kArmv8InstrShiftSz_U32, 0x72);
     2153IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(psllq, 64, kArmv8InstrShiftSz_U64, 0x73);
     2154
    20992155#endif
    21002156
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette