Changeset 104294 in vbox
- Timestamp:
- Apr 11, 2024 12:28:26 PM (8 months ago)
- Location:
- trunk
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/armv8.h
r104293 r104294 4616 4616 typedef enum ARMV8INSTRUSHIFTSZ 4617 4617 { 4618 kArmv8InstrShiftSz_U8 = 16, /**< Byte. */4619 kArmv8InstrShiftSz_U16 = 32, /**< Halfword. */4620 kArmv8InstrShiftSz_U32 = 64, /**< 32-bit. */4621 kArmv8InstrShiftSz_U64 = 128/**< 64-bit. */4618 kArmv8InstrShiftSz_U8 = 8, /**< Byte. */ 4619 kArmv8InstrShiftSz_U16 = 16, /**< Halfword. */ 4620 kArmv8InstrShiftSz_U32 = 32, /**< 32-bit. */ 4621 kArmv8InstrShiftSz_U64 = 64 /**< 64-bit. */ 4622 4622 } ARMV8INSTRUSHIFTSZ; 4623 4623 … … 4646 4646 || (enmSz == kArmv8InstrShiftSz_U64 && cShift <= 64))); 4647 4647 4648 return UINT32_C(0x f000400)4648 return UINT32_C(0x0f000400) 4649 4649 | ((uint32_t)f128Bit << 30) 4650 4650 | ((uint32_t)fUnsigned << 29) 4651 | ((( uint32_t)enmSz- cShift) << 16)4651 | ((((uint32_t)enmSz << 1) - cShift) << 16) 4652 4652 | ((uint32_t)fRound << 13) 4653 4653 | ((uint32_t)fAccum << 12) … … 4655 4655 | iVecRegDst; 4656 4656 } 4657 4658 4659 /** 4660 * A64: Encodes SHL (vector, register). 4661 * 4662 * @returns The encoded instruction. 4663 * @param iVecRegDst The vector register to put the result into. 4664 * @param iVecRegSrc The vector source register. 4665 * @param cShift Number of bits to shift. 4666 * @param enmSz Element size. 4667 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register 4668 * or just the low 64-bit (false). 4669 */ 4670 DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShlImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz, 4671 bool f128Bit = true) 4672 { 4673 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32); 4674 Assert( (enmSz == kArmv8InstrShiftSz_U8 && cShift < 8) 4675 || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16) 4676 || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32) 4677 || (enmSz == kArmv8InstrShiftSz_U64 && cShift < 64)); 4678 4679 return UINT32_C(0x0f005400) 4680 | ((uint32_t)f128Bit << 30) 4681 | (((uint32_t)enmSz | cShift) << 16) 4682 | (iVecRegSrc << 5) 4683 | iVecRegDst; 4684 } 4657 4685 /** @} */ 4658 4686 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h
r104292 r104294 6612 6612 { 6613 6613 // IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); 6614 SSE2_SHIFT_BODY_Imm(psllw, bRm, 0);6614 SSE2_SHIFT_BODY_Imm(psllw, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 6615 6615 } 6616 6616 … … 6688 6688 { 6689 6689 // IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); 6690 SSE2_SHIFT_BODY_Imm(pslld, bRm, 0);6690 SSE2_SHIFT_BODY_Imm(pslld, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 6691 6691 } 6692 6692 … … 6756 6756 { 6757 6757 // IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); 6758 SSE2_SHIFT_BODY_Imm(psllq, bRm, 0);6758 SSE2_SHIFT_BODY_Imm(psllq, bRm, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 6759 6759 } 6760 6760 -
trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h
r104292 r104294 2057 2057 uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \ 2058 2058 kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \ 2059 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5); \2059 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); \ 2060 2060 pCodeBuf[off++] = X86_OP_PRF_SIZE_OP; \ 2061 2061 if (idxSimdRegDst >= 8) \ … … 2097 2097 IEMNATIVE_NATIVE_EMIT_SHIFT_RIGHT_IMM_U128(psrlq, 64, kArmv8InstrShiftSz_U64, 0x73); 2098 2098 2099 2100 /** 2101 * Common emitter for the shift left with immediate instructions. 2102 */ 2103 #ifdef RT_ARCH_AMD64 2104 # define IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(a_Instr, a_cShiftMax, a_ArmElemSz, a_bOpcX86) \ 2105 DECL_INLINE_THROW(uint32_t) \ 2106 RT_CONCAT3(iemNativeEmit_,a_Instr,_ri_u128)(PIEMRECOMPILERSTATE pReNative, uint32_t off, \ 2107 uint8_t const idxSimdGstRegDst, uint8_t const bImm) \ 2108 { \ 2109 if (bImm) \ 2110 { \ 2111 uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \ 2112 kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \ 2113 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); \ 2114 pCodeBuf[off++] = X86_OP_PRF_SIZE_OP; \ 2115 if (idxSimdRegDst >= 8) \ 2116 pCodeBuf[off++] = X86_OP_REX_B; \ 2117 pCodeBuf[off++] = 0x0f; \ 2118 pCodeBuf[off++] = (a_bOpcX86); \ 2119 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 6, idxSimdRegDst & 7); \ 2120 pCodeBuf[off++] = bImm; \ 2121 iemNativeSimdRegFreeTmp(pReNative, idxSimdRegDst); \ 2122 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); \ 2123 } \ 2124 /* Immediate 0 is a nop. */ \ 2125 return off; \ 2126 } 2127 #elif defined(RT_ARCH_ARM64) 2128 # define IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(a_Instr, a_cShiftMax, a_ArmElemSz, a_bOpcX86) \ 2129 DECL_INLINE_THROW(uint32_t) \ 2130 RT_CONCAT3(iemNativeEmit_,a_Instr,_ri_u128)(PIEMRECOMPILERSTATE pReNative, uint32_t off, \ 2131 uint8_t const idxSimdGstRegDst, uint8_t const bImm) \ 2132 { \ 2133 if (bImm) /* bImm == 0 is a nop */ \ 2134 { \ 2135 uint8_t const idxSimdRegDst = iemNativeSimdRegAllocTmpForGuestSimdReg(pReNative, &off, IEMNATIVEGSTSIMDREG_SIMD(idxSimdGstRegDst), \ 2136 kIemNativeGstSimdRegLdStSz_Low128, kIemNativeGstRegUse_ForUpdate); \ 2137 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); \ 2138 if (bImm < (a_cShiftMax)) \ 2139 pCodeBuf[off++] = Armv8A64MkVecInstrShlImm(idxSimdRegDst, idxSimdRegDst, bImm, (a_ArmElemSz)); \ 2140 else /* Everything >= a_cShiftMax sets the register to zero. */ \ 2141 pCodeBuf[off++] = Armv8A64MkVecInstrEor(idxSimdRegDst, idxSimdRegDst, idxSimdRegDst); \ 2142 iemNativeSimdRegFreeTmp(pReNative, idxSimdRegDst); \ 2143 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); \ 2144 } \ 2145 return off; \ 2146 } 2147 #else 2148 # error "Port me" 2149 #endif 2150 2151 IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(psllw, 16, kArmv8InstrShiftSz_U16, 0x71); 2152 IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(pslld, 32, kArmv8InstrShiftSz_U32, 0x72); 2153 IEMNATIVE_NATIVE_EMIT_SHIFT_LEFT_IMM_U128(psllq, 64, kArmv8InstrShiftSz_U64, 0x73); 2154 2099 2155 #endif 2100 2156
Note:
See TracChangeset
for help on using the changeset viewer.