- Timestamp:
- Jun 22, 2022 10:14:17 PM (3 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r95345 r95347 734 734 ENDPROC iemAImpl_rorx_u64 735 735 %endif ; RT_ARCH_AMD64 736 737 738 ; 739 ; MULX 740 ; 741 BEGINPROC_FASTCALL iemAImpl_mulx_u32, 16 742 PROLOGUE_4_ARGS 743 %ifdef ASM_CALL64_GCC 744 ; A2_32 is EDX - prefect 745 mulx T0_32, T1_32, A3_32 746 mov [A1], T1_32 ; Low value first, as we should return the high part if same destination registers. 747 mov [A0], T0_32 748 %else 749 ; A1 is xDX - must switch A1 and A2, so EDX=uSrc1 750 xchg A1, A2 751 mulx T0_32, T1_32, A3_32 752 mov [A2], T1_32 ; Low value first, as we should return the high part if same destination registers. 753 mov [A0], T0_32 754 %endif 755 EPILOGUE_4_ARGS 756 ENDPROC iemAImpl_mulx_u32 757 758 759 BEGINPROC_FASTCALL iemAImpl_mulx_u32_fallback, 16 760 PROLOGUE_4_ARGS 761 %ifdef ASM_CALL64_GCC 762 ; A2_32 is EDX, T0_32 is EAX 763 mov eax, A3_32 764 mul A2_32 765 mov [A1], eax ; Low value first, as we should return the high part if same destination registers. 766 mov [A0], edx 767 %else 768 ; A1 is xDX, T0_32 is EAX - must switch A1 and A2, so EDX=uSrc1 769 xchg A1, A2 770 mov eax, A3_32 771 mul A2_32 772 mov [A2], eax ; Low value first, as we should return the high part if same destination registers. 773 mov [A0], edx 774 %endif 775 EPILOGUE_4_ARGS 776 ENDPROC iemAImpl_mulx_u32_fallback 777 778 %ifdef RT_ARCH_AMD64 779 BEGINPROC_FASTCALL iemAImpl_mulx_u64, 16 780 PROLOGUE_4_ARGS 781 %ifdef ASM_CALL64_GCC 782 ; A2 is RDX - prefect 783 mulx T0, T1, A3 784 mov [A1], T1 ; Low value first, as we should return the high part if same destination registers. 785 mov [A0], T0 786 %else 787 ; A1 is xDX - must switch A1 and A2, so RDX=uSrc1 788 xchg A1, A2 789 mulx T0, T1, A3 790 mov [A2], T1 ; Low value first, as we should return the high part if same destination registers. 791 mov [A0], T0 792 %endif 793 EPILOGUE_4_ARGS 794 ENDPROC iemAImpl_mulx_u64 795 796 797 BEGINPROC_FASTCALL iemAImpl_mulx_u64_fallback, 16 798 PROLOGUE_4_ARGS 799 %ifdef ASM_CALL64_GCC 800 ; A2 is RDX, T0 is RAX 801 mov rax, A3 802 mul A2 803 mov [A1], rax ; Low value first, as we should return the high part if same destination registers. 804 mov [A0], rdx 805 %else 806 ; A1 is xDX, T0 is RAX - must switch A1 and A2, so RDX=uSrc1 807 xchg A1, A2 808 mov rax, A3 809 mul A2 810 mov [A2], rax ; Low value first, as we should return the high part if same destination registers. 811 mov [A0], rdx 812 %endif 813 EPILOGUE_4_ARGS 814 ENDPROC iemAImpl_mulx_u64_fallback 815 816 %endif 736 817 737 818 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r95345 r95347 2178 2178 # endif /* !DOXYGEN_RUNNING */ 2179 2179 2180 /* 2181 * MULX 2182 */ 2183 # define EMIT_MULX(a_cBitsWidth, a_cBitsWidth2x, a_uType, a_fnMul, a_Suffix) \ 2184 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_mulx_u,a_cBitsWidth,a_Suffix), \ 2185 (a_uType *puDst1, a_uType *puDst2, a_uType uSrc1, a_uType uSrc2)) \ 2186 { \ 2187 RTUINT ## a_cBitsWidth2x ## U Result; \ 2188 a_fnMul(Result, uSrc1, uSrc2, a_cBitsWidth2x); \ 2189 *puDst2 = Result.s.Lo; /* Lower part first, as we should return the high part when puDst2 == puDst1. */ \ 2190 *puDst1 = Result.s.Hi; \ 2191 } \ 2192 2193 # ifndef DOXYGEN_RUNNING /* this totally confuses doxygen for some reason */ 2194 EMIT_MULX(64, 128, uint64_t, MULDIV_MUL_U128, RT_NOTHING) 2195 EMIT_MULX(64, 128, uint64_t, MULDIV_MUL_U128, _fallback) 2196 # if !defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 2197 EMIT_MULX(32, 64, uint32_t, MULDIV_MUL, RT_NOTHING) 2198 EMIT_MULX(32, 64, uint32_t, MULDIV_MUL, _fallback) 2199 # endif /* !defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) */ 2200 # endif /* !DOXYGEN_RUNNING */ 2201 2180 2202 2181 2203 /* -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r95341 r95347 188 188 'AL': [], 189 189 'rAX': [], 190 'rDX': [], 190 191 'rSI': [], 191 192 'rDI': [], … … 333 334 'AL': ( 'IDX_ParseFixedReg', 'AL', 'al', 'REG_AL', '', ), 334 335 'rAX': ( 'IDX_ParseFixedReg', 'rAX', '%eAX', 'REG_EAX', '', ), 336 'rDX': ( 'IDX_ParseFixedReg', 'rDX', '%eDX', 'REG_EDX', '', ), 335 337 'CS': ( 'IDX_ParseFixedReg', 'CS', 'cs', 'REG_CS', '', ), # 8086: push CS 336 338 'DS': ( 'IDX_ParseFixedReg', 'DS', 'ds', 'REG_DS', '', ), … … 3280 3282 # Check the parameter locations for the encoding. 3281 3283 if g_kdIemForms[sForm][1] is not None: 3282 if len(g_kdIemForms[sForm][1]) !=len(oInstr.aoOperands):3284 if len(g_kdIemForms[sForm][1]) > len(oInstr.aoOperands): 3283 3285 self.error('%s: The a_Form=%s has a different operand count: %s (form) vs %s' 3284 3286 % (sMacro, sForm, len(g_kdIemForms[sForm][1]), len(oInstr.aoOperands) )); … … 3296 3298 self.error('%s: current instruction @op%u and a_Form type does not match: %s/%s vs %s' 3297 3299 % (sMacro, iOperand + 1, oInstr.aoOperands[iOperand].sType, sOpFormMatch, sForm, )); 3300 if len(g_kdIemForms[sForm][1]) < len(oInstr.aoOperands): 3301 for iOperand in range(len(g_kdIemForms[sForm][1]), len(oInstr.aoOperands)): 3302 if oInstr.aoOperands[iOperand].sType != 'FIXED' \ 3303 and g_kdOpTypes[oInstr.aoOperands[iOperand].sType][0] != 'IDX_ParseFixedReg': 3304 self.error('%s: Expected FIXED type operand #%u following operands given by a_Form=%s: %s (%s)' 3305 % (sMacro, iOperand, sForm, oInstr.aoOperands[iOperand].sType, 3306 oInstr.aoOperands[iOperand].sWhere)); 3307 3298 3308 3299 3309 # Check @opcodesub -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r95345 r95347 1093 1093 /* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */ 1094 1094 /* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */ 1095 /* Opcode VEX.F2.0F38 0xf6 - invalid (vex only). */ 1096 FNIEMOP_STUB(iemOp_mulx_By_Gy_rDX_Ey); 1095 1096 1097 /** Opcode VEX.F2.0F38 0xf6 (vex only) */ 1098 FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey) 1099 { 1100 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1101 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi2) 1102 return iemOp_InvalidNeedRM(pVCpu); 1103 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1105 { 1106 /* 1107 * Register, register. 1108 */ 1109 IEMOP_HLP_DONE_VEX_DECODING_L0(); 1110 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1111 { 1112 IEM_MC_BEGIN(4, 0); 1113 IEM_MC_ARG(uint64_t *, pDst1, 0); 1114 IEM_MC_ARG(uint64_t *, pDst2, 1); 1115 IEM_MC_ARG(uint64_t, uSrc1, 2); 1116 IEM_MC_ARG(uint64_t, uSrc2, 3); 1117 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1118 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 1119 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX); 1120 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1121 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback), 1122 pDst1, pDst2, uSrc1, uSrc2); 1123 IEM_MC_ADVANCE_RIP(); 1124 IEM_MC_END(); 1125 } 1126 else 1127 { 1128 IEM_MC_BEGIN(4, 0); 1129 IEM_MC_ARG(uint32_t *, pDst1, 0); 1130 IEM_MC_ARG(uint32_t *, pDst2, 1); 1131 IEM_MC_ARG(uint32_t, uSrc1, 2); 1132 IEM_MC_ARG(uint32_t, uSrc2, 3); 1133 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1134 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 1135 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX); 1136 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1137 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback), 1138 pDst1, pDst2, uSrc1, uSrc2); 1139 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2); 1140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1); 1141 IEM_MC_ADVANCE_RIP(); 1142 IEM_MC_END(); 1143 } 1144 } 1145 else 1146 { 1147 /* 1148 * Register, memory. 1149 */ 1150 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1151 { 1152 IEM_MC_BEGIN(4, 1); 1153 IEM_MC_ARG(uint64_t *, pDst1, 0); 1154 IEM_MC_ARG(uint64_t *, pDst2, 1); 1155 IEM_MC_ARG(uint64_t, uSrc1, 2); 1156 IEM_MC_ARG(uint64_t, uSrc2, 3); 1157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1159 IEMOP_HLP_DONE_VEX_DECODING_L0(); 1160 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1161 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX); 1162 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 1163 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1164 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback), 1165 pDst1, pDst2, uSrc1, uSrc2); 1166 IEM_MC_ADVANCE_RIP(); 1167 IEM_MC_END(); 1168 } 1169 else 1170 { 1171 IEM_MC_BEGIN(4, 1); 1172 IEM_MC_ARG(uint32_t *, pDst1, 0); 1173 IEM_MC_ARG(uint32_t *, pDst2, 1); 1174 IEM_MC_ARG(uint32_t, uSrc1, 2); 1175 IEM_MC_ARG(uint32_t, uSrc2, 3); 1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1178 IEMOP_HLP_DONE_VEX_DECODING_L0(); 1179 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1180 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX); 1181 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 1182 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1183 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback), 1184 pDst1, pDst2, uSrc1, uSrc2); 1185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2); 1186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1); 1187 IEM_MC_ADVANCE_RIP(); 1188 IEM_MC_END(); 1189 } 1190 } 1191 return VINF_SUCCESS; 1192 } 1097 1193 1098 1194 -
trunk/src/VBox/VMM/include/IEMInternal.h
r95345 r95347 1285 1285 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_rorx_u64; 1286 1286 /** @} */ 1287 1288 /** @name MULX 32-bit and 64-bit. 1289 * @{ */ 1290 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU32, (uint32_t *puDst1, uint32_t *puDst2, uint32_t uSrc1, uint32_t uSrc2)); 1291 typedef FNIEMAIMPLMULXVEXU32 *PFNIEMAIMPLMULXVEXU32; 1292 FNIEMAIMPLMULXVEXU32 iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback; 1293 1294 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU64, (uint64_t *puDst1, uint64_t *puDst2, uint64_t uSrc1, uint64_t uSrc2)); 1295 typedef FNIEMAIMPLMULXVEXU64 *PFNIEMAIMPLMULXVEXU64; 1296 FNIEMAIMPLMULXVEXU64 iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback; 1297 /** @} */ 1298 1287 1299 1288 1300 /** @name Exchange memory with register operations. -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r95308 r95347 161 161 #define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) do { } while (0) 162 162 #define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) do { } while (0) 163 #define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_ fDisHints, a_fIemHints) do { } while (0)163 #define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) do { } while (0) 164 164 #define IEMOP_BITCH_ABOUT_STUB() do { } while (0) 165 165 #define FNIEMOP_STUB(a_Name) \
Note:
See TracChangeset
for help on using the changeset viewer.