Changeset 95403 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
- Timestamp:
- Jun 27, 2022 11:38:38 PM (3 years ago)
- svn:sync-xref-src-repo-rev:
- 152000
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r95360 r95403 24 24 * @{ 25 25 */ 26 27 28 /** 29 * Common worker for MMX instructions on the form: 30 * pxxx mm1, mm2/mem64 31 */ 32 FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 33 { 34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 35 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 36 { 37 /* 38 * Register, register. 39 */ 40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 43 IEM_MC_BEGIN(2, 0); 44 IEM_MC_ARG(uint64_t *, pDst, 0); 45 IEM_MC_ARG(uint64_t const *, pSrc, 1); 46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 47 IEM_MC_PREPARE_FPU_USAGE(); 48 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 49 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 50 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 51 IEM_MC_FPU_TO_MMX_MODE(); 52 IEM_MC_ADVANCE_RIP(); 53 IEM_MC_END(); 54 } 55 else 56 { 57 /* 58 * Register, memory. 59 */ 60 IEM_MC_BEGIN(2, 2); 61 IEM_MC_ARG(uint64_t *, pDst, 0); 62 IEM_MC_LOCAL(uint64_t, uSrc); 63 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 64 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 65 66 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 67 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 68 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 69 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 70 71 IEM_MC_PREPARE_FPU_USAGE(); 72 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 73 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 74 IEM_MC_FPU_TO_MMX_MODE(); 75 76 IEM_MC_ADVANCE_RIP(); 77 IEM_MC_END(); 78 } 79 return VINF_SUCCESS; 80 } 81 82 83 /** 84 * Common worker for SSE2 instructions on the forms: 85 * pxxx xmm1, xmm2/mem128 86 * 87 * Proper alignment of the 128-bit operand is enforced. 88 * Exceptions type 4. SSE2 cpuid checks. 89 */ 90 FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 91 { 92 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 93 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 94 { 95 /* 96 * Register, register. 97 */ 98 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 99 IEM_MC_BEGIN(2, 0); 100 IEM_MC_ARG(PRTUINT128U, pDst, 0); 101 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 103 IEM_MC_PREPARE_SSE_USAGE(); 104 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 105 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 106 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 107 IEM_MC_ADVANCE_RIP(); 108 IEM_MC_END(); 109 } 110 else 111 { 112 /* 113 * Register, memory. 114 */ 115 IEM_MC_BEGIN(2, 2); 116 IEM_MC_ARG(PRTUINT128U, pDst, 0); 117 IEM_MC_LOCAL(RTUINT128U, uSrc); 118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 120 121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 124 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 125 126 IEM_MC_PREPARE_SSE_USAGE(); 127 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 128 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 129 130 IEM_MC_ADVANCE_RIP(); 131 IEM_MC_END(); 132 } 133 return VINF_SUCCESS; 134 } 135 26 136 27 137 /** Opcode 0x0f 0x00 /0. */ … … 2939 3049 /* Opcode 0xf2 0x0f 0x56 - invalid */ 2940 3050 3051 2941 3052 /** Opcode 0x0f 0x57 - xorps Vps, Wps */ 2942 FNIEMOP_STUB(iemOp_xorps_Vps_Wps); 3053 FNIEMOP_DEF(iemOp_xorps_Vps_Wps) 3054 { 3055 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0); 3056 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor); 3057 } 3058 3059 2943 3060 /** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */ 2944 FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd); 3061 FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd) 3062 { 3063 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0); 3064 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor); 3065 } 3066 3067 2945 3068 /* Opcode 0xf3 0x0f 0x57 - invalid */ 2946 3069 /* Opcode 0xf2 0x0f 0x57 - invalid */ … … 4144 4267 4145 4268 4146 /**4147 * Common worker for MMX instructions on the form:4148 * pxxx mm1, mm2/mem644149 */4150 FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)4151 {4152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);4153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))4154 {4155 /*4156 * Register, register.4157 */4158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */4159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */4160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();4161 IEM_MC_BEGIN(2, 0);4162 IEM_MC_ARG(uint64_t *, pDst, 0);4163 IEM_MC_ARG(uint64_t const *, pSrc, 1);4164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();4165 IEM_MC_PREPARE_FPU_USAGE();4166 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);4167 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);4168 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);4169 IEM_MC_ADVANCE_RIP();4170 IEM_MC_END();4171 }4172 else4173 {4174 /*4175 * Register, memory.4176 */4177 IEM_MC_BEGIN(2, 2);4178 IEM_MC_ARG(uint64_t *, pDst, 0);4179 IEM_MC_LOCAL(uint64_t, uSrc);4180 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);4181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);4182 4183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);4184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();4185 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();4186 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);4187 4188 IEM_MC_PREPARE_FPU_USAGE();4189 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);4190 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);4191 4192 IEM_MC_ADVANCE_RIP();4193 IEM_MC_END();4194 }4195 return VINF_SUCCESS;4196 }4197 4198 4199 /**4200 * Common worker for SSE2 instructions on the forms:4201 * pxxx xmm1, xmm2/mem1284202 *4203 * Proper alignment of the 128-bit operand is enforced.4204 * Exceptions type 4. SSE2 cpuid checks.4205 */4206 FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)4207 {4208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);4209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))4210 {4211 /*4212 * Register, register.4213 */4214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();4215 IEM_MC_BEGIN(2, 0);4216 IEM_MC_ARG(PRTUINT128U, pDst, 0);4217 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);4218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();4219 IEM_MC_PREPARE_SSE_USAGE();4220 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);4221 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);4222 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);4223 IEM_MC_ADVANCE_RIP();4224 IEM_MC_END();4225 }4226 else4227 {4228 /*4229 * Register, memory.4230 */4231 IEM_MC_BEGIN(2, 2);4232 IEM_MC_ARG(PRTUINT128U, pDst, 0);4233 IEM_MC_LOCAL(RTUINT128U, uSrc);4234 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);4235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);4236 4237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);4238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();4239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();4240 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);4241 4242 IEM_MC_PREPARE_SSE_USAGE();4243 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);4244 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);4245 4246 IEM_MC_ADVANCE_RIP();4247 IEM_MC_END();4248 }4249 return VINF_SUCCESS;4250 }4251 4252 4253 4269 /** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */ 4254 4270 FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq) … … 4261 4277 FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx) 4262 4278 { 4263 IEMOP_MNEMONIC( vpcmpeqb_Vx_Wx, "pcmpeqb");4279 IEMOP_MNEMONIC(pcmpeqb_Vx_Wx, "pcmpeqb"); 4264 4280 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 4265 4281 } … … 4297 4313 FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx) 4298 4314 { 4299 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, " vpcmpeqd");4315 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "pcmpeqd"); 4300 4316 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 4301 4317 } … … 7810 7826 return iemOp_InvalidNeedRM(pVCpu); 7811 7827 #ifndef TST_IEM_CHECK_MC 7812 # if defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)7828 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY) 7813 7829 static const IEMOPBINSIZES s_Native = 7814 7830 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL }; … … 9608 9624 } 9609 9625 9626 9610 9627 /** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */ 9611 9628 FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
Note:
See TracChangeset
for help on using the changeset viewer.