VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 55282

Last change on this file since 55282 was 55229, checked in by vboxsync, 10 years ago

CPUM,IEM: Expose GuestFeatures and HostFeatures (exploded CPUID), making IEM use it. Early XSAVE/AVX guest support preps.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 590.0 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 55229 2015-04-14 06:35:43Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2013 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_NO_REAL_OR_V86_MODE();
544
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
548 switch (pIemCpu->enmEffOpSize)
549 {
550 case IEMMODE_16BIT:
551 IEM_MC_BEGIN(0, 1);
552 IEM_MC_LOCAL(uint16_t, u16Ldtr);
553 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
555 IEM_MC_ADVANCE_RIP();
556 IEM_MC_END();
557 break;
558
559 case IEMMODE_32BIT:
560 IEM_MC_BEGIN(0, 1);
561 IEM_MC_LOCAL(uint32_t, u32Ldtr);
562 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
564 IEM_MC_ADVANCE_RIP();
565 IEM_MC_END();
566 break;
567
568 case IEMMODE_64BIT:
569 IEM_MC_BEGIN(0, 1);
570 IEM_MC_LOCAL(uint64_t, u64Ldtr);
571 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 break;
576
577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
578 }
579 }
580 else
581 {
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(uint16_t, u16Ldtr);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
586 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
587 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
588 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
589 IEM_MC_ADVANCE_RIP();
590 IEM_MC_END();
591 }
592 return VINF_SUCCESS;
593}
594
595
596/** Opcode 0x0f 0x00 /1. */
597FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
598{
599 IEMOP_MNEMONIC("str Rv/Mw");
600 IEMOP_HLP_NO_REAL_OR_V86_MODE();
601
602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
603 {
604 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
605 switch (pIemCpu->enmEffOpSize)
606 {
607 case IEMMODE_16BIT:
608 IEM_MC_BEGIN(0, 1);
609 IEM_MC_LOCAL(uint16_t, u16Tr);
610 IEM_MC_FETCH_TR_U16(u16Tr);
611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
612 IEM_MC_ADVANCE_RIP();
613 IEM_MC_END();
614 break;
615
616 case IEMMODE_32BIT:
617 IEM_MC_BEGIN(0, 1);
618 IEM_MC_LOCAL(uint32_t, u32Tr);
619 IEM_MC_FETCH_TR_U32(u32Tr);
620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 break;
624
625 case IEMMODE_64BIT:
626 IEM_MC_BEGIN(0, 1);
627 IEM_MC_LOCAL(uint64_t, u64Tr);
628 IEM_MC_FETCH_TR_U64(u64Tr);
629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
630 IEM_MC_ADVANCE_RIP();
631 IEM_MC_END();
632 break;
633
634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
635 }
636 }
637 else
638 {
639 IEM_MC_BEGIN(0, 2);
640 IEM_MC_LOCAL(uint16_t, u16Tr);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
644 IEM_MC_FETCH_TR_U16(u16Tr);
645 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
646 IEM_MC_ADVANCE_RIP();
647 IEM_MC_END();
648 }
649 return VINF_SUCCESS;
650}
651
652
653/** Opcode 0x0f 0x00 /2. */
654FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
655{
656 IEMOP_MNEMONIC("lldt Ew");
657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
658
659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
660 {
661 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
662 IEM_MC_BEGIN(1, 0);
663 IEM_MC_ARG(uint16_t, u16Sel, 0);
664 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
665 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(1, 1);
671 IEM_MC_ARG(uint16_t, u16Sel, 0);
672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
675 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
676 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
677 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
678 IEM_MC_END();
679 }
680 return VINF_SUCCESS;
681}
682
683
684/** Opcode 0x0f 0x00 /3. */
685FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
686{
687 IEMOP_MNEMONIC("ltr Ew");
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689
690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
691 {
692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
693 IEM_MC_BEGIN(1, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
696 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
697 IEM_MC_END();
698 }
699 else
700 {
701 IEM_MC_BEGIN(1, 1);
702 IEM_MC_ARG(uint16_t, u16Sel, 0);
703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
706 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
707 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
708 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
709 IEM_MC_END();
710 }
711 return VINF_SUCCESS;
712}
713
714
715/** Opcode 0x0f 0x00 /3. */
716FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
717{
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
723 IEM_MC_BEGIN(2, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
727 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
728 IEM_MC_END();
729 }
730 else
731 {
732 IEM_MC_BEGIN(2, 1);
733 IEM_MC_ARG(uint16_t, u16Sel, 0);
734 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
739 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
740 IEM_MC_END();
741 }
742 return VINF_SUCCESS;
743}
744
745
746/** Opcode 0x0f 0x00 /4. */
747FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
748{
749 IEMOP_MNEMONIC("verr Ew");
750 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
751}
752
753
754/** Opcode 0x0f 0x00 /5. */
755FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
756{
757 IEMOP_MNEMONIC("verr Ew");
758 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
759}
760
761
762/** Opcode 0x0f 0x00. */
763FNIEMOP_DEF(iemOp_Grp6)
764{
765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
767 {
768 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
769 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
770 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
771 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
772 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
773 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
775 case 7: return IEMOP_RAISE_INVALID_OPCODE();
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
784{
785 IEMOP_MNEMONIC("sgdt Ms");
786 IEMOP_HLP_64BIT_OP_SIZE();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_ARG(uint8_t, iEffSeg, 0);
789 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
794 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
795 IEM_MC_END();
796 return VINF_SUCCESS;
797}
798
799
800/** Opcode 0x0f 0x01 /0. */
801FNIEMOP_DEF(iemOp_Grp7_vmcall)
802{
803 IEMOP_BITCH_ABOUT_STUB();
804 return IEMOP_RAISE_INVALID_OPCODE();
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmresume)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmxoff)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /1. */
833FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
834{
835 IEMOP_MNEMONIC("sidt Ms");
836 IEMOP_HLP_64BIT_OP_SIZE();
837 IEM_MC_BEGIN(3, 1);
838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
839 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
843 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
844 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
845 IEM_MC_END();
846 return VINF_SUCCESS;
847}
848
849
850/** Opcode 0x0f 0x01 /1. */
851FNIEMOP_DEF(iemOp_Grp7_monitor)
852{
853 IEMOP_MNEMONIC("monitor");
854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
855 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_mwait)
861{
862 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
865}
866
867
868/** Opcode 0x0f 0x01 /2. */
869FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC("lgdt");
872 IEMOP_HLP_64BIT_OP_SIZE();
873 IEM_MC_BEGIN(3, 1);
874 IEM_MC_ARG(uint8_t, iEffSeg, 0);
875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
880 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /2. */
887FNIEMOP_DEF(iemOp_Grp7_xgetbv)
888{
889 AssertFailed();
890 return IEMOP_RAISE_INVALID_OPCODE();
891}
892
893
894/** Opcode 0x0f 0x01 /2. */
895FNIEMOP_DEF(iemOp_Grp7_xsetbv)
896{
897 AssertFailed();
898 return IEMOP_RAISE_INVALID_OPCODE();
899}
900
901
902/** Opcode 0x0f 0x01 /3. */
903FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
904{
905 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
906 ? IEMMODE_64BIT
907 : pIemCpu->enmEffOpSize;
908 IEM_MC_BEGIN(3, 1);
909 IEM_MC_ARG(uint8_t, iEffSeg, 0);
910 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
911 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
914 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
915 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
916 IEM_MC_END();
917 return VINF_SUCCESS;
918}
919
920
921/** Opcode 0x0f 0x01 0xd8. */
922FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
923
924/** Opcode 0x0f 0x01 0xd9. */
925FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
926
927/** Opcode 0x0f 0x01 0xda. */
928FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
929
930/** Opcode 0x0f 0x01 0xdb. */
931FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
932
933/** Opcode 0x0f 0x01 0xdc. */
934FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
935
936/** Opcode 0x0f 0x01 0xdd. */
937FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
938
939/** Opcode 0x0f 0x01 0xde. */
940FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
941
942/** Opcode 0x0f 0x01 0xdf. */
943FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
944
945/** Opcode 0x0f 0x01 /4. */
946FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
947{
948 IEMOP_MNEMONIC("smsw");
949 IEMOP_HLP_NO_LOCK_PREFIX();
950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
951 {
952 switch (pIemCpu->enmEffOpSize)
953 {
954 case IEMMODE_16BIT:
955 IEM_MC_BEGIN(0, 1);
956 IEM_MC_LOCAL(uint16_t, u16Tmp);
957 IEM_MC_FETCH_CR0_U16(u16Tmp);
958 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
959 IEM_MC_ADVANCE_RIP();
960 IEM_MC_END();
961 return VINF_SUCCESS;
962
963 case IEMMODE_32BIT:
964 IEM_MC_BEGIN(0, 1);
965 IEM_MC_LOCAL(uint32_t, u32Tmp);
966 IEM_MC_FETCH_CR0_U32(u32Tmp);
967 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
968 IEM_MC_ADVANCE_RIP();
969 IEM_MC_END();
970 return VINF_SUCCESS;
971
972 case IEMMODE_64BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint64_t, u64Tmp);
975 IEM_MC_FETCH_CR0_U64(u64Tmp);
976 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 return VINF_SUCCESS;
980
981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
982 }
983 }
984 else
985 {
986 /* Ignore operand size here, memory refs are always 16-bit. */
987 IEM_MC_BEGIN(0, 2);
988 IEM_MC_LOCAL(uint16_t, u16Tmp);
989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
991 IEM_MC_FETCH_CR0_U16(u16Tmp);
992 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
993 IEM_MC_ADVANCE_RIP();
994 IEM_MC_END();
995 return VINF_SUCCESS;
996 }
997}
998
999
1000/** Opcode 0x0f 0x01 /6. */
1001FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1002{
1003 /* The operand size is effectively ignored, all is 16-bit and only the
1004 lower 3-bits are used. */
1005 IEMOP_MNEMONIC("lmsw");
1006 IEMOP_HLP_NO_LOCK_PREFIX();
1007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1008 {
1009 IEM_MC_BEGIN(1, 0);
1010 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1011 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1012 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1013 IEM_MC_END();
1014 }
1015 else
1016 {
1017 IEM_MC_BEGIN(1, 1);
1018 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1021 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1022 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1023 IEM_MC_END();
1024 }
1025 return VINF_SUCCESS;
1026}
1027
1028
1029/** Opcode 0x0f 0x01 /7. */
1030FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1031{
1032 IEMOP_MNEMONIC("invlpg");
1033 IEMOP_HLP_NO_LOCK_PREFIX();
1034 IEM_MC_BEGIN(1, 1);
1035 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1037 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1038 IEM_MC_END();
1039 return VINF_SUCCESS;
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /7. */
1044FNIEMOP_DEF(iemOp_Grp7_swapgs)
1045{
1046 IEMOP_MNEMONIC("swapgs");
1047 IEMOP_HLP_NO_LOCK_PREFIX();
1048 IEMOP_HLP_ONLY_64BIT();
1049 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1050}
1051
1052
1053/** Opcode 0x0f 0x01 /7. */
1054FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1055{
1056 NOREF(pIemCpu);
1057 IEMOP_BITCH_ABOUT_STUB();
1058 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1059}
1060
1061
1062/** Opcode 0x0f 0x01. */
1063FNIEMOP_DEF(iemOp_Grp7)
1064{
1065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1067 {
1068 case 0:
1069 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1070 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1071 switch (bRm & X86_MODRM_RM_MASK)
1072 {
1073 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1074 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1075 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1076 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1077 }
1078 return IEMOP_RAISE_INVALID_OPCODE();
1079
1080 case 1:
1081 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1082 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1083 switch (bRm & X86_MODRM_RM_MASK)
1084 {
1085 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1086 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1087 }
1088 return IEMOP_RAISE_INVALID_OPCODE();
1089
1090 case 2:
1091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1092 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1093 switch (bRm & X86_MODRM_RM_MASK)
1094 {
1095 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1096 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1097 }
1098 return IEMOP_RAISE_INVALID_OPCODE();
1099
1100 case 3:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1106 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1107 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1108 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1109 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1110 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1111 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1112 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1114 }
1115
1116 case 4:
1117 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1118
1119 case 5:
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 6:
1123 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1124
1125 case 7:
1126 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1127 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1128 switch (bRm & X86_MODRM_RM_MASK)
1129 {
1130 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1131 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1132 }
1133 return IEMOP_RAISE_INVALID_OPCODE();
1134
1135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1136 }
1137}
1138
1139/** Opcode 0x0f 0x00 /3. */
1140FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1141{
1142 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1144
1145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1146 {
1147 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1148 switch (pIemCpu->enmEffOpSize)
1149 {
1150 case IEMMODE_16BIT:
1151 {
1152 IEM_MC_BEGIN(4, 0);
1153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1154 IEM_MC_ARG(uint16_t, u16Sel, 1);
1155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1156 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1157
1158 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1159 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1160 IEM_MC_REF_EFLAGS(pEFlags);
1161 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1162
1163 IEM_MC_END();
1164 return VINF_SUCCESS;
1165 }
1166
1167 case IEMMODE_32BIT:
1168 case IEMMODE_64BIT:
1169 {
1170 IEM_MC_BEGIN(4, 0);
1171 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1172 IEM_MC_ARG(uint16_t, u16Sel, 1);
1173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1174 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1175
1176 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1177 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1178 IEM_MC_REF_EFLAGS(pEFlags);
1179 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1180
1181 IEM_MC_END();
1182 return VINF_SUCCESS;
1183 }
1184
1185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1186 }
1187 }
1188 else
1189 {
1190 switch (pIemCpu->enmEffOpSize)
1191 {
1192 case IEMMODE_16BIT:
1193 {
1194 IEM_MC_BEGIN(4, 1);
1195 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1196 IEM_MC_ARG(uint16_t, u16Sel, 1);
1197 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1198 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1200
1201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1202 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1203
1204 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1205 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1206 IEM_MC_REF_EFLAGS(pEFlags);
1207 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1208
1209 IEM_MC_END();
1210 return VINF_SUCCESS;
1211 }
1212
1213 case IEMMODE_32BIT:
1214 case IEMMODE_64BIT:
1215 {
1216 IEM_MC_BEGIN(4, 1);
1217 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1220 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1222
1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1224 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1225/** @todo testcase: make sure it's a 16-bit read. */
1226
1227 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1228 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1229 IEM_MC_REF_EFLAGS(pEFlags);
1230 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1231
1232 IEM_MC_END();
1233 return VINF_SUCCESS;
1234 }
1235
1236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1237 }
1238 }
1239}
1240
1241
1242
1243/** Opcode 0x0f 0x02. */
1244FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1245{
1246 IEMOP_MNEMONIC("lar Gv,Ew");
1247 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1248}
1249
1250
1251/** Opcode 0x0f 0x03. */
1252FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1253{
1254 IEMOP_MNEMONIC("lsl Gv,Ew");
1255 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1256}
1257
1258
1259/** Opcode 0x0f 0x04. */
1260FNIEMOP_DEF(iemOp_syscall)
1261{
1262 IEMOP_MNEMONIC("syscall");
1263 IEMOP_HLP_NO_LOCK_PREFIX();
1264 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1265}
1266
1267
1268/** Opcode 0x0f 0x05. */
1269FNIEMOP_DEF(iemOp_clts)
1270{
1271 IEMOP_MNEMONIC("clts");
1272 IEMOP_HLP_NO_LOCK_PREFIX();
1273 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1274}
1275
1276
1277/** Opcode 0x0f 0x06. */
1278FNIEMOP_DEF(iemOp_sysret)
1279{
1280 IEMOP_MNEMONIC("sysret");
1281 IEMOP_HLP_NO_LOCK_PREFIX();
1282 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1283}
1284
1285
1286/** Opcode 0x0f 0x08. */
1287FNIEMOP_STUB(iemOp_invd);
1288
1289
1290/** Opcode 0x0f 0x09. */
1291FNIEMOP_DEF(iemOp_wbinvd)
1292{
1293 IEMOP_MNEMONIC("wbinvd");
1294 IEMOP_HLP_NO_LOCK_PREFIX();
1295 IEM_MC_BEGIN(0, 0);
1296 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1297 IEM_MC_ADVANCE_RIP();
1298 IEM_MC_END();
1299 return VINF_SUCCESS; /* ignore for now */
1300}
1301
1302
1303/** Opcode 0x0f 0x0b. */
1304FNIEMOP_STUB(iemOp_ud2);
1305
1306/** Opcode 0x0f 0x0d. */
1307FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1308{
1309 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1310 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1311 {
1312 IEMOP_MNEMONIC("GrpP");
1313 return IEMOP_RAISE_INVALID_OPCODE();
1314 }
1315
1316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1318 {
1319 IEMOP_MNEMONIC("GrpP");
1320 return IEMOP_RAISE_INVALID_OPCODE();
1321 }
1322
1323 IEMOP_HLP_NO_LOCK_PREFIX();
1324 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1325 {
1326 case 2: /* Aliased to /0 for the time being. */
1327 case 4: /* Aliased to /0 for the time being. */
1328 case 5: /* Aliased to /0 for the time being. */
1329 case 6: /* Aliased to /0 for the time being. */
1330 case 7: /* Aliased to /0 for the time being. */
1331 case 0: IEMOP_MNEMONIC("prefetch"); break;
1332 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1333 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1335 }
1336
1337 IEM_MC_BEGIN(0, 1);
1338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1340 /* Currently a NOP. */
1341 IEM_MC_ADVANCE_RIP();
1342 IEM_MC_END();
1343 return VINF_SUCCESS;
1344}
1345
1346
1347/** Opcode 0x0f 0x0e. */
1348FNIEMOP_STUB(iemOp_femms);
1349
1350
1351/** Opcode 0x0f 0x0f 0x0c. */
1352FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1353
1354/** Opcode 0x0f 0x0f 0x0d. */
1355FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1356
1357/** Opcode 0x0f 0x0f 0x1c. */
1358FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1359
1360/** Opcode 0x0f 0x0f 0x1d. */
1361FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1362
1363/** Opcode 0x0f 0x0f 0x8a. */
1364FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1365
1366/** Opcode 0x0f 0x0f 0x8e. */
1367FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1368
1369/** Opcode 0x0f 0x0f 0x90. */
1370FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1371
1372/** Opcode 0x0f 0x0f 0x94. */
1373FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1374
1375/** Opcode 0x0f 0x0f 0x96. */
1376FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1377
1378/** Opcode 0x0f 0x0f 0x97. */
1379FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1380
1381/** Opcode 0x0f 0x0f 0x9a. */
1382FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1383
1384/** Opcode 0x0f 0x0f 0x9e. */
1385FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1386
1387/** Opcode 0x0f 0x0f 0xa0. */
1388FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1389
1390/** Opcode 0x0f 0x0f 0xa4. */
1391FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1392
1393/** Opcode 0x0f 0x0f 0xa6. */
1394FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1395
1396/** Opcode 0x0f 0x0f 0xa7. */
1397FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1398
1399/** Opcode 0x0f 0x0f 0xaa. */
1400FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1401
1402/** Opcode 0x0f 0x0f 0xae. */
1403FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1404
1405/** Opcode 0x0f 0x0f 0xb0. */
1406FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1407
1408/** Opcode 0x0f 0x0f 0xb4. */
1409FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1410
1411/** Opcode 0x0f 0x0f 0xb6. */
1412FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1413
1414/** Opcode 0x0f 0x0f 0xb7. */
1415FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0xbb. */
1418FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0xbf. */
1421FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1422
1423
1424/** Opcode 0x0f 0x0f. */
1425FNIEMOP_DEF(iemOp_3Dnow)
1426{
1427 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1428 {
1429 IEMOP_MNEMONIC("3Dnow");
1430 return IEMOP_RAISE_INVALID_OPCODE();
1431 }
1432
1433 /* This is pretty sparse, use switch instead of table. */
1434 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1435 switch (b)
1436 {
1437 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1438 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1439 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1440 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1441 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1442 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1443 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1444 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1445 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1446 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1447 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1448 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1449 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1450 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1451 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1452 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1453 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1454 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1455 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1456 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1457 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1458 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1459 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1460 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1461 default:
1462 return IEMOP_RAISE_INVALID_OPCODE();
1463 }
1464}
1465
1466
1467/** Opcode 0x0f 0x10. */
1468FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1469/** Opcode 0x0f 0x11. */
1470FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1471/** Opcode 0x0f 0x12. */
1472FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1473/** Opcode 0x0f 0x13. */
1474FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1475/** Opcode 0x0f 0x14. */
1476FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1477/** Opcode 0x0f 0x15. */
1478FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1479/** Opcode 0x0f 0x16. */
1480FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1481/** Opcode 0x0f 0x17. */
1482FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1483
1484
1485/** Opcode 0x0f 0x18. */
1486FNIEMOP_DEF(iemOp_prefetch_Grp16)
1487{
1488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1489 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1490 {
1491 IEMOP_HLP_NO_LOCK_PREFIX();
1492 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1493 {
1494 case 4: /* Aliased to /0 for the time being according to AMD. */
1495 case 5: /* Aliased to /0 for the time being according to AMD. */
1496 case 6: /* Aliased to /0 for the time being according to AMD. */
1497 case 7: /* Aliased to /0 for the time being according to AMD. */
1498 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1499 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1500 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1501 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1503 }
1504
1505 IEM_MC_BEGIN(0, 1);
1506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1508 /* Currently a NOP. */
1509 IEM_MC_ADVANCE_RIP();
1510 IEM_MC_END();
1511 return VINF_SUCCESS;
1512 }
1513
1514 return IEMOP_RAISE_INVALID_OPCODE();
1515}
1516
1517
1518/** Opcode 0x0f 0x19..0x1f. */
1519FNIEMOP_DEF(iemOp_nop_Ev)
1520{
1521 IEMOP_HLP_NO_LOCK_PREFIX();
1522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1524 {
1525 IEM_MC_BEGIN(0, 0);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 }
1529 else
1530 {
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1534 /* Currently a NOP. */
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 }
1538 return VINF_SUCCESS;
1539}
1540
1541
1542/** Opcode 0x0f 0x20. */
1543FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1544{
1545 /* mod is ignored, as is operand size overrides. */
1546 IEMOP_MNEMONIC("mov Rd,Cd");
1547 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1548 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1549 else
1550 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1551
1552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1553 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1554 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1555 {
1556 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1557 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1558 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1559 iCrReg |= 8;
1560 }
1561 switch (iCrReg)
1562 {
1563 case 0: case 2: case 3: case 4: case 8:
1564 break;
1565 default:
1566 return IEMOP_RAISE_INVALID_OPCODE();
1567 }
1568 IEMOP_HLP_DONE_DECODING();
1569
1570 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1571}
1572
1573
1574/** Opcode 0x0f 0x21. */
1575FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1576{
1577 IEMOP_MNEMONIC("mov Rd,Dd");
1578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1579 IEMOP_HLP_NO_LOCK_PREFIX();
1580 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1581 return IEMOP_RAISE_INVALID_OPCODE();
1582 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1583 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1584 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1585}
1586
1587
1588/** Opcode 0x0f 0x22. */
1589FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1590{
1591 /* mod is ignored, as is operand size overrides. */
1592 IEMOP_MNEMONIC("mov Cd,Rd");
1593 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1594 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1595 else
1596 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1597
1598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1599 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1600 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1601 {
1602 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1603 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1604 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1605 iCrReg |= 8;
1606 }
1607 switch (iCrReg)
1608 {
1609 case 0: case 2: case 3: case 4: case 8:
1610 break;
1611 default:
1612 return IEMOP_RAISE_INVALID_OPCODE();
1613 }
1614 IEMOP_HLP_DONE_DECODING();
1615
1616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1617}
1618
1619
1620/** Opcode 0x0f 0x23. */
1621FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1622{
1623 IEMOP_MNEMONIC("mov Dd,Rd");
1624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1626 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1627 return IEMOP_RAISE_INVALID_OPCODE();
1628 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1629 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1630 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1631}
1632
1633
1634/** Opcode 0x0f 0x24. */
1635FNIEMOP_DEF(iemOp_mov_Rd_Td)
1636{
1637 IEMOP_MNEMONIC("mov Rd,Td");
1638 /* The RM byte is not considered, see testcase. */
1639 return IEMOP_RAISE_INVALID_OPCODE();
1640}
1641
1642
1643/** Opcode 0x0f 0x26. */
1644FNIEMOP_DEF(iemOp_mov_Td_Rd)
1645{
1646 IEMOP_MNEMONIC("mov Td,Rd");
1647 /* The RM byte is not considered, see testcase. */
1648 return IEMOP_RAISE_INVALID_OPCODE();
1649}
1650
1651
1652/** Opcode 0x0f 0x28. */
1653FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1654/** Opcode 0x0f 0x29. */
1655FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1656/** Opcode 0x0f 0x2a. */
1657FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1658/** Opcode 0x0f 0x2b. */
1659FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1660/** Opcode 0x0f 0x2c. */
1661FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1662/** Opcode 0x0f 0x2d. */
1663FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1664/** Opcode 0x0f 0x2e. */
1665FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1666/** Opcode 0x0f 0x2f. */
1667FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1668
1669
1670/** Opcode 0x0f 0x30. */
1671FNIEMOP_DEF(iemOp_wrmsr)
1672{
1673 IEMOP_MNEMONIC("wrmsr");
1674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1675 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1676}
1677
1678
1679/** Opcode 0x0f 0x31. */
1680FNIEMOP_DEF(iemOp_rdtsc)
1681{
1682 IEMOP_MNEMONIC("rdtsc");
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1685}
1686
1687
1688/** Opcode 0x0f 0x33. */
1689FNIEMOP_DEF(iemOp_rdmsr)
1690{
1691 IEMOP_MNEMONIC("rdmsr");
1692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1693 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1694}
1695
1696
1697/** Opcode 0x0f 0x34. */
1698FNIEMOP_STUB(iemOp_rdpmc);
1699/** Opcode 0x0f 0x34. */
1700FNIEMOP_STUB(iemOp_sysenter);
1701/** Opcode 0x0f 0x35. */
1702FNIEMOP_STUB(iemOp_sysexit);
1703/** Opcode 0x0f 0x37. */
1704FNIEMOP_STUB(iemOp_getsec);
1705/** Opcode 0x0f 0x38. */
1706FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1707/** Opcode 0x0f 0x3a. */
1708FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1709/** Opcode 0x0f 0x3c (?). */
1710FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1711
1712/**
1713 * Implements a conditional move.
1714 *
1715 * Wish there was an obvious way to do this where we could share and reduce
1716 * code bloat.
1717 *
1718 * @param a_Cnd The conditional "microcode" operation.
1719 */
1720#define CMOV_X(a_Cnd) \
1721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1723 { \
1724 switch (pIemCpu->enmEffOpSize) \
1725 { \
1726 case IEMMODE_16BIT: \
1727 IEM_MC_BEGIN(0, 1); \
1728 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1729 a_Cnd { \
1730 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1731 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1732 } IEM_MC_ENDIF(); \
1733 IEM_MC_ADVANCE_RIP(); \
1734 IEM_MC_END(); \
1735 return VINF_SUCCESS; \
1736 \
1737 case IEMMODE_32BIT: \
1738 IEM_MC_BEGIN(0, 1); \
1739 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1740 a_Cnd { \
1741 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1742 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1743 } IEM_MC_ELSE() { \
1744 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1745 } IEM_MC_ENDIF(); \
1746 IEM_MC_ADVANCE_RIP(); \
1747 IEM_MC_END(); \
1748 return VINF_SUCCESS; \
1749 \
1750 case IEMMODE_64BIT: \
1751 IEM_MC_BEGIN(0, 1); \
1752 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1753 a_Cnd { \
1754 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1755 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1756 } IEM_MC_ENDIF(); \
1757 IEM_MC_ADVANCE_RIP(); \
1758 IEM_MC_END(); \
1759 return VINF_SUCCESS; \
1760 \
1761 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1762 } \
1763 } \
1764 else \
1765 { \
1766 switch (pIemCpu->enmEffOpSize) \
1767 { \
1768 case IEMMODE_16BIT: \
1769 IEM_MC_BEGIN(0, 2); \
1770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1771 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1773 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1774 a_Cnd { \
1775 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1776 } IEM_MC_ENDIF(); \
1777 IEM_MC_ADVANCE_RIP(); \
1778 IEM_MC_END(); \
1779 return VINF_SUCCESS; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(0, 2); \
1783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1784 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1786 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1787 a_Cnd { \
1788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1789 } IEM_MC_ELSE() { \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1791 } IEM_MC_ENDIF(); \
1792 IEM_MC_ADVANCE_RIP(); \
1793 IEM_MC_END(); \
1794 return VINF_SUCCESS; \
1795 \
1796 case IEMMODE_64BIT: \
1797 IEM_MC_BEGIN(0, 2); \
1798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1799 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1801 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1802 a_Cnd { \
1803 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1804 } IEM_MC_ENDIF(); \
1805 IEM_MC_ADVANCE_RIP(); \
1806 IEM_MC_END(); \
1807 return VINF_SUCCESS; \
1808 \
1809 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1810 } \
1811 } do {} while (0)
1812
1813
1814
1815/** Opcode 0x0f 0x40. */
1816FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1817{
1818 IEMOP_MNEMONIC("cmovo Gv,Ev");
1819 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1820}
1821
1822
1823/** Opcode 0x0f 0x41. */
1824FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1825{
1826 IEMOP_MNEMONIC("cmovno Gv,Ev");
1827 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1828}
1829
1830
1831/** Opcode 0x0f 0x42. */
1832FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1833{
1834 IEMOP_MNEMONIC("cmovc Gv,Ev");
1835 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1836}
1837
1838
1839/** Opcode 0x0f 0x43. */
1840FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1841{
1842 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1843 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1844}
1845
1846
1847/** Opcode 0x0f 0x44. */
1848FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1849{
1850 IEMOP_MNEMONIC("cmove Gv,Ev");
1851 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1852}
1853
1854
1855/** Opcode 0x0f 0x45. */
1856FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1857{
1858 IEMOP_MNEMONIC("cmovne Gv,Ev");
1859 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1860}
1861
1862
1863/** Opcode 0x0f 0x46. */
1864FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1865{
1866 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1867 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1868}
1869
1870
1871/** Opcode 0x0f 0x47. */
1872FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1873{
1874 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1875 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1876}
1877
1878
1879/** Opcode 0x0f 0x48. */
1880FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1881{
1882 IEMOP_MNEMONIC("cmovs Gv,Ev");
1883 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1884}
1885
1886
1887/** Opcode 0x0f 0x49. */
1888FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1889{
1890 IEMOP_MNEMONIC("cmovns Gv,Ev");
1891 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1892}
1893
1894
1895/** Opcode 0x0f 0x4a. */
1896FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1897{
1898 IEMOP_MNEMONIC("cmovp Gv,Ev");
1899 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1900}
1901
1902
1903/** Opcode 0x0f 0x4b. */
1904FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1905{
1906 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1907 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1908}
1909
1910
1911/** Opcode 0x0f 0x4c. */
1912FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1913{
1914 IEMOP_MNEMONIC("cmovl Gv,Ev");
1915 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1916}
1917
1918
1919/** Opcode 0x0f 0x4d. */
1920FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1921{
1922 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1923 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1924}
1925
1926
1927/** Opcode 0x0f 0x4e. */
1928FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1929{
1930 IEMOP_MNEMONIC("cmovle Gv,Ev");
1931 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1932}
1933
1934
1935/** Opcode 0x0f 0x4f. */
1936FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1937{
1938 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1939 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1940}
1941
1942#undef CMOV_X
1943
1944/** Opcode 0x0f 0x50. */
1945FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1946/** Opcode 0x0f 0x51. */
1947FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1948/** Opcode 0x0f 0x52. */
1949FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1950/** Opcode 0x0f 0x53. */
1951FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1952/** Opcode 0x0f 0x54. */
1953FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1954/** Opcode 0x0f 0x55. */
1955FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1956/** Opcode 0x0f 0x56. */
1957FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1958/** Opcode 0x0f 0x57. */
1959FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1960/** Opcode 0x0f 0x58. */
1961FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1962/** Opcode 0x0f 0x59. */
1963FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1964/** Opcode 0x0f 0x5a. */
1965FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1966/** Opcode 0x0f 0x5b. */
1967FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1968/** Opcode 0x0f 0x5c. */
1969FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1970/** Opcode 0x0f 0x5d. */
1971FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1972/** Opcode 0x0f 0x5e. */
1973FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1974/** Opcode 0x0f 0x5f. */
1975FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1976
1977
1978/**
1979 * Common worker for SSE2 and MMX instructions on the forms:
1980 * pxxxx xmm1, xmm2/mem128
1981 * pxxxx mm1, mm2/mem32
1982 *
1983 * The 2nd operand is the first half of a register, which in the memory case
1984 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1985 * memory accessed for MMX.
1986 *
1987 * Exceptions type 4.
1988 */
1989FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1990{
1991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1992 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1993 {
1994 case IEM_OP_PRF_SIZE_OP: /* SSE */
1995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1996 {
1997 /*
1998 * Register, register.
1999 */
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 IEM_MC_BEGIN(2, 0);
2002 IEM_MC_ARG(uint128_t *, pDst, 0);
2003 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2005 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2006 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2007 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2008 IEM_MC_ADVANCE_RIP();
2009 IEM_MC_END();
2010 }
2011 else
2012 {
2013 /*
2014 * Register, memory.
2015 */
2016 IEM_MC_BEGIN(2, 2);
2017 IEM_MC_ARG(uint128_t *, pDst, 0);
2018 IEM_MC_LOCAL(uint64_t, uSrc);
2019 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021
2022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2024 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2025 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2026
2027 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2028 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2029
2030 IEM_MC_ADVANCE_RIP();
2031 IEM_MC_END();
2032 }
2033 return VINF_SUCCESS;
2034
2035 case 0: /* MMX */
2036 if (!pImpl->pfnU64)
2037 return IEMOP_RAISE_INVALID_OPCODE();
2038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2039 {
2040 /*
2041 * Register, register.
2042 */
2043 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2044 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2046 IEM_MC_BEGIN(2, 0);
2047 IEM_MC_ARG(uint64_t *, pDst, 0);
2048 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2049 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2050 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2051 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2052 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2053 IEM_MC_ADVANCE_RIP();
2054 IEM_MC_END();
2055 }
2056 else
2057 {
2058 /*
2059 * Register, memory.
2060 */
2061 IEM_MC_BEGIN(2, 2);
2062 IEM_MC_ARG(uint64_t *, pDst, 0);
2063 IEM_MC_LOCAL(uint32_t, uSrc);
2064 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2066
2067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2069 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2070 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2071
2072 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2073 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2074
2075 IEM_MC_ADVANCE_RIP();
2076 IEM_MC_END();
2077 }
2078 return VINF_SUCCESS;
2079
2080 default:
2081 return IEMOP_RAISE_INVALID_OPCODE();
2082 }
2083}
2084
2085
2086/** Opcode 0x0f 0x60. */
2087FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2088{
2089 IEMOP_MNEMONIC("punpcklbw");
2090 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2091}
2092
2093
2094/** Opcode 0x0f 0x61. */
2095FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2096{
2097 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2098 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2099}
2100
2101
2102/** Opcode 0x0f 0x62. */
2103FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2104{
2105 IEMOP_MNEMONIC("punpckldq");
2106 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2107}
2108
2109
2110/** Opcode 0x0f 0x63. */
2111FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2112/** Opcode 0x0f 0x64. */
2113FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2114/** Opcode 0x0f 0x65. */
2115FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2116/** Opcode 0x0f 0x66. */
2117FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2118/** Opcode 0x0f 0x67. */
2119FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2120
2121
2122/**
2123 * Common worker for SSE2 and MMX instructions on the forms:
2124 * pxxxx xmm1, xmm2/mem128
2125 * pxxxx mm1, mm2/mem64
2126 *
2127 * The 2nd operand is the second half of a register, which in the memory case
2128 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2129 * where it may read the full 128 bits or only the upper 64 bits.
2130 *
2131 * Exceptions type 4.
2132 */
2133FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2134{
2135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2136 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2137 {
2138 case IEM_OP_PRF_SIZE_OP: /* SSE */
2139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2140 {
2141 /*
2142 * Register, register.
2143 */
2144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2145 IEM_MC_BEGIN(2, 0);
2146 IEM_MC_ARG(uint128_t *, pDst, 0);
2147 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2148 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2149 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2150 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2151 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2152 IEM_MC_ADVANCE_RIP();
2153 IEM_MC_END();
2154 }
2155 else
2156 {
2157 /*
2158 * Register, memory.
2159 */
2160 IEM_MC_BEGIN(2, 2);
2161 IEM_MC_ARG(uint128_t *, pDst, 0);
2162 IEM_MC_LOCAL(uint128_t, uSrc);
2163 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2165
2166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2168 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2169 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2170
2171 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2172 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2173
2174 IEM_MC_ADVANCE_RIP();
2175 IEM_MC_END();
2176 }
2177 return VINF_SUCCESS;
2178
2179 case 0: /* MMX */
2180 if (!pImpl->pfnU64)
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2183 {
2184 /*
2185 * Register, register.
2186 */
2187 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2188 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(2, 0);
2191 IEM_MC_ARG(uint64_t *, pDst, 0);
2192 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2193 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2194 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2195 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2196 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2197 IEM_MC_ADVANCE_RIP();
2198 IEM_MC_END();
2199 }
2200 else
2201 {
2202 /*
2203 * Register, memory.
2204 */
2205 IEM_MC_BEGIN(2, 2);
2206 IEM_MC_ARG(uint64_t *, pDst, 0);
2207 IEM_MC_LOCAL(uint64_t, uSrc);
2208 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2213 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2214 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2215
2216 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2217 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2218
2219 IEM_MC_ADVANCE_RIP();
2220 IEM_MC_END();
2221 }
2222 return VINF_SUCCESS;
2223
2224 default:
2225 return IEMOP_RAISE_INVALID_OPCODE();
2226 }
2227}
2228
2229
2230/** Opcode 0x0f 0x68. */
2231FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2232{
2233 IEMOP_MNEMONIC("punpckhbw");
2234 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2235}
2236
2237
2238/** Opcode 0x0f 0x69. */
2239FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2240{
2241 IEMOP_MNEMONIC("punpckhwd");
2242 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2243}
2244
2245
2246/** Opcode 0x0f 0x6a. */
2247FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2248{
2249 IEMOP_MNEMONIC("punpckhdq");
2250 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2251}
2252
2253/** Opcode 0x0f 0x6b. */
2254FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2255
2256
2257/** Opcode 0x0f 0x6c. */
2258FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2259{
2260 IEMOP_MNEMONIC("punpcklqdq");
2261 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2262}
2263
2264
2265/** Opcode 0x0f 0x6d. */
2266FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2267{
2268 IEMOP_MNEMONIC("punpckhqdq");
2269 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2270}
2271
2272
2273/** Opcode 0x0f 0x6e. */
2274FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2275{
2276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2277 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2278 {
2279 case IEM_OP_PRF_SIZE_OP: /* SSE */
2280 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2282 {
2283 /* XMM, greg*/
2284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2285 IEM_MC_BEGIN(0, 1);
2286 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2287 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2288 {
2289 IEM_MC_LOCAL(uint64_t, u64Tmp);
2290 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2291 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2292 }
2293 else
2294 {
2295 IEM_MC_LOCAL(uint32_t, u32Tmp);
2296 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2297 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2298 }
2299 IEM_MC_ADVANCE_RIP();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 /* XMM, [mem] */
2305 IEM_MC_BEGIN(0, 2);
2306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2307 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2311 {
2312 IEM_MC_LOCAL(uint64_t, u64Tmp);
2313 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2314 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2315 }
2316 else
2317 {
2318 IEM_MC_LOCAL(uint32_t, u32Tmp);
2319 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2320 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2321 }
2322 IEM_MC_ADVANCE_RIP();
2323 IEM_MC_END();
2324 }
2325 return VINF_SUCCESS;
2326
2327 case 0: /* MMX */
2328 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2330 {
2331 /* MMX, greg */
2332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2333 IEM_MC_BEGIN(0, 1);
2334 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2335 IEM_MC_LOCAL(uint64_t, u64Tmp);
2336 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2337 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2338 else
2339 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2340 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2341 IEM_MC_ADVANCE_RIP();
2342 IEM_MC_END();
2343 }
2344 else
2345 {
2346 /* MMX, [mem] */
2347 IEM_MC_BEGIN(0, 2);
2348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2349 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2352 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2353 {
2354 IEM_MC_LOCAL(uint64_t, u64Tmp);
2355 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2356 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2357 }
2358 else
2359 {
2360 IEM_MC_LOCAL(uint32_t, u32Tmp);
2361 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2362 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2363 }
2364 IEM_MC_ADVANCE_RIP();
2365 IEM_MC_END();
2366 }
2367 return VINF_SUCCESS;
2368
2369 default:
2370 return IEMOP_RAISE_INVALID_OPCODE();
2371 }
2372}
2373
2374
2375/** Opcode 0x0f 0x6f. */
2376FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2377{
2378 bool fAligned = false;
2379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2380 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2381 {
2382 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2383 fAligned = true;
2384 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2385 if (fAligned)
2386 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2387 else
2388 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2390 {
2391 /*
2392 * Register, register.
2393 */
2394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2395 IEM_MC_BEGIN(0, 1);
2396 IEM_MC_LOCAL(uint128_t, u128Tmp);
2397 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2398 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2399 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2400 IEM_MC_ADVANCE_RIP();
2401 IEM_MC_END();
2402 }
2403 else
2404 {
2405 /*
2406 * Register, memory.
2407 */
2408 IEM_MC_BEGIN(0, 2);
2409 IEM_MC_LOCAL(uint128_t, u128Tmp);
2410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2411
2412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2414 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2415 if (fAligned)
2416 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2417 else
2418 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2419 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2420
2421 IEM_MC_ADVANCE_RIP();
2422 IEM_MC_END();
2423 }
2424 return VINF_SUCCESS;
2425
2426 case 0: /* MMX */
2427 IEMOP_MNEMONIC("movq Pq,Qq");
2428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2429 {
2430 /*
2431 * Register, register.
2432 */
2433 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2434 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2436 IEM_MC_BEGIN(0, 1);
2437 IEM_MC_LOCAL(uint64_t, u64Tmp);
2438 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2439 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2440 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2441 IEM_MC_ADVANCE_RIP();
2442 IEM_MC_END();
2443 }
2444 else
2445 {
2446 /*
2447 * Register, memory.
2448 */
2449 IEM_MC_BEGIN(0, 2);
2450 IEM_MC_LOCAL(uint64_t, u64Tmp);
2451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2452
2453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2455 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2456 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2457 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2458
2459 IEM_MC_ADVANCE_RIP();
2460 IEM_MC_END();
2461 }
2462 return VINF_SUCCESS;
2463
2464 default:
2465 return IEMOP_RAISE_INVALID_OPCODE();
2466 }
2467}
2468
2469
2470/** Opcode 0x0f 0x70. The immediate here is evil! */
2471FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2472{
2473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2474 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2475 {
2476 case IEM_OP_PRF_SIZE_OP: /* SSE */
2477 case IEM_OP_PRF_REPNZ: /* SSE */
2478 case IEM_OP_PRF_REPZ: /* SSE */
2479 {
2480 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2481 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2482 {
2483 case IEM_OP_PRF_SIZE_OP:
2484 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2485 pfnAImpl = iemAImpl_pshufd;
2486 break;
2487 case IEM_OP_PRF_REPNZ:
2488 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2489 pfnAImpl = iemAImpl_pshuflw;
2490 break;
2491 case IEM_OP_PRF_REPZ:
2492 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2493 pfnAImpl = iemAImpl_pshufhw;
2494 break;
2495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2496 }
2497 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2498 {
2499 /*
2500 * Register, register.
2501 */
2502 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504
2505 IEM_MC_BEGIN(3, 0);
2506 IEM_MC_ARG(uint128_t *, pDst, 0);
2507 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2508 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2509 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2510 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2511 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2512 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2513 IEM_MC_ADVANCE_RIP();
2514 IEM_MC_END();
2515 }
2516 else
2517 {
2518 /*
2519 * Register, memory.
2520 */
2521 IEM_MC_BEGIN(3, 2);
2522 IEM_MC_ARG(uint128_t *, pDst, 0);
2523 IEM_MC_LOCAL(uint128_t, uSrc);
2524 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2526
2527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2528 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2529 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2531 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2532
2533 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2534 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2535 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2536
2537 IEM_MC_ADVANCE_RIP();
2538 IEM_MC_END();
2539 }
2540 return VINF_SUCCESS;
2541 }
2542
2543 case 0: /* MMX Extension */
2544 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2546 {
2547 /*
2548 * Register, register.
2549 */
2550 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552
2553 IEM_MC_BEGIN(3, 0);
2554 IEM_MC_ARG(uint64_t *, pDst, 0);
2555 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2556 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2557 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2558 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2559 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2560 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2561 IEM_MC_ADVANCE_RIP();
2562 IEM_MC_END();
2563 }
2564 else
2565 {
2566 /*
2567 * Register, memory.
2568 */
2569 IEM_MC_BEGIN(3, 2);
2570 IEM_MC_ARG(uint64_t *, pDst, 0);
2571 IEM_MC_LOCAL(uint64_t, uSrc);
2572 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2574
2575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2576 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2577 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2579 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2580
2581 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2582 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2583 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2584
2585 IEM_MC_ADVANCE_RIP();
2586 IEM_MC_END();
2587 }
2588 return VINF_SUCCESS;
2589
2590 default:
2591 return IEMOP_RAISE_INVALID_OPCODE();
2592 }
2593}
2594
2595
2596/** Opcode 0x0f 0x71 11/2. */
2597FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2598
2599/** Opcode 0x66 0x0f 0x71 11/2. */
2600FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2601
2602/** Opcode 0x0f 0x71 11/4. */
2603FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2604
2605/** Opcode 0x66 0x0f 0x71 11/4. */
2606FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2607
2608/** Opcode 0x0f 0x71 11/6. */
2609FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2610
2611/** Opcode 0x66 0x0f 0x71 11/6. */
2612FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2613
2614
2615/** Opcode 0x0f 0x71. */
2616FNIEMOP_DEF(iemOp_Grp12)
2617{
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2620 return IEMOP_RAISE_INVALID_OPCODE();
2621 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2622 {
2623 case 0: case 1: case 3: case 5: case 7:
2624 return IEMOP_RAISE_INVALID_OPCODE();
2625 case 2:
2626 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2627 {
2628 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2629 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2630 default: return IEMOP_RAISE_INVALID_OPCODE();
2631 }
2632 case 4:
2633 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2634 {
2635 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2636 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2637 default: return IEMOP_RAISE_INVALID_OPCODE();
2638 }
2639 case 6:
2640 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2641 {
2642 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2643 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2644 default: return IEMOP_RAISE_INVALID_OPCODE();
2645 }
2646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2647 }
2648}
2649
2650
2651/** Opcode 0x0f 0x72 11/2. */
2652FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2653
2654/** Opcode 0x66 0x0f 0x72 11/2. */
2655FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2656
2657/** Opcode 0x0f 0x72 11/4. */
2658FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2659
2660/** Opcode 0x66 0x0f 0x72 11/4. */
2661FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2662
2663/** Opcode 0x0f 0x72 11/6. */
2664FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2665
2666/** Opcode 0x66 0x0f 0x72 11/6. */
2667FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2668
2669
2670/** Opcode 0x0f 0x72. */
2671FNIEMOP_DEF(iemOp_Grp13)
2672{
2673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2674 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2675 return IEMOP_RAISE_INVALID_OPCODE();
2676 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2677 {
2678 case 0: case 1: case 3: case 5: case 7:
2679 return IEMOP_RAISE_INVALID_OPCODE();
2680 case 2:
2681 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2682 {
2683 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2684 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2685 default: return IEMOP_RAISE_INVALID_OPCODE();
2686 }
2687 case 4:
2688 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2689 {
2690 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2691 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2692 default: return IEMOP_RAISE_INVALID_OPCODE();
2693 }
2694 case 6:
2695 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2696 {
2697 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2698 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2699 default: return IEMOP_RAISE_INVALID_OPCODE();
2700 }
2701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2702 }
2703}
2704
2705
2706/** Opcode 0x0f 0x73 11/2. */
2707FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2708
2709/** Opcode 0x66 0x0f 0x73 11/2. */
2710FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2711
2712/** Opcode 0x66 0x0f 0x73 11/3. */
2713FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2714
2715/** Opcode 0x0f 0x73 11/6. */
2716FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2717
2718/** Opcode 0x66 0x0f 0x73 11/6. */
2719FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2720
2721/** Opcode 0x66 0x0f 0x73 11/7. */
2722FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2723
2724
2725/** Opcode 0x0f 0x73. */
2726FNIEMOP_DEF(iemOp_Grp14)
2727{
2728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2729 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2730 return IEMOP_RAISE_INVALID_OPCODE();
2731 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2732 {
2733 case 0: case 1: case 4: case 5:
2734 return IEMOP_RAISE_INVALID_OPCODE();
2735 case 2:
2736 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2737 {
2738 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2739 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2740 default: return IEMOP_RAISE_INVALID_OPCODE();
2741 }
2742 case 3:
2743 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2744 {
2745 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2746 default: return IEMOP_RAISE_INVALID_OPCODE();
2747 }
2748 case 6:
2749 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2750 {
2751 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2752 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2753 default: return IEMOP_RAISE_INVALID_OPCODE();
2754 }
2755 case 7:
2756 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2757 {
2758 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2759 default: return IEMOP_RAISE_INVALID_OPCODE();
2760 }
2761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2762 }
2763}
2764
2765
2766/**
2767 * Common worker for SSE2 and MMX instructions on the forms:
2768 * pxxx mm1, mm2/mem64
2769 * pxxx xmm1, xmm2/mem128
2770 *
2771 * Proper alignment of the 128-bit operand is enforced.
2772 * Exceptions type 4. SSE2 and MMX cpuid checks.
2773 */
2774FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2778 {
2779 case IEM_OP_PRF_SIZE_OP: /* SSE */
2780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2781 {
2782 /*
2783 * Register, register.
2784 */
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEM_MC_BEGIN(2, 0);
2787 IEM_MC_ARG(uint128_t *, pDst, 0);
2788 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2789 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2790 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2791 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2792 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2793 IEM_MC_ADVANCE_RIP();
2794 IEM_MC_END();
2795 }
2796 else
2797 {
2798 /*
2799 * Register, memory.
2800 */
2801 IEM_MC_BEGIN(2, 2);
2802 IEM_MC_ARG(uint128_t *, pDst, 0);
2803 IEM_MC_LOCAL(uint128_t, uSrc);
2804 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2810 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2811
2812 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2813 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2814
2815 IEM_MC_ADVANCE_RIP();
2816 IEM_MC_END();
2817 }
2818 return VINF_SUCCESS;
2819
2820 case 0: /* MMX */
2821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2822 {
2823 /*
2824 * Register, register.
2825 */
2826 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2827 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2829 IEM_MC_BEGIN(2, 0);
2830 IEM_MC_ARG(uint64_t *, pDst, 0);
2831 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2832 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2833 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2834 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2835 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2836 IEM_MC_ADVANCE_RIP();
2837 IEM_MC_END();
2838 }
2839 else
2840 {
2841 /*
2842 * Register, memory.
2843 */
2844 IEM_MC_BEGIN(2, 2);
2845 IEM_MC_ARG(uint64_t *, pDst, 0);
2846 IEM_MC_LOCAL(uint64_t, uSrc);
2847 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2849
2850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2853 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2854
2855 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2856 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2857
2858 IEM_MC_ADVANCE_RIP();
2859 IEM_MC_END();
2860 }
2861 return VINF_SUCCESS;
2862
2863 default:
2864 return IEMOP_RAISE_INVALID_OPCODE();
2865 }
2866}
2867
2868
2869/** Opcode 0x0f 0x74. */
2870FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2871{
2872 IEMOP_MNEMONIC("pcmpeqb");
2873 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2874}
2875
2876
2877/** Opcode 0x0f 0x75. */
2878FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2879{
2880 IEMOP_MNEMONIC("pcmpeqw");
2881 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2882}
2883
2884
2885/** Opcode 0x0f 0x76. */
2886FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2887{
2888 IEMOP_MNEMONIC("pcmpeqd");
2889 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2890}
2891
2892
2893/** Opcode 0x0f 0x77. */
2894FNIEMOP_STUB(iemOp_emms);
2895/** Opcode 0x0f 0x78. */
2896FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2897/** Opcode 0x0f 0x79. */
2898FNIEMOP_UD_STUB(iemOp_vmwrite);
2899/** Opcode 0x0f 0x7c. */
2900FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2901/** Opcode 0x0f 0x7d. */
2902FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2903
2904
2905/** Opcode 0x0f 0x7e. */
2906FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2907{
2908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2909 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2910 {
2911 case IEM_OP_PRF_SIZE_OP: /* SSE */
2912 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2914 {
2915 /* greg, XMM */
2916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2917 IEM_MC_BEGIN(0, 1);
2918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2919 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2920 {
2921 IEM_MC_LOCAL(uint64_t, u64Tmp);
2922 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2923 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2924 }
2925 else
2926 {
2927 IEM_MC_LOCAL(uint32_t, u32Tmp);
2928 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2929 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2930 }
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 else
2935 {
2936 /* [mem], XMM */
2937 IEM_MC_BEGIN(0, 2);
2938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2939 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2943 {
2944 IEM_MC_LOCAL(uint64_t, u64Tmp);
2945 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2946 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2947 }
2948 else
2949 {
2950 IEM_MC_LOCAL(uint32_t, u32Tmp);
2951 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2952 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2953 }
2954 IEM_MC_ADVANCE_RIP();
2955 IEM_MC_END();
2956 }
2957 return VINF_SUCCESS;
2958
2959 case 0: /* MMX */
2960 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2961 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2962 {
2963 /* greg, MMX */
2964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2965 IEM_MC_BEGIN(0, 1);
2966 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2967 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2968 {
2969 IEM_MC_LOCAL(uint64_t, u64Tmp);
2970 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2971 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2972 }
2973 else
2974 {
2975 IEM_MC_LOCAL(uint32_t, u32Tmp);
2976 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2977 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2978 }
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 }
2982 else
2983 {
2984 /* [mem], MMX */
2985 IEM_MC_BEGIN(0, 2);
2986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2987 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2990 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2991 {
2992 IEM_MC_LOCAL(uint64_t, u64Tmp);
2993 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2994 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2995 }
2996 else
2997 {
2998 IEM_MC_LOCAL(uint32_t, u32Tmp);
2999 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3000 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3001 }
3002 IEM_MC_ADVANCE_RIP();
3003 IEM_MC_END();
3004 }
3005 return VINF_SUCCESS;
3006
3007 default:
3008 return IEMOP_RAISE_INVALID_OPCODE();
3009 }
3010}
3011
3012
3013/** Opcode 0x0f 0x7f. */
3014FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3015{
3016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3017 bool fAligned = false;
3018 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3019 {
3020 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3021 fAligned = true;
3022 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3023 if (fAligned)
3024 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3025 else
3026 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3028 {
3029 /*
3030 * Register, register.
3031 */
3032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3033 IEM_MC_BEGIN(0, 1);
3034 IEM_MC_LOCAL(uint128_t, u128Tmp);
3035 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3036 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3037 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3038 IEM_MC_ADVANCE_RIP();
3039 IEM_MC_END();
3040 }
3041 else
3042 {
3043 /*
3044 * Register, memory.
3045 */
3046 IEM_MC_BEGIN(0, 2);
3047 IEM_MC_LOCAL(uint128_t, u128Tmp);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3052 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3053 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3054 if (fAligned)
3055 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3056 else
3057 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3058
3059 IEM_MC_ADVANCE_RIP();
3060 IEM_MC_END();
3061 }
3062 return VINF_SUCCESS;
3063
3064 case 0: /* MMX */
3065 IEMOP_MNEMONIC("movq Qq,Pq");
3066
3067 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3068 {
3069 /*
3070 * Register, register.
3071 */
3072 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3073 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_BEGIN(0, 1);
3076 IEM_MC_LOCAL(uint64_t, u64Tmp);
3077 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3078 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3079 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3080 IEM_MC_ADVANCE_RIP();
3081 IEM_MC_END();
3082 }
3083 else
3084 {
3085 /*
3086 * Register, memory.
3087 */
3088 IEM_MC_BEGIN(0, 2);
3089 IEM_MC_LOCAL(uint64_t, u64Tmp);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3091
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3094 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3095 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3096 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3097
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 }
3101 return VINF_SUCCESS;
3102
3103 default:
3104 return IEMOP_RAISE_INVALID_OPCODE();
3105 }
3106}
3107
3108
3109
3110/** Opcode 0x0f 0x80. */
3111FNIEMOP_DEF(iemOp_jo_Jv)
3112{
3113 IEMOP_MNEMONIC("jo Jv");
3114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3115 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3116 {
3117 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3118 IEMOP_HLP_NO_LOCK_PREFIX();
3119
3120 IEM_MC_BEGIN(0, 0);
3121 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3122 IEM_MC_REL_JMP_S16(i16Imm);
3123 } IEM_MC_ELSE() {
3124 IEM_MC_ADVANCE_RIP();
3125 } IEM_MC_ENDIF();
3126 IEM_MC_END();
3127 }
3128 else
3129 {
3130 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3131 IEMOP_HLP_NO_LOCK_PREFIX();
3132
3133 IEM_MC_BEGIN(0, 0);
3134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3135 IEM_MC_REL_JMP_S32(i32Imm);
3136 } IEM_MC_ELSE() {
3137 IEM_MC_ADVANCE_RIP();
3138 } IEM_MC_ENDIF();
3139 IEM_MC_END();
3140 }
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/** Opcode 0x0f 0x81. */
3146FNIEMOP_DEF(iemOp_jno_Jv)
3147{
3148 IEMOP_MNEMONIC("jno Jv");
3149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3150 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3151 {
3152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3153 IEMOP_HLP_NO_LOCK_PREFIX();
3154
3155 IEM_MC_BEGIN(0, 0);
3156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3157 IEM_MC_ADVANCE_RIP();
3158 } IEM_MC_ELSE() {
3159 IEM_MC_REL_JMP_S16(i16Imm);
3160 } IEM_MC_ENDIF();
3161 IEM_MC_END();
3162 }
3163 else
3164 {
3165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3166 IEMOP_HLP_NO_LOCK_PREFIX();
3167
3168 IEM_MC_BEGIN(0, 0);
3169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3170 IEM_MC_ADVANCE_RIP();
3171 } IEM_MC_ELSE() {
3172 IEM_MC_REL_JMP_S32(i32Imm);
3173 } IEM_MC_ENDIF();
3174 IEM_MC_END();
3175 }
3176 return VINF_SUCCESS;
3177}
3178
3179
3180/** Opcode 0x0f 0x82. */
3181FNIEMOP_DEF(iemOp_jc_Jv)
3182{
3183 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3185 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3186 {
3187 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3188 IEMOP_HLP_NO_LOCK_PREFIX();
3189
3190 IEM_MC_BEGIN(0, 0);
3191 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3192 IEM_MC_REL_JMP_S16(i16Imm);
3193 } IEM_MC_ELSE() {
3194 IEM_MC_ADVANCE_RIP();
3195 } IEM_MC_ENDIF();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3201 IEMOP_HLP_NO_LOCK_PREFIX();
3202
3203 IEM_MC_BEGIN(0, 0);
3204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3205 IEM_MC_REL_JMP_S32(i32Imm);
3206 } IEM_MC_ELSE() {
3207 IEM_MC_ADVANCE_RIP();
3208 } IEM_MC_ENDIF();
3209 IEM_MC_END();
3210 }
3211 return VINF_SUCCESS;
3212}
3213
3214
3215/** Opcode 0x0f 0x83. */
3216FNIEMOP_DEF(iemOp_jnc_Jv)
3217{
3218 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3220 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3221 {
3222 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3223 IEMOP_HLP_NO_LOCK_PREFIX();
3224
3225 IEM_MC_BEGIN(0, 0);
3226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3227 IEM_MC_ADVANCE_RIP();
3228 } IEM_MC_ELSE() {
3229 IEM_MC_REL_JMP_S16(i16Imm);
3230 } IEM_MC_ENDIF();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3236 IEMOP_HLP_NO_LOCK_PREFIX();
3237
3238 IEM_MC_BEGIN(0, 0);
3239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3240 IEM_MC_ADVANCE_RIP();
3241 } IEM_MC_ELSE() {
3242 IEM_MC_REL_JMP_S32(i32Imm);
3243 } IEM_MC_ENDIF();
3244 IEM_MC_END();
3245 }
3246 return VINF_SUCCESS;
3247}
3248
3249
3250/** Opcode 0x0f 0x84. */
3251FNIEMOP_DEF(iemOp_je_Jv)
3252{
3253 IEMOP_MNEMONIC("je/jz Jv");
3254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3255 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3256 {
3257 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3258 IEMOP_HLP_NO_LOCK_PREFIX();
3259
3260 IEM_MC_BEGIN(0, 0);
3261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3262 IEM_MC_REL_JMP_S16(i16Imm);
3263 } IEM_MC_ELSE() {
3264 IEM_MC_ADVANCE_RIP();
3265 } IEM_MC_ENDIF();
3266 IEM_MC_END();
3267 }
3268 else
3269 {
3270 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3271 IEMOP_HLP_NO_LOCK_PREFIX();
3272
3273 IEM_MC_BEGIN(0, 0);
3274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3275 IEM_MC_REL_JMP_S32(i32Imm);
3276 } IEM_MC_ELSE() {
3277 IEM_MC_ADVANCE_RIP();
3278 } IEM_MC_ENDIF();
3279 IEM_MC_END();
3280 }
3281 return VINF_SUCCESS;
3282}
3283
3284
3285/** Opcode 0x0f 0x85. */
3286FNIEMOP_DEF(iemOp_jne_Jv)
3287{
3288 IEMOP_MNEMONIC("jne/jnz Jv");
3289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3290 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3291 {
3292 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3293 IEMOP_HLP_NO_LOCK_PREFIX();
3294
3295 IEM_MC_BEGIN(0, 0);
3296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3297 IEM_MC_ADVANCE_RIP();
3298 } IEM_MC_ELSE() {
3299 IEM_MC_REL_JMP_S16(i16Imm);
3300 } IEM_MC_ENDIF();
3301 IEM_MC_END();
3302 }
3303 else
3304 {
3305 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3306 IEMOP_HLP_NO_LOCK_PREFIX();
3307
3308 IEM_MC_BEGIN(0, 0);
3309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3310 IEM_MC_ADVANCE_RIP();
3311 } IEM_MC_ELSE() {
3312 IEM_MC_REL_JMP_S32(i32Imm);
3313 } IEM_MC_ENDIF();
3314 IEM_MC_END();
3315 }
3316 return VINF_SUCCESS;
3317}
3318
3319
3320/** Opcode 0x0f 0x86. */
3321FNIEMOP_DEF(iemOp_jbe_Jv)
3322{
3323 IEMOP_MNEMONIC("jbe/jna Jv");
3324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3325 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3326 {
3327 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3328 IEMOP_HLP_NO_LOCK_PREFIX();
3329
3330 IEM_MC_BEGIN(0, 0);
3331 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3332 IEM_MC_REL_JMP_S16(i16Imm);
3333 } IEM_MC_ELSE() {
3334 IEM_MC_ADVANCE_RIP();
3335 } IEM_MC_ENDIF();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3341 IEMOP_HLP_NO_LOCK_PREFIX();
3342
3343 IEM_MC_BEGIN(0, 0);
3344 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3345 IEM_MC_REL_JMP_S32(i32Imm);
3346 } IEM_MC_ELSE() {
3347 IEM_MC_ADVANCE_RIP();
3348 } IEM_MC_ENDIF();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352}
3353
3354
3355/** Opcode 0x0f 0x87. */
3356FNIEMOP_DEF(iemOp_jnbe_Jv)
3357{
3358 IEMOP_MNEMONIC("jnbe/ja Jv");
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3360 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3361 {
3362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3363 IEMOP_HLP_NO_LOCK_PREFIX();
3364
3365 IEM_MC_BEGIN(0, 0);
3366 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3367 IEM_MC_ADVANCE_RIP();
3368 } IEM_MC_ELSE() {
3369 IEM_MC_REL_JMP_S16(i16Imm);
3370 } IEM_MC_ENDIF();
3371 IEM_MC_END();
3372 }
3373 else
3374 {
3375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3376 IEMOP_HLP_NO_LOCK_PREFIX();
3377
3378 IEM_MC_BEGIN(0, 0);
3379 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3380 IEM_MC_ADVANCE_RIP();
3381 } IEM_MC_ELSE() {
3382 IEM_MC_REL_JMP_S32(i32Imm);
3383 } IEM_MC_ENDIF();
3384 IEM_MC_END();
3385 }
3386 return VINF_SUCCESS;
3387}
3388
3389
3390/** Opcode 0x0f 0x88. */
3391FNIEMOP_DEF(iemOp_js_Jv)
3392{
3393 IEMOP_MNEMONIC("js Jv");
3394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3395 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3396 {
3397 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3398 IEMOP_HLP_NO_LOCK_PREFIX();
3399
3400 IEM_MC_BEGIN(0, 0);
3401 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3402 IEM_MC_REL_JMP_S16(i16Imm);
3403 } IEM_MC_ELSE() {
3404 IEM_MC_ADVANCE_RIP();
3405 } IEM_MC_ENDIF();
3406 IEM_MC_END();
3407 }
3408 else
3409 {
3410 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3411 IEMOP_HLP_NO_LOCK_PREFIX();
3412
3413 IEM_MC_BEGIN(0, 0);
3414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3415 IEM_MC_REL_JMP_S32(i32Imm);
3416 } IEM_MC_ELSE() {
3417 IEM_MC_ADVANCE_RIP();
3418 } IEM_MC_ENDIF();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424
3425/** Opcode 0x0f 0x89. */
3426FNIEMOP_DEF(iemOp_jns_Jv)
3427{
3428 IEMOP_MNEMONIC("jns Jv");
3429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3430 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3431 {
3432 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3433 IEMOP_HLP_NO_LOCK_PREFIX();
3434
3435 IEM_MC_BEGIN(0, 0);
3436 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3437 IEM_MC_ADVANCE_RIP();
3438 } IEM_MC_ELSE() {
3439 IEM_MC_REL_JMP_S16(i16Imm);
3440 } IEM_MC_ENDIF();
3441 IEM_MC_END();
3442 }
3443 else
3444 {
3445 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3446 IEMOP_HLP_NO_LOCK_PREFIX();
3447
3448 IEM_MC_BEGIN(0, 0);
3449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3450 IEM_MC_ADVANCE_RIP();
3451 } IEM_MC_ELSE() {
3452 IEM_MC_REL_JMP_S32(i32Imm);
3453 } IEM_MC_ENDIF();
3454 IEM_MC_END();
3455 }
3456 return VINF_SUCCESS;
3457}
3458
3459
3460/** Opcode 0x0f 0x8a. */
3461FNIEMOP_DEF(iemOp_jp_Jv)
3462{
3463 IEMOP_MNEMONIC("jp Jv");
3464 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3465 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3466 {
3467 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3468 IEMOP_HLP_NO_LOCK_PREFIX();
3469
3470 IEM_MC_BEGIN(0, 0);
3471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3472 IEM_MC_REL_JMP_S16(i16Imm);
3473 } IEM_MC_ELSE() {
3474 IEM_MC_ADVANCE_RIP();
3475 } IEM_MC_ENDIF();
3476 IEM_MC_END();
3477 }
3478 else
3479 {
3480 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3481 IEMOP_HLP_NO_LOCK_PREFIX();
3482
3483 IEM_MC_BEGIN(0, 0);
3484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3485 IEM_MC_REL_JMP_S32(i32Imm);
3486 } IEM_MC_ELSE() {
3487 IEM_MC_ADVANCE_RIP();
3488 } IEM_MC_ENDIF();
3489 IEM_MC_END();
3490 }
3491 return VINF_SUCCESS;
3492}
3493
3494
3495/** Opcode 0x0f 0x8b. */
3496FNIEMOP_DEF(iemOp_jnp_Jv)
3497{
3498 IEMOP_MNEMONIC("jo Jv");
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3500 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3501 {
3502 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3503 IEMOP_HLP_NO_LOCK_PREFIX();
3504
3505 IEM_MC_BEGIN(0, 0);
3506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3507 IEM_MC_ADVANCE_RIP();
3508 } IEM_MC_ELSE() {
3509 IEM_MC_REL_JMP_S16(i16Imm);
3510 } IEM_MC_ENDIF();
3511 IEM_MC_END();
3512 }
3513 else
3514 {
3515 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3516 IEMOP_HLP_NO_LOCK_PREFIX();
3517
3518 IEM_MC_BEGIN(0, 0);
3519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3520 IEM_MC_ADVANCE_RIP();
3521 } IEM_MC_ELSE() {
3522 IEM_MC_REL_JMP_S32(i32Imm);
3523 } IEM_MC_ENDIF();
3524 IEM_MC_END();
3525 }
3526 return VINF_SUCCESS;
3527}
3528
3529
3530/** Opcode 0x0f 0x8c. */
3531FNIEMOP_DEF(iemOp_jl_Jv)
3532{
3533 IEMOP_MNEMONIC("jl/jnge Jv");
3534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3535 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3536 {
3537 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3538 IEMOP_HLP_NO_LOCK_PREFIX();
3539
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S16(i16Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547 }
3548 else
3549 {
3550 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3551 IEMOP_HLP_NO_LOCK_PREFIX();
3552
3553 IEM_MC_BEGIN(0, 0);
3554 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3555 IEM_MC_REL_JMP_S32(i32Imm);
3556 } IEM_MC_ELSE() {
3557 IEM_MC_ADVANCE_RIP();
3558 } IEM_MC_ENDIF();
3559 IEM_MC_END();
3560 }
3561 return VINF_SUCCESS;
3562}
3563
3564
3565/** Opcode 0x0f 0x8d. */
3566FNIEMOP_DEF(iemOp_jnl_Jv)
3567{
3568 IEMOP_MNEMONIC("jnl/jge Jv");
3569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3570 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3571 {
3572 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3573 IEMOP_HLP_NO_LOCK_PREFIX();
3574
3575 IEM_MC_BEGIN(0, 0);
3576 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3577 IEM_MC_ADVANCE_RIP();
3578 } IEM_MC_ELSE() {
3579 IEM_MC_REL_JMP_S16(i16Imm);
3580 } IEM_MC_ENDIF();
3581 IEM_MC_END();
3582 }
3583 else
3584 {
3585 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3586 IEMOP_HLP_NO_LOCK_PREFIX();
3587
3588 IEM_MC_BEGIN(0, 0);
3589 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3590 IEM_MC_ADVANCE_RIP();
3591 } IEM_MC_ELSE() {
3592 IEM_MC_REL_JMP_S32(i32Imm);
3593 } IEM_MC_ENDIF();
3594 IEM_MC_END();
3595 }
3596 return VINF_SUCCESS;
3597}
3598
3599
3600/** Opcode 0x0f 0x8e. */
3601FNIEMOP_DEF(iemOp_jle_Jv)
3602{
3603 IEMOP_MNEMONIC("jle/jng Jv");
3604 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3605 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3606 {
3607 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3608 IEMOP_HLP_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3612 IEM_MC_REL_JMP_S16(i16Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 else
3619 {
3620 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3621 IEMOP_HLP_NO_LOCK_PREFIX();
3622
3623 IEM_MC_BEGIN(0, 0);
3624 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3625 IEM_MC_REL_JMP_S32(i32Imm);
3626 } IEM_MC_ELSE() {
3627 IEM_MC_ADVANCE_RIP();
3628 } IEM_MC_ENDIF();
3629 IEM_MC_END();
3630 }
3631 return VINF_SUCCESS;
3632}
3633
3634
3635/** Opcode 0x0f 0x8f. */
3636FNIEMOP_DEF(iemOp_jnle_Jv)
3637{
3638 IEMOP_MNEMONIC("jnle/jg Jv");
3639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3640 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3641 {
3642 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3643 IEMOP_HLP_NO_LOCK_PREFIX();
3644
3645 IEM_MC_BEGIN(0, 0);
3646 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3647 IEM_MC_ADVANCE_RIP();
3648 } IEM_MC_ELSE() {
3649 IEM_MC_REL_JMP_S16(i16Imm);
3650 } IEM_MC_ENDIF();
3651 IEM_MC_END();
3652 }
3653 else
3654 {
3655 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3656 IEMOP_HLP_NO_LOCK_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3660 IEM_MC_ADVANCE_RIP();
3661 } IEM_MC_ELSE() {
3662 IEM_MC_REL_JMP_S32(i32Imm);
3663 } IEM_MC_ENDIF();
3664 IEM_MC_END();
3665 }
3666 return VINF_SUCCESS;
3667}
3668
3669
3670/** Opcode 0x0f 0x90. */
3671FNIEMOP_DEF(iemOp_seto_Eb)
3672{
3673 IEMOP_MNEMONIC("seto Eb");
3674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3675 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3676
3677 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3678 * any way. AMD says it's "unused", whatever that means. We're
3679 * ignoring for now. */
3680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3681 {
3682 /* register target */
3683 IEM_MC_BEGIN(0, 0);
3684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3685 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3686 } IEM_MC_ELSE() {
3687 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3688 } IEM_MC_ENDIF();
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 else
3693 {
3694 /* memory target */
3695 IEM_MC_BEGIN(0, 1);
3696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3698 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3699 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3700 } IEM_MC_ELSE() {
3701 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3702 } IEM_MC_ENDIF();
3703 IEM_MC_ADVANCE_RIP();
3704 IEM_MC_END();
3705 }
3706 return VINF_SUCCESS;
3707}
3708
3709
3710/** Opcode 0x0f 0x91. */
3711FNIEMOP_DEF(iemOp_setno_Eb)
3712{
3713 IEMOP_MNEMONIC("setno Eb");
3714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3715 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3716
3717 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3718 * any way. AMD says it's "unused", whatever that means. We're
3719 * ignoring for now. */
3720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3721 {
3722 /* register target */
3723 IEM_MC_BEGIN(0, 0);
3724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3725 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3726 } IEM_MC_ELSE() {
3727 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3728 } IEM_MC_ENDIF();
3729 IEM_MC_ADVANCE_RIP();
3730 IEM_MC_END();
3731 }
3732 else
3733 {
3734 /* memory target */
3735 IEM_MC_BEGIN(0, 1);
3736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3739 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3740 } IEM_MC_ELSE() {
3741 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3742 } IEM_MC_ENDIF();
3743 IEM_MC_ADVANCE_RIP();
3744 IEM_MC_END();
3745 }
3746 return VINF_SUCCESS;
3747}
3748
3749
3750/** Opcode 0x0f 0x92. */
3751FNIEMOP_DEF(iemOp_setc_Eb)
3752{
3753 IEMOP_MNEMONIC("setc Eb");
3754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3755 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3756
3757 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3758 * any way. AMD says it's "unused", whatever that means. We're
3759 * ignoring for now. */
3760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3761 {
3762 /* register target */
3763 IEM_MC_BEGIN(0, 0);
3764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3765 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3766 } IEM_MC_ELSE() {
3767 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3768 } IEM_MC_ENDIF();
3769 IEM_MC_ADVANCE_RIP();
3770 IEM_MC_END();
3771 }
3772 else
3773 {
3774 /* memory target */
3775 IEM_MC_BEGIN(0, 1);
3776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3778 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3779 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3780 } IEM_MC_ELSE() {
3781 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_ADVANCE_RIP();
3784 IEM_MC_END();
3785 }
3786 return VINF_SUCCESS;
3787}
3788
3789
3790/** Opcode 0x0f 0x93. */
3791FNIEMOP_DEF(iemOp_setnc_Eb)
3792{
3793 IEMOP_MNEMONIC("setnc Eb");
3794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3795 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3796
3797 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3798 * any way. AMD says it's "unused", whatever that means. We're
3799 * ignoring for now. */
3800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3801 {
3802 /* register target */
3803 IEM_MC_BEGIN(0, 0);
3804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3805 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3808 } IEM_MC_ENDIF();
3809 IEM_MC_ADVANCE_RIP();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /* memory target */
3815 IEM_MC_BEGIN(0, 1);
3816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3819 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3820 } IEM_MC_ELSE() {
3821 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3822 } IEM_MC_ENDIF();
3823 IEM_MC_ADVANCE_RIP();
3824 IEM_MC_END();
3825 }
3826 return VINF_SUCCESS;
3827}
3828
3829
3830/** Opcode 0x0f 0x94. */
3831FNIEMOP_DEF(iemOp_sete_Eb)
3832{
3833 IEMOP_MNEMONIC("sete Eb");
3834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3835 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3836
3837 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3838 * any way. AMD says it's "unused", whatever that means. We're
3839 * ignoring for now. */
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 /* register target */
3843 IEM_MC_BEGIN(0, 0);
3844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3845 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3846 } IEM_MC_ELSE() {
3847 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3848 } IEM_MC_ENDIF();
3849 IEM_MC_ADVANCE_RIP();
3850 IEM_MC_END();
3851 }
3852 else
3853 {
3854 /* memory target */
3855 IEM_MC_BEGIN(0, 1);
3856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3859 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3860 } IEM_MC_ELSE() {
3861 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3862 } IEM_MC_ENDIF();
3863 IEM_MC_ADVANCE_RIP();
3864 IEM_MC_END();
3865 }
3866 return VINF_SUCCESS;
3867}
3868
3869
3870/** Opcode 0x0f 0x95. */
3871FNIEMOP_DEF(iemOp_setne_Eb)
3872{
3873 IEMOP_MNEMONIC("setne Eb");
3874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3875 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3876
3877 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3878 * any way. AMD says it's "unused", whatever that means. We're
3879 * ignoring for now. */
3880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3881 {
3882 /* register target */
3883 IEM_MC_BEGIN(0, 0);
3884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3885 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3886 } IEM_MC_ELSE() {
3887 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3888 } IEM_MC_ENDIF();
3889 IEM_MC_ADVANCE_RIP();
3890 IEM_MC_END();
3891 }
3892 else
3893 {
3894 /* memory target */
3895 IEM_MC_BEGIN(0, 1);
3896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3899 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3900 } IEM_MC_ELSE() {
3901 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3902 } IEM_MC_ENDIF();
3903 IEM_MC_ADVANCE_RIP();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x96. */
3911FNIEMOP_DEF(iemOp_setbe_Eb)
3912{
3913 IEMOP_MNEMONIC("setbe Eb");
3914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3915 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3916
3917 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3918 * any way. AMD says it's "unused", whatever that means. We're
3919 * ignoring for now. */
3920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3921 {
3922 /* register target */
3923 IEM_MC_BEGIN(0, 0);
3924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3925 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3926 } IEM_MC_ELSE() {
3927 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3928 } IEM_MC_ENDIF();
3929 IEM_MC_ADVANCE_RIP();
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 /* memory target */
3935 IEM_MC_BEGIN(0, 1);
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3938 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3939 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3940 } IEM_MC_ELSE() {
3941 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3942 } IEM_MC_ENDIF();
3943 IEM_MC_ADVANCE_RIP();
3944 IEM_MC_END();
3945 }
3946 return VINF_SUCCESS;
3947}
3948
3949
3950/** Opcode 0x0f 0x97. */
3951FNIEMOP_DEF(iemOp_setnbe_Eb)
3952{
3953 IEMOP_MNEMONIC("setnbe Eb");
3954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3955 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3956
3957 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3958 * any way. AMD says it's "unused", whatever that means. We're
3959 * ignoring for now. */
3960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3961 {
3962 /* register target */
3963 IEM_MC_BEGIN(0, 0);
3964 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3965 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3966 } IEM_MC_ELSE() {
3967 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3968 } IEM_MC_ENDIF();
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 }
3972 else
3973 {
3974 /* memory target */
3975 IEM_MC_BEGIN(0, 1);
3976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3978 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3979 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3980 } IEM_MC_ELSE() {
3981 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3982 } IEM_MC_ENDIF();
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 }
3986 return VINF_SUCCESS;
3987}
3988
3989
3990/** Opcode 0x0f 0x98. */
3991FNIEMOP_DEF(iemOp_sets_Eb)
3992{
3993 IEMOP_MNEMONIC("sets Eb");
3994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3995 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3996
3997 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3998 * any way. AMD says it's "unused", whatever that means. We're
3999 * ignoring for now. */
4000 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4001 {
4002 /* register target */
4003 IEM_MC_BEGIN(0, 0);
4004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4005 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4006 } IEM_MC_ELSE() {
4007 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4008 } IEM_MC_ENDIF();
4009 IEM_MC_ADVANCE_RIP();
4010 IEM_MC_END();
4011 }
4012 else
4013 {
4014 /* memory target */
4015 IEM_MC_BEGIN(0, 1);
4016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4019 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4020 } IEM_MC_ELSE() {
4021 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4022 } IEM_MC_ENDIF();
4023 IEM_MC_ADVANCE_RIP();
4024 IEM_MC_END();
4025 }
4026 return VINF_SUCCESS;
4027}
4028
4029
4030/** Opcode 0x0f 0x99. */
4031FNIEMOP_DEF(iemOp_setns_Eb)
4032{
4033 IEMOP_MNEMONIC("setns Eb");
4034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4035 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4036
4037 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4038 * any way. AMD says it's "unused", whatever that means. We're
4039 * ignoring for now. */
4040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4041 {
4042 /* register target */
4043 IEM_MC_BEGIN(0, 0);
4044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4045 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4046 } IEM_MC_ELSE() {
4047 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4048 } IEM_MC_ENDIF();
4049 IEM_MC_ADVANCE_RIP();
4050 IEM_MC_END();
4051 }
4052 else
4053 {
4054 /* memory target */
4055 IEM_MC_BEGIN(0, 1);
4056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4058 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4059 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4060 } IEM_MC_ELSE() {
4061 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4062 } IEM_MC_ENDIF();
4063 IEM_MC_ADVANCE_RIP();
4064 IEM_MC_END();
4065 }
4066 return VINF_SUCCESS;
4067}
4068
4069
4070/** Opcode 0x0f 0x9a. */
4071FNIEMOP_DEF(iemOp_setp_Eb)
4072{
4073 IEMOP_MNEMONIC("setnp Eb");
4074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4075 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4076
4077 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4078 * any way. AMD says it's "unused", whatever that means. We're
4079 * ignoring for now. */
4080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4081 {
4082 /* register target */
4083 IEM_MC_BEGIN(0, 0);
4084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4085 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4086 } IEM_MC_ELSE() {
4087 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4088 } IEM_MC_ENDIF();
4089 IEM_MC_ADVANCE_RIP();
4090 IEM_MC_END();
4091 }
4092 else
4093 {
4094 /* memory target */
4095 IEM_MC_BEGIN(0, 1);
4096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4098 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4099 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4100 } IEM_MC_ELSE() {
4101 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4102 } IEM_MC_ENDIF();
4103 IEM_MC_ADVANCE_RIP();
4104 IEM_MC_END();
4105 }
4106 return VINF_SUCCESS;
4107}
4108
4109
4110/** Opcode 0x0f 0x9b. */
4111FNIEMOP_DEF(iemOp_setnp_Eb)
4112{
4113 IEMOP_MNEMONIC("setnp Eb");
4114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4115 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4116
4117 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4118 * any way. AMD says it's "unused", whatever that means. We're
4119 * ignoring for now. */
4120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4121 {
4122 /* register target */
4123 IEM_MC_BEGIN(0, 0);
4124 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4125 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4126 } IEM_MC_ELSE() {
4127 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4128 } IEM_MC_ENDIF();
4129 IEM_MC_ADVANCE_RIP();
4130 IEM_MC_END();
4131 }
4132 else
4133 {
4134 /* memory target */
4135 IEM_MC_BEGIN(0, 1);
4136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4138 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4139 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4140 } IEM_MC_ELSE() {
4141 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4142 } IEM_MC_ENDIF();
4143 IEM_MC_ADVANCE_RIP();
4144 IEM_MC_END();
4145 }
4146 return VINF_SUCCESS;
4147}
4148
4149
4150/** Opcode 0x0f 0x9c. */
4151FNIEMOP_DEF(iemOp_setl_Eb)
4152{
4153 IEMOP_MNEMONIC("setl Eb");
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4156
4157 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4158 * any way. AMD says it's "unused", whatever that means. We're
4159 * ignoring for now. */
4160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4161 {
4162 /* register target */
4163 IEM_MC_BEGIN(0, 0);
4164 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4165 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4166 } IEM_MC_ELSE() {
4167 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4168 } IEM_MC_ENDIF();
4169 IEM_MC_ADVANCE_RIP();
4170 IEM_MC_END();
4171 }
4172 else
4173 {
4174 /* memory target */
4175 IEM_MC_BEGIN(0, 1);
4176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4178 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4179 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4180 } IEM_MC_ELSE() {
4181 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4182 } IEM_MC_ENDIF();
4183 IEM_MC_ADVANCE_RIP();
4184 IEM_MC_END();
4185 }
4186 return VINF_SUCCESS;
4187}
4188
4189
4190/** Opcode 0x0f 0x9d. */
4191FNIEMOP_DEF(iemOp_setnl_Eb)
4192{
4193 IEMOP_MNEMONIC("setnl Eb");
4194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4195 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4196
4197 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4198 * any way. AMD says it's "unused", whatever that means. We're
4199 * ignoring for now. */
4200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4201 {
4202 /* register target */
4203 IEM_MC_BEGIN(0, 0);
4204 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4205 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /* memory target */
4215 IEM_MC_BEGIN(0, 1);
4216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4218 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4219 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4220 } IEM_MC_ELSE() {
4221 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4222 } IEM_MC_ENDIF();
4223 IEM_MC_ADVANCE_RIP();
4224 IEM_MC_END();
4225 }
4226 return VINF_SUCCESS;
4227}
4228
4229
4230/** Opcode 0x0f 0x9e. */
4231FNIEMOP_DEF(iemOp_setle_Eb)
4232{
4233 IEMOP_MNEMONIC("setle Eb");
4234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4235 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4236
4237 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4238 * any way. AMD says it's "unused", whatever that means. We're
4239 * ignoring for now. */
4240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4241 {
4242 /* register target */
4243 IEM_MC_BEGIN(0, 0);
4244 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4245 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4246 } IEM_MC_ELSE() {
4247 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4248 } IEM_MC_ENDIF();
4249 IEM_MC_ADVANCE_RIP();
4250 IEM_MC_END();
4251 }
4252 else
4253 {
4254 /* memory target */
4255 IEM_MC_BEGIN(0, 1);
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4258 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4259 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4260 } IEM_MC_ELSE() {
4261 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4262 } IEM_MC_ENDIF();
4263 IEM_MC_ADVANCE_RIP();
4264 IEM_MC_END();
4265 }
4266 return VINF_SUCCESS;
4267}
4268
4269
4270/** Opcode 0x0f 0x9f. */
4271FNIEMOP_DEF(iemOp_setnle_Eb)
4272{
4273 IEMOP_MNEMONIC("setnle Eb");
4274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4275 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4276
4277 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4278 * any way. AMD says it's "unused", whatever that means. We're
4279 * ignoring for now. */
4280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4281 {
4282 /* register target */
4283 IEM_MC_BEGIN(0, 0);
4284 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4285 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4286 } IEM_MC_ELSE() {
4287 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4288 } IEM_MC_ENDIF();
4289 IEM_MC_ADVANCE_RIP();
4290 IEM_MC_END();
4291 }
4292 else
4293 {
4294 /* memory target */
4295 IEM_MC_BEGIN(0, 1);
4296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4298 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4299 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4300 } IEM_MC_ELSE() {
4301 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4302 } IEM_MC_ENDIF();
4303 IEM_MC_ADVANCE_RIP();
4304 IEM_MC_END();
4305 }
4306 return VINF_SUCCESS;
4307}
4308
4309
4310/**
4311 * Common 'push segment-register' helper.
4312 */
4313FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4314{
4315 IEMOP_HLP_NO_LOCK_PREFIX();
4316 if (iReg < X86_SREG_FS)
4317 IEMOP_HLP_NO_64BIT();
4318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4319
4320 switch (pIemCpu->enmEffOpSize)
4321 {
4322 case IEMMODE_16BIT:
4323 IEM_MC_BEGIN(0, 1);
4324 IEM_MC_LOCAL(uint16_t, u16Value);
4325 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4326 IEM_MC_PUSH_U16(u16Value);
4327 IEM_MC_ADVANCE_RIP();
4328 IEM_MC_END();
4329 break;
4330
4331 case IEMMODE_32BIT:
4332 IEM_MC_BEGIN(0, 1);
4333 IEM_MC_LOCAL(uint32_t, u32Value);
4334 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4335 IEM_MC_PUSH_U32_SREG(u32Value);
4336 IEM_MC_ADVANCE_RIP();
4337 IEM_MC_END();
4338 break;
4339
4340 case IEMMODE_64BIT:
4341 IEM_MC_BEGIN(0, 1);
4342 IEM_MC_LOCAL(uint64_t, u64Value);
4343 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4344 IEM_MC_PUSH_U64(u64Value);
4345 IEM_MC_ADVANCE_RIP();
4346 IEM_MC_END();
4347 break;
4348 }
4349
4350 return VINF_SUCCESS;
4351}
4352
4353
4354/** Opcode 0x0f 0xa0. */
4355FNIEMOP_DEF(iemOp_push_fs)
4356{
4357 IEMOP_MNEMONIC("push fs");
4358 IEMOP_HLP_NO_LOCK_PREFIX();
4359 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4360}
4361
4362
4363/** Opcode 0x0f 0xa1. */
4364FNIEMOP_DEF(iemOp_pop_fs)
4365{
4366 IEMOP_MNEMONIC("pop fs");
4367 IEMOP_HLP_NO_LOCK_PREFIX();
4368 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4369}
4370
4371
4372/** Opcode 0x0f 0xa2. */
4373FNIEMOP_DEF(iemOp_cpuid)
4374{
4375 IEMOP_MNEMONIC("cpuid");
4376 IEMOP_HLP_NO_LOCK_PREFIX();
4377 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4378}
4379
4380
4381/**
4382 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4383 * iemOp_bts_Ev_Gv.
4384 */
4385FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4386{
4387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4388 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4389
4390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4391 {
4392 /* register destination. */
4393 IEMOP_HLP_NO_LOCK_PREFIX();
4394 switch (pIemCpu->enmEffOpSize)
4395 {
4396 case IEMMODE_16BIT:
4397 IEM_MC_BEGIN(3, 0);
4398 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4399 IEM_MC_ARG(uint16_t, u16Src, 1);
4400 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4401
4402 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4403 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4404 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4405 IEM_MC_REF_EFLAGS(pEFlags);
4406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4407
4408 IEM_MC_ADVANCE_RIP();
4409 IEM_MC_END();
4410 return VINF_SUCCESS;
4411
4412 case IEMMODE_32BIT:
4413 IEM_MC_BEGIN(3, 0);
4414 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4415 IEM_MC_ARG(uint32_t, u32Src, 1);
4416 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4417
4418 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4419 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4420 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4421 IEM_MC_REF_EFLAGS(pEFlags);
4422 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4423
4424 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 return VINF_SUCCESS;
4428
4429 case IEMMODE_64BIT:
4430 IEM_MC_BEGIN(3, 0);
4431 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4432 IEM_MC_ARG(uint64_t, u64Src, 1);
4433 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4434
4435 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4436 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4437 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4438 IEM_MC_REF_EFLAGS(pEFlags);
4439 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4440
4441 IEM_MC_ADVANCE_RIP();
4442 IEM_MC_END();
4443 return VINF_SUCCESS;
4444
4445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4446 }
4447 }
4448 else
4449 {
4450 /* memory destination. */
4451
4452 uint32_t fAccess;
4453 if (pImpl->pfnLockedU16)
4454 fAccess = IEM_ACCESS_DATA_RW;
4455 else /* BT */
4456 {
4457 IEMOP_HLP_NO_LOCK_PREFIX();
4458 fAccess = IEM_ACCESS_DATA_R;
4459 }
4460
4461 NOREF(fAccess);
4462
4463 /** @todo test negative bit offsets! */
4464 switch (pIemCpu->enmEffOpSize)
4465 {
4466 case IEMMODE_16BIT:
4467 IEM_MC_BEGIN(3, 2);
4468 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4469 IEM_MC_ARG(uint16_t, u16Src, 1);
4470 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4472 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4473
4474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4475 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4476 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4477 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4478 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4479 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4480 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4481 IEM_MC_FETCH_EFLAGS(EFlags);
4482
4483 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4484 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4485 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4486 else
4487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4489
4490 IEM_MC_COMMIT_EFLAGS(EFlags);
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 return VINF_SUCCESS;
4494
4495 case IEMMODE_32BIT:
4496 IEM_MC_BEGIN(3, 2);
4497 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4498 IEM_MC_ARG(uint32_t, u32Src, 1);
4499 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4501 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4502
4503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4504 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4505 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4506 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4507 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4508 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4509 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4510 IEM_MC_FETCH_EFLAGS(EFlags);
4511
4512 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4513 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4514 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4515 else
4516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4518
4519 IEM_MC_COMMIT_EFLAGS(EFlags);
4520 IEM_MC_ADVANCE_RIP();
4521 IEM_MC_END();
4522 return VINF_SUCCESS;
4523
4524 case IEMMODE_64BIT:
4525 IEM_MC_BEGIN(3, 2);
4526 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4527 IEM_MC_ARG(uint64_t, u64Src, 1);
4528 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4530 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4531
4532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4533 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4534 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4535 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4536 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4537 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4538 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4539 IEM_MC_FETCH_EFLAGS(EFlags);
4540
4541 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4542 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4543 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4544 else
4545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4547
4548 IEM_MC_COMMIT_EFLAGS(EFlags);
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 return VINF_SUCCESS;
4552
4553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4554 }
4555 }
4556}
4557
4558
4559/** Opcode 0x0f 0xa3. */
4560FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4561{
4562 IEMOP_MNEMONIC("bt Gv,Gv");
4563 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4564}
4565
4566
4567/**
4568 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4569 */
4570FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4571{
4572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4573 IEMOP_HLP_NO_LOCK_PREFIX();
4574 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4575
4576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4577 {
4578 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4579 IEMOP_HLP_NO_LOCK_PREFIX();
4580
4581 switch (pIemCpu->enmEffOpSize)
4582 {
4583 case IEMMODE_16BIT:
4584 IEM_MC_BEGIN(4, 0);
4585 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4586 IEM_MC_ARG(uint16_t, u16Src, 1);
4587 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4588 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4589
4590 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4591 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4592 IEM_MC_REF_EFLAGS(pEFlags);
4593 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4594
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 return VINF_SUCCESS;
4598
4599 case IEMMODE_32BIT:
4600 IEM_MC_BEGIN(4, 0);
4601 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4602 IEM_MC_ARG(uint32_t, u32Src, 1);
4603 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4604 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4605
4606 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4607 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4608 IEM_MC_REF_EFLAGS(pEFlags);
4609 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4610
4611 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4612 IEM_MC_ADVANCE_RIP();
4613 IEM_MC_END();
4614 return VINF_SUCCESS;
4615
4616 case IEMMODE_64BIT:
4617 IEM_MC_BEGIN(4, 0);
4618 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4619 IEM_MC_ARG(uint64_t, u64Src, 1);
4620 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4621 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4622
4623 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4624 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4625 IEM_MC_REF_EFLAGS(pEFlags);
4626 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4627
4628 IEM_MC_ADVANCE_RIP();
4629 IEM_MC_END();
4630 return VINF_SUCCESS;
4631
4632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4633 }
4634 }
4635 else
4636 {
4637 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4638
4639 switch (pIemCpu->enmEffOpSize)
4640 {
4641 case IEMMODE_16BIT:
4642 IEM_MC_BEGIN(4, 2);
4643 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4644 IEM_MC_ARG(uint16_t, u16Src, 1);
4645 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4646 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4648
4649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4650 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4651 IEM_MC_ASSIGN(cShiftArg, cShift);
4652 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4653 IEM_MC_FETCH_EFLAGS(EFlags);
4654 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4655 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4656
4657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4658 IEM_MC_COMMIT_EFLAGS(EFlags);
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 return VINF_SUCCESS;
4662
4663 case IEMMODE_32BIT:
4664 IEM_MC_BEGIN(4, 2);
4665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4666 IEM_MC_ARG(uint32_t, u32Src, 1);
4667 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4668 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4670
4671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4672 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4673 IEM_MC_ASSIGN(cShiftArg, cShift);
4674 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4675 IEM_MC_FETCH_EFLAGS(EFlags);
4676 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4677 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4678
4679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4680 IEM_MC_COMMIT_EFLAGS(EFlags);
4681 IEM_MC_ADVANCE_RIP();
4682 IEM_MC_END();
4683 return VINF_SUCCESS;
4684
4685 case IEMMODE_64BIT:
4686 IEM_MC_BEGIN(4, 2);
4687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4688 IEM_MC_ARG(uint64_t, u64Src, 1);
4689 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4690 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4692
4693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4694 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4695 IEM_MC_ASSIGN(cShiftArg, cShift);
4696 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4697 IEM_MC_FETCH_EFLAGS(EFlags);
4698 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4699 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4700
4701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4702 IEM_MC_COMMIT_EFLAGS(EFlags);
4703 IEM_MC_ADVANCE_RIP();
4704 IEM_MC_END();
4705 return VINF_SUCCESS;
4706
4707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4708 }
4709 }
4710}
4711
4712
4713/**
4714 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4715 */
4716FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4717{
4718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4719 IEMOP_HLP_NO_LOCK_PREFIX();
4720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4721
4722 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4723 {
4724 IEMOP_HLP_NO_LOCK_PREFIX();
4725
4726 switch (pIemCpu->enmEffOpSize)
4727 {
4728 case IEMMODE_16BIT:
4729 IEM_MC_BEGIN(4, 0);
4730 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4731 IEM_MC_ARG(uint16_t, u16Src, 1);
4732 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4733 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4734
4735 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4736 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4737 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4738 IEM_MC_REF_EFLAGS(pEFlags);
4739 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4740
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 return VINF_SUCCESS;
4744
4745 case IEMMODE_32BIT:
4746 IEM_MC_BEGIN(4, 0);
4747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4748 IEM_MC_ARG(uint32_t, u32Src, 1);
4749 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4750 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4751
4752 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4753 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4754 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4755 IEM_MC_REF_EFLAGS(pEFlags);
4756 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4757
4758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4759 IEM_MC_ADVANCE_RIP();
4760 IEM_MC_END();
4761 return VINF_SUCCESS;
4762
4763 case IEMMODE_64BIT:
4764 IEM_MC_BEGIN(4, 0);
4765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4766 IEM_MC_ARG(uint64_t, u64Src, 1);
4767 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4768 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4769
4770 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4771 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4772 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4773 IEM_MC_REF_EFLAGS(pEFlags);
4774 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4775
4776 IEM_MC_ADVANCE_RIP();
4777 IEM_MC_END();
4778 return VINF_SUCCESS;
4779
4780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4781 }
4782 }
4783 else
4784 {
4785 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4786
4787 switch (pIemCpu->enmEffOpSize)
4788 {
4789 case IEMMODE_16BIT:
4790 IEM_MC_BEGIN(4, 2);
4791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4792 IEM_MC_ARG(uint16_t, u16Src, 1);
4793 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4794 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4796
4797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4798 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4799 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4800 IEM_MC_FETCH_EFLAGS(EFlags);
4801 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4802 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4803
4804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4805 IEM_MC_COMMIT_EFLAGS(EFlags);
4806 IEM_MC_ADVANCE_RIP();
4807 IEM_MC_END();
4808 return VINF_SUCCESS;
4809
4810 case IEMMODE_32BIT:
4811 IEM_MC_BEGIN(4, 2);
4812 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4813 IEM_MC_ARG(uint32_t, u32Src, 1);
4814 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4817
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4819 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4820 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4821 IEM_MC_FETCH_EFLAGS(EFlags);
4822 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4823 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4824
4825 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4826 IEM_MC_COMMIT_EFLAGS(EFlags);
4827 IEM_MC_ADVANCE_RIP();
4828 IEM_MC_END();
4829 return VINF_SUCCESS;
4830
4831 case IEMMODE_64BIT:
4832 IEM_MC_BEGIN(4, 2);
4833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4834 IEM_MC_ARG(uint64_t, u64Src, 1);
4835 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4836 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4838
4839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4840 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4841 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4842 IEM_MC_FETCH_EFLAGS(EFlags);
4843 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4844 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4845
4846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4847 IEM_MC_COMMIT_EFLAGS(EFlags);
4848 IEM_MC_ADVANCE_RIP();
4849 IEM_MC_END();
4850 return VINF_SUCCESS;
4851
4852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4853 }
4854 }
4855}
4856
4857
4858
4859/** Opcode 0x0f 0xa4. */
4860FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4861{
4862 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4863 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4864}
4865
4866
4867/** Opcode 0x0f 0xa7. */
4868FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4869{
4870 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4871 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4872}
4873
4874
4875/** Opcode 0x0f 0xa8. */
4876FNIEMOP_DEF(iemOp_push_gs)
4877{
4878 IEMOP_MNEMONIC("push gs");
4879 IEMOP_HLP_NO_LOCK_PREFIX();
4880 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4881}
4882
4883
4884/** Opcode 0x0f 0xa9. */
4885FNIEMOP_DEF(iemOp_pop_gs)
4886{
4887 IEMOP_MNEMONIC("pop gs");
4888 IEMOP_HLP_NO_LOCK_PREFIX();
4889 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4890}
4891
4892
4893/** Opcode 0x0f 0xaa. */
4894FNIEMOP_STUB(iemOp_rsm);
4895
4896
4897/** Opcode 0x0f 0xab. */
4898FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4899{
4900 IEMOP_MNEMONIC("bts Ev,Gv");
4901 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4902}
4903
4904
4905/** Opcode 0x0f 0xac. */
4906FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4907{
4908 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4909 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4910}
4911
4912
4913/** Opcode 0x0f 0xad. */
4914FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4915{
4916 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4917 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4918}
4919
4920
4921/** Opcode 0x0f 0xae mem/0. */
4922FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4923{
4924 IEMOP_MNEMONIC("fxsave m512");
4925 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4926 return IEMOP_RAISE_INVALID_OPCODE();
4927
4928 IEM_MC_BEGIN(3, 1);
4929 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4930 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4931 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4934 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4935 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
4936 IEM_MC_END();
4937 return VINF_SUCCESS;
4938}
4939
4940
4941/** Opcode 0x0f 0xae mem/1. */
4942FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
4943{
4944 IEMOP_MNEMONIC("fxrstor m512");
4945 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4946 return IEMOP_RAISE_INVALID_OPCODE();
4947
4948 IEM_MC_BEGIN(3, 1);
4949 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4950 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4951 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4954 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4955 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
4956 IEM_MC_END();
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** Opcode 0x0f 0xae mem/2. */
4962FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
4963
4964/** Opcode 0x0f 0xae mem/3. */
4965FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
4966
4967/** Opcode 0x0f 0xae mem/4. */
4968FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
4969
4970/** Opcode 0x0f 0xae mem/5. */
4971FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
4972
4973/** Opcode 0x0f 0xae mem/6. */
4974FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
4975
4976/** Opcode 0x0f 0xae mem/7. */
4977FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
4978
4979
4980/** Opcode 0x0f 0xae 11b/5. */
4981FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
4982{
4983 IEMOP_MNEMONIC("lfence");
4984 IEMOP_HLP_NO_LOCK_PREFIX();
4985 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
4986 return IEMOP_RAISE_INVALID_OPCODE();
4987
4988 IEM_MC_BEGIN(0, 0);
4989 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
4990 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
4991 else
4992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
4993 IEM_MC_ADVANCE_RIP();
4994 IEM_MC_END();
4995 return VINF_SUCCESS;
4996}
4997
4998
4999/** Opcode 0x0f 0xae 11b/6. */
5000FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC("mfence");
5003 IEMOP_HLP_NO_LOCK_PREFIX();
5004 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5005 return IEMOP_RAISE_INVALID_OPCODE();
5006
5007 IEM_MC_BEGIN(0, 0);
5008 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5009 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5010 else
5011 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5012 IEM_MC_ADVANCE_RIP();
5013 IEM_MC_END();
5014 return VINF_SUCCESS;
5015}
5016
5017
5018/** Opcode 0x0f 0xae 11b/7. */
5019FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5020{
5021 IEMOP_MNEMONIC("sfence");
5022 IEMOP_HLP_NO_LOCK_PREFIX();
5023 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5024 return IEMOP_RAISE_INVALID_OPCODE();
5025
5026 IEM_MC_BEGIN(0, 0);
5027 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5028 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5029 else
5030 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5031 IEM_MC_ADVANCE_RIP();
5032 IEM_MC_END();
5033 return VINF_SUCCESS;
5034}
5035
5036
5037/** Opcode 0xf3 0x0f 0xae 11b/0. */
5038FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5039
5040/** Opcode 0xf3 0x0f 0xae 11b/1. */
5041FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5042
5043/** Opcode 0xf3 0x0f 0xae 11b/2. */
5044FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5045
5046/** Opcode 0xf3 0x0f 0xae 11b/3. */
5047FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5048
5049
5050/** Opcode 0x0f 0xae. */
5051FNIEMOP_DEF(iemOp_Grp15)
5052{
5053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5054 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5055 {
5056 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5057 {
5058 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5059 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5060 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5061 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5062 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5063 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5064 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5065 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5067 }
5068 }
5069 else
5070 {
5071 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5072 {
5073 case 0:
5074 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5075 {
5076 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5077 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5078 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5079 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5080 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5081 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5082 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5083 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5085 }
5086 break;
5087
5088 case IEM_OP_PRF_REPZ:
5089 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5090 {
5091 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5092 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5093 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5094 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5095 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5096 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5097 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5098 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5100 }
5101 break;
5102
5103 default:
5104 return IEMOP_RAISE_INVALID_OPCODE();
5105 }
5106 }
5107}
5108
5109
5110/** Opcode 0x0f 0xaf. */
5111FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5112{
5113 IEMOP_MNEMONIC("imul Gv,Ev");
5114 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5115 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5116}
5117
5118
5119/** Opcode 0x0f 0xb0. */
5120FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5121{
5122 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5124
5125 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5126 {
5127 IEMOP_HLP_DONE_DECODING();
5128 IEM_MC_BEGIN(4, 0);
5129 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5130 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5131 IEM_MC_ARG(uint8_t, u8Src, 2);
5132 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5133
5134 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5135 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5136 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5137 IEM_MC_REF_EFLAGS(pEFlags);
5138 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5139 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5140 else
5141 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5142
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 }
5146 else
5147 {
5148 IEM_MC_BEGIN(4, 3);
5149 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5150 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5151 IEM_MC_ARG(uint8_t, u8Src, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154 IEM_MC_LOCAL(uint8_t, u8Al);
5155
5156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5157 IEMOP_HLP_DONE_DECODING();
5158 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5159 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5160 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5161 IEM_MC_FETCH_EFLAGS(EFlags);
5162 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5163 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5165 else
5166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5167
5168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5169 IEM_MC_COMMIT_EFLAGS(EFlags);
5170 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5171 IEM_MC_ADVANCE_RIP();
5172 IEM_MC_END();
5173 }
5174 return VINF_SUCCESS;
5175}
5176
5177/** Opcode 0x0f 0xb1. */
5178FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5179{
5180 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182
5183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5184 {
5185 IEMOP_HLP_DONE_DECODING();
5186 switch (pIemCpu->enmEffOpSize)
5187 {
5188 case IEMMODE_16BIT:
5189 IEM_MC_BEGIN(4, 0);
5190 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5191 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5192 IEM_MC_ARG(uint16_t, u16Src, 2);
5193 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5194
5195 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5196 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5197 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5198 IEM_MC_REF_EFLAGS(pEFlags);
5199 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5200 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5201 else
5202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5203
5204 IEM_MC_ADVANCE_RIP();
5205 IEM_MC_END();
5206 return VINF_SUCCESS;
5207
5208 case IEMMODE_32BIT:
5209 IEM_MC_BEGIN(4, 0);
5210 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5211 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5212 IEM_MC_ARG(uint32_t, u32Src, 2);
5213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5214
5215 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5216 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5217 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5218 IEM_MC_REF_EFLAGS(pEFlags);
5219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5221 else
5222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5223
5224 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5225 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5226 IEM_MC_ADVANCE_RIP();
5227 IEM_MC_END();
5228 return VINF_SUCCESS;
5229
5230 case IEMMODE_64BIT:
5231 IEM_MC_BEGIN(4, 0);
5232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5233 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5234#ifdef RT_ARCH_X86
5235 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5236#else
5237 IEM_MC_ARG(uint64_t, u64Src, 2);
5238#endif
5239 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5240
5241 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5242 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5243 IEM_MC_REF_EFLAGS(pEFlags);
5244#ifdef RT_ARCH_X86
5245 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5246 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5248 else
5249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5250#else
5251 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5252 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5253 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5254 else
5255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5256#endif
5257
5258 IEM_MC_ADVANCE_RIP();
5259 IEM_MC_END();
5260 return VINF_SUCCESS;
5261
5262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5263 }
5264 }
5265 else
5266 {
5267 switch (pIemCpu->enmEffOpSize)
5268 {
5269 case IEMMODE_16BIT:
5270 IEM_MC_BEGIN(4, 3);
5271 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5272 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5273 IEM_MC_ARG(uint16_t, u16Src, 2);
5274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5276 IEM_MC_LOCAL(uint16_t, u16Ax);
5277
5278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5279 IEMOP_HLP_DONE_DECODING();
5280 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5281 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5282 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5285 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5286 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5287 else
5288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5289
5290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5291 IEM_MC_COMMIT_EFLAGS(EFlags);
5292 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5293 IEM_MC_ADVANCE_RIP();
5294 IEM_MC_END();
5295 return VINF_SUCCESS;
5296
5297 case IEMMODE_32BIT:
5298 IEM_MC_BEGIN(4, 3);
5299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5300 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5301 IEM_MC_ARG(uint32_t, u32Src, 2);
5302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5304 IEM_MC_LOCAL(uint32_t, u32Eax);
5305
5306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5307 IEMOP_HLP_DONE_DECODING();
5308 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5309 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5310 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5311 IEM_MC_FETCH_EFLAGS(EFlags);
5312 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5313 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5315 else
5316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5317
5318 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5319 IEM_MC_COMMIT_EFLAGS(EFlags);
5320 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5321 IEM_MC_ADVANCE_RIP();
5322 IEM_MC_END();
5323 return VINF_SUCCESS;
5324
5325 case IEMMODE_64BIT:
5326 IEM_MC_BEGIN(4, 3);
5327 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5328 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5329#ifdef RT_ARCH_X86
5330 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5331#else
5332 IEM_MC_ARG(uint64_t, u64Src, 2);
5333#endif
5334 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5336 IEM_MC_LOCAL(uint64_t, u64Rax);
5337
5338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5339 IEMOP_HLP_DONE_DECODING();
5340 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5341 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5342 IEM_MC_FETCH_EFLAGS(EFlags);
5343 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5344#ifdef RT_ARCH_X86
5345 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5346 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5348 else
5349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5350#else
5351 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5352 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5353 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5354 else
5355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5356#endif
5357
5358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5359 IEM_MC_COMMIT_EFLAGS(EFlags);
5360 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5361 IEM_MC_ADVANCE_RIP();
5362 IEM_MC_END();
5363 return VINF_SUCCESS;
5364
5365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5366 }
5367 }
5368}
5369
5370
5371FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5372{
5373 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5374 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5375
5376 switch (pIemCpu->enmEffOpSize)
5377 {
5378 case IEMMODE_16BIT:
5379 IEM_MC_BEGIN(5, 1);
5380 IEM_MC_ARG(uint16_t, uSel, 0);
5381 IEM_MC_ARG(uint16_t, offSeg, 1);
5382 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5383 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5384 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5385 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5388 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5389 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5390 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5391 IEM_MC_END();
5392 return VINF_SUCCESS;
5393
5394 case IEMMODE_32BIT:
5395 IEM_MC_BEGIN(5, 1);
5396 IEM_MC_ARG(uint16_t, uSel, 0);
5397 IEM_MC_ARG(uint32_t, offSeg, 1);
5398 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5399 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5400 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5401 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5405 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5406 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409
5410 case IEMMODE_64BIT:
5411 IEM_MC_BEGIN(5, 1);
5412 IEM_MC_ARG(uint16_t, uSel, 0);
5413 IEM_MC_ARG(uint64_t, offSeg, 1);
5414 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5415 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5416 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5417 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5421 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5422 else
5423 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5424 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5425 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5426 IEM_MC_END();
5427 return VINF_SUCCESS;
5428
5429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5430 }
5431}
5432
5433
5434/** Opcode 0x0f 0xb2. */
5435FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5436{
5437 IEMOP_MNEMONIC("lss Gv,Mp");
5438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5440 return IEMOP_RAISE_INVALID_OPCODE();
5441 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5442}
5443
5444
5445/** Opcode 0x0f 0xb3. */
5446FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5447{
5448 IEMOP_MNEMONIC("btr Ev,Gv");
5449 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5450}
5451
5452
5453/** Opcode 0x0f 0xb4. */
5454FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5455{
5456 IEMOP_MNEMONIC("lfs Gv,Mp");
5457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5459 return IEMOP_RAISE_INVALID_OPCODE();
5460 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5461}
5462
5463
5464/** Opcode 0x0f 0xb5. */
5465FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5466{
5467 IEMOP_MNEMONIC("lgs Gv,Mp");
5468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5470 return IEMOP_RAISE_INVALID_OPCODE();
5471 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5472}
5473
5474
5475/** Opcode 0x0f 0xb6. */
5476FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5477{
5478 IEMOP_MNEMONIC("movzx Gv,Eb");
5479
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5482
5483 /*
5484 * If rm is denoting a register, no more instruction bytes.
5485 */
5486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5487 {
5488 switch (pIemCpu->enmEffOpSize)
5489 {
5490 case IEMMODE_16BIT:
5491 IEM_MC_BEGIN(0, 1);
5492 IEM_MC_LOCAL(uint16_t, u16Value);
5493 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5494 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5495 IEM_MC_ADVANCE_RIP();
5496 IEM_MC_END();
5497 return VINF_SUCCESS;
5498
5499 case IEMMODE_32BIT:
5500 IEM_MC_BEGIN(0, 1);
5501 IEM_MC_LOCAL(uint32_t, u32Value);
5502 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5503 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5504 IEM_MC_ADVANCE_RIP();
5505 IEM_MC_END();
5506 return VINF_SUCCESS;
5507
5508 case IEMMODE_64BIT:
5509 IEM_MC_BEGIN(0, 1);
5510 IEM_MC_LOCAL(uint64_t, u64Value);
5511 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5512 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 return VINF_SUCCESS;
5516
5517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5518 }
5519 }
5520 else
5521 {
5522 /*
5523 * We're loading a register from memory.
5524 */
5525 switch (pIemCpu->enmEffOpSize)
5526 {
5527 case IEMMODE_16BIT:
5528 IEM_MC_BEGIN(0, 2);
5529 IEM_MC_LOCAL(uint16_t, u16Value);
5530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5532 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5533 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5534 IEM_MC_ADVANCE_RIP();
5535 IEM_MC_END();
5536 return VINF_SUCCESS;
5537
5538 case IEMMODE_32BIT:
5539 IEM_MC_BEGIN(0, 2);
5540 IEM_MC_LOCAL(uint32_t, u32Value);
5541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5543 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5544 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548
5549 case IEMMODE_64BIT:
5550 IEM_MC_BEGIN(0, 2);
5551 IEM_MC_LOCAL(uint64_t, u64Value);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5555 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5556 IEM_MC_ADVANCE_RIP();
5557 IEM_MC_END();
5558 return VINF_SUCCESS;
5559
5560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5561 }
5562 }
5563}
5564
5565
5566/** Opcode 0x0f 0xb7. */
5567FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5568{
5569 IEMOP_MNEMONIC("movzx Gv,Ew");
5570
5571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5572 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5573
5574 /** @todo Not entirely sure how the operand size prefix is handled here,
5575 * assuming that it will be ignored. Would be nice to have a few
5576 * test for this. */
5577 /*
5578 * If rm is denoting a register, no more instruction bytes.
5579 */
5580 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5581 {
5582 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5583 {
5584 IEM_MC_BEGIN(0, 1);
5585 IEM_MC_LOCAL(uint32_t, u32Value);
5586 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5587 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5588 IEM_MC_ADVANCE_RIP();
5589 IEM_MC_END();
5590 }
5591 else
5592 {
5593 IEM_MC_BEGIN(0, 1);
5594 IEM_MC_LOCAL(uint64_t, u64Value);
5595 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5596 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5597 IEM_MC_ADVANCE_RIP();
5598 IEM_MC_END();
5599 }
5600 }
5601 else
5602 {
5603 /*
5604 * We're loading a register from memory.
5605 */
5606 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5607 {
5608 IEM_MC_BEGIN(0, 2);
5609 IEM_MC_LOCAL(uint32_t, u32Value);
5610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5612 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5613 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5614 IEM_MC_ADVANCE_RIP();
5615 IEM_MC_END();
5616 }
5617 else
5618 {
5619 IEM_MC_BEGIN(0, 2);
5620 IEM_MC_LOCAL(uint64_t, u64Value);
5621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5623 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5624 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5625 IEM_MC_ADVANCE_RIP();
5626 IEM_MC_END();
5627 }
5628 }
5629 return VINF_SUCCESS;
5630}
5631
5632
5633/** Opcode 0x0f 0xb8. */
5634FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5635
5636
5637/** Opcode 0x0f 0xb9. */
5638FNIEMOP_DEF(iemOp_Grp10)
5639{
5640 Log(("iemOp_Grp10 -> #UD\n"));
5641 return IEMOP_RAISE_INVALID_OPCODE();
5642}
5643
5644
5645/** Opcode 0x0f 0xba. */
5646FNIEMOP_DEF(iemOp_Grp8)
5647{
5648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5649 PCIEMOPBINSIZES pImpl;
5650 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5651 {
5652 case 0: case 1: case 2: case 3:
5653 return IEMOP_RAISE_INVALID_OPCODE();
5654 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5655 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5656 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5657 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5659 }
5660 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5661
5662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5663 {
5664 /* register destination. */
5665 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5666 IEMOP_HLP_NO_LOCK_PREFIX();
5667
5668 switch (pIemCpu->enmEffOpSize)
5669 {
5670 case IEMMODE_16BIT:
5671 IEM_MC_BEGIN(3, 0);
5672 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5673 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5675
5676 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5677 IEM_MC_REF_EFLAGS(pEFlags);
5678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5679
5680 IEM_MC_ADVANCE_RIP();
5681 IEM_MC_END();
5682 return VINF_SUCCESS;
5683
5684 case IEMMODE_32BIT:
5685 IEM_MC_BEGIN(3, 0);
5686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5687 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5689
5690 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5691 IEM_MC_REF_EFLAGS(pEFlags);
5692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5693
5694 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5695 IEM_MC_ADVANCE_RIP();
5696 IEM_MC_END();
5697 return VINF_SUCCESS;
5698
5699 case IEMMODE_64BIT:
5700 IEM_MC_BEGIN(3, 0);
5701 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5702 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5703 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5704
5705 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5706 IEM_MC_REF_EFLAGS(pEFlags);
5707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5708
5709 IEM_MC_ADVANCE_RIP();
5710 IEM_MC_END();
5711 return VINF_SUCCESS;
5712
5713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5714 }
5715 }
5716 else
5717 {
5718 /* memory destination. */
5719
5720 uint32_t fAccess;
5721 if (pImpl->pfnLockedU16)
5722 fAccess = IEM_ACCESS_DATA_RW;
5723 else /* BT */
5724 {
5725 IEMOP_HLP_NO_LOCK_PREFIX();
5726 fAccess = IEM_ACCESS_DATA_R;
5727 }
5728
5729 /** @todo test negative bit offsets! */
5730 switch (pIemCpu->enmEffOpSize)
5731 {
5732 case IEMMODE_16BIT:
5733 IEM_MC_BEGIN(3, 1);
5734 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5735 IEM_MC_ARG(uint16_t, u16Src, 1);
5736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5738
5739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5740 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5741 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5742 IEM_MC_FETCH_EFLAGS(EFlags);
5743 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5744 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5746 else
5747 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5749
5750 IEM_MC_COMMIT_EFLAGS(EFlags);
5751 IEM_MC_ADVANCE_RIP();
5752 IEM_MC_END();
5753 return VINF_SUCCESS;
5754
5755 case IEMMODE_32BIT:
5756 IEM_MC_BEGIN(3, 1);
5757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5758 IEM_MC_ARG(uint32_t, u32Src, 1);
5759 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5761
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5763 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5764 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5765 IEM_MC_FETCH_EFLAGS(EFlags);
5766 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5767 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5769 else
5770 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5772
5773 IEM_MC_COMMIT_EFLAGS(EFlags);
5774 IEM_MC_ADVANCE_RIP();
5775 IEM_MC_END();
5776 return VINF_SUCCESS;
5777
5778 case IEMMODE_64BIT:
5779 IEM_MC_BEGIN(3, 1);
5780 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5781 IEM_MC_ARG(uint64_t, u64Src, 1);
5782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5784
5785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5786 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5787 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5788 IEM_MC_FETCH_EFLAGS(EFlags);
5789 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5790 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5792 else
5793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5795
5796 IEM_MC_COMMIT_EFLAGS(EFlags);
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 return VINF_SUCCESS;
5800
5801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5802 }
5803 }
5804
5805}
5806
5807
5808/** Opcode 0x0f 0xbb. */
5809FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5810{
5811 IEMOP_MNEMONIC("btc Ev,Gv");
5812 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5813}
5814
5815
5816/** Opcode 0x0f 0xbc. */
5817FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5818{
5819 IEMOP_MNEMONIC("bsf Gv,Ev");
5820 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5821 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5822}
5823
5824
5825/** Opcode 0x0f 0xbd. */
5826FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5827{
5828 IEMOP_MNEMONIC("bsr Gv,Ev");
5829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5831}
5832
5833
5834/** Opcode 0x0f 0xbe. */
5835FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5836{
5837 IEMOP_MNEMONIC("movsx Gv,Eb");
5838
5839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5840 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5841
5842 /*
5843 * If rm is denoting a register, no more instruction bytes.
5844 */
5845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5846 {
5847 switch (pIemCpu->enmEffOpSize)
5848 {
5849 case IEMMODE_16BIT:
5850 IEM_MC_BEGIN(0, 1);
5851 IEM_MC_LOCAL(uint16_t, u16Value);
5852 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5853 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5854 IEM_MC_ADVANCE_RIP();
5855 IEM_MC_END();
5856 return VINF_SUCCESS;
5857
5858 case IEMMODE_32BIT:
5859 IEM_MC_BEGIN(0, 1);
5860 IEM_MC_LOCAL(uint32_t, u32Value);
5861 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5862 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 return VINF_SUCCESS;
5866
5867 case IEMMODE_64BIT:
5868 IEM_MC_BEGIN(0, 1);
5869 IEM_MC_LOCAL(uint64_t, u64Value);
5870 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5871 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5872 IEM_MC_ADVANCE_RIP();
5873 IEM_MC_END();
5874 return VINF_SUCCESS;
5875
5876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5877 }
5878 }
5879 else
5880 {
5881 /*
5882 * We're loading a register from memory.
5883 */
5884 switch (pIemCpu->enmEffOpSize)
5885 {
5886 case IEMMODE_16BIT:
5887 IEM_MC_BEGIN(0, 2);
5888 IEM_MC_LOCAL(uint16_t, u16Value);
5889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5891 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5892 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 return VINF_SUCCESS;
5896
5897 case IEMMODE_32BIT:
5898 IEM_MC_BEGIN(0, 2);
5899 IEM_MC_LOCAL(uint32_t, u32Value);
5900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5902 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5903 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 return VINF_SUCCESS;
5907
5908 case IEMMODE_64BIT:
5909 IEM_MC_BEGIN(0, 2);
5910 IEM_MC_LOCAL(uint64_t, u64Value);
5911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5913 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5914 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5915 IEM_MC_ADVANCE_RIP();
5916 IEM_MC_END();
5917 return VINF_SUCCESS;
5918
5919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5920 }
5921 }
5922}
5923
5924
5925/** Opcode 0x0f 0xbf. */
5926FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
5927{
5928 IEMOP_MNEMONIC("movsx Gv,Ew");
5929
5930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5931 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5932
5933 /** @todo Not entirely sure how the operand size prefix is handled here,
5934 * assuming that it will be ignored. Would be nice to have a few
5935 * test for this. */
5936 /*
5937 * If rm is denoting a register, no more instruction bytes.
5938 */
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 {
5941 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5942 {
5943 IEM_MC_BEGIN(0, 1);
5944 IEM_MC_LOCAL(uint32_t, u32Value);
5945 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5946 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5947 IEM_MC_ADVANCE_RIP();
5948 IEM_MC_END();
5949 }
5950 else
5951 {
5952 IEM_MC_BEGIN(0, 1);
5953 IEM_MC_LOCAL(uint64_t, u64Value);
5954 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5955 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 }
5959 }
5960 else
5961 {
5962 /*
5963 * We're loading a register from memory.
5964 */
5965 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5966 {
5967 IEM_MC_BEGIN(0, 2);
5968 IEM_MC_LOCAL(uint32_t, u32Value);
5969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5971 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5972 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5973 IEM_MC_ADVANCE_RIP();
5974 IEM_MC_END();
5975 }
5976 else
5977 {
5978 IEM_MC_BEGIN(0, 2);
5979 IEM_MC_LOCAL(uint64_t, u64Value);
5980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5982 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5983 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5984 IEM_MC_ADVANCE_RIP();
5985 IEM_MC_END();
5986 }
5987 }
5988 return VINF_SUCCESS;
5989}
5990
5991
5992/** Opcode 0x0f 0xc0. */
5993FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
5994{
5995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5996 IEMOP_MNEMONIC("xadd Eb,Gb");
5997
5998 /*
5999 * If rm is denoting a register, no more instruction bytes.
6000 */
6001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6002 {
6003 IEMOP_HLP_NO_LOCK_PREFIX();
6004
6005 IEM_MC_BEGIN(3, 0);
6006 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6007 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6009
6010 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6011 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6012 IEM_MC_REF_EFLAGS(pEFlags);
6013 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6014
6015 IEM_MC_ADVANCE_RIP();
6016 IEM_MC_END();
6017 }
6018 else
6019 {
6020 /*
6021 * We're accessing memory.
6022 */
6023 IEM_MC_BEGIN(3, 3);
6024 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6025 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6026 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6027 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6029
6030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6031 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6032 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6033 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6034 IEM_MC_FETCH_EFLAGS(EFlags);
6035 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6037 else
6038 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6039
6040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6041 IEM_MC_COMMIT_EFLAGS(EFlags);
6042 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6043 IEM_MC_ADVANCE_RIP();
6044 IEM_MC_END();
6045 return VINF_SUCCESS;
6046 }
6047 return VINF_SUCCESS;
6048}
6049
6050
6051/** Opcode 0x0f 0xc1. */
6052FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6053{
6054 IEMOP_MNEMONIC("xadd Ev,Gv");
6055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6056
6057 /*
6058 * If rm is denoting a register, no more instruction bytes.
6059 */
6060 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6061 {
6062 IEMOP_HLP_NO_LOCK_PREFIX();
6063
6064 switch (pIemCpu->enmEffOpSize)
6065 {
6066 case IEMMODE_16BIT:
6067 IEM_MC_BEGIN(3, 0);
6068 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6069 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6070 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6071
6072 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6073 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6074 IEM_MC_REF_EFLAGS(pEFlags);
6075 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6076
6077 IEM_MC_ADVANCE_RIP();
6078 IEM_MC_END();
6079 return VINF_SUCCESS;
6080
6081 case IEMMODE_32BIT:
6082 IEM_MC_BEGIN(3, 0);
6083 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6084 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6085 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6086
6087 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6088 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6089 IEM_MC_REF_EFLAGS(pEFlags);
6090 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6091
6092 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6093 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6094 IEM_MC_ADVANCE_RIP();
6095 IEM_MC_END();
6096 return VINF_SUCCESS;
6097
6098 case IEMMODE_64BIT:
6099 IEM_MC_BEGIN(3, 0);
6100 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6101 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6103
6104 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6105 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6106 IEM_MC_REF_EFLAGS(pEFlags);
6107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6108
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 return VINF_SUCCESS;
6112
6113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6114 }
6115 }
6116 else
6117 {
6118 /*
6119 * We're accessing memory.
6120 */
6121 switch (pIemCpu->enmEffOpSize)
6122 {
6123 case IEMMODE_16BIT:
6124 IEM_MC_BEGIN(3, 3);
6125 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6126 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6127 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6128 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6130
6131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6132 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6133 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6134 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6135 IEM_MC_FETCH_EFLAGS(EFlags);
6136 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6137 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6138 else
6139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6140
6141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6142 IEM_MC_COMMIT_EFLAGS(EFlags);
6143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 return VINF_SUCCESS;
6147
6148 case IEMMODE_32BIT:
6149 IEM_MC_BEGIN(3, 3);
6150 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6151 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6152 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6153 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6155
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6157 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6158 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6159 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6160 IEM_MC_FETCH_EFLAGS(EFlags);
6161 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6163 else
6164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6165
6166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6167 IEM_MC_COMMIT_EFLAGS(EFlags);
6168 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6169 IEM_MC_ADVANCE_RIP();
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172
6173 case IEMMODE_64BIT:
6174 IEM_MC_BEGIN(3, 3);
6175 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6176 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6177 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6178 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6180
6181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6182 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6183 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6184 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6185 IEM_MC_FETCH_EFLAGS(EFlags);
6186 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6188 else
6189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6190
6191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6192 IEM_MC_COMMIT_EFLAGS(EFlags);
6193 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6194 IEM_MC_ADVANCE_RIP();
6195 IEM_MC_END();
6196 return VINF_SUCCESS;
6197
6198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6199 }
6200 }
6201}
6202
6203/** Opcode 0x0f 0xc2. */
6204FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6205
6206/** Opcode 0x0f 0xc3. */
6207FNIEMOP_STUB(iemOp_movnti_My_Gy);
6208
6209/** Opcode 0x0f 0xc4. */
6210FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6211
6212/** Opcode 0x0f 0xc5. */
6213FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6214
6215/** Opcode 0x0f 0xc6. */
6216FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6217
6218
6219/** Opcode 0x0f 0xc7 !11/1. */
6220FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6221{
6222 IEMOP_MNEMONIC("cmpxchg8b Mq");
6223
6224 IEM_MC_BEGIN(4, 3);
6225 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6226 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6227 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6228 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6229 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6230 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6232
6233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6234 IEMOP_HLP_DONE_DECODING();
6235 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6236
6237 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6238 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6239 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6240
6241 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6242 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6243 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6244
6245 IEM_MC_FETCH_EFLAGS(EFlags);
6246 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6248 else
6249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6250
6251 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6252 IEM_MC_COMMIT_EFLAGS(EFlags);
6253 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6254 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6255 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6256 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6257 IEM_MC_ENDIF();
6258 IEM_MC_ADVANCE_RIP();
6259
6260 IEM_MC_END();
6261 return VINF_SUCCESS;
6262}
6263
6264
6265/** Opcode REX.W 0x0f 0xc7 !11/1. */
6266FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6267
6268/** Opcode 0x0f 0xc7 11/6. */
6269FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6270
6271/** Opcode 0x0f 0xc7 !11/6. */
6272FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6273
6274/** Opcode 0x66 0x0f 0xc7 !11/6. */
6275FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6276
6277/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6278FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6279
6280/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6281FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6282
6283
6284/** Opcode 0x0f 0xc7. */
6285FNIEMOP_DEF(iemOp_Grp9)
6286{
6287 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6289 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6290 {
6291 case 0: case 2: case 3: case 4: case 5:
6292 return IEMOP_RAISE_INVALID_OPCODE();
6293 case 1:
6294 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6295 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6296 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6297 return IEMOP_RAISE_INVALID_OPCODE();
6298 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6299 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6300 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6301 case 6:
6302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6303 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6304 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6305 {
6306 case 0:
6307 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6308 case IEM_OP_PRF_SIZE_OP:
6309 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6310 case IEM_OP_PRF_REPZ:
6311 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6312 default:
6313 return IEMOP_RAISE_INVALID_OPCODE();
6314 }
6315 case 7:
6316 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6317 {
6318 case 0:
6319 case IEM_OP_PRF_REPZ:
6320 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6321 default:
6322 return IEMOP_RAISE_INVALID_OPCODE();
6323 }
6324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6325 }
6326}
6327
6328
6329/**
6330 * Common 'bswap register' helper.
6331 */
6332FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6333{
6334 IEMOP_HLP_NO_LOCK_PREFIX();
6335 switch (pIemCpu->enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 IEM_MC_BEGIN(1, 0);
6339 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6340 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6341 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 case IEMMODE_32BIT:
6347 IEM_MC_BEGIN(1, 0);
6348 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6349 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6350 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_64BIT:
6357 IEM_MC_BEGIN(1, 0);
6358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6359 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6360 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6366 }
6367}
6368
6369
6370/** Opcode 0x0f 0xc8. */
6371FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6372{
6373 IEMOP_MNEMONIC("bswap rAX/r8");
6374 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6375 prefix. REX.B is the correct prefix it appears. For a parallel
6376 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6377 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6378}
6379
6380
6381/** Opcode 0x0f 0xc9. */
6382FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6383{
6384 IEMOP_MNEMONIC("bswap rCX/r9");
6385 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6386}
6387
6388
6389/** Opcode 0x0f 0xca. */
6390FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6391{
6392 IEMOP_MNEMONIC("bswap rDX/r9");
6393 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6394}
6395
6396
6397/** Opcode 0x0f 0xcb. */
6398FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6399{
6400 IEMOP_MNEMONIC("bswap rBX/r9");
6401 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6402}
6403
6404
6405/** Opcode 0x0f 0xcc. */
6406FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6407{
6408 IEMOP_MNEMONIC("bswap rSP/r12");
6409 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6410}
6411
6412
6413/** Opcode 0x0f 0xcd. */
6414FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6415{
6416 IEMOP_MNEMONIC("bswap rBP/r13");
6417 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6418}
6419
6420
6421/** Opcode 0x0f 0xce. */
6422FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6423{
6424 IEMOP_MNEMONIC("bswap rSI/r14");
6425 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6426}
6427
6428
6429/** Opcode 0x0f 0xcf. */
6430FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6431{
6432 IEMOP_MNEMONIC("bswap rDI/r15");
6433 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6434}
6435
6436
6437
6438/** Opcode 0x0f 0xd0. */
6439FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6440/** Opcode 0x0f 0xd1. */
6441FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6442/** Opcode 0x0f 0xd2. */
6443FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6444/** Opcode 0x0f 0xd3. */
6445FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6446/** Opcode 0x0f 0xd4. */
6447FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6448/** Opcode 0x0f 0xd5. */
6449FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6450/** Opcode 0x0f 0xd6. */
6451FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6452
6453
6454/** Opcode 0x0f 0xd7. */
6455FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6456{
6457 /* Docs says register only. */
6458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6459 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6460 return IEMOP_RAISE_INVALID_OPCODE();
6461
6462 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6463 /** @todo testcase: Check that the instruction implicitly clears the high
6464 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6465 * and opcode modifications are made to work with the whole width (not
6466 * just 128). */
6467 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6468 {
6469 case IEM_OP_PRF_SIZE_OP: /* SSE */
6470 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6471 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6472 IEM_MC_BEGIN(2, 0);
6473 IEM_MC_ARG(uint64_t *, pDst, 0);
6474 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6475 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6476 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6477 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6478 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6479 IEM_MC_ADVANCE_RIP();
6480 IEM_MC_END();
6481 return VINF_SUCCESS;
6482
6483 case 0: /* MMX */
6484 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6485 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6486 IEM_MC_BEGIN(2, 0);
6487 IEM_MC_ARG(uint64_t *, pDst, 0);
6488 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6490 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6491 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6492 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 return VINF_SUCCESS;
6496
6497 default:
6498 return IEMOP_RAISE_INVALID_OPCODE();
6499 }
6500}
6501
6502
6503/** Opcode 0x0f 0xd8. */
6504FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6505/** Opcode 0x0f 0xd9. */
6506FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6507/** Opcode 0x0f 0xda. */
6508FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6509/** Opcode 0x0f 0xdb. */
6510FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6511/** Opcode 0x0f 0xdc. */
6512FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6513/** Opcode 0x0f 0xdd. */
6514FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6515/** Opcode 0x0f 0xde. */
6516FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6517/** Opcode 0x0f 0xdf. */
6518FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6519/** Opcode 0x0f 0xe0. */
6520FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6521/** Opcode 0x0f 0xe1. */
6522FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6523/** Opcode 0x0f 0xe2. */
6524FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6525/** Opcode 0x0f 0xe3. */
6526FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6527/** Opcode 0x0f 0xe4. */
6528FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6529/** Opcode 0x0f 0xe5. */
6530FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6531/** Opcode 0x0f 0xe6. */
6532FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6533/** Opcode 0x0f 0xe7. */
6534FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6535/** Opcode 0x0f 0xe8. */
6536FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6537/** Opcode 0x0f 0xe9. */
6538FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6539/** Opcode 0x0f 0xea. */
6540FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6541/** Opcode 0x0f 0xeb. */
6542FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6543/** Opcode 0x0f 0xec. */
6544FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6545/** Opcode 0x0f 0xed. */
6546FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6547/** Opcode 0x0f 0xee. */
6548FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6549
6550
6551/** Opcode 0x0f 0xef. */
6552FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6553{
6554 IEMOP_MNEMONIC("pxor");
6555 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6556}
6557
6558
6559/** Opcode 0x0f 0xf0. */
6560FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6561/** Opcode 0x0f 0xf1. */
6562FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6563/** Opcode 0x0f 0xf2. */
6564FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6565/** Opcode 0x0f 0xf3. */
6566FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6567/** Opcode 0x0f 0xf4. */
6568FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6569/** Opcode 0x0f 0xf5. */
6570FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6571/** Opcode 0x0f 0xf6. */
6572FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6573/** Opcode 0x0f 0xf7. */
6574FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6575/** Opcode 0x0f 0xf8. */
6576FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6577/** Opcode 0x0f 0xf9. */
6578FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6579/** Opcode 0x0f 0xfa. */
6580FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6581/** Opcode 0x0f 0xfb. */
6582FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6583/** Opcode 0x0f 0xfc. */
6584FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6585/** Opcode 0x0f 0xfd. */
6586FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6587/** Opcode 0x0f 0xfe. */
6588FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6589
6590
6591const PFNIEMOP g_apfnTwoByteMap[256] =
6592{
6593 /* 0x00 */ iemOp_Grp6,
6594 /* 0x01 */ iemOp_Grp7,
6595 /* 0x02 */ iemOp_lar_Gv_Ew,
6596 /* 0x03 */ iemOp_lsl_Gv_Ew,
6597 /* 0x04 */ iemOp_Invalid,
6598 /* 0x05 */ iemOp_syscall,
6599 /* 0x06 */ iemOp_clts,
6600 /* 0x07 */ iemOp_sysret,
6601 /* 0x08 */ iemOp_invd,
6602 /* 0x09 */ iemOp_wbinvd,
6603 /* 0x0a */ iemOp_Invalid,
6604 /* 0x0b */ iemOp_ud2,
6605 /* 0x0c */ iemOp_Invalid,
6606 /* 0x0d */ iemOp_nop_Ev_GrpP,
6607 /* 0x0e */ iemOp_femms,
6608 /* 0x0f */ iemOp_3Dnow,
6609 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6610 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6611 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6612 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6613 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6614 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6615 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6616 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6617 /* 0x18 */ iemOp_prefetch_Grp16,
6618 /* 0x19 */ iemOp_nop_Ev,
6619 /* 0x1a */ iemOp_nop_Ev,
6620 /* 0x1b */ iemOp_nop_Ev,
6621 /* 0x1c */ iemOp_nop_Ev,
6622 /* 0x1d */ iemOp_nop_Ev,
6623 /* 0x1e */ iemOp_nop_Ev,
6624 /* 0x1f */ iemOp_nop_Ev,
6625 /* 0x20 */ iemOp_mov_Rd_Cd,
6626 /* 0x21 */ iemOp_mov_Rd_Dd,
6627 /* 0x22 */ iemOp_mov_Cd_Rd,
6628 /* 0x23 */ iemOp_mov_Dd_Rd,
6629 /* 0x24 */ iemOp_mov_Rd_Td,
6630 /* 0x25 */ iemOp_Invalid,
6631 /* 0x26 */ iemOp_mov_Td_Rd,
6632 /* 0x27 */ iemOp_Invalid,
6633 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6634 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6635 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6636 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6637 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6638 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6639 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6640 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6641 /* 0x30 */ iemOp_wrmsr,
6642 /* 0x31 */ iemOp_rdtsc,
6643 /* 0x32 */ iemOp_rdmsr,
6644 /* 0x33 */ iemOp_rdpmc,
6645 /* 0x34 */ iemOp_sysenter,
6646 /* 0x35 */ iemOp_sysexit,
6647 /* 0x36 */ iemOp_Invalid,
6648 /* 0x37 */ iemOp_getsec,
6649 /* 0x38 */ iemOp_3byte_Esc_A4,
6650 /* 0x39 */ iemOp_Invalid,
6651 /* 0x3a */ iemOp_3byte_Esc_A5,
6652 /* 0x3b */ iemOp_Invalid,
6653 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6654 /* 0x3d */ iemOp_Invalid,
6655 /* 0x3e */ iemOp_Invalid,
6656 /* 0x3f */ iemOp_Invalid,
6657 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6658 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6659 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6660 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6661 /* 0x44 */ iemOp_cmove_Gv_Ev,
6662 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6663 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6664 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6665 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6666 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6667 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6668 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6669 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6670 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6671 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6672 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6673 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6674 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6675 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6676 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6677 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6678 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6679 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6680 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6681 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6682 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6683 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6684 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6685 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6686 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6687 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6688 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6689 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6690 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6691 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6692 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6693 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6694 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6695 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6696 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6697 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6698 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6699 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6700 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6701 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6702 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6703 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6704 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6705 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6706 /* 0x71 */ iemOp_Grp12,
6707 /* 0x72 */ iemOp_Grp13,
6708 /* 0x73 */ iemOp_Grp14,
6709 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6710 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6711 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6712 /* 0x77 */ iemOp_emms,
6713 /* 0x78 */ iemOp_vmread_AmdGrp17,
6714 /* 0x79 */ iemOp_vmwrite,
6715 /* 0x7a */ iemOp_Invalid,
6716 /* 0x7b */ iemOp_Invalid,
6717 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6718 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6719 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6720 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6721 /* 0x80 */ iemOp_jo_Jv,
6722 /* 0x81 */ iemOp_jno_Jv,
6723 /* 0x82 */ iemOp_jc_Jv,
6724 /* 0x83 */ iemOp_jnc_Jv,
6725 /* 0x84 */ iemOp_je_Jv,
6726 /* 0x85 */ iemOp_jne_Jv,
6727 /* 0x86 */ iemOp_jbe_Jv,
6728 /* 0x87 */ iemOp_jnbe_Jv,
6729 /* 0x88 */ iemOp_js_Jv,
6730 /* 0x89 */ iemOp_jns_Jv,
6731 /* 0x8a */ iemOp_jp_Jv,
6732 /* 0x8b */ iemOp_jnp_Jv,
6733 /* 0x8c */ iemOp_jl_Jv,
6734 /* 0x8d */ iemOp_jnl_Jv,
6735 /* 0x8e */ iemOp_jle_Jv,
6736 /* 0x8f */ iemOp_jnle_Jv,
6737 /* 0x90 */ iemOp_seto_Eb,
6738 /* 0x91 */ iemOp_setno_Eb,
6739 /* 0x92 */ iemOp_setc_Eb,
6740 /* 0x93 */ iemOp_setnc_Eb,
6741 /* 0x94 */ iemOp_sete_Eb,
6742 /* 0x95 */ iemOp_setne_Eb,
6743 /* 0x96 */ iemOp_setbe_Eb,
6744 /* 0x97 */ iemOp_setnbe_Eb,
6745 /* 0x98 */ iemOp_sets_Eb,
6746 /* 0x99 */ iemOp_setns_Eb,
6747 /* 0x9a */ iemOp_setp_Eb,
6748 /* 0x9b */ iemOp_setnp_Eb,
6749 /* 0x9c */ iemOp_setl_Eb,
6750 /* 0x9d */ iemOp_setnl_Eb,
6751 /* 0x9e */ iemOp_setle_Eb,
6752 /* 0x9f */ iemOp_setnle_Eb,
6753 /* 0xa0 */ iemOp_push_fs,
6754 /* 0xa1 */ iemOp_pop_fs,
6755 /* 0xa2 */ iemOp_cpuid,
6756 /* 0xa3 */ iemOp_bt_Ev_Gv,
6757 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6758 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6759 /* 0xa6 */ iemOp_Invalid,
6760 /* 0xa7 */ iemOp_Invalid,
6761 /* 0xa8 */ iemOp_push_gs,
6762 /* 0xa9 */ iemOp_pop_gs,
6763 /* 0xaa */ iemOp_rsm,
6764 /* 0xab */ iemOp_bts_Ev_Gv,
6765 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6766 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6767 /* 0xae */ iemOp_Grp15,
6768 /* 0xaf */ iemOp_imul_Gv_Ev,
6769 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6770 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6771 /* 0xb2 */ iemOp_lss_Gv_Mp,
6772 /* 0xb3 */ iemOp_btr_Ev_Gv,
6773 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6774 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6775 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6776 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6777 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6778 /* 0xb9 */ iemOp_Grp10,
6779 /* 0xba */ iemOp_Grp8,
6780 /* 0xbd */ iemOp_btc_Ev_Gv,
6781 /* 0xbc */ iemOp_bsf_Gv_Ev,
6782 /* 0xbd */ iemOp_bsr_Gv_Ev,
6783 /* 0xbe */ iemOp_movsx_Gv_Eb,
6784 /* 0xbf */ iemOp_movsx_Gv_Ew,
6785 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6786 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6787 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6788 /* 0xc3 */ iemOp_movnti_My_Gy,
6789 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6790 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6791 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6792 /* 0xc7 */ iemOp_Grp9,
6793 /* 0xc8 */ iemOp_bswap_rAX_r8,
6794 /* 0xc9 */ iemOp_bswap_rCX_r9,
6795 /* 0xca */ iemOp_bswap_rDX_r10,
6796 /* 0xcb */ iemOp_bswap_rBX_r11,
6797 /* 0xcc */ iemOp_bswap_rSP_r12,
6798 /* 0xcd */ iemOp_bswap_rBP_r13,
6799 /* 0xce */ iemOp_bswap_rSI_r14,
6800 /* 0xcf */ iemOp_bswap_rDI_r15,
6801 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6802 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6803 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6804 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6805 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6806 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6807 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6808 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6809 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6810 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6811 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6812 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6813 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6814 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6815 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6816 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6817 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6818 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6819 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6820 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6821 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6822 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6823 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6824 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6825 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6826 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6827 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6828 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6829 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6830 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6831 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6832 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6833 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6834 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6835 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6836 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6837 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6838 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6839 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6840 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6841 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6842 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6843 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6844 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6845 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6846 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6847 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6848 /* 0xff */ iemOp_Invalid
6849};
6850
6851/** @} */
6852
6853
6854/** @name One byte opcodes.
6855 *
6856 * @{
6857 */
6858
6859/** Opcode 0x00. */
6860FNIEMOP_DEF(iemOp_add_Eb_Gb)
6861{
6862 IEMOP_MNEMONIC("add Eb,Gb");
6863 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6864}
6865
6866
6867/** Opcode 0x01. */
6868FNIEMOP_DEF(iemOp_add_Ev_Gv)
6869{
6870 IEMOP_MNEMONIC("add Ev,Gv");
6871 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6872}
6873
6874
6875/** Opcode 0x02. */
6876FNIEMOP_DEF(iemOp_add_Gb_Eb)
6877{
6878 IEMOP_MNEMONIC("add Gb,Eb");
6879 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6880}
6881
6882
6883/** Opcode 0x03. */
6884FNIEMOP_DEF(iemOp_add_Gv_Ev)
6885{
6886 IEMOP_MNEMONIC("add Gv,Ev");
6887 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6888}
6889
6890
6891/** Opcode 0x04. */
6892FNIEMOP_DEF(iemOp_add_Al_Ib)
6893{
6894 IEMOP_MNEMONIC("add al,Ib");
6895 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6896}
6897
6898
6899/** Opcode 0x05. */
6900FNIEMOP_DEF(iemOp_add_eAX_Iz)
6901{
6902 IEMOP_MNEMONIC("add rAX,Iz");
6903 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
6904}
6905
6906
6907/** Opcode 0x06. */
6908FNIEMOP_DEF(iemOp_push_ES)
6909{
6910 IEMOP_MNEMONIC("push es");
6911 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
6912}
6913
6914
6915/** Opcode 0x07. */
6916FNIEMOP_DEF(iemOp_pop_ES)
6917{
6918 IEMOP_MNEMONIC("pop es");
6919 IEMOP_HLP_NO_64BIT();
6920 IEMOP_HLP_NO_LOCK_PREFIX();
6921 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
6922}
6923
6924
6925/** Opcode 0x08. */
6926FNIEMOP_DEF(iemOp_or_Eb_Gb)
6927{
6928 IEMOP_MNEMONIC("or Eb,Gb");
6929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6930 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
6931}
6932
6933
6934/** Opcode 0x09. */
6935FNIEMOP_DEF(iemOp_or_Ev_Gv)
6936{
6937 IEMOP_MNEMONIC("or Ev,Gv ");
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6939 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
6940}
6941
6942
6943/** Opcode 0x0a. */
6944FNIEMOP_DEF(iemOp_or_Gb_Eb)
6945{
6946 IEMOP_MNEMONIC("or Gb,Eb");
6947 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6948 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
6949}
6950
6951
6952/** Opcode 0x0b. */
6953FNIEMOP_DEF(iemOp_or_Gv_Ev)
6954{
6955 IEMOP_MNEMONIC("or Gv,Ev");
6956 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6957 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
6958}
6959
6960
6961/** Opcode 0x0c. */
6962FNIEMOP_DEF(iemOp_or_Al_Ib)
6963{
6964 IEMOP_MNEMONIC("or al,Ib");
6965 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6966 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
6967}
6968
6969
6970/** Opcode 0x0d. */
6971FNIEMOP_DEF(iemOp_or_eAX_Iz)
6972{
6973 IEMOP_MNEMONIC("or rAX,Iz");
6974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6975 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
6976}
6977
6978
6979/** Opcode 0x0e. */
6980FNIEMOP_DEF(iemOp_push_CS)
6981{
6982 IEMOP_MNEMONIC("push cs");
6983 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
6984}
6985
6986
6987/** Opcode 0x0f. */
6988FNIEMOP_DEF(iemOp_2byteEscape)
6989{
6990 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6991 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
6992}
6993
6994/** Opcode 0x10. */
6995FNIEMOP_DEF(iemOp_adc_Eb_Gb)
6996{
6997 IEMOP_MNEMONIC("adc Eb,Gb");
6998 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
6999}
7000
7001
7002/** Opcode 0x11. */
7003FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7004{
7005 IEMOP_MNEMONIC("adc Ev,Gv");
7006 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7007}
7008
7009
7010/** Opcode 0x12. */
7011FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7012{
7013 IEMOP_MNEMONIC("adc Gb,Eb");
7014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7015}
7016
7017
7018/** Opcode 0x13. */
7019FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7020{
7021 IEMOP_MNEMONIC("adc Gv,Ev");
7022 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7023}
7024
7025
7026/** Opcode 0x14. */
7027FNIEMOP_DEF(iemOp_adc_Al_Ib)
7028{
7029 IEMOP_MNEMONIC("adc al,Ib");
7030 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7031}
7032
7033
7034/** Opcode 0x15. */
7035FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7036{
7037 IEMOP_MNEMONIC("adc rAX,Iz");
7038 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7039}
7040
7041
7042/** Opcode 0x16. */
7043FNIEMOP_DEF(iemOp_push_SS)
7044{
7045 IEMOP_MNEMONIC("push ss");
7046 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7047}
7048
7049
7050/** Opcode 0x17. */
7051FNIEMOP_DEF(iemOp_pop_SS)
7052{
7053 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7054 IEMOP_HLP_NO_LOCK_PREFIX();
7055 IEMOP_HLP_NO_64BIT();
7056 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7057}
7058
7059
7060/** Opcode 0x18. */
7061FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7062{
7063 IEMOP_MNEMONIC("sbb Eb,Gb");
7064 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7065}
7066
7067
7068/** Opcode 0x19. */
7069FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7070{
7071 IEMOP_MNEMONIC("sbb Ev,Gv");
7072 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7073}
7074
7075
7076/** Opcode 0x1a. */
7077FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7078{
7079 IEMOP_MNEMONIC("sbb Gb,Eb");
7080 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7081}
7082
7083
7084/** Opcode 0x1b. */
7085FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7086{
7087 IEMOP_MNEMONIC("sbb Gv,Ev");
7088 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7089}
7090
7091
7092/** Opcode 0x1c. */
7093FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7094{
7095 IEMOP_MNEMONIC("sbb al,Ib");
7096 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7097}
7098
7099
7100/** Opcode 0x1d. */
7101FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7102{
7103 IEMOP_MNEMONIC("sbb rAX,Iz");
7104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7105}
7106
7107
7108/** Opcode 0x1e. */
7109FNIEMOP_DEF(iemOp_push_DS)
7110{
7111 IEMOP_MNEMONIC("push ds");
7112 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7113}
7114
7115
7116/** Opcode 0x1f. */
7117FNIEMOP_DEF(iemOp_pop_DS)
7118{
7119 IEMOP_MNEMONIC("pop ds");
7120 IEMOP_HLP_NO_LOCK_PREFIX();
7121 IEMOP_HLP_NO_64BIT();
7122 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7123}
7124
7125
7126/** Opcode 0x20. */
7127FNIEMOP_DEF(iemOp_and_Eb_Gb)
7128{
7129 IEMOP_MNEMONIC("and Eb,Gb");
7130 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7131 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7132}
7133
7134
7135/** Opcode 0x21. */
7136FNIEMOP_DEF(iemOp_and_Ev_Gv)
7137{
7138 IEMOP_MNEMONIC("and Ev,Gv");
7139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7140 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7141}
7142
7143
7144/** Opcode 0x22. */
7145FNIEMOP_DEF(iemOp_and_Gb_Eb)
7146{
7147 IEMOP_MNEMONIC("and Gb,Eb");
7148 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7149 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7150}
7151
7152
7153/** Opcode 0x23. */
7154FNIEMOP_DEF(iemOp_and_Gv_Ev)
7155{
7156 IEMOP_MNEMONIC("and Gv,Ev");
7157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7158 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7159}
7160
7161
7162/** Opcode 0x24. */
7163FNIEMOP_DEF(iemOp_and_Al_Ib)
7164{
7165 IEMOP_MNEMONIC("and al,Ib");
7166 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7167 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7168}
7169
7170
7171/** Opcode 0x25. */
7172FNIEMOP_DEF(iemOp_and_eAX_Iz)
7173{
7174 IEMOP_MNEMONIC("and rAX,Iz");
7175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7176 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7177}
7178
7179
7180/** Opcode 0x26. */
7181FNIEMOP_DEF(iemOp_seg_ES)
7182{
7183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7184 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7185 pIemCpu->iEffSeg = X86_SREG_ES;
7186
7187 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7188 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7189}
7190
7191
7192/** Opcode 0x27. */
7193FNIEMOP_DEF(iemOp_daa)
7194{
7195 IEMOP_MNEMONIC("daa AL");
7196 IEMOP_HLP_NO_64BIT();
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7199 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7200}
7201
7202
7203/** Opcode 0x28. */
7204FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7205{
7206 IEMOP_MNEMONIC("sub Eb,Gb");
7207 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7208}
7209
7210
7211/** Opcode 0x29. */
7212FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7213{
7214 IEMOP_MNEMONIC("sub Ev,Gv");
7215 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7216}
7217
7218
7219/** Opcode 0x2a. */
7220FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7221{
7222 IEMOP_MNEMONIC("sub Gb,Eb");
7223 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7224}
7225
7226
7227/** Opcode 0x2b. */
7228FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7229{
7230 IEMOP_MNEMONIC("sub Gv,Ev");
7231 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7232}
7233
7234
7235/** Opcode 0x2c. */
7236FNIEMOP_DEF(iemOp_sub_Al_Ib)
7237{
7238 IEMOP_MNEMONIC("sub al,Ib");
7239 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7240}
7241
7242
7243/** Opcode 0x2d. */
7244FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7245{
7246 IEMOP_MNEMONIC("sub rAX,Iz");
7247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7248}
7249
7250
7251/** Opcode 0x2e. */
7252FNIEMOP_DEF(iemOp_seg_CS)
7253{
7254 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7255 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7256 pIemCpu->iEffSeg = X86_SREG_CS;
7257
7258 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7259 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7260}
7261
7262
7263/** Opcode 0x2f. */
7264FNIEMOP_DEF(iemOp_das)
7265{
7266 IEMOP_MNEMONIC("das AL");
7267 IEMOP_HLP_NO_64BIT();
7268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7270 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7271}
7272
7273
7274/** Opcode 0x30. */
7275FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7276{
7277 IEMOP_MNEMONIC("xor Eb,Gb");
7278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7279 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7280}
7281
7282
7283/** Opcode 0x31. */
7284FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7285{
7286 IEMOP_MNEMONIC("xor Ev,Gv");
7287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7288 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7289}
7290
7291
7292/** Opcode 0x32. */
7293FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7294{
7295 IEMOP_MNEMONIC("xor Gb,Eb");
7296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7297 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7298}
7299
7300
7301/** Opcode 0x33. */
7302FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7303{
7304 IEMOP_MNEMONIC("xor Gv,Ev");
7305 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7306 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7307}
7308
7309
7310/** Opcode 0x34. */
7311FNIEMOP_DEF(iemOp_xor_Al_Ib)
7312{
7313 IEMOP_MNEMONIC("xor al,Ib");
7314 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7315 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7316}
7317
7318
7319/** Opcode 0x35. */
7320FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7321{
7322 IEMOP_MNEMONIC("xor rAX,Iz");
7323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7325}
7326
7327
7328/** Opcode 0x36. */
7329FNIEMOP_DEF(iemOp_seg_SS)
7330{
7331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7332 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7333 pIemCpu->iEffSeg = X86_SREG_SS;
7334
7335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7337}
7338
7339
7340/** Opcode 0x37. */
7341FNIEMOP_STUB(iemOp_aaa);
7342
7343
7344/** Opcode 0x38. */
7345FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7346{
7347 IEMOP_MNEMONIC("cmp Eb,Gb");
7348 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7349 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7350}
7351
7352
7353/** Opcode 0x39. */
7354FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7355{
7356 IEMOP_MNEMONIC("cmp Ev,Gv");
7357 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7359}
7360
7361
7362/** Opcode 0x3a. */
7363FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7364{
7365 IEMOP_MNEMONIC("cmp Gb,Eb");
7366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7367}
7368
7369
7370/** Opcode 0x3b. */
7371FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7372{
7373 IEMOP_MNEMONIC("cmp Gv,Ev");
7374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7375}
7376
7377
7378/** Opcode 0x3c. */
7379FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7380{
7381 IEMOP_MNEMONIC("cmp al,Ib");
7382 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7383}
7384
7385
7386/** Opcode 0x3d. */
7387FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7388{
7389 IEMOP_MNEMONIC("cmp rAX,Iz");
7390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7391}
7392
7393
7394/** Opcode 0x3e. */
7395FNIEMOP_DEF(iemOp_seg_DS)
7396{
7397 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7398 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7399 pIemCpu->iEffSeg = X86_SREG_DS;
7400
7401 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7402 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7403}
7404
7405
7406/** Opcode 0x3f. */
7407FNIEMOP_STUB(iemOp_aas);
7408
7409/**
7410 * Common 'inc/dec/not/neg register' helper.
7411 */
7412FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7413{
7414 IEMOP_HLP_NO_LOCK_PREFIX();
7415 switch (pIemCpu->enmEffOpSize)
7416 {
7417 case IEMMODE_16BIT:
7418 IEM_MC_BEGIN(2, 0);
7419 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7420 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7421 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7422 IEM_MC_REF_EFLAGS(pEFlags);
7423 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7424 IEM_MC_ADVANCE_RIP();
7425 IEM_MC_END();
7426 return VINF_SUCCESS;
7427
7428 case IEMMODE_32BIT:
7429 IEM_MC_BEGIN(2, 0);
7430 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7431 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7432 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7433 IEM_MC_REF_EFLAGS(pEFlags);
7434 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7435 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7436 IEM_MC_ADVANCE_RIP();
7437 IEM_MC_END();
7438 return VINF_SUCCESS;
7439
7440 case IEMMODE_64BIT:
7441 IEM_MC_BEGIN(2, 0);
7442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7443 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7444 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7445 IEM_MC_REF_EFLAGS(pEFlags);
7446 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 return VINF_SUCCESS;
7450 }
7451 return VINF_SUCCESS;
7452}
7453
7454
7455/** Opcode 0x40. */
7456FNIEMOP_DEF(iemOp_inc_eAX)
7457{
7458 /*
7459 * This is a REX prefix in 64-bit mode.
7460 */
7461 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7462 {
7463 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7464 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7465
7466 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7467 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7468 }
7469
7470 IEMOP_MNEMONIC("inc eAX");
7471 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7472}
7473
7474
7475/** Opcode 0x41. */
7476FNIEMOP_DEF(iemOp_inc_eCX)
7477{
7478 /*
7479 * This is a REX prefix in 64-bit mode.
7480 */
7481 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7482 {
7483 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7484 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7485 pIemCpu->uRexB = 1 << 3;
7486
7487 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7488 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7489 }
7490
7491 IEMOP_MNEMONIC("inc eCX");
7492 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7493}
7494
7495
7496/** Opcode 0x42. */
7497FNIEMOP_DEF(iemOp_inc_eDX)
7498{
7499 /*
7500 * This is a REX prefix in 64-bit mode.
7501 */
7502 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7503 {
7504 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7505 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7506 pIemCpu->uRexIndex = 1 << 3;
7507
7508 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7509 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7510 }
7511
7512 IEMOP_MNEMONIC("inc eDX");
7513 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7514}
7515
7516
7517
7518/** Opcode 0x43. */
7519FNIEMOP_DEF(iemOp_inc_eBX)
7520{
7521 /*
7522 * This is a REX prefix in 64-bit mode.
7523 */
7524 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7525 {
7526 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7527 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7528 pIemCpu->uRexB = 1 << 3;
7529 pIemCpu->uRexIndex = 1 << 3;
7530
7531 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7532 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7533 }
7534
7535 IEMOP_MNEMONIC("inc eBX");
7536 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7537}
7538
7539
7540/** Opcode 0x44. */
7541FNIEMOP_DEF(iemOp_inc_eSP)
7542{
7543 /*
7544 * This is a REX prefix in 64-bit mode.
7545 */
7546 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7547 {
7548 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7549 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7550 pIemCpu->uRexReg = 1 << 3;
7551
7552 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7553 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7554 }
7555
7556 IEMOP_MNEMONIC("inc eSP");
7557 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7558}
7559
7560
7561/** Opcode 0x45. */
7562FNIEMOP_DEF(iemOp_inc_eBP)
7563{
7564 /*
7565 * This is a REX prefix in 64-bit mode.
7566 */
7567 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7568 {
7569 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7570 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7571 pIemCpu->uRexReg = 1 << 3;
7572 pIemCpu->uRexB = 1 << 3;
7573
7574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7576 }
7577
7578 IEMOP_MNEMONIC("inc eBP");
7579 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7580}
7581
7582
7583/** Opcode 0x46. */
7584FNIEMOP_DEF(iemOp_inc_eSI)
7585{
7586 /*
7587 * This is a REX prefix in 64-bit mode.
7588 */
7589 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7590 {
7591 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7592 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7593 pIemCpu->uRexReg = 1 << 3;
7594 pIemCpu->uRexIndex = 1 << 3;
7595
7596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7598 }
7599
7600 IEMOP_MNEMONIC("inc eSI");
7601 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7602}
7603
7604
7605/** Opcode 0x47. */
7606FNIEMOP_DEF(iemOp_inc_eDI)
7607{
7608 /*
7609 * This is a REX prefix in 64-bit mode.
7610 */
7611 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7612 {
7613 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7614 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7615 pIemCpu->uRexReg = 1 << 3;
7616 pIemCpu->uRexB = 1 << 3;
7617 pIemCpu->uRexIndex = 1 << 3;
7618
7619 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7620 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7621 }
7622
7623 IEMOP_MNEMONIC("inc eDI");
7624 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7625}
7626
7627
7628/** Opcode 0x48. */
7629FNIEMOP_DEF(iemOp_dec_eAX)
7630{
7631 /*
7632 * This is a REX prefix in 64-bit mode.
7633 */
7634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7635 {
7636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7637 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7638 iemRecalEffOpSize(pIemCpu);
7639
7640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7642 }
7643
7644 IEMOP_MNEMONIC("dec eAX");
7645 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7646}
7647
7648
7649/** Opcode 0x49. */
7650FNIEMOP_DEF(iemOp_dec_eCX)
7651{
7652 /*
7653 * This is a REX prefix in 64-bit mode.
7654 */
7655 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7656 {
7657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7658 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7659 pIemCpu->uRexB = 1 << 3;
7660 iemRecalEffOpSize(pIemCpu);
7661
7662 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7663 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7664 }
7665
7666 IEMOP_MNEMONIC("dec eCX");
7667 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7668}
7669
7670
7671/** Opcode 0x4a. */
7672FNIEMOP_DEF(iemOp_dec_eDX)
7673{
7674 /*
7675 * This is a REX prefix in 64-bit mode.
7676 */
7677 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7678 {
7679 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7680 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7681 pIemCpu->uRexIndex = 1 << 3;
7682 iemRecalEffOpSize(pIemCpu);
7683
7684 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7685 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7686 }
7687
7688 IEMOP_MNEMONIC("dec eDX");
7689 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7690}
7691
7692
7693/** Opcode 0x4b. */
7694FNIEMOP_DEF(iemOp_dec_eBX)
7695{
7696 /*
7697 * This is a REX prefix in 64-bit mode.
7698 */
7699 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7700 {
7701 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7702 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7703 pIemCpu->uRexB = 1 << 3;
7704 pIemCpu->uRexIndex = 1 << 3;
7705 iemRecalEffOpSize(pIemCpu);
7706
7707 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7708 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7709 }
7710
7711 IEMOP_MNEMONIC("dec eBX");
7712 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7713}
7714
7715
7716/** Opcode 0x4c. */
7717FNIEMOP_DEF(iemOp_dec_eSP)
7718{
7719 /*
7720 * This is a REX prefix in 64-bit mode.
7721 */
7722 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7723 {
7724 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7725 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7726 pIemCpu->uRexReg = 1 << 3;
7727 iemRecalEffOpSize(pIemCpu);
7728
7729 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7730 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7731 }
7732
7733 IEMOP_MNEMONIC("dec eSP");
7734 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7735}
7736
7737
7738/** Opcode 0x4d. */
7739FNIEMOP_DEF(iemOp_dec_eBP)
7740{
7741 /*
7742 * This is a REX prefix in 64-bit mode.
7743 */
7744 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7745 {
7746 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7747 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7748 pIemCpu->uRexReg = 1 << 3;
7749 pIemCpu->uRexB = 1 << 3;
7750 iemRecalEffOpSize(pIemCpu);
7751
7752 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7753 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7754 }
7755
7756 IEMOP_MNEMONIC("dec eBP");
7757 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7758}
7759
7760
7761/** Opcode 0x4e. */
7762FNIEMOP_DEF(iemOp_dec_eSI)
7763{
7764 /*
7765 * This is a REX prefix in 64-bit mode.
7766 */
7767 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7768 {
7769 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7770 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7771 pIemCpu->uRexReg = 1 << 3;
7772 pIemCpu->uRexIndex = 1 << 3;
7773 iemRecalEffOpSize(pIemCpu);
7774
7775 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7776 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7777 }
7778
7779 IEMOP_MNEMONIC("dec eSI");
7780 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7781}
7782
7783
7784/** Opcode 0x4f. */
7785FNIEMOP_DEF(iemOp_dec_eDI)
7786{
7787 /*
7788 * This is a REX prefix in 64-bit mode.
7789 */
7790 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7791 {
7792 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7793 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7794 pIemCpu->uRexReg = 1 << 3;
7795 pIemCpu->uRexB = 1 << 3;
7796 pIemCpu->uRexIndex = 1 << 3;
7797 iemRecalEffOpSize(pIemCpu);
7798
7799 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7800 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7801 }
7802
7803 IEMOP_MNEMONIC("dec eDI");
7804 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7805}
7806
7807
7808/**
7809 * Common 'push register' helper.
7810 */
7811FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7812{
7813 IEMOP_HLP_NO_LOCK_PREFIX();
7814 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7815 {
7816 iReg |= pIemCpu->uRexB;
7817 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7818 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7819 }
7820
7821 switch (pIemCpu->enmEffOpSize)
7822 {
7823 case IEMMODE_16BIT:
7824 IEM_MC_BEGIN(0, 1);
7825 IEM_MC_LOCAL(uint16_t, u16Value);
7826 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7827 IEM_MC_PUSH_U16(u16Value);
7828 IEM_MC_ADVANCE_RIP();
7829 IEM_MC_END();
7830 break;
7831
7832 case IEMMODE_32BIT:
7833 IEM_MC_BEGIN(0, 1);
7834 IEM_MC_LOCAL(uint32_t, u32Value);
7835 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7836 IEM_MC_PUSH_U32(u32Value);
7837 IEM_MC_ADVANCE_RIP();
7838 IEM_MC_END();
7839 break;
7840
7841 case IEMMODE_64BIT:
7842 IEM_MC_BEGIN(0, 1);
7843 IEM_MC_LOCAL(uint64_t, u64Value);
7844 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7845 IEM_MC_PUSH_U64(u64Value);
7846 IEM_MC_ADVANCE_RIP();
7847 IEM_MC_END();
7848 break;
7849 }
7850
7851 return VINF_SUCCESS;
7852}
7853
7854
7855/** Opcode 0x50. */
7856FNIEMOP_DEF(iemOp_push_eAX)
7857{
7858 IEMOP_MNEMONIC("push rAX");
7859 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7860}
7861
7862
7863/** Opcode 0x51. */
7864FNIEMOP_DEF(iemOp_push_eCX)
7865{
7866 IEMOP_MNEMONIC("push rCX");
7867 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7868}
7869
7870
7871/** Opcode 0x52. */
7872FNIEMOP_DEF(iemOp_push_eDX)
7873{
7874 IEMOP_MNEMONIC("push rDX");
7875 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7876}
7877
7878
7879/** Opcode 0x53. */
7880FNIEMOP_DEF(iemOp_push_eBX)
7881{
7882 IEMOP_MNEMONIC("push rBX");
7883 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7884}
7885
7886
7887/** Opcode 0x54. */
7888FNIEMOP_DEF(iemOp_push_eSP)
7889{
7890 IEMOP_MNEMONIC("push rSP");
7891 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7892}
7893
7894
7895/** Opcode 0x55. */
7896FNIEMOP_DEF(iemOp_push_eBP)
7897{
7898 IEMOP_MNEMONIC("push rBP");
7899 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
7900}
7901
7902
7903/** Opcode 0x56. */
7904FNIEMOP_DEF(iemOp_push_eSI)
7905{
7906 IEMOP_MNEMONIC("push rSI");
7907 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
7908}
7909
7910
7911/** Opcode 0x57. */
7912FNIEMOP_DEF(iemOp_push_eDI)
7913{
7914 IEMOP_MNEMONIC("push rDI");
7915 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
7916}
7917
7918
7919/**
7920 * Common 'pop register' helper.
7921 */
7922FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
7923{
7924 IEMOP_HLP_NO_LOCK_PREFIX();
7925 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7926 {
7927 iReg |= pIemCpu->uRexB;
7928 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7929 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7930 }
7931
7932 switch (pIemCpu->enmEffOpSize)
7933 {
7934 case IEMMODE_16BIT:
7935 IEM_MC_BEGIN(0, 1);
7936 IEM_MC_LOCAL(uint16_t, *pu16Dst);
7937 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7938 IEM_MC_POP_U16(pu16Dst);
7939 IEM_MC_ADVANCE_RIP();
7940 IEM_MC_END();
7941 break;
7942
7943 case IEMMODE_32BIT:
7944 IEM_MC_BEGIN(0, 1);
7945 IEM_MC_LOCAL(uint32_t, *pu32Dst);
7946 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7947 IEM_MC_POP_U32(pu32Dst);
7948 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
7949 IEM_MC_ADVANCE_RIP();
7950 IEM_MC_END();
7951 break;
7952
7953 case IEMMODE_64BIT:
7954 IEM_MC_BEGIN(0, 1);
7955 IEM_MC_LOCAL(uint64_t, *pu64Dst);
7956 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7957 IEM_MC_POP_U64(pu64Dst);
7958 IEM_MC_ADVANCE_RIP();
7959 IEM_MC_END();
7960 break;
7961 }
7962
7963 return VINF_SUCCESS;
7964}
7965
7966
7967/** Opcode 0x58. */
7968FNIEMOP_DEF(iemOp_pop_eAX)
7969{
7970 IEMOP_MNEMONIC("pop rAX");
7971 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
7972}
7973
7974
7975/** Opcode 0x59. */
7976FNIEMOP_DEF(iemOp_pop_eCX)
7977{
7978 IEMOP_MNEMONIC("pop rCX");
7979 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
7980}
7981
7982
7983/** Opcode 0x5a. */
7984FNIEMOP_DEF(iemOp_pop_eDX)
7985{
7986 IEMOP_MNEMONIC("pop rDX");
7987 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
7988}
7989
7990
7991/** Opcode 0x5b. */
7992FNIEMOP_DEF(iemOp_pop_eBX)
7993{
7994 IEMOP_MNEMONIC("pop rBX");
7995 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
7996}
7997
7998
7999/** Opcode 0x5c. */
8000FNIEMOP_DEF(iemOp_pop_eSP)
8001{
8002 IEMOP_MNEMONIC("pop rSP");
8003 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8004 {
8005 if (pIemCpu->uRexB)
8006 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8007 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8008 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8009 }
8010
8011 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8012 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8013 /** @todo add testcase for this instruction. */
8014 switch (pIemCpu->enmEffOpSize)
8015 {
8016 case IEMMODE_16BIT:
8017 IEM_MC_BEGIN(0, 1);
8018 IEM_MC_LOCAL(uint16_t, u16Dst);
8019 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8020 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8021 IEM_MC_ADVANCE_RIP();
8022 IEM_MC_END();
8023 break;
8024
8025 case IEMMODE_32BIT:
8026 IEM_MC_BEGIN(0, 1);
8027 IEM_MC_LOCAL(uint32_t, u32Dst);
8028 IEM_MC_POP_U32(&u32Dst);
8029 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8030 IEM_MC_ADVANCE_RIP();
8031 IEM_MC_END();
8032 break;
8033
8034 case IEMMODE_64BIT:
8035 IEM_MC_BEGIN(0, 1);
8036 IEM_MC_LOCAL(uint64_t, u64Dst);
8037 IEM_MC_POP_U64(&u64Dst);
8038 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8039 IEM_MC_ADVANCE_RIP();
8040 IEM_MC_END();
8041 break;
8042 }
8043
8044 return VINF_SUCCESS;
8045}
8046
8047
8048/** Opcode 0x5d. */
8049FNIEMOP_DEF(iemOp_pop_eBP)
8050{
8051 IEMOP_MNEMONIC("pop rBP");
8052 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8053}
8054
8055
8056/** Opcode 0x5e. */
8057FNIEMOP_DEF(iemOp_pop_eSI)
8058{
8059 IEMOP_MNEMONIC("pop rSI");
8060 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8061}
8062
8063
8064/** Opcode 0x5f. */
8065FNIEMOP_DEF(iemOp_pop_eDI)
8066{
8067 IEMOP_MNEMONIC("pop rDI");
8068 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8069}
8070
8071
8072/** Opcode 0x60. */
8073FNIEMOP_DEF(iemOp_pusha)
8074{
8075 IEMOP_MNEMONIC("pusha");
8076 IEMOP_HLP_NO_64BIT();
8077 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8078 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8079 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8080 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8081}
8082
8083
8084/** Opcode 0x61. */
8085FNIEMOP_DEF(iemOp_popa)
8086{
8087 IEMOP_MNEMONIC("popa");
8088 IEMOP_HLP_NO_64BIT();
8089 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8090 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8091 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8092 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8093}
8094
8095
8096/** Opcode 0x62. */
8097FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8098
8099
8100/** Opcode 0x63 - non-64-bit modes. */
8101FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8102{
8103 IEMOP_MNEMONIC("arpl Ew,Gw");
8104 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8106
8107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8108 {
8109 /* Register */
8110 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8111 IEM_MC_BEGIN(3, 0);
8112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8113 IEM_MC_ARG(uint16_t, u16Src, 1);
8114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8115
8116 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8117 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8118 IEM_MC_REF_EFLAGS(pEFlags);
8119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8120
8121 IEM_MC_ADVANCE_RIP();
8122 IEM_MC_END();
8123 }
8124 else
8125 {
8126 /* Memory */
8127 IEM_MC_BEGIN(3, 2);
8128 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8129 IEM_MC_ARG(uint16_t, u16Src, 1);
8130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8132
8133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8134 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8135 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8136 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8137 IEM_MC_FETCH_EFLAGS(EFlags);
8138 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8139
8140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8141 IEM_MC_COMMIT_EFLAGS(EFlags);
8142 IEM_MC_ADVANCE_RIP();
8143 IEM_MC_END();
8144 }
8145 return VINF_SUCCESS;
8146
8147}
8148
8149
8150/** Opcode 0x63.
8151 * @note This is a weird one. It works like a regular move instruction if
8152 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8153 * @todo This definitely needs a testcase to verify the odd cases. */
8154FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8155{
8156 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8157
8158 IEMOP_MNEMONIC("movsxd Gv,Ev");
8159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8160
8161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8162 {
8163 /*
8164 * Register to register.
8165 */
8166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8167 IEM_MC_BEGIN(0, 1);
8168 IEM_MC_LOCAL(uint64_t, u64Value);
8169 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8170 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8171 IEM_MC_ADVANCE_RIP();
8172 IEM_MC_END();
8173 }
8174 else
8175 {
8176 /*
8177 * We're loading a register from memory.
8178 */
8179 IEM_MC_BEGIN(0, 2);
8180 IEM_MC_LOCAL(uint64_t, u64Value);
8181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8184 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8185 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8186 IEM_MC_ADVANCE_RIP();
8187 IEM_MC_END();
8188 }
8189 return VINF_SUCCESS;
8190}
8191
8192
8193/** Opcode 0x64. */
8194FNIEMOP_DEF(iemOp_seg_FS)
8195{
8196 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8197 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8198 pIemCpu->iEffSeg = X86_SREG_FS;
8199
8200 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8201 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8202}
8203
8204
8205/** Opcode 0x65. */
8206FNIEMOP_DEF(iemOp_seg_GS)
8207{
8208 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8209 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8210 pIemCpu->iEffSeg = X86_SREG_GS;
8211
8212 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8213 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8214}
8215
8216
8217/** Opcode 0x66. */
8218FNIEMOP_DEF(iemOp_op_size)
8219{
8220 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8221 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8222 iemRecalEffOpSize(pIemCpu);
8223
8224 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8225 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8226}
8227
8228
8229/** Opcode 0x67. */
8230FNIEMOP_DEF(iemOp_addr_size)
8231{
8232 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8233 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8234 switch (pIemCpu->enmDefAddrMode)
8235 {
8236 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8237 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8238 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8239 default: AssertFailed();
8240 }
8241
8242 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8243 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8244}
8245
8246
8247/** Opcode 0x68. */
8248FNIEMOP_DEF(iemOp_push_Iz)
8249{
8250 IEMOP_MNEMONIC("push Iz");
8251 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8252 switch (pIemCpu->enmEffOpSize)
8253 {
8254 case IEMMODE_16BIT:
8255 {
8256 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8257 IEMOP_HLP_NO_LOCK_PREFIX();
8258 IEM_MC_BEGIN(0,0);
8259 IEM_MC_PUSH_U16(u16Imm);
8260 IEM_MC_ADVANCE_RIP();
8261 IEM_MC_END();
8262 return VINF_SUCCESS;
8263 }
8264
8265 case IEMMODE_32BIT:
8266 {
8267 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8268 IEMOP_HLP_NO_LOCK_PREFIX();
8269 IEM_MC_BEGIN(0,0);
8270 IEM_MC_PUSH_U32(u32Imm);
8271 IEM_MC_ADVANCE_RIP();
8272 IEM_MC_END();
8273 return VINF_SUCCESS;
8274 }
8275
8276 case IEMMODE_64BIT:
8277 {
8278 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8279 IEMOP_HLP_NO_LOCK_PREFIX();
8280 IEM_MC_BEGIN(0,0);
8281 IEM_MC_PUSH_U64(u64Imm);
8282 IEM_MC_ADVANCE_RIP();
8283 IEM_MC_END();
8284 return VINF_SUCCESS;
8285 }
8286
8287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8288 }
8289}
8290
8291
8292/** Opcode 0x69. */
8293FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8294{
8295 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8298
8299 switch (pIemCpu->enmEffOpSize)
8300 {
8301 case IEMMODE_16BIT:
8302 {
8303 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8304 {
8305 /* register operand */
8306 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8308
8309 IEM_MC_BEGIN(3, 1);
8310 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8311 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8312 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8313 IEM_MC_LOCAL(uint16_t, u16Tmp);
8314
8315 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8316 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8317 IEM_MC_REF_EFLAGS(pEFlags);
8318 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8319 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8320
8321 IEM_MC_ADVANCE_RIP();
8322 IEM_MC_END();
8323 }
8324 else
8325 {
8326 /* memory operand */
8327 IEM_MC_BEGIN(3, 2);
8328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8329 IEM_MC_ARG(uint16_t, u16Src, 1);
8330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8331 IEM_MC_LOCAL(uint16_t, u16Tmp);
8332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8333
8334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8336 IEM_MC_ASSIGN(u16Src, u16Imm);
8337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8338 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8339 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8340 IEM_MC_REF_EFLAGS(pEFlags);
8341 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8342 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8343
8344 IEM_MC_ADVANCE_RIP();
8345 IEM_MC_END();
8346 }
8347 return VINF_SUCCESS;
8348 }
8349
8350 case IEMMODE_32BIT:
8351 {
8352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8353 {
8354 /* register operand */
8355 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8357
8358 IEM_MC_BEGIN(3, 1);
8359 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8360 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8361 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8362 IEM_MC_LOCAL(uint32_t, u32Tmp);
8363
8364 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8365 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8366 IEM_MC_REF_EFLAGS(pEFlags);
8367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8368 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8369
8370 IEM_MC_ADVANCE_RIP();
8371 IEM_MC_END();
8372 }
8373 else
8374 {
8375 /* memory operand */
8376 IEM_MC_BEGIN(3, 2);
8377 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8378 IEM_MC_ARG(uint32_t, u32Src, 1);
8379 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8380 IEM_MC_LOCAL(uint32_t, u32Tmp);
8381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8382
8383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8385 IEM_MC_ASSIGN(u32Src, u32Imm);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8388 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8389 IEM_MC_REF_EFLAGS(pEFlags);
8390 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8391 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8392
8393 IEM_MC_ADVANCE_RIP();
8394 IEM_MC_END();
8395 }
8396 return VINF_SUCCESS;
8397 }
8398
8399 case IEMMODE_64BIT:
8400 {
8401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8402 {
8403 /* register operand */
8404 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8406
8407 IEM_MC_BEGIN(3, 1);
8408 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8409 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8411 IEM_MC_LOCAL(uint64_t, u64Tmp);
8412
8413 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8414 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8415 IEM_MC_REF_EFLAGS(pEFlags);
8416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8417 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8418
8419 IEM_MC_ADVANCE_RIP();
8420 IEM_MC_END();
8421 }
8422 else
8423 {
8424 /* memory operand */
8425 IEM_MC_BEGIN(3, 2);
8426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8427 IEM_MC_ARG(uint64_t, u64Src, 1);
8428 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8429 IEM_MC_LOCAL(uint64_t, u64Tmp);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8433 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8434 IEM_MC_ASSIGN(u64Src, u64Imm);
8435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8436 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8437 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8438 IEM_MC_REF_EFLAGS(pEFlags);
8439 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8440 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8441
8442 IEM_MC_ADVANCE_RIP();
8443 IEM_MC_END();
8444 }
8445 return VINF_SUCCESS;
8446 }
8447 }
8448 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8449}
8450
8451
8452/** Opcode 0x6a. */
8453FNIEMOP_DEF(iemOp_push_Ib)
8454{
8455 IEMOP_MNEMONIC("push Ib");
8456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8457 IEMOP_HLP_NO_LOCK_PREFIX();
8458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8459
8460 IEM_MC_BEGIN(0,0);
8461 switch (pIemCpu->enmEffOpSize)
8462 {
8463 case IEMMODE_16BIT:
8464 IEM_MC_PUSH_U16(i8Imm);
8465 break;
8466 case IEMMODE_32BIT:
8467 IEM_MC_PUSH_U32(i8Imm);
8468 break;
8469 case IEMMODE_64BIT:
8470 IEM_MC_PUSH_U64(i8Imm);
8471 break;
8472 }
8473 IEM_MC_ADVANCE_RIP();
8474 IEM_MC_END();
8475 return VINF_SUCCESS;
8476}
8477
8478
8479/** Opcode 0x6b. */
8480FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8481{
8482 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8485
8486 switch (pIemCpu->enmEffOpSize)
8487 {
8488 case IEMMODE_16BIT:
8489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8490 {
8491 /* register operand */
8492 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494
8495 IEM_MC_BEGIN(3, 1);
8496 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8497 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8498 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8499 IEM_MC_LOCAL(uint16_t, u16Tmp);
8500
8501 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8502 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8503 IEM_MC_REF_EFLAGS(pEFlags);
8504 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8505 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8506
8507 IEM_MC_ADVANCE_RIP();
8508 IEM_MC_END();
8509 }
8510 else
8511 {
8512 /* memory operand */
8513 IEM_MC_BEGIN(3, 2);
8514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8515 IEM_MC_ARG(uint16_t, u16Src, 1);
8516 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8517 IEM_MC_LOCAL(uint16_t, u16Tmp);
8518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8519
8520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8521 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8522 IEM_MC_ASSIGN(u16Src, u16Imm);
8523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8524 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8525 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8526 IEM_MC_REF_EFLAGS(pEFlags);
8527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8528 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8529
8530 IEM_MC_ADVANCE_RIP();
8531 IEM_MC_END();
8532 }
8533 return VINF_SUCCESS;
8534
8535 case IEMMODE_32BIT:
8536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8537 {
8538 /* register operand */
8539 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541
8542 IEM_MC_BEGIN(3, 1);
8543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8544 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8546 IEM_MC_LOCAL(uint32_t, u32Tmp);
8547
8548 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8549 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8550 IEM_MC_REF_EFLAGS(pEFlags);
8551 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8552 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8553
8554 IEM_MC_ADVANCE_RIP();
8555 IEM_MC_END();
8556 }
8557 else
8558 {
8559 /* memory operand */
8560 IEM_MC_BEGIN(3, 2);
8561 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8562 IEM_MC_ARG(uint32_t, u32Src, 1);
8563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8564 IEM_MC_LOCAL(uint32_t, u32Tmp);
8565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8566
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8568 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8569 IEM_MC_ASSIGN(u32Src, u32Imm);
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8572 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8573 IEM_MC_REF_EFLAGS(pEFlags);
8574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8575 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8576
8577 IEM_MC_ADVANCE_RIP();
8578 IEM_MC_END();
8579 }
8580 return VINF_SUCCESS;
8581
8582 case IEMMODE_64BIT:
8583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8584 {
8585 /* register operand */
8586 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8588
8589 IEM_MC_BEGIN(3, 1);
8590 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8591 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8592 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8593 IEM_MC_LOCAL(uint64_t, u64Tmp);
8594
8595 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8596 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8597 IEM_MC_REF_EFLAGS(pEFlags);
8598 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8599 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8600
8601 IEM_MC_ADVANCE_RIP();
8602 IEM_MC_END();
8603 }
8604 else
8605 {
8606 /* memory operand */
8607 IEM_MC_BEGIN(3, 2);
8608 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8609 IEM_MC_ARG(uint64_t, u64Src, 1);
8610 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8611 IEM_MC_LOCAL(uint64_t, u64Tmp);
8612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8613
8614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8615 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8616 IEM_MC_ASSIGN(u64Src, u64Imm);
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8619 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8620 IEM_MC_REF_EFLAGS(pEFlags);
8621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8622 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8623
8624 IEM_MC_ADVANCE_RIP();
8625 IEM_MC_END();
8626 }
8627 return VINF_SUCCESS;
8628 }
8629 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8630}
8631
8632
8633/** Opcode 0x6c. */
8634FNIEMOP_DEF(iemOp_insb_Yb_DX)
8635{
8636 IEMOP_HLP_NO_LOCK_PREFIX();
8637 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8638 {
8639 IEMOP_MNEMONIC("rep ins Yb,DX");
8640 switch (pIemCpu->enmEffAddrMode)
8641 {
8642 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8646 }
8647 }
8648 else
8649 {
8650 IEMOP_MNEMONIC("ins Yb,DX");
8651 switch (pIemCpu->enmEffAddrMode)
8652 {
8653 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8654 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8655 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8657 }
8658 }
8659}
8660
8661
8662/** Opcode 0x6d. */
8663FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8664{
8665 IEMOP_HLP_NO_LOCK_PREFIX();
8666 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8667 {
8668 IEMOP_MNEMONIC("rep ins Yv,DX");
8669 switch (pIemCpu->enmEffOpSize)
8670 {
8671 case IEMMODE_16BIT:
8672 switch (pIemCpu->enmEffAddrMode)
8673 {
8674 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8675 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8676 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8678 }
8679 break;
8680 case IEMMODE_64BIT:
8681 case IEMMODE_32BIT:
8682 switch (pIemCpu->enmEffAddrMode)
8683 {
8684 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8685 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8686 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8688 }
8689 break;
8690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8691 }
8692 }
8693 else
8694 {
8695 IEMOP_MNEMONIC("ins Yv,DX");
8696 switch (pIemCpu->enmEffOpSize)
8697 {
8698 case IEMMODE_16BIT:
8699 switch (pIemCpu->enmEffAddrMode)
8700 {
8701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8705 }
8706 break;
8707 case IEMMODE_64BIT:
8708 case IEMMODE_32BIT:
8709 switch (pIemCpu->enmEffAddrMode)
8710 {
8711 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8712 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8713 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8715 }
8716 break;
8717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8718 }
8719 }
8720}
8721
8722
8723/** Opcode 0x6e. */
8724FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8725{
8726 IEMOP_HLP_NO_LOCK_PREFIX();
8727 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8728 {
8729 IEMOP_MNEMONIC("rep out DX,Yb");
8730 switch (pIemCpu->enmEffAddrMode)
8731 {
8732 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8733 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8734 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8736 }
8737 }
8738 else
8739 {
8740 IEMOP_MNEMONIC("out DX,Yb");
8741 switch (pIemCpu->enmEffAddrMode)
8742 {
8743 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8744 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8745 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8747 }
8748 }
8749}
8750
8751
8752/** Opcode 0x6f. */
8753FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8754{
8755 IEMOP_HLP_NO_LOCK_PREFIX();
8756 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8757 {
8758 IEMOP_MNEMONIC("rep outs DX,Yv");
8759 switch (pIemCpu->enmEffOpSize)
8760 {
8761 case IEMMODE_16BIT:
8762 switch (pIemCpu->enmEffAddrMode)
8763 {
8764 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8765 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8766 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8768 }
8769 break;
8770 case IEMMODE_64BIT:
8771 case IEMMODE_32BIT:
8772 switch (pIemCpu->enmEffAddrMode)
8773 {
8774 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8775 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8776 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8778 }
8779 break;
8780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8781 }
8782 }
8783 else
8784 {
8785 IEMOP_MNEMONIC("outs DX,Yv");
8786 switch (pIemCpu->enmEffOpSize)
8787 {
8788 case IEMMODE_16BIT:
8789 switch (pIemCpu->enmEffAddrMode)
8790 {
8791 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8792 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8793 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8795 }
8796 break;
8797 case IEMMODE_64BIT:
8798 case IEMMODE_32BIT:
8799 switch (pIemCpu->enmEffAddrMode)
8800 {
8801 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8805 }
8806 break;
8807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8808 }
8809 }
8810}
8811
8812
8813/** Opcode 0x70. */
8814FNIEMOP_DEF(iemOp_jo_Jb)
8815{
8816 IEMOP_MNEMONIC("jo Jb");
8817 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8818 IEMOP_HLP_NO_LOCK_PREFIX();
8819 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8820
8821 IEM_MC_BEGIN(0, 0);
8822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8823 IEM_MC_REL_JMP_S8(i8Imm);
8824 } IEM_MC_ELSE() {
8825 IEM_MC_ADVANCE_RIP();
8826 } IEM_MC_ENDIF();
8827 IEM_MC_END();
8828 return VINF_SUCCESS;
8829}
8830
8831
8832/** Opcode 0x71. */
8833FNIEMOP_DEF(iemOp_jno_Jb)
8834{
8835 IEMOP_MNEMONIC("jno Jb");
8836 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8837 IEMOP_HLP_NO_LOCK_PREFIX();
8838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8839
8840 IEM_MC_BEGIN(0, 0);
8841 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8842 IEM_MC_ADVANCE_RIP();
8843 } IEM_MC_ELSE() {
8844 IEM_MC_REL_JMP_S8(i8Imm);
8845 } IEM_MC_ENDIF();
8846 IEM_MC_END();
8847 return VINF_SUCCESS;
8848}
8849
8850/** Opcode 0x72. */
8851FNIEMOP_DEF(iemOp_jc_Jb)
8852{
8853 IEMOP_MNEMONIC("jc/jnae Jb");
8854 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8855 IEMOP_HLP_NO_LOCK_PREFIX();
8856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8857
8858 IEM_MC_BEGIN(0, 0);
8859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8860 IEM_MC_REL_JMP_S8(i8Imm);
8861 } IEM_MC_ELSE() {
8862 IEM_MC_ADVANCE_RIP();
8863 } IEM_MC_ENDIF();
8864 IEM_MC_END();
8865 return VINF_SUCCESS;
8866}
8867
8868
8869/** Opcode 0x73. */
8870FNIEMOP_DEF(iemOp_jnc_Jb)
8871{
8872 IEMOP_MNEMONIC("jnc/jnb Jb");
8873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8874 IEMOP_HLP_NO_LOCK_PREFIX();
8875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8876
8877 IEM_MC_BEGIN(0, 0);
8878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8879 IEM_MC_ADVANCE_RIP();
8880 } IEM_MC_ELSE() {
8881 IEM_MC_REL_JMP_S8(i8Imm);
8882 } IEM_MC_ENDIF();
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885}
8886
8887
8888/** Opcode 0x74. */
8889FNIEMOP_DEF(iemOp_je_Jb)
8890{
8891 IEMOP_MNEMONIC("je/jz Jb");
8892 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8893 IEMOP_HLP_NO_LOCK_PREFIX();
8894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8895
8896 IEM_MC_BEGIN(0, 0);
8897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8898 IEM_MC_REL_JMP_S8(i8Imm);
8899 } IEM_MC_ELSE() {
8900 IEM_MC_ADVANCE_RIP();
8901 } IEM_MC_ENDIF();
8902 IEM_MC_END();
8903 return VINF_SUCCESS;
8904}
8905
8906
8907/** Opcode 0x75. */
8908FNIEMOP_DEF(iemOp_jne_Jb)
8909{
8910 IEMOP_MNEMONIC("jne/jnz Jb");
8911 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8912 IEMOP_HLP_NO_LOCK_PREFIX();
8913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8914
8915 IEM_MC_BEGIN(0, 0);
8916 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8917 IEM_MC_ADVANCE_RIP();
8918 } IEM_MC_ELSE() {
8919 IEM_MC_REL_JMP_S8(i8Imm);
8920 } IEM_MC_ENDIF();
8921 IEM_MC_END();
8922 return VINF_SUCCESS;
8923}
8924
8925
8926/** Opcode 0x76. */
8927FNIEMOP_DEF(iemOp_jbe_Jb)
8928{
8929 IEMOP_MNEMONIC("jbe/jna Jb");
8930 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8931 IEMOP_HLP_NO_LOCK_PREFIX();
8932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8933
8934 IEM_MC_BEGIN(0, 0);
8935 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8936 IEM_MC_REL_JMP_S8(i8Imm);
8937 } IEM_MC_ELSE() {
8938 IEM_MC_ADVANCE_RIP();
8939 } IEM_MC_ENDIF();
8940 IEM_MC_END();
8941 return VINF_SUCCESS;
8942}
8943
8944
8945/** Opcode 0x77. */
8946FNIEMOP_DEF(iemOp_jnbe_Jb)
8947{
8948 IEMOP_MNEMONIC("jnbe/ja Jb");
8949 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8950 IEMOP_HLP_NO_LOCK_PREFIX();
8951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8952
8953 IEM_MC_BEGIN(0, 0);
8954 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8955 IEM_MC_ADVANCE_RIP();
8956 } IEM_MC_ELSE() {
8957 IEM_MC_REL_JMP_S8(i8Imm);
8958 } IEM_MC_ENDIF();
8959 IEM_MC_END();
8960 return VINF_SUCCESS;
8961}
8962
8963
8964/** Opcode 0x78. */
8965FNIEMOP_DEF(iemOp_js_Jb)
8966{
8967 IEMOP_MNEMONIC("js Jb");
8968 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8969 IEMOP_HLP_NO_LOCK_PREFIX();
8970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8971
8972 IEM_MC_BEGIN(0, 0);
8973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8974 IEM_MC_REL_JMP_S8(i8Imm);
8975 } IEM_MC_ELSE() {
8976 IEM_MC_ADVANCE_RIP();
8977 } IEM_MC_ENDIF();
8978 IEM_MC_END();
8979 return VINF_SUCCESS;
8980}
8981
8982
8983/** Opcode 0x79. */
8984FNIEMOP_DEF(iemOp_jns_Jb)
8985{
8986 IEMOP_MNEMONIC("jns Jb");
8987 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8988 IEMOP_HLP_NO_LOCK_PREFIX();
8989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8990
8991 IEM_MC_BEGIN(0, 0);
8992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8993 IEM_MC_ADVANCE_RIP();
8994 } IEM_MC_ELSE() {
8995 IEM_MC_REL_JMP_S8(i8Imm);
8996 } IEM_MC_ENDIF();
8997 IEM_MC_END();
8998 return VINF_SUCCESS;
8999}
9000
9001
9002/** Opcode 0x7a. */
9003FNIEMOP_DEF(iemOp_jp_Jb)
9004{
9005 IEMOP_MNEMONIC("jp Jb");
9006 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9007 IEMOP_HLP_NO_LOCK_PREFIX();
9008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9009
9010 IEM_MC_BEGIN(0, 0);
9011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9012 IEM_MC_REL_JMP_S8(i8Imm);
9013 } IEM_MC_ELSE() {
9014 IEM_MC_ADVANCE_RIP();
9015 } IEM_MC_ENDIF();
9016 IEM_MC_END();
9017 return VINF_SUCCESS;
9018}
9019
9020
9021/** Opcode 0x7b. */
9022FNIEMOP_DEF(iemOp_jnp_Jb)
9023{
9024 IEMOP_MNEMONIC("jnp Jb");
9025 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9026 IEMOP_HLP_NO_LOCK_PREFIX();
9027 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9028
9029 IEM_MC_BEGIN(0, 0);
9030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9031 IEM_MC_ADVANCE_RIP();
9032 } IEM_MC_ELSE() {
9033 IEM_MC_REL_JMP_S8(i8Imm);
9034 } IEM_MC_ENDIF();
9035 IEM_MC_END();
9036 return VINF_SUCCESS;
9037}
9038
9039
9040/** Opcode 0x7c. */
9041FNIEMOP_DEF(iemOp_jl_Jb)
9042{
9043 IEMOP_MNEMONIC("jl/jnge Jb");
9044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9045 IEMOP_HLP_NO_LOCK_PREFIX();
9046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9047
9048 IEM_MC_BEGIN(0, 0);
9049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9050 IEM_MC_REL_JMP_S8(i8Imm);
9051 } IEM_MC_ELSE() {
9052 IEM_MC_ADVANCE_RIP();
9053 } IEM_MC_ENDIF();
9054 IEM_MC_END();
9055 return VINF_SUCCESS;
9056}
9057
9058
9059/** Opcode 0x7d. */
9060FNIEMOP_DEF(iemOp_jnl_Jb)
9061{
9062 IEMOP_MNEMONIC("jnl/jge Jb");
9063 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9064 IEMOP_HLP_NO_LOCK_PREFIX();
9065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9066
9067 IEM_MC_BEGIN(0, 0);
9068 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9069 IEM_MC_ADVANCE_RIP();
9070 } IEM_MC_ELSE() {
9071 IEM_MC_REL_JMP_S8(i8Imm);
9072 } IEM_MC_ENDIF();
9073 IEM_MC_END();
9074 return VINF_SUCCESS;
9075}
9076
9077
9078/** Opcode 0x7e. */
9079FNIEMOP_DEF(iemOp_jle_Jb)
9080{
9081 IEMOP_MNEMONIC("jle/jng Jb");
9082 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9083 IEMOP_HLP_NO_LOCK_PREFIX();
9084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9085
9086 IEM_MC_BEGIN(0, 0);
9087 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9088 IEM_MC_REL_JMP_S8(i8Imm);
9089 } IEM_MC_ELSE() {
9090 IEM_MC_ADVANCE_RIP();
9091 } IEM_MC_ENDIF();
9092 IEM_MC_END();
9093 return VINF_SUCCESS;
9094}
9095
9096
9097/** Opcode 0x7f. */
9098FNIEMOP_DEF(iemOp_jnle_Jb)
9099{
9100 IEMOP_MNEMONIC("jnle/jg Jb");
9101 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9102 IEMOP_HLP_NO_LOCK_PREFIX();
9103 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9104
9105 IEM_MC_BEGIN(0, 0);
9106 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9107 IEM_MC_ADVANCE_RIP();
9108 } IEM_MC_ELSE() {
9109 IEM_MC_REL_JMP_S8(i8Imm);
9110 } IEM_MC_ENDIF();
9111 IEM_MC_END();
9112 return VINF_SUCCESS;
9113}
9114
9115
9116/** Opcode 0x80. */
9117FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9118{
9119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9120 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9121 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9122
9123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9124 {
9125 /* register target */
9126 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9127 IEMOP_HLP_NO_LOCK_PREFIX();
9128 IEM_MC_BEGIN(3, 0);
9129 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9130 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9131 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9132
9133 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9134 IEM_MC_REF_EFLAGS(pEFlags);
9135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9136
9137 IEM_MC_ADVANCE_RIP();
9138 IEM_MC_END();
9139 }
9140 else
9141 {
9142 /* memory target */
9143 uint32_t fAccess;
9144 if (pImpl->pfnLockedU8)
9145 fAccess = IEM_ACCESS_DATA_RW;
9146 else
9147 { /* CMP */
9148 IEMOP_HLP_NO_LOCK_PREFIX();
9149 fAccess = IEM_ACCESS_DATA_R;
9150 }
9151 IEM_MC_BEGIN(3, 2);
9152 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9155
9156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9158 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9159
9160 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9161 IEM_MC_FETCH_EFLAGS(EFlags);
9162 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9164 else
9165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9166
9167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9168 IEM_MC_COMMIT_EFLAGS(EFlags);
9169 IEM_MC_ADVANCE_RIP();
9170 IEM_MC_END();
9171 }
9172 return VINF_SUCCESS;
9173}
9174
9175
9176/** Opcode 0x81. */
9177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9178{
9179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9180 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9181 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9182
9183 switch (pIemCpu->enmEffOpSize)
9184 {
9185 case IEMMODE_16BIT:
9186 {
9187 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9188 {
9189 /* register target */
9190 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9191 IEMOP_HLP_NO_LOCK_PREFIX();
9192 IEM_MC_BEGIN(3, 0);
9193 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9194 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9196
9197 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9198 IEM_MC_REF_EFLAGS(pEFlags);
9199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9200
9201 IEM_MC_ADVANCE_RIP();
9202 IEM_MC_END();
9203 }
9204 else
9205 {
9206 /* memory target */
9207 uint32_t fAccess;
9208 if (pImpl->pfnLockedU16)
9209 fAccess = IEM_ACCESS_DATA_RW;
9210 else
9211 { /* CMP, TEST */
9212 IEMOP_HLP_NO_LOCK_PREFIX();
9213 fAccess = IEM_ACCESS_DATA_R;
9214 }
9215 IEM_MC_BEGIN(3, 2);
9216 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9217 IEM_MC_ARG(uint16_t, u16Src, 1);
9218 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9220
9221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9222 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9223 IEM_MC_ASSIGN(u16Src, u16Imm);
9224 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9225 IEM_MC_FETCH_EFLAGS(EFlags);
9226 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9227 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9228 else
9229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9230
9231 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9232 IEM_MC_COMMIT_EFLAGS(EFlags);
9233 IEM_MC_ADVANCE_RIP();
9234 IEM_MC_END();
9235 }
9236 break;
9237 }
9238
9239 case IEMMODE_32BIT:
9240 {
9241 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9242 {
9243 /* register target */
9244 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9245 IEMOP_HLP_NO_LOCK_PREFIX();
9246 IEM_MC_BEGIN(3, 0);
9247 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9248 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9250
9251 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9252 IEM_MC_REF_EFLAGS(pEFlags);
9253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9254 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9255
9256 IEM_MC_ADVANCE_RIP();
9257 IEM_MC_END();
9258 }
9259 else
9260 {
9261 /* memory target */
9262 uint32_t fAccess;
9263 if (pImpl->pfnLockedU32)
9264 fAccess = IEM_ACCESS_DATA_RW;
9265 else
9266 { /* CMP, TEST */
9267 IEMOP_HLP_NO_LOCK_PREFIX();
9268 fAccess = IEM_ACCESS_DATA_R;
9269 }
9270 IEM_MC_BEGIN(3, 2);
9271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9272 IEM_MC_ARG(uint32_t, u32Src, 1);
9273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9275
9276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9278 IEM_MC_ASSIGN(u32Src, u32Imm);
9279 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9280 IEM_MC_FETCH_EFLAGS(EFlags);
9281 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9283 else
9284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9285
9286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9287 IEM_MC_COMMIT_EFLAGS(EFlags);
9288 IEM_MC_ADVANCE_RIP();
9289 IEM_MC_END();
9290 }
9291 break;
9292 }
9293
9294 case IEMMODE_64BIT:
9295 {
9296 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9297 {
9298 /* register target */
9299 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9300 IEMOP_HLP_NO_LOCK_PREFIX();
9301 IEM_MC_BEGIN(3, 0);
9302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9303 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9304 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9305
9306 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9307 IEM_MC_REF_EFLAGS(pEFlags);
9308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9309
9310 IEM_MC_ADVANCE_RIP();
9311 IEM_MC_END();
9312 }
9313 else
9314 {
9315 /* memory target */
9316 uint32_t fAccess;
9317 if (pImpl->pfnLockedU64)
9318 fAccess = IEM_ACCESS_DATA_RW;
9319 else
9320 { /* CMP */
9321 IEMOP_HLP_NO_LOCK_PREFIX();
9322 fAccess = IEM_ACCESS_DATA_R;
9323 }
9324 IEM_MC_BEGIN(3, 2);
9325 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9326 IEM_MC_ARG(uint64_t, u64Src, 1);
9327 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9329
9330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9331 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9332 IEM_MC_ASSIGN(u64Src, u64Imm);
9333 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9334 IEM_MC_FETCH_EFLAGS(EFlags);
9335 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9337 else
9338 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9339
9340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9341 IEM_MC_COMMIT_EFLAGS(EFlags);
9342 IEM_MC_ADVANCE_RIP();
9343 IEM_MC_END();
9344 }
9345 break;
9346 }
9347 }
9348 return VINF_SUCCESS;
9349}
9350
9351
9352/** Opcode 0x82. */
9353FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9354{
9355 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9356 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9357}
9358
9359
9360/** Opcode 0x83. */
9361FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9362{
9363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9364 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9365 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9366
9367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9368 {
9369 /*
9370 * Register target
9371 */
9372 IEMOP_HLP_NO_LOCK_PREFIX();
9373 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9374 switch (pIemCpu->enmEffOpSize)
9375 {
9376 case IEMMODE_16BIT:
9377 {
9378 IEM_MC_BEGIN(3, 0);
9379 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9380 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9382
9383 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9384 IEM_MC_REF_EFLAGS(pEFlags);
9385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9386
9387 IEM_MC_ADVANCE_RIP();
9388 IEM_MC_END();
9389 break;
9390 }
9391
9392 case IEMMODE_32BIT:
9393 {
9394 IEM_MC_BEGIN(3, 0);
9395 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9396 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9397 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9398
9399 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9400 IEM_MC_REF_EFLAGS(pEFlags);
9401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9402 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9403
9404 IEM_MC_ADVANCE_RIP();
9405 IEM_MC_END();
9406 break;
9407 }
9408
9409 case IEMMODE_64BIT:
9410 {
9411 IEM_MC_BEGIN(3, 0);
9412 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9413 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9414 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9415
9416 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9417 IEM_MC_REF_EFLAGS(pEFlags);
9418 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9419
9420 IEM_MC_ADVANCE_RIP();
9421 IEM_MC_END();
9422 break;
9423 }
9424 }
9425 }
9426 else
9427 {
9428 /*
9429 * Memory target.
9430 */
9431 uint32_t fAccess;
9432 if (pImpl->pfnLockedU16)
9433 fAccess = IEM_ACCESS_DATA_RW;
9434 else
9435 { /* CMP */
9436 IEMOP_HLP_NO_LOCK_PREFIX();
9437 fAccess = IEM_ACCESS_DATA_R;
9438 }
9439
9440 switch (pIemCpu->enmEffOpSize)
9441 {
9442 case IEMMODE_16BIT:
9443 {
9444 IEM_MC_BEGIN(3, 2);
9445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9446 IEM_MC_ARG(uint16_t, u16Src, 1);
9447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9449
9450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9452 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9453 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9454 IEM_MC_FETCH_EFLAGS(EFlags);
9455 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9457 else
9458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9459
9460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9461 IEM_MC_COMMIT_EFLAGS(EFlags);
9462 IEM_MC_ADVANCE_RIP();
9463 IEM_MC_END();
9464 break;
9465 }
9466
9467 case IEMMODE_32BIT:
9468 {
9469 IEM_MC_BEGIN(3, 2);
9470 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9471 IEM_MC_ARG(uint32_t, u32Src, 1);
9472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9477 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9478 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9479 IEM_MC_FETCH_EFLAGS(EFlags);
9480 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9482 else
9483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9484
9485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9486 IEM_MC_COMMIT_EFLAGS(EFlags);
9487 IEM_MC_ADVANCE_RIP();
9488 IEM_MC_END();
9489 break;
9490 }
9491
9492 case IEMMODE_64BIT:
9493 {
9494 IEM_MC_BEGIN(3, 2);
9495 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9496 IEM_MC_ARG(uint64_t, u64Src, 1);
9497 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9499
9500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9501 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9502 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9503 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9504 IEM_MC_FETCH_EFLAGS(EFlags);
9505 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9506 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9507 else
9508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9509
9510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9511 IEM_MC_COMMIT_EFLAGS(EFlags);
9512 IEM_MC_ADVANCE_RIP();
9513 IEM_MC_END();
9514 break;
9515 }
9516 }
9517 }
9518 return VINF_SUCCESS;
9519}
9520
9521
9522/** Opcode 0x84. */
9523FNIEMOP_DEF(iemOp_test_Eb_Gb)
9524{
9525 IEMOP_MNEMONIC("test Eb,Gb");
9526 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9527 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9528 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9529}
9530
9531
9532/** Opcode 0x85. */
9533FNIEMOP_DEF(iemOp_test_Ev_Gv)
9534{
9535 IEMOP_MNEMONIC("test Ev,Gv");
9536 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9539}
9540
9541
9542/** Opcode 0x86. */
9543FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9544{
9545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9546 IEMOP_MNEMONIC("xchg Eb,Gb");
9547
9548 /*
9549 * If rm is denoting a register, no more instruction bytes.
9550 */
9551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9552 {
9553 IEMOP_HLP_NO_LOCK_PREFIX();
9554
9555 IEM_MC_BEGIN(0, 2);
9556 IEM_MC_LOCAL(uint8_t, uTmp1);
9557 IEM_MC_LOCAL(uint8_t, uTmp2);
9558
9559 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9560 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9561 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9562 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9563
9564 IEM_MC_ADVANCE_RIP();
9565 IEM_MC_END();
9566 }
9567 else
9568 {
9569 /*
9570 * We're accessing memory.
9571 */
9572/** @todo the register must be committed separately! */
9573 IEM_MC_BEGIN(2, 2);
9574 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9575 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9577
9578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9579 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9580 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9581 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9582 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9583
9584 IEM_MC_ADVANCE_RIP();
9585 IEM_MC_END();
9586 }
9587 return VINF_SUCCESS;
9588}
9589
9590
9591/** Opcode 0x87. */
9592FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9593{
9594 IEMOP_MNEMONIC("xchg Ev,Gv");
9595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9596
9597 /*
9598 * If rm is denoting a register, no more instruction bytes.
9599 */
9600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9601 {
9602 IEMOP_HLP_NO_LOCK_PREFIX();
9603
9604 switch (pIemCpu->enmEffOpSize)
9605 {
9606 case IEMMODE_16BIT:
9607 IEM_MC_BEGIN(0, 2);
9608 IEM_MC_LOCAL(uint16_t, uTmp1);
9609 IEM_MC_LOCAL(uint16_t, uTmp2);
9610
9611 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9612 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9614 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9615
9616 IEM_MC_ADVANCE_RIP();
9617 IEM_MC_END();
9618 return VINF_SUCCESS;
9619
9620 case IEMMODE_32BIT:
9621 IEM_MC_BEGIN(0, 2);
9622 IEM_MC_LOCAL(uint32_t, uTmp1);
9623 IEM_MC_LOCAL(uint32_t, uTmp2);
9624
9625 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9626 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9627 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9628 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9629
9630 IEM_MC_ADVANCE_RIP();
9631 IEM_MC_END();
9632 return VINF_SUCCESS;
9633
9634 case IEMMODE_64BIT:
9635 IEM_MC_BEGIN(0, 2);
9636 IEM_MC_LOCAL(uint64_t, uTmp1);
9637 IEM_MC_LOCAL(uint64_t, uTmp2);
9638
9639 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9640 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9641 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9642 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9643
9644 IEM_MC_ADVANCE_RIP();
9645 IEM_MC_END();
9646 return VINF_SUCCESS;
9647
9648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9649 }
9650 }
9651 else
9652 {
9653 /*
9654 * We're accessing memory.
9655 */
9656 switch (pIemCpu->enmEffOpSize)
9657 {
9658/** @todo the register must be committed separately! */
9659 case IEMMODE_16BIT:
9660 IEM_MC_BEGIN(2, 2);
9661 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9662 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9664
9665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9666 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9667 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9668 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9670
9671 IEM_MC_ADVANCE_RIP();
9672 IEM_MC_END();
9673 return VINF_SUCCESS;
9674
9675 case IEMMODE_32BIT:
9676 IEM_MC_BEGIN(2, 2);
9677 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9678 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9680
9681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9682 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9683 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9684 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9685 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9686
9687 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9688 IEM_MC_ADVANCE_RIP();
9689 IEM_MC_END();
9690 return VINF_SUCCESS;
9691
9692 case IEMMODE_64BIT:
9693 IEM_MC_BEGIN(2, 2);
9694 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9695 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9697
9698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9699 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9700 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9701 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9703
9704 IEM_MC_ADVANCE_RIP();
9705 IEM_MC_END();
9706 return VINF_SUCCESS;
9707
9708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9709 }
9710 }
9711}
9712
9713
9714/** Opcode 0x88. */
9715FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9716{
9717 IEMOP_MNEMONIC("mov Eb,Gb");
9718
9719 uint8_t bRm;
9720 IEM_OPCODE_GET_NEXT_U8(&bRm);
9721 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9722
9723 /*
9724 * If rm is denoting a register, no more instruction bytes.
9725 */
9726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9727 {
9728 IEM_MC_BEGIN(0, 1);
9729 IEM_MC_LOCAL(uint8_t, u8Value);
9730 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9731 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 }
9735 else
9736 {
9737 /*
9738 * We're writing a register to memory.
9739 */
9740 IEM_MC_BEGIN(0, 2);
9741 IEM_MC_LOCAL(uint8_t, u8Value);
9742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9744 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9745 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9746 IEM_MC_ADVANCE_RIP();
9747 IEM_MC_END();
9748 }
9749 return VINF_SUCCESS;
9750
9751}
9752
9753
9754/** Opcode 0x89. */
9755FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9756{
9757 IEMOP_MNEMONIC("mov Ev,Gv");
9758
9759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9760 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9761
9762 /*
9763 * If rm is denoting a register, no more instruction bytes.
9764 */
9765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9766 {
9767 switch (pIemCpu->enmEffOpSize)
9768 {
9769 case IEMMODE_16BIT:
9770 IEM_MC_BEGIN(0, 1);
9771 IEM_MC_LOCAL(uint16_t, u16Value);
9772 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9773 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 break;
9777
9778 case IEMMODE_32BIT:
9779 IEM_MC_BEGIN(0, 1);
9780 IEM_MC_LOCAL(uint32_t, u32Value);
9781 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9782 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9783 IEM_MC_ADVANCE_RIP();
9784 IEM_MC_END();
9785 break;
9786
9787 case IEMMODE_64BIT:
9788 IEM_MC_BEGIN(0, 1);
9789 IEM_MC_LOCAL(uint64_t, u64Value);
9790 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9791 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9792 IEM_MC_ADVANCE_RIP();
9793 IEM_MC_END();
9794 break;
9795 }
9796 }
9797 else
9798 {
9799 /*
9800 * We're writing a register to memory.
9801 */
9802 switch (pIemCpu->enmEffOpSize)
9803 {
9804 case IEMMODE_16BIT:
9805 IEM_MC_BEGIN(0, 2);
9806 IEM_MC_LOCAL(uint16_t, u16Value);
9807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9809 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9810 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9811 IEM_MC_ADVANCE_RIP();
9812 IEM_MC_END();
9813 break;
9814
9815 case IEMMODE_32BIT:
9816 IEM_MC_BEGIN(0, 2);
9817 IEM_MC_LOCAL(uint32_t, u32Value);
9818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9820 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9821 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9822 IEM_MC_ADVANCE_RIP();
9823 IEM_MC_END();
9824 break;
9825
9826 case IEMMODE_64BIT:
9827 IEM_MC_BEGIN(0, 2);
9828 IEM_MC_LOCAL(uint64_t, u64Value);
9829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9831 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9832 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9833 IEM_MC_ADVANCE_RIP();
9834 IEM_MC_END();
9835 break;
9836 }
9837 }
9838 return VINF_SUCCESS;
9839}
9840
9841
9842/** Opcode 0x8a. */
9843FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9844{
9845 IEMOP_MNEMONIC("mov Gb,Eb");
9846
9847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9848 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9849
9850 /*
9851 * If rm is denoting a register, no more instruction bytes.
9852 */
9853 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9854 {
9855 IEM_MC_BEGIN(0, 1);
9856 IEM_MC_LOCAL(uint8_t, u8Value);
9857 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9858 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9859 IEM_MC_ADVANCE_RIP();
9860 IEM_MC_END();
9861 }
9862 else
9863 {
9864 /*
9865 * We're loading a register from memory.
9866 */
9867 IEM_MC_BEGIN(0, 2);
9868 IEM_MC_LOCAL(uint8_t, u8Value);
9869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9871 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9872 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9873 IEM_MC_ADVANCE_RIP();
9874 IEM_MC_END();
9875 }
9876 return VINF_SUCCESS;
9877}
9878
9879
9880/** Opcode 0x8b. */
9881FNIEMOP_DEF(iemOp_mov_Gv_Ev)
9882{
9883 IEMOP_MNEMONIC("mov Gv,Ev");
9884
9885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9886 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9887
9888 /*
9889 * If rm is denoting a register, no more instruction bytes.
9890 */
9891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9892 {
9893 switch (pIemCpu->enmEffOpSize)
9894 {
9895 case IEMMODE_16BIT:
9896 IEM_MC_BEGIN(0, 1);
9897 IEM_MC_LOCAL(uint16_t, u16Value);
9898 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9899 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9900 IEM_MC_ADVANCE_RIP();
9901 IEM_MC_END();
9902 break;
9903
9904 case IEMMODE_32BIT:
9905 IEM_MC_BEGIN(0, 1);
9906 IEM_MC_LOCAL(uint32_t, u32Value);
9907 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9908 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9909 IEM_MC_ADVANCE_RIP();
9910 IEM_MC_END();
9911 break;
9912
9913 case IEMMODE_64BIT:
9914 IEM_MC_BEGIN(0, 1);
9915 IEM_MC_LOCAL(uint64_t, u64Value);
9916 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9917 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 break;
9921 }
9922 }
9923 else
9924 {
9925 /*
9926 * We're loading a register from memory.
9927 */
9928 switch (pIemCpu->enmEffOpSize)
9929 {
9930 case IEMMODE_16BIT:
9931 IEM_MC_BEGIN(0, 2);
9932 IEM_MC_LOCAL(uint16_t, u16Value);
9933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9935 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
9936 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9937 IEM_MC_ADVANCE_RIP();
9938 IEM_MC_END();
9939 break;
9940
9941 case IEMMODE_32BIT:
9942 IEM_MC_BEGIN(0, 2);
9943 IEM_MC_LOCAL(uint32_t, u32Value);
9944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9946 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
9947 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9948 IEM_MC_ADVANCE_RIP();
9949 IEM_MC_END();
9950 break;
9951
9952 case IEMMODE_64BIT:
9953 IEM_MC_BEGIN(0, 2);
9954 IEM_MC_LOCAL(uint64_t, u64Value);
9955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9957 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
9958 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9959 IEM_MC_ADVANCE_RIP();
9960 IEM_MC_END();
9961 break;
9962 }
9963 }
9964 return VINF_SUCCESS;
9965}
9966
9967
9968/** Opcode 0x63. */
9969FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
9970{
9971 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
9972 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
9973 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
9974 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
9975 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
9976}
9977
9978
9979/** Opcode 0x8c. */
9980FNIEMOP_DEF(iemOp_mov_Ev_Sw)
9981{
9982 IEMOP_MNEMONIC("mov Ev,Sw");
9983
9984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9985 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9986
9987 /*
9988 * Check that the destination register exists. The REX.R prefix is ignored.
9989 */
9990 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9991 if ( iSegReg > X86_SREG_GS)
9992 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9993
9994 /*
9995 * If rm is denoting a register, no more instruction bytes.
9996 * In that case, the operand size is respected and the upper bits are
9997 * cleared (starting with some pentium).
9998 */
9999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10000 {
10001 switch (pIemCpu->enmEffOpSize)
10002 {
10003 case IEMMODE_16BIT:
10004 IEM_MC_BEGIN(0, 1);
10005 IEM_MC_LOCAL(uint16_t, u16Value);
10006 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10007 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10008 IEM_MC_ADVANCE_RIP();
10009 IEM_MC_END();
10010 break;
10011
10012 case IEMMODE_32BIT:
10013 IEM_MC_BEGIN(0, 1);
10014 IEM_MC_LOCAL(uint32_t, u32Value);
10015 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10016 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10017 IEM_MC_ADVANCE_RIP();
10018 IEM_MC_END();
10019 break;
10020
10021 case IEMMODE_64BIT:
10022 IEM_MC_BEGIN(0, 1);
10023 IEM_MC_LOCAL(uint64_t, u64Value);
10024 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10025 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10026 IEM_MC_ADVANCE_RIP();
10027 IEM_MC_END();
10028 break;
10029 }
10030 }
10031 else
10032 {
10033 /*
10034 * We're saving the register to memory. The access is word sized
10035 * regardless of operand size prefixes.
10036 */
10037#if 0 /* not necessary */
10038 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10039#endif
10040 IEM_MC_BEGIN(0, 2);
10041 IEM_MC_LOCAL(uint16_t, u16Value);
10042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10044 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10045 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10046 IEM_MC_ADVANCE_RIP();
10047 IEM_MC_END();
10048 }
10049 return VINF_SUCCESS;
10050}
10051
10052
10053
10054
10055/** Opcode 0x8d. */
10056FNIEMOP_DEF(iemOp_lea_Gv_M)
10057{
10058 IEMOP_MNEMONIC("lea Gv,M");
10059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10060 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10062 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10063
10064 switch (pIemCpu->enmEffOpSize)
10065 {
10066 case IEMMODE_16BIT:
10067 IEM_MC_BEGIN(0, 2);
10068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10069 IEM_MC_LOCAL(uint16_t, u16Cast);
10070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10071 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10072 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10073 IEM_MC_ADVANCE_RIP();
10074 IEM_MC_END();
10075 return VINF_SUCCESS;
10076
10077 case IEMMODE_32BIT:
10078 IEM_MC_BEGIN(0, 2);
10079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10080 IEM_MC_LOCAL(uint32_t, u32Cast);
10081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10082 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10083 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10084 IEM_MC_ADVANCE_RIP();
10085 IEM_MC_END();
10086 return VINF_SUCCESS;
10087
10088 case IEMMODE_64BIT:
10089 IEM_MC_BEGIN(0, 1);
10090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10092 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10093 IEM_MC_ADVANCE_RIP();
10094 IEM_MC_END();
10095 return VINF_SUCCESS;
10096 }
10097 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
10098}
10099
10100
10101/** Opcode 0x8e. */
10102FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10103{
10104 IEMOP_MNEMONIC("mov Sw,Ev");
10105
10106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10107 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10108
10109 /*
10110 * The practical operand size is 16-bit.
10111 */
10112#if 0 /* not necessary */
10113 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10114#endif
10115
10116 /*
10117 * Check that the destination register exists and can be used with this
10118 * instruction. The REX.R prefix is ignored.
10119 */
10120 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10121 if ( iSegReg == X86_SREG_CS
10122 || iSegReg > X86_SREG_GS)
10123 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10124
10125 /*
10126 * If rm is denoting a register, no more instruction bytes.
10127 */
10128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10129 {
10130 IEM_MC_BEGIN(2, 0);
10131 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10132 IEM_MC_ARG(uint16_t, u16Value, 1);
10133 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10134 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10135 IEM_MC_END();
10136 }
10137 else
10138 {
10139 /*
10140 * We're loading the register from memory. The access is word sized
10141 * regardless of operand size prefixes.
10142 */
10143 IEM_MC_BEGIN(2, 1);
10144 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10145 IEM_MC_ARG(uint16_t, u16Value, 1);
10146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10148 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10149 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10150 IEM_MC_END();
10151 }
10152 return VINF_SUCCESS;
10153}
10154
10155
10156/** Opcode 0x8f /0. */
10157FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10158{
10159 /* This bugger is rather annoying as it requires rSP to be updated before
10160 doing the effective address calculations. Will eventually require a
10161 split between the R/M+SIB decoding and the effective address
10162 calculation - which is something that is required for any attempt at
10163 reusing this code for a recompiler. It may also be good to have if we
10164 need to delay #UD exception caused by invalid lock prefixes.
10165
10166 For now, we'll do a mostly safe interpreter-only implementation here. */
10167 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10168 * now until tests show it's checked.. */
10169 IEMOP_MNEMONIC("pop Ev");
10170 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10171
10172 /* Register access is relatively easy and can share code. */
10173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10174 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10175
10176 /*
10177 * Memory target.
10178 *
10179 * Intel says that RSP is incremented before it's used in any effective
10180 * address calcuations. This means some serious extra annoyance here since
10181 * we decode and calculate the effective address in one step and like to
10182 * delay committing registers till everything is done.
10183 *
10184 * So, we'll decode and calculate the effective address twice. This will
10185 * require some recoding if turned into a recompiler.
10186 */
10187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10188
10189#ifndef TST_IEM_CHECK_MC
10190 /* Calc effective address with modified ESP. */
10191 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10192 RTGCPTR GCPtrEff;
10193 VBOXSTRICTRC rcStrict;
10194 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10195 if (rcStrict != VINF_SUCCESS)
10196 return rcStrict;
10197 pIemCpu->offOpcode = offOpcodeSaved;
10198
10199 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10200 uint64_t const RspSaved = pCtx->rsp;
10201 switch (pIemCpu->enmEffOpSize)
10202 {
10203 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10204 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10205 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10207 }
10208 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10209 Assert(rcStrict == VINF_SUCCESS);
10210 pCtx->rsp = RspSaved;
10211
10212 /* Perform the operation - this should be CImpl. */
10213 RTUINT64U TmpRsp;
10214 TmpRsp.u = pCtx->rsp;
10215 switch (pIemCpu->enmEffOpSize)
10216 {
10217 case IEMMODE_16BIT:
10218 {
10219 uint16_t u16Value;
10220 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10221 if (rcStrict == VINF_SUCCESS)
10222 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10223 break;
10224 }
10225
10226 case IEMMODE_32BIT:
10227 {
10228 uint32_t u32Value;
10229 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10230 if (rcStrict == VINF_SUCCESS)
10231 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10232 break;
10233 }
10234
10235 case IEMMODE_64BIT:
10236 {
10237 uint64_t u64Value;
10238 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10239 if (rcStrict == VINF_SUCCESS)
10240 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10241 break;
10242 }
10243
10244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10245 }
10246 if (rcStrict == VINF_SUCCESS)
10247 {
10248 pCtx->rsp = TmpRsp.u;
10249 iemRegUpdateRipAndClearRF(pIemCpu);
10250 }
10251 return rcStrict;
10252
10253#else
10254 return VERR_IEM_IPE_2;
10255#endif
10256}
10257
10258
10259/** Opcode 0x8f. */
10260FNIEMOP_DEF(iemOp_Grp1A)
10261{
10262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10263 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10264 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10265
10266 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10267 /** @todo XOP decoding. */
10268 IEMOP_MNEMONIC("3-byte-xop");
10269 return IEMOP_RAISE_INVALID_OPCODE();
10270}
10271
10272
10273/**
10274 * Common 'xchg reg,rAX' helper.
10275 */
10276FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10277{
10278 IEMOP_HLP_NO_LOCK_PREFIX();
10279
10280 iReg |= pIemCpu->uRexB;
10281 switch (pIemCpu->enmEffOpSize)
10282 {
10283 case IEMMODE_16BIT:
10284 IEM_MC_BEGIN(0, 2);
10285 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10286 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10287 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10288 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10289 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10290 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10291 IEM_MC_ADVANCE_RIP();
10292 IEM_MC_END();
10293 return VINF_SUCCESS;
10294
10295 case IEMMODE_32BIT:
10296 IEM_MC_BEGIN(0, 2);
10297 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10298 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10299 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10300 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10301 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10302 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10303 IEM_MC_ADVANCE_RIP();
10304 IEM_MC_END();
10305 return VINF_SUCCESS;
10306
10307 case IEMMODE_64BIT:
10308 IEM_MC_BEGIN(0, 2);
10309 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10310 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10311 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10312 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10313 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10314 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10315 IEM_MC_ADVANCE_RIP();
10316 IEM_MC_END();
10317 return VINF_SUCCESS;
10318
10319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10320 }
10321}
10322
10323
10324/** Opcode 0x90. */
10325FNIEMOP_DEF(iemOp_nop)
10326{
10327 /* R8/R8D and RAX/EAX can be exchanged. */
10328 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10329 {
10330 IEMOP_MNEMONIC("xchg r8,rAX");
10331 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10332 }
10333
10334 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10335 IEMOP_MNEMONIC("pause");
10336 else
10337 IEMOP_MNEMONIC("nop");
10338 IEM_MC_BEGIN(0, 0);
10339 IEM_MC_ADVANCE_RIP();
10340 IEM_MC_END();
10341 return VINF_SUCCESS;
10342}
10343
10344
10345/** Opcode 0x91. */
10346FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10347{
10348 IEMOP_MNEMONIC("xchg rCX,rAX");
10349 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10350}
10351
10352
10353/** Opcode 0x92. */
10354FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10355{
10356 IEMOP_MNEMONIC("xchg rDX,rAX");
10357 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10358}
10359
10360
10361/** Opcode 0x93. */
10362FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10363{
10364 IEMOP_MNEMONIC("xchg rBX,rAX");
10365 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10366}
10367
10368
10369/** Opcode 0x94. */
10370FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10371{
10372 IEMOP_MNEMONIC("xchg rSX,rAX");
10373 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10374}
10375
10376
10377/** Opcode 0x95. */
10378FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10379{
10380 IEMOP_MNEMONIC("xchg rBP,rAX");
10381 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10382}
10383
10384
10385/** Opcode 0x96. */
10386FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10387{
10388 IEMOP_MNEMONIC("xchg rSI,rAX");
10389 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10390}
10391
10392
10393/** Opcode 0x97. */
10394FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10395{
10396 IEMOP_MNEMONIC("xchg rDI,rAX");
10397 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10398}
10399
10400
10401/** Opcode 0x98. */
10402FNIEMOP_DEF(iemOp_cbw)
10403{
10404 IEMOP_HLP_NO_LOCK_PREFIX();
10405 switch (pIemCpu->enmEffOpSize)
10406 {
10407 case IEMMODE_16BIT:
10408 IEMOP_MNEMONIC("cbw");
10409 IEM_MC_BEGIN(0, 1);
10410 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10411 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10412 } IEM_MC_ELSE() {
10413 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10414 } IEM_MC_ENDIF();
10415 IEM_MC_ADVANCE_RIP();
10416 IEM_MC_END();
10417 return VINF_SUCCESS;
10418
10419 case IEMMODE_32BIT:
10420 IEMOP_MNEMONIC("cwde");
10421 IEM_MC_BEGIN(0, 1);
10422 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10423 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10424 } IEM_MC_ELSE() {
10425 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10426 } IEM_MC_ENDIF();
10427 IEM_MC_ADVANCE_RIP();
10428 IEM_MC_END();
10429 return VINF_SUCCESS;
10430
10431 case IEMMODE_64BIT:
10432 IEMOP_MNEMONIC("cdqe");
10433 IEM_MC_BEGIN(0, 1);
10434 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10435 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10436 } IEM_MC_ELSE() {
10437 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10438 } IEM_MC_ENDIF();
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442
10443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10444 }
10445}
10446
10447
10448/** Opcode 0x99. */
10449FNIEMOP_DEF(iemOp_cwd)
10450{
10451 IEMOP_HLP_NO_LOCK_PREFIX();
10452 switch (pIemCpu->enmEffOpSize)
10453 {
10454 case IEMMODE_16BIT:
10455 IEMOP_MNEMONIC("cwd");
10456 IEM_MC_BEGIN(0, 1);
10457 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10458 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10459 } IEM_MC_ELSE() {
10460 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10461 } IEM_MC_ENDIF();
10462 IEM_MC_ADVANCE_RIP();
10463 IEM_MC_END();
10464 return VINF_SUCCESS;
10465
10466 case IEMMODE_32BIT:
10467 IEMOP_MNEMONIC("cdq");
10468 IEM_MC_BEGIN(0, 1);
10469 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10470 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10471 } IEM_MC_ELSE() {
10472 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10473 } IEM_MC_ENDIF();
10474 IEM_MC_ADVANCE_RIP();
10475 IEM_MC_END();
10476 return VINF_SUCCESS;
10477
10478 case IEMMODE_64BIT:
10479 IEMOP_MNEMONIC("cqo");
10480 IEM_MC_BEGIN(0, 1);
10481 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10482 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10483 } IEM_MC_ELSE() {
10484 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10485 } IEM_MC_ENDIF();
10486 IEM_MC_ADVANCE_RIP();
10487 IEM_MC_END();
10488 return VINF_SUCCESS;
10489
10490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10491 }
10492}
10493
10494
10495/** Opcode 0x9a. */
10496FNIEMOP_DEF(iemOp_call_Ap)
10497{
10498 IEMOP_MNEMONIC("call Ap");
10499 IEMOP_HLP_NO_64BIT();
10500
10501 /* Decode the far pointer address and pass it on to the far call C implementation. */
10502 uint32_t offSeg;
10503 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10504 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10505 else
10506 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10507 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10510}
10511
10512
10513/** Opcode 0x9b. (aka fwait) */
10514FNIEMOP_DEF(iemOp_wait)
10515{
10516 IEMOP_MNEMONIC("wait");
10517 IEMOP_HLP_NO_LOCK_PREFIX();
10518
10519 IEM_MC_BEGIN(0, 0);
10520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10521 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10522 IEM_MC_ADVANCE_RIP();
10523 IEM_MC_END();
10524 return VINF_SUCCESS;
10525}
10526
10527
10528/** Opcode 0x9c. */
10529FNIEMOP_DEF(iemOp_pushf_Fv)
10530{
10531 IEMOP_HLP_NO_LOCK_PREFIX();
10532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10533 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10534}
10535
10536
10537/** Opcode 0x9d. */
10538FNIEMOP_DEF(iemOp_popf_Fv)
10539{
10540 IEMOP_HLP_NO_LOCK_PREFIX();
10541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10542 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10543}
10544
10545
10546/** Opcode 0x9e. */
10547FNIEMOP_DEF(iemOp_sahf)
10548{
10549 IEMOP_MNEMONIC("sahf");
10550 IEMOP_HLP_NO_LOCK_PREFIX();
10551 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10552 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10553 return IEMOP_RAISE_INVALID_OPCODE();
10554 IEM_MC_BEGIN(0, 2);
10555 IEM_MC_LOCAL(uint32_t, u32Flags);
10556 IEM_MC_LOCAL(uint32_t, EFlags);
10557 IEM_MC_FETCH_EFLAGS(EFlags);
10558 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10559 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10560 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10561 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10562 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10563 IEM_MC_COMMIT_EFLAGS(EFlags);
10564 IEM_MC_ADVANCE_RIP();
10565 IEM_MC_END();
10566 return VINF_SUCCESS;
10567}
10568
10569
10570/** Opcode 0x9f. */
10571FNIEMOP_DEF(iemOp_lahf)
10572{
10573 IEMOP_MNEMONIC("lahf");
10574 IEMOP_HLP_NO_LOCK_PREFIX();
10575 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10576 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10577 return IEMOP_RAISE_INVALID_OPCODE();
10578 IEM_MC_BEGIN(0, 1);
10579 IEM_MC_LOCAL(uint8_t, u8Flags);
10580 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10581 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10582 IEM_MC_ADVANCE_RIP();
10583 IEM_MC_END();
10584 return VINF_SUCCESS;
10585}
10586
10587
10588/**
10589 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10590 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10591 * prefixes. Will return on failures.
10592 * @param a_GCPtrMemOff The variable to store the offset in.
10593 */
10594#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10595 do \
10596 { \
10597 switch (pIemCpu->enmEffAddrMode) \
10598 { \
10599 case IEMMODE_16BIT: \
10600 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10601 break; \
10602 case IEMMODE_32BIT: \
10603 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10604 break; \
10605 case IEMMODE_64BIT: \
10606 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10607 break; \
10608 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10609 } \
10610 IEMOP_HLP_NO_LOCK_PREFIX(); \
10611 } while (0)
10612
10613/** Opcode 0xa0. */
10614FNIEMOP_DEF(iemOp_mov_Al_Ob)
10615{
10616 /*
10617 * Get the offset and fend of lock prefixes.
10618 */
10619 RTGCPTR GCPtrMemOff;
10620 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10621
10622 /*
10623 * Fetch AL.
10624 */
10625 IEM_MC_BEGIN(0,1);
10626 IEM_MC_LOCAL(uint8_t, u8Tmp);
10627 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10628 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10629 IEM_MC_ADVANCE_RIP();
10630 IEM_MC_END();
10631 return VINF_SUCCESS;
10632}
10633
10634
10635/** Opcode 0xa1. */
10636FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10637{
10638 /*
10639 * Get the offset and fend of lock prefixes.
10640 */
10641 IEMOP_MNEMONIC("mov rAX,Ov");
10642 RTGCPTR GCPtrMemOff;
10643 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10644
10645 /*
10646 * Fetch rAX.
10647 */
10648 switch (pIemCpu->enmEffOpSize)
10649 {
10650 case IEMMODE_16BIT:
10651 IEM_MC_BEGIN(0,1);
10652 IEM_MC_LOCAL(uint16_t, u16Tmp);
10653 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10654 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10655 IEM_MC_ADVANCE_RIP();
10656 IEM_MC_END();
10657 return VINF_SUCCESS;
10658
10659 case IEMMODE_32BIT:
10660 IEM_MC_BEGIN(0,1);
10661 IEM_MC_LOCAL(uint32_t, u32Tmp);
10662 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10663 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10664 IEM_MC_ADVANCE_RIP();
10665 IEM_MC_END();
10666 return VINF_SUCCESS;
10667
10668 case IEMMODE_64BIT:
10669 IEM_MC_BEGIN(0,1);
10670 IEM_MC_LOCAL(uint64_t, u64Tmp);
10671 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10672 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10673 IEM_MC_ADVANCE_RIP();
10674 IEM_MC_END();
10675 return VINF_SUCCESS;
10676
10677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10678 }
10679}
10680
10681
10682/** Opcode 0xa2. */
10683FNIEMOP_DEF(iemOp_mov_Ob_AL)
10684{
10685 /*
10686 * Get the offset and fend of lock prefixes.
10687 */
10688 RTGCPTR GCPtrMemOff;
10689 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10690
10691 /*
10692 * Store AL.
10693 */
10694 IEM_MC_BEGIN(0,1);
10695 IEM_MC_LOCAL(uint8_t, u8Tmp);
10696 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10697 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10698 IEM_MC_ADVANCE_RIP();
10699 IEM_MC_END();
10700 return VINF_SUCCESS;
10701}
10702
10703
10704/** Opcode 0xa3. */
10705FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10706{
10707 /*
10708 * Get the offset and fend of lock prefixes.
10709 */
10710 RTGCPTR GCPtrMemOff;
10711 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10712
10713 /*
10714 * Store rAX.
10715 */
10716 switch (pIemCpu->enmEffOpSize)
10717 {
10718 case IEMMODE_16BIT:
10719 IEM_MC_BEGIN(0,1);
10720 IEM_MC_LOCAL(uint16_t, u16Tmp);
10721 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10722 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10723 IEM_MC_ADVANCE_RIP();
10724 IEM_MC_END();
10725 return VINF_SUCCESS;
10726
10727 case IEMMODE_32BIT:
10728 IEM_MC_BEGIN(0,1);
10729 IEM_MC_LOCAL(uint32_t, u32Tmp);
10730 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10731 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10732 IEM_MC_ADVANCE_RIP();
10733 IEM_MC_END();
10734 return VINF_SUCCESS;
10735
10736 case IEMMODE_64BIT:
10737 IEM_MC_BEGIN(0,1);
10738 IEM_MC_LOCAL(uint64_t, u64Tmp);
10739 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10740 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10741 IEM_MC_ADVANCE_RIP();
10742 IEM_MC_END();
10743 return VINF_SUCCESS;
10744
10745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10746 }
10747}
10748
10749/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10750#define IEM_MOVS_CASE(ValBits, AddrBits) \
10751 IEM_MC_BEGIN(0, 2); \
10752 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10753 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10754 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10755 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10756 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10757 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10758 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10759 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10760 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10761 } IEM_MC_ELSE() { \
10762 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10763 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10764 } IEM_MC_ENDIF(); \
10765 IEM_MC_ADVANCE_RIP(); \
10766 IEM_MC_END();
10767
10768/** Opcode 0xa4. */
10769FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10770{
10771 IEMOP_HLP_NO_LOCK_PREFIX();
10772
10773 /*
10774 * Use the C implementation if a repeat prefix is encountered.
10775 */
10776 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10777 {
10778 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10779 switch (pIemCpu->enmEffAddrMode)
10780 {
10781 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10782 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10783 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10785 }
10786 }
10787 IEMOP_MNEMONIC("movsb Xb,Yb");
10788
10789 /*
10790 * Sharing case implementation with movs[wdq] below.
10791 */
10792 switch (pIemCpu->enmEffAddrMode)
10793 {
10794 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10795 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10796 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10798 }
10799 return VINF_SUCCESS;
10800}
10801
10802
10803/** Opcode 0xa5. */
10804FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10805{
10806 IEMOP_HLP_NO_LOCK_PREFIX();
10807
10808 /*
10809 * Use the C implementation if a repeat prefix is encountered.
10810 */
10811 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10812 {
10813 IEMOP_MNEMONIC("rep movs Xv,Yv");
10814 switch (pIemCpu->enmEffOpSize)
10815 {
10816 case IEMMODE_16BIT:
10817 switch (pIemCpu->enmEffAddrMode)
10818 {
10819 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10820 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10821 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10823 }
10824 break;
10825 case IEMMODE_32BIT:
10826 switch (pIemCpu->enmEffAddrMode)
10827 {
10828 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10829 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10830 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10832 }
10833 case IEMMODE_64BIT:
10834 switch (pIemCpu->enmEffAddrMode)
10835 {
10836 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10837 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10838 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10840 }
10841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10842 }
10843 }
10844 IEMOP_MNEMONIC("movs Xv,Yv");
10845
10846 /*
10847 * Annoying double switch here.
10848 * Using ugly macro for implementing the cases, sharing it with movsb.
10849 */
10850 switch (pIemCpu->enmEffOpSize)
10851 {
10852 case IEMMODE_16BIT:
10853 switch (pIemCpu->enmEffAddrMode)
10854 {
10855 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10856 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10857 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10859 }
10860 break;
10861
10862 case IEMMODE_32BIT:
10863 switch (pIemCpu->enmEffAddrMode)
10864 {
10865 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10866 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10867 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10869 }
10870 break;
10871
10872 case IEMMODE_64BIT:
10873 switch (pIemCpu->enmEffAddrMode)
10874 {
10875 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10876 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
10877 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
10878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10879 }
10880 break;
10881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10882 }
10883 return VINF_SUCCESS;
10884}
10885
10886#undef IEM_MOVS_CASE
10887
10888/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
10889#define IEM_CMPS_CASE(ValBits, AddrBits) \
10890 IEM_MC_BEGIN(3, 3); \
10891 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
10892 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
10893 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10894 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
10895 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10896 \
10897 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10898 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
10899 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10900 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
10901 IEM_MC_REF_LOCAL(puValue1, uValue1); \
10902 IEM_MC_REF_EFLAGS(pEFlags); \
10903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
10904 \
10905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10906 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10907 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10908 } IEM_MC_ELSE() { \
10909 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10910 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10911 } IEM_MC_ENDIF(); \
10912 IEM_MC_ADVANCE_RIP(); \
10913 IEM_MC_END(); \
10914
10915/** Opcode 0xa6. */
10916FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
10917{
10918 IEMOP_HLP_NO_LOCK_PREFIX();
10919
10920 /*
10921 * Use the C implementation if a repeat prefix is encountered.
10922 */
10923 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10924 {
10925 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10926 switch (pIemCpu->enmEffAddrMode)
10927 {
10928 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
10929 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
10930 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
10931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10932 }
10933 }
10934 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10935 {
10936 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10937 switch (pIemCpu->enmEffAddrMode)
10938 {
10939 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
10940 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
10941 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
10942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10943 }
10944 }
10945 IEMOP_MNEMONIC("cmps Xb,Yb");
10946
10947 /*
10948 * Sharing case implementation with cmps[wdq] below.
10949 */
10950 switch (pIemCpu->enmEffAddrMode)
10951 {
10952 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
10953 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
10954 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
10955 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10956 }
10957 return VINF_SUCCESS;
10958
10959}
10960
10961
10962/** Opcode 0xa7. */
10963FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
10964{
10965 IEMOP_HLP_NO_LOCK_PREFIX();
10966
10967 /*
10968 * Use the C implementation if a repeat prefix is encountered.
10969 */
10970 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10971 {
10972 IEMOP_MNEMONIC("repe cmps Xv,Yv");
10973 switch (pIemCpu->enmEffOpSize)
10974 {
10975 case IEMMODE_16BIT:
10976 switch (pIemCpu->enmEffAddrMode)
10977 {
10978 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
10979 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
10980 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
10981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10982 }
10983 break;
10984 case IEMMODE_32BIT:
10985 switch (pIemCpu->enmEffAddrMode)
10986 {
10987 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
10988 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
10989 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
10990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10991 }
10992 case IEMMODE_64BIT:
10993 switch (pIemCpu->enmEffAddrMode)
10994 {
10995 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10996 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
10997 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
10998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10999 }
11000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11001 }
11002 }
11003
11004 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11005 {
11006 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11007 switch (pIemCpu->enmEffOpSize)
11008 {
11009 case IEMMODE_16BIT:
11010 switch (pIemCpu->enmEffAddrMode)
11011 {
11012 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11013 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11014 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11016 }
11017 break;
11018 case IEMMODE_32BIT:
11019 switch (pIemCpu->enmEffAddrMode)
11020 {
11021 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11022 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11023 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11025 }
11026 case IEMMODE_64BIT:
11027 switch (pIemCpu->enmEffAddrMode)
11028 {
11029 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11033 }
11034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11035 }
11036 }
11037
11038 IEMOP_MNEMONIC("cmps Xv,Yv");
11039
11040 /*
11041 * Annoying double switch here.
11042 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11043 */
11044 switch (pIemCpu->enmEffOpSize)
11045 {
11046 case IEMMODE_16BIT:
11047 switch (pIemCpu->enmEffAddrMode)
11048 {
11049 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11050 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11051 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11053 }
11054 break;
11055
11056 case IEMMODE_32BIT:
11057 switch (pIemCpu->enmEffAddrMode)
11058 {
11059 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11060 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11061 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11063 }
11064 break;
11065
11066 case IEMMODE_64BIT:
11067 switch (pIemCpu->enmEffAddrMode)
11068 {
11069 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11070 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11071 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11073 }
11074 break;
11075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11076 }
11077 return VINF_SUCCESS;
11078
11079}
11080
11081#undef IEM_CMPS_CASE
11082
11083/** Opcode 0xa8. */
11084FNIEMOP_DEF(iemOp_test_AL_Ib)
11085{
11086 IEMOP_MNEMONIC("test al,Ib");
11087 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11088 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11089}
11090
11091
11092/** Opcode 0xa9. */
11093FNIEMOP_DEF(iemOp_test_eAX_Iz)
11094{
11095 IEMOP_MNEMONIC("test rAX,Iz");
11096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11097 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11098}
11099
11100
11101/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11102#define IEM_STOS_CASE(ValBits, AddrBits) \
11103 IEM_MC_BEGIN(0, 2); \
11104 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11105 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11106 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11107 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11108 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11110 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11111 } IEM_MC_ELSE() { \
11112 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11113 } IEM_MC_ENDIF(); \
11114 IEM_MC_ADVANCE_RIP(); \
11115 IEM_MC_END(); \
11116
11117/** Opcode 0xaa. */
11118FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11119{
11120 IEMOP_HLP_NO_LOCK_PREFIX();
11121
11122 /*
11123 * Use the C implementation if a repeat prefix is encountered.
11124 */
11125 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11126 {
11127 IEMOP_MNEMONIC("rep stos Yb,al");
11128 switch (pIemCpu->enmEffAddrMode)
11129 {
11130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11134 }
11135 }
11136 IEMOP_MNEMONIC("stos Yb,al");
11137
11138 /*
11139 * Sharing case implementation with stos[wdq] below.
11140 */
11141 switch (pIemCpu->enmEffAddrMode)
11142 {
11143 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11144 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11145 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11147 }
11148 return VINF_SUCCESS;
11149}
11150
11151
11152/** Opcode 0xab. */
11153FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11154{
11155 IEMOP_HLP_NO_LOCK_PREFIX();
11156
11157 /*
11158 * Use the C implementation if a repeat prefix is encountered.
11159 */
11160 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11161 {
11162 IEMOP_MNEMONIC("rep stos Yv,rAX");
11163 switch (pIemCpu->enmEffOpSize)
11164 {
11165 case IEMMODE_16BIT:
11166 switch (pIemCpu->enmEffAddrMode)
11167 {
11168 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11169 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11170 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11172 }
11173 break;
11174 case IEMMODE_32BIT:
11175 switch (pIemCpu->enmEffAddrMode)
11176 {
11177 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182 case IEMMODE_64BIT:
11183 switch (pIemCpu->enmEffAddrMode)
11184 {
11185 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11186 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11187 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11189 }
11190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11191 }
11192 }
11193 IEMOP_MNEMONIC("stos Yv,rAX");
11194
11195 /*
11196 * Annoying double switch here.
11197 * Using ugly macro for implementing the cases, sharing it with stosb.
11198 */
11199 switch (pIemCpu->enmEffOpSize)
11200 {
11201 case IEMMODE_16BIT:
11202 switch (pIemCpu->enmEffAddrMode)
11203 {
11204 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11205 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11206 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11208 }
11209 break;
11210
11211 case IEMMODE_32BIT:
11212 switch (pIemCpu->enmEffAddrMode)
11213 {
11214 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11215 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11216 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 break;
11220
11221 case IEMMODE_64BIT:
11222 switch (pIemCpu->enmEffAddrMode)
11223 {
11224 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11225 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11226 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11228 }
11229 break;
11230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11231 }
11232 return VINF_SUCCESS;
11233}
11234
11235#undef IEM_STOS_CASE
11236
11237/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11238#define IEM_LODS_CASE(ValBits, AddrBits) \
11239 IEM_MC_BEGIN(0, 2); \
11240 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11241 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11242 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11243 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11244 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11246 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11247 } IEM_MC_ELSE() { \
11248 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11249 } IEM_MC_ENDIF(); \
11250 IEM_MC_ADVANCE_RIP(); \
11251 IEM_MC_END();
11252
11253/** Opcode 0xac. */
11254FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11255{
11256 IEMOP_HLP_NO_LOCK_PREFIX();
11257
11258 /*
11259 * Use the C implementation if a repeat prefix is encountered.
11260 */
11261 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11262 {
11263 IEMOP_MNEMONIC("rep lodsb al,Xb");
11264 switch (pIemCpu->enmEffAddrMode)
11265 {
11266 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11267 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11268 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11270 }
11271 }
11272 IEMOP_MNEMONIC("lodsb al,Xb");
11273
11274 /*
11275 * Sharing case implementation with stos[wdq] below.
11276 */
11277 switch (pIemCpu->enmEffAddrMode)
11278 {
11279 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11280 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11281 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11283 }
11284 return VINF_SUCCESS;
11285}
11286
11287
11288/** Opcode 0xad. */
11289FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11290{
11291 IEMOP_HLP_NO_LOCK_PREFIX();
11292
11293 /*
11294 * Use the C implementation if a repeat prefix is encountered.
11295 */
11296 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11297 {
11298 IEMOP_MNEMONIC("rep lods rAX,Xv");
11299 switch (pIemCpu->enmEffOpSize)
11300 {
11301 case IEMMODE_16BIT:
11302 switch (pIemCpu->enmEffAddrMode)
11303 {
11304 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11305 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11306 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11308 }
11309 break;
11310 case IEMMODE_32BIT:
11311 switch (pIemCpu->enmEffAddrMode)
11312 {
11313 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11314 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11315 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11317 }
11318 case IEMMODE_64BIT:
11319 switch (pIemCpu->enmEffAddrMode)
11320 {
11321 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11327 }
11328 }
11329 IEMOP_MNEMONIC("lods rAX,Xv");
11330
11331 /*
11332 * Annoying double switch here.
11333 * Using ugly macro for implementing the cases, sharing it with lodsb.
11334 */
11335 switch (pIemCpu->enmEffOpSize)
11336 {
11337 case IEMMODE_16BIT:
11338 switch (pIemCpu->enmEffAddrMode)
11339 {
11340 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11341 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11342 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11344 }
11345 break;
11346
11347 case IEMMODE_32BIT:
11348 switch (pIemCpu->enmEffAddrMode)
11349 {
11350 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11351 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11352 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11354 }
11355 break;
11356
11357 case IEMMODE_64BIT:
11358 switch (pIemCpu->enmEffAddrMode)
11359 {
11360 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11361 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11362 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11364 }
11365 break;
11366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11367 }
11368 return VINF_SUCCESS;
11369}
11370
11371#undef IEM_LODS_CASE
11372
11373/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11374#define IEM_SCAS_CASE(ValBits, AddrBits) \
11375 IEM_MC_BEGIN(3, 2); \
11376 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11377 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11378 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11379 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11380 \
11381 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11382 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11383 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11384 IEM_MC_REF_EFLAGS(pEFlags); \
11385 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11386 \
11387 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11388 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11389 } IEM_MC_ELSE() { \
11390 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11391 } IEM_MC_ENDIF(); \
11392 IEM_MC_ADVANCE_RIP(); \
11393 IEM_MC_END();
11394
11395/** Opcode 0xae. */
11396FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11397{
11398 IEMOP_HLP_NO_LOCK_PREFIX();
11399
11400 /*
11401 * Use the C implementation if a repeat prefix is encountered.
11402 */
11403 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11404 {
11405 IEMOP_MNEMONIC("repe scasb al,Xb");
11406 switch (pIemCpu->enmEffAddrMode)
11407 {
11408 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11412 }
11413 }
11414 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11415 {
11416 IEMOP_MNEMONIC("repne scasb al,Xb");
11417 switch (pIemCpu->enmEffAddrMode)
11418 {
11419 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11420 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11421 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11423 }
11424 }
11425 IEMOP_MNEMONIC("scasb al,Xb");
11426
11427 /*
11428 * Sharing case implementation with stos[wdq] below.
11429 */
11430 switch (pIemCpu->enmEffAddrMode)
11431 {
11432 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11433 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11434 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11436 }
11437 return VINF_SUCCESS;
11438}
11439
11440
11441/** Opcode 0xaf. */
11442FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11443{
11444 IEMOP_HLP_NO_LOCK_PREFIX();
11445
11446 /*
11447 * Use the C implementation if a repeat prefix is encountered.
11448 */
11449 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11450 {
11451 IEMOP_MNEMONIC("repe scas rAX,Xv");
11452 switch (pIemCpu->enmEffOpSize)
11453 {
11454 case IEMMODE_16BIT:
11455 switch (pIemCpu->enmEffAddrMode)
11456 {
11457 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11458 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11459 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462 break;
11463 case IEMMODE_32BIT:
11464 switch (pIemCpu->enmEffAddrMode)
11465 {
11466 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11467 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11468 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11470 }
11471 case IEMMODE_64BIT:
11472 switch (pIemCpu->enmEffAddrMode)
11473 {
11474 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11478 }
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 }
11482 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11483 {
11484 IEMOP_MNEMONIC("repne scas rAX,Xv");
11485 switch (pIemCpu->enmEffOpSize)
11486 {
11487 case IEMMODE_16BIT:
11488 switch (pIemCpu->enmEffAddrMode)
11489 {
11490 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11491 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11492 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11494 }
11495 break;
11496 case IEMMODE_32BIT:
11497 switch (pIemCpu->enmEffAddrMode)
11498 {
11499 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11500 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11501 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11502 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11503 }
11504 case IEMMODE_64BIT:
11505 switch (pIemCpu->enmEffAddrMode)
11506 {
11507 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11508 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11509 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11511 }
11512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11513 }
11514 }
11515 IEMOP_MNEMONIC("scas rAX,Xv");
11516
11517 /*
11518 * Annoying double switch here.
11519 * Using ugly macro for implementing the cases, sharing it with scasb.
11520 */
11521 switch (pIemCpu->enmEffOpSize)
11522 {
11523 case IEMMODE_16BIT:
11524 switch (pIemCpu->enmEffAddrMode)
11525 {
11526 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11527 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11528 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11530 }
11531 break;
11532
11533 case IEMMODE_32BIT:
11534 switch (pIemCpu->enmEffAddrMode)
11535 {
11536 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11537 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11538 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11540 }
11541 break;
11542
11543 case IEMMODE_64BIT:
11544 switch (pIemCpu->enmEffAddrMode)
11545 {
11546 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11547 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11548 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11550 }
11551 break;
11552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11553 }
11554 return VINF_SUCCESS;
11555}
11556
11557#undef IEM_SCAS_CASE
11558
11559/**
11560 * Common 'mov r8, imm8' helper.
11561 */
11562FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11563{
11564 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11565 IEMOP_HLP_NO_LOCK_PREFIX();
11566
11567 IEM_MC_BEGIN(0, 1);
11568 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11569 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11570 IEM_MC_ADVANCE_RIP();
11571 IEM_MC_END();
11572
11573 return VINF_SUCCESS;
11574}
11575
11576
11577/** Opcode 0xb0. */
11578FNIEMOP_DEF(iemOp_mov_AL_Ib)
11579{
11580 IEMOP_MNEMONIC("mov AL,Ib");
11581 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11582}
11583
11584
11585/** Opcode 0xb1. */
11586FNIEMOP_DEF(iemOp_CL_Ib)
11587{
11588 IEMOP_MNEMONIC("mov CL,Ib");
11589 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11590}
11591
11592
11593/** Opcode 0xb2. */
11594FNIEMOP_DEF(iemOp_DL_Ib)
11595{
11596 IEMOP_MNEMONIC("mov DL,Ib");
11597 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11598}
11599
11600
11601/** Opcode 0xb3. */
11602FNIEMOP_DEF(iemOp_BL_Ib)
11603{
11604 IEMOP_MNEMONIC("mov BL,Ib");
11605 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11606}
11607
11608
11609/** Opcode 0xb4. */
11610FNIEMOP_DEF(iemOp_mov_AH_Ib)
11611{
11612 IEMOP_MNEMONIC("mov AH,Ib");
11613 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11614}
11615
11616
11617/** Opcode 0xb5. */
11618FNIEMOP_DEF(iemOp_CH_Ib)
11619{
11620 IEMOP_MNEMONIC("mov CH,Ib");
11621 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11622}
11623
11624
11625/** Opcode 0xb6. */
11626FNIEMOP_DEF(iemOp_DH_Ib)
11627{
11628 IEMOP_MNEMONIC("mov DH,Ib");
11629 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11630}
11631
11632
11633/** Opcode 0xb7. */
11634FNIEMOP_DEF(iemOp_BH_Ib)
11635{
11636 IEMOP_MNEMONIC("mov BH,Ib");
11637 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11638}
11639
11640
11641/**
11642 * Common 'mov regX,immX' helper.
11643 */
11644FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11645{
11646 switch (pIemCpu->enmEffOpSize)
11647 {
11648 case IEMMODE_16BIT:
11649 {
11650 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11651 IEMOP_HLP_NO_LOCK_PREFIX();
11652
11653 IEM_MC_BEGIN(0, 1);
11654 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11655 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11656 IEM_MC_ADVANCE_RIP();
11657 IEM_MC_END();
11658 break;
11659 }
11660
11661 case IEMMODE_32BIT:
11662 {
11663 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11664 IEMOP_HLP_NO_LOCK_PREFIX();
11665
11666 IEM_MC_BEGIN(0, 1);
11667 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11668 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11669 IEM_MC_ADVANCE_RIP();
11670 IEM_MC_END();
11671 break;
11672 }
11673 case IEMMODE_64BIT:
11674 {
11675 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11676 IEMOP_HLP_NO_LOCK_PREFIX();
11677
11678 IEM_MC_BEGIN(0, 1);
11679 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11680 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11681 IEM_MC_ADVANCE_RIP();
11682 IEM_MC_END();
11683 break;
11684 }
11685 }
11686
11687 return VINF_SUCCESS;
11688}
11689
11690
11691/** Opcode 0xb8. */
11692FNIEMOP_DEF(iemOp_eAX_Iv)
11693{
11694 IEMOP_MNEMONIC("mov rAX,IV");
11695 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11696}
11697
11698
11699/** Opcode 0xb9. */
11700FNIEMOP_DEF(iemOp_eCX_Iv)
11701{
11702 IEMOP_MNEMONIC("mov rCX,IV");
11703 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11704}
11705
11706
11707/** Opcode 0xba. */
11708FNIEMOP_DEF(iemOp_eDX_Iv)
11709{
11710 IEMOP_MNEMONIC("mov rDX,IV");
11711 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11712}
11713
11714
11715/** Opcode 0xbb. */
11716FNIEMOP_DEF(iemOp_eBX_Iv)
11717{
11718 IEMOP_MNEMONIC("mov rBX,IV");
11719 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11720}
11721
11722
11723/** Opcode 0xbc. */
11724FNIEMOP_DEF(iemOp_eSP_Iv)
11725{
11726 IEMOP_MNEMONIC("mov rSP,IV");
11727 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11728}
11729
11730
11731/** Opcode 0xbd. */
11732FNIEMOP_DEF(iemOp_eBP_Iv)
11733{
11734 IEMOP_MNEMONIC("mov rBP,IV");
11735 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11736}
11737
11738
11739/** Opcode 0xbe. */
11740FNIEMOP_DEF(iemOp_eSI_Iv)
11741{
11742 IEMOP_MNEMONIC("mov rSI,IV");
11743 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11744}
11745
11746
11747/** Opcode 0xbf. */
11748FNIEMOP_DEF(iemOp_eDI_Iv)
11749{
11750 IEMOP_MNEMONIC("mov rDI,IV");
11751 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11752}
11753
11754
11755/** Opcode 0xc0. */
11756FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11757{
11758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11759 PCIEMOPSHIFTSIZES pImpl;
11760 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11761 {
11762 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11763 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11764 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11765 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11766 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11767 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11768 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11769 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11770 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11771 }
11772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11773
11774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11775 {
11776 /* register */
11777 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11778 IEMOP_HLP_NO_LOCK_PREFIX();
11779 IEM_MC_BEGIN(3, 0);
11780 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11781 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11783 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11784 IEM_MC_REF_EFLAGS(pEFlags);
11785 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11786 IEM_MC_ADVANCE_RIP();
11787 IEM_MC_END();
11788 }
11789 else
11790 {
11791 /* memory */
11792 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11793 IEM_MC_BEGIN(3, 2);
11794 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11795 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11796 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11798
11799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11800 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11801 IEM_MC_ASSIGN(cShiftArg, cShift);
11802 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11803 IEM_MC_FETCH_EFLAGS(EFlags);
11804 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11805
11806 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11807 IEM_MC_COMMIT_EFLAGS(EFlags);
11808 IEM_MC_ADVANCE_RIP();
11809 IEM_MC_END();
11810 }
11811 return VINF_SUCCESS;
11812}
11813
11814
11815/** Opcode 0xc1. */
11816FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11817{
11818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11819 PCIEMOPSHIFTSIZES pImpl;
11820 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11821 {
11822 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11823 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11824 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11825 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11826 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11827 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11828 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11829 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11830 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11831 }
11832 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11833
11834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11835 {
11836 /* register */
11837 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11838 IEMOP_HLP_NO_LOCK_PREFIX();
11839 switch (pIemCpu->enmEffOpSize)
11840 {
11841 case IEMMODE_16BIT:
11842 IEM_MC_BEGIN(3, 0);
11843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11844 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11846 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11847 IEM_MC_REF_EFLAGS(pEFlags);
11848 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11849 IEM_MC_ADVANCE_RIP();
11850 IEM_MC_END();
11851 return VINF_SUCCESS;
11852
11853 case IEMMODE_32BIT:
11854 IEM_MC_BEGIN(3, 0);
11855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11856 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11857 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11858 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11859 IEM_MC_REF_EFLAGS(pEFlags);
11860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11861 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11862 IEM_MC_ADVANCE_RIP();
11863 IEM_MC_END();
11864 return VINF_SUCCESS;
11865
11866 case IEMMODE_64BIT:
11867 IEM_MC_BEGIN(3, 0);
11868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11869 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11871 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11872 IEM_MC_REF_EFLAGS(pEFlags);
11873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11874 IEM_MC_ADVANCE_RIP();
11875 IEM_MC_END();
11876 return VINF_SUCCESS;
11877
11878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11879 }
11880 }
11881 else
11882 {
11883 /* memory */
11884 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11885 switch (pIemCpu->enmEffOpSize)
11886 {
11887 case IEMMODE_16BIT:
11888 IEM_MC_BEGIN(3, 2);
11889 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11890 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11891 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11893
11894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11895 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11896 IEM_MC_ASSIGN(cShiftArg, cShift);
11897 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11898 IEM_MC_FETCH_EFLAGS(EFlags);
11899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11900
11901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11902 IEM_MC_COMMIT_EFLAGS(EFlags);
11903 IEM_MC_ADVANCE_RIP();
11904 IEM_MC_END();
11905 return VINF_SUCCESS;
11906
11907 case IEMMODE_32BIT:
11908 IEM_MC_BEGIN(3, 2);
11909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11910 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11911 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11913
11914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11915 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11916 IEM_MC_ASSIGN(cShiftArg, cShift);
11917 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11918 IEM_MC_FETCH_EFLAGS(EFlags);
11919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11920
11921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11922 IEM_MC_COMMIT_EFLAGS(EFlags);
11923 IEM_MC_ADVANCE_RIP();
11924 IEM_MC_END();
11925 return VINF_SUCCESS;
11926
11927 case IEMMODE_64BIT:
11928 IEM_MC_BEGIN(3, 2);
11929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11930 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11931 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11933
11934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11935 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11936 IEM_MC_ASSIGN(cShiftArg, cShift);
11937 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11938 IEM_MC_FETCH_EFLAGS(EFlags);
11939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11940
11941 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11942 IEM_MC_COMMIT_EFLAGS(EFlags);
11943 IEM_MC_ADVANCE_RIP();
11944 IEM_MC_END();
11945 return VINF_SUCCESS;
11946
11947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11948 }
11949 }
11950}
11951
11952
11953/** Opcode 0xc2. */
11954FNIEMOP_DEF(iemOp_retn_Iw)
11955{
11956 IEMOP_MNEMONIC("retn Iw");
11957 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11958 IEMOP_HLP_NO_LOCK_PREFIX();
11959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
11961}
11962
11963
11964/** Opcode 0xc3. */
11965FNIEMOP_DEF(iemOp_retn)
11966{
11967 IEMOP_MNEMONIC("retn");
11968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11969 IEMOP_HLP_NO_LOCK_PREFIX();
11970 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
11971}
11972
11973
11974/** Opcode 0xc4. */
11975FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
11976{
11977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11978 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11979 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11980 {
11981 IEMOP_MNEMONIC("2-byte-vex");
11982 /* The LES instruction is invalid 64-bit mode. In legacy and
11983 compatability mode it is invalid with MOD=3.
11984 The use as a VEX prefix is made possible by assigning the inverted
11985 REX.R to the top MOD bit, and the top bit in the inverted register
11986 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
11987 to accessing registers 0..7 in this VEX form. */
11988 /** @todo VEX: Just use new tables for it. */
11989 return IEMOP_RAISE_INVALID_OPCODE();
11990 }
11991 IEMOP_MNEMONIC("les Gv,Mp");
11992 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
11993}
11994
11995
11996/** Opcode 0xc5. */
11997FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
11998{
11999 /* The LDS instruction is invalid 64-bit mode. In legacy and
12000 compatability mode it is invalid with MOD=3.
12001 The use as a VEX prefix is made possible by assigning the inverted
12002 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12003 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12005 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12006 {
12007 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12008 {
12009 IEMOP_MNEMONIC("lds Gv,Mp");
12010 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12011 }
12012 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12013 }
12014
12015 IEMOP_MNEMONIC("3-byte-vex");
12016 /** @todo Test when exctly the VEX conformance checks kick in during
12017 * instruction decoding and fetching (using \#PF). */
12018 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12019 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12020 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12021#if 0 /* will make sense of this next week... */
12022 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12023 &&
12024 )
12025 {
12026
12027 }
12028#endif
12029
12030 /** @todo VEX: Just use new tables for it. */
12031 return IEMOP_RAISE_INVALID_OPCODE();
12032}
12033
12034
12035/** Opcode 0xc6. */
12036FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12037{
12038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12039 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12040 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12041 return IEMOP_RAISE_INVALID_OPCODE();
12042 IEMOP_MNEMONIC("mov Eb,Ib");
12043
12044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12045 {
12046 /* register access */
12047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12048 IEM_MC_BEGIN(0, 0);
12049 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12050 IEM_MC_ADVANCE_RIP();
12051 IEM_MC_END();
12052 }
12053 else
12054 {
12055 /* memory access. */
12056 IEM_MC_BEGIN(0, 1);
12057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12059 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12060 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12061 IEM_MC_ADVANCE_RIP();
12062 IEM_MC_END();
12063 }
12064 return VINF_SUCCESS;
12065}
12066
12067
12068/** Opcode 0xc7. */
12069FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12070{
12071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12072 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12073 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12074 return IEMOP_RAISE_INVALID_OPCODE();
12075 IEMOP_MNEMONIC("mov Ev,Iz");
12076
12077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12078 {
12079 /* register access */
12080 switch (pIemCpu->enmEffOpSize)
12081 {
12082 case IEMMODE_16BIT:
12083 IEM_MC_BEGIN(0, 0);
12084 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12085 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12086 IEM_MC_ADVANCE_RIP();
12087 IEM_MC_END();
12088 return VINF_SUCCESS;
12089
12090 case IEMMODE_32BIT:
12091 IEM_MC_BEGIN(0, 0);
12092 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12093 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12094 IEM_MC_ADVANCE_RIP();
12095 IEM_MC_END();
12096 return VINF_SUCCESS;
12097
12098 case IEMMODE_64BIT:
12099 IEM_MC_BEGIN(0, 0);
12100 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12101 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12102 IEM_MC_ADVANCE_RIP();
12103 IEM_MC_END();
12104 return VINF_SUCCESS;
12105
12106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12107 }
12108 }
12109 else
12110 {
12111 /* memory access. */
12112 switch (pIemCpu->enmEffOpSize)
12113 {
12114 case IEMMODE_16BIT:
12115 IEM_MC_BEGIN(0, 1);
12116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12118 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12119 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12120 IEM_MC_ADVANCE_RIP();
12121 IEM_MC_END();
12122 return VINF_SUCCESS;
12123
12124 case IEMMODE_32BIT:
12125 IEM_MC_BEGIN(0, 1);
12126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12128 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12129 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12130 IEM_MC_ADVANCE_RIP();
12131 IEM_MC_END();
12132 return VINF_SUCCESS;
12133
12134 case IEMMODE_64BIT:
12135 IEM_MC_BEGIN(0, 1);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12138 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12139 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12140 IEM_MC_ADVANCE_RIP();
12141 IEM_MC_END();
12142 return VINF_SUCCESS;
12143
12144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12145 }
12146 }
12147}
12148
12149
12150
12151
12152/** Opcode 0xc8. */
12153FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12154{
12155 IEMOP_MNEMONIC("enter Iw,Ib");
12156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12157 IEMOP_HLP_NO_LOCK_PREFIX();
12158 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12159 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12160 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12161}
12162
12163
12164/** Opcode 0xc9. */
12165FNIEMOP_DEF(iemOp_leave)
12166{
12167 IEMOP_MNEMONIC("retn");
12168 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12169 IEMOP_HLP_NO_LOCK_PREFIX();
12170 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12171}
12172
12173
12174/** Opcode 0xca. */
12175FNIEMOP_DEF(iemOp_retf_Iw)
12176{
12177 IEMOP_MNEMONIC("retf Iw");
12178 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12179 IEMOP_HLP_NO_LOCK_PREFIX();
12180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12181 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12182}
12183
12184
12185/** Opcode 0xcb. */
12186FNIEMOP_DEF(iemOp_retf)
12187{
12188 IEMOP_MNEMONIC("retf");
12189 IEMOP_HLP_NO_LOCK_PREFIX();
12190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12191 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12192}
12193
12194
12195/** Opcode 0xcc. */
12196FNIEMOP_DEF(iemOp_int_3)
12197{
12198 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12199}
12200
12201
12202/** Opcode 0xcd. */
12203FNIEMOP_DEF(iemOp_int_Ib)
12204{
12205 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12206 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12207}
12208
12209
12210/** Opcode 0xce. */
12211FNIEMOP_DEF(iemOp_into)
12212{
12213 IEMOP_MNEMONIC("into");
12214 IEMOP_HLP_NO_64BIT();
12215
12216 IEM_MC_BEGIN(2, 0);
12217 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12218 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12219 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12220 IEM_MC_END();
12221 return VINF_SUCCESS;
12222}
12223
12224
12225/** Opcode 0xcf. */
12226FNIEMOP_DEF(iemOp_iret)
12227{
12228 IEMOP_MNEMONIC("iret");
12229 IEMOP_HLP_NO_LOCK_PREFIX();
12230 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12231}
12232
12233
12234/** Opcode 0xd0. */
12235FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12236{
12237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12238 PCIEMOPSHIFTSIZES pImpl;
12239 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12240 {
12241 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12242 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12243 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12244 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12245 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12246 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12247 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12248 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12249 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12250 }
12251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12252
12253 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12254 {
12255 /* register */
12256 IEMOP_HLP_NO_LOCK_PREFIX();
12257 IEM_MC_BEGIN(3, 0);
12258 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12259 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12261 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12262 IEM_MC_REF_EFLAGS(pEFlags);
12263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12264 IEM_MC_ADVANCE_RIP();
12265 IEM_MC_END();
12266 }
12267 else
12268 {
12269 /* memory */
12270 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12271 IEM_MC_BEGIN(3, 2);
12272 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12273 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12274 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12276
12277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12278 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12279 IEM_MC_FETCH_EFLAGS(EFlags);
12280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12281
12282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12283 IEM_MC_COMMIT_EFLAGS(EFlags);
12284 IEM_MC_ADVANCE_RIP();
12285 IEM_MC_END();
12286 }
12287 return VINF_SUCCESS;
12288}
12289
12290
12291
12292/** Opcode 0xd1. */
12293FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12294{
12295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12296 PCIEMOPSHIFTSIZES pImpl;
12297 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12298 {
12299 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12300 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12301 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12302 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12303 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12304 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12305 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12306 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12307 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12308 }
12309 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12310
12311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12312 {
12313 /* register */
12314 IEMOP_HLP_NO_LOCK_PREFIX();
12315 switch (pIemCpu->enmEffOpSize)
12316 {
12317 case IEMMODE_16BIT:
12318 IEM_MC_BEGIN(3, 0);
12319 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12320 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12321 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12322 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12323 IEM_MC_REF_EFLAGS(pEFlags);
12324 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12325 IEM_MC_ADVANCE_RIP();
12326 IEM_MC_END();
12327 return VINF_SUCCESS;
12328
12329 case IEMMODE_32BIT:
12330 IEM_MC_BEGIN(3, 0);
12331 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12332 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12334 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12335 IEM_MC_REF_EFLAGS(pEFlags);
12336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12338 IEM_MC_ADVANCE_RIP();
12339 IEM_MC_END();
12340 return VINF_SUCCESS;
12341
12342 case IEMMODE_64BIT:
12343 IEM_MC_BEGIN(3, 0);
12344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12345 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12346 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12347 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12348 IEM_MC_REF_EFLAGS(pEFlags);
12349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12350 IEM_MC_ADVANCE_RIP();
12351 IEM_MC_END();
12352 return VINF_SUCCESS;
12353
12354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12355 }
12356 }
12357 else
12358 {
12359 /* memory */
12360 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12361 switch (pIemCpu->enmEffOpSize)
12362 {
12363 case IEMMODE_16BIT:
12364 IEM_MC_BEGIN(3, 2);
12365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12366 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12367 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12369
12370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12371 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12372 IEM_MC_FETCH_EFLAGS(EFlags);
12373 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12374
12375 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12376 IEM_MC_COMMIT_EFLAGS(EFlags);
12377 IEM_MC_ADVANCE_RIP();
12378 IEM_MC_END();
12379 return VINF_SUCCESS;
12380
12381 case IEMMODE_32BIT:
12382 IEM_MC_BEGIN(3, 2);
12383 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12384 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12385 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12387
12388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12389 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12390 IEM_MC_FETCH_EFLAGS(EFlags);
12391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12392
12393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12394 IEM_MC_COMMIT_EFLAGS(EFlags);
12395 IEM_MC_ADVANCE_RIP();
12396 IEM_MC_END();
12397 return VINF_SUCCESS;
12398
12399 case IEMMODE_64BIT:
12400 IEM_MC_BEGIN(3, 2);
12401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12402 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12405
12406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12407 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12408 IEM_MC_FETCH_EFLAGS(EFlags);
12409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12410
12411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12412 IEM_MC_COMMIT_EFLAGS(EFlags);
12413 IEM_MC_ADVANCE_RIP();
12414 IEM_MC_END();
12415 return VINF_SUCCESS;
12416
12417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12418 }
12419 }
12420}
12421
12422
12423/** Opcode 0xd2. */
12424FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12425{
12426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12427 PCIEMOPSHIFTSIZES pImpl;
12428 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12429 {
12430 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12431 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12432 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12433 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12434 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12435 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12436 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12437 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12438 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12439 }
12440 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12441
12442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12443 {
12444 /* register */
12445 IEMOP_HLP_NO_LOCK_PREFIX();
12446 IEM_MC_BEGIN(3, 0);
12447 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12448 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12449 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12450 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12451 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12452 IEM_MC_REF_EFLAGS(pEFlags);
12453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12454 IEM_MC_ADVANCE_RIP();
12455 IEM_MC_END();
12456 }
12457 else
12458 {
12459 /* memory */
12460 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12461 IEM_MC_BEGIN(3, 2);
12462 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12463 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12464 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12466
12467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12468 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12469 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12470 IEM_MC_FETCH_EFLAGS(EFlags);
12471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12472
12473 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12474 IEM_MC_COMMIT_EFLAGS(EFlags);
12475 IEM_MC_ADVANCE_RIP();
12476 IEM_MC_END();
12477 }
12478 return VINF_SUCCESS;
12479}
12480
12481
12482/** Opcode 0xd3. */
12483FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12484{
12485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12486 PCIEMOPSHIFTSIZES pImpl;
12487 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12488 {
12489 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12490 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12491 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12492 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12493 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12494 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12495 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12496 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12497 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12498 }
12499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12500
12501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12502 {
12503 /* register */
12504 IEMOP_HLP_NO_LOCK_PREFIX();
12505 switch (pIemCpu->enmEffOpSize)
12506 {
12507 case IEMMODE_16BIT:
12508 IEM_MC_BEGIN(3, 0);
12509 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12510 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12511 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12512 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12513 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12514 IEM_MC_REF_EFLAGS(pEFlags);
12515 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12516 IEM_MC_ADVANCE_RIP();
12517 IEM_MC_END();
12518 return VINF_SUCCESS;
12519
12520 case IEMMODE_32BIT:
12521 IEM_MC_BEGIN(3, 0);
12522 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12523 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12524 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12525 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12527 IEM_MC_REF_EFLAGS(pEFlags);
12528 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12530 IEM_MC_ADVANCE_RIP();
12531 IEM_MC_END();
12532 return VINF_SUCCESS;
12533
12534 case IEMMODE_64BIT:
12535 IEM_MC_BEGIN(3, 0);
12536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12537 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12539 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12540 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12541 IEM_MC_REF_EFLAGS(pEFlags);
12542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12543 IEM_MC_ADVANCE_RIP();
12544 IEM_MC_END();
12545 return VINF_SUCCESS;
12546
12547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12548 }
12549 }
12550 else
12551 {
12552 /* memory */
12553 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12554 switch (pIemCpu->enmEffOpSize)
12555 {
12556 case IEMMODE_16BIT:
12557 IEM_MC_BEGIN(3, 2);
12558 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12559 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12560 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12562
12563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12564 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12565 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12566 IEM_MC_FETCH_EFLAGS(EFlags);
12567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12568
12569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12570 IEM_MC_COMMIT_EFLAGS(EFlags);
12571 IEM_MC_ADVANCE_RIP();
12572 IEM_MC_END();
12573 return VINF_SUCCESS;
12574
12575 case IEMMODE_32BIT:
12576 IEM_MC_BEGIN(3, 2);
12577 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12578 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12579 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12581
12582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12583 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12584 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12585 IEM_MC_FETCH_EFLAGS(EFlags);
12586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12587
12588 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12589 IEM_MC_COMMIT_EFLAGS(EFlags);
12590 IEM_MC_ADVANCE_RIP();
12591 IEM_MC_END();
12592 return VINF_SUCCESS;
12593
12594 case IEMMODE_64BIT:
12595 IEM_MC_BEGIN(3, 2);
12596 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12597 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12598 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12600
12601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12602 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12603 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12604 IEM_MC_FETCH_EFLAGS(EFlags);
12605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12606
12607 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12608 IEM_MC_COMMIT_EFLAGS(EFlags);
12609 IEM_MC_ADVANCE_RIP();
12610 IEM_MC_END();
12611 return VINF_SUCCESS;
12612
12613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12614 }
12615 }
12616}
12617
12618/** Opcode 0xd4. */
12619FNIEMOP_DEF(iemOp_aam_Ib)
12620{
12621 IEMOP_MNEMONIC("aam Ib");
12622 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12623 IEMOP_HLP_NO_LOCK_PREFIX();
12624 IEMOP_HLP_NO_64BIT();
12625 if (!bImm)
12626 return IEMOP_RAISE_DIVIDE_ERROR();
12627 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12628}
12629
12630
12631/** Opcode 0xd5. */
12632FNIEMOP_DEF(iemOp_aad_Ib)
12633{
12634 IEMOP_MNEMONIC("aad Ib");
12635 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12636 IEMOP_HLP_NO_LOCK_PREFIX();
12637 IEMOP_HLP_NO_64BIT();
12638 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12639}
12640
12641
12642/** Opcode 0xd6. */
12643FNIEMOP_DEF(iemOp_salc)
12644{
12645 IEMOP_MNEMONIC("salc");
12646 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12648 IEMOP_HLP_NO_64BIT();
12649
12650 IEM_MC_BEGIN(0, 0);
12651 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12652 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12653 } IEM_MC_ELSE() {
12654 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12655 } IEM_MC_ENDIF();
12656 IEM_MC_ADVANCE_RIP();
12657 IEM_MC_END();
12658 return VINF_SUCCESS;
12659}
12660
12661
12662/** Opcode 0xd7. */
12663FNIEMOP_DEF(iemOp_xlat)
12664{
12665 IEMOP_MNEMONIC("xlat");
12666 IEMOP_HLP_NO_LOCK_PREFIX();
12667 switch (pIemCpu->enmEffAddrMode)
12668 {
12669 case IEMMODE_16BIT:
12670 IEM_MC_BEGIN(2, 0);
12671 IEM_MC_LOCAL(uint8_t, u8Tmp);
12672 IEM_MC_LOCAL(uint16_t, u16Addr);
12673 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12674 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12675 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12676 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12677 IEM_MC_ADVANCE_RIP();
12678 IEM_MC_END();
12679 return VINF_SUCCESS;
12680
12681 case IEMMODE_32BIT:
12682 IEM_MC_BEGIN(2, 0);
12683 IEM_MC_LOCAL(uint8_t, u8Tmp);
12684 IEM_MC_LOCAL(uint32_t, u32Addr);
12685 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12686 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12687 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12688 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12689 IEM_MC_ADVANCE_RIP();
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692
12693 case IEMMODE_64BIT:
12694 IEM_MC_BEGIN(2, 0);
12695 IEM_MC_LOCAL(uint8_t, u8Tmp);
12696 IEM_MC_LOCAL(uint64_t, u64Addr);
12697 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12698 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12699 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12700 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12701 IEM_MC_ADVANCE_RIP();
12702 IEM_MC_END();
12703 return VINF_SUCCESS;
12704
12705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12706 }
12707}
12708
12709
12710/**
12711 * Common worker for FPU instructions working on ST0 and STn, and storing the
12712 * result in ST0.
12713 *
12714 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12715 */
12716FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12717{
12718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12719
12720 IEM_MC_BEGIN(3, 1);
12721 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12722 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12723 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12724 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12725
12726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12728 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12729 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12730 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12731 IEM_MC_ELSE()
12732 IEM_MC_FPU_STACK_UNDERFLOW(0);
12733 IEM_MC_ENDIF();
12734 IEM_MC_USED_FPU();
12735 IEM_MC_ADVANCE_RIP();
12736
12737 IEM_MC_END();
12738 return VINF_SUCCESS;
12739}
12740
12741
12742/**
12743 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12744 * flags.
12745 *
12746 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12747 */
12748FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12749{
12750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12751
12752 IEM_MC_BEGIN(3, 1);
12753 IEM_MC_LOCAL(uint16_t, u16Fsw);
12754 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12755 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12756 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12757
12758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12760 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12761 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12762 IEM_MC_UPDATE_FSW(u16Fsw);
12763 IEM_MC_ELSE()
12764 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12765 IEM_MC_ENDIF();
12766 IEM_MC_USED_FPU();
12767 IEM_MC_ADVANCE_RIP();
12768
12769 IEM_MC_END();
12770 return VINF_SUCCESS;
12771}
12772
12773
12774/**
12775 * Common worker for FPU instructions working on ST0 and STn, only affecting
12776 * flags, and popping when done.
12777 *
12778 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12779 */
12780FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12781{
12782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12783
12784 IEM_MC_BEGIN(3, 1);
12785 IEM_MC_LOCAL(uint16_t, u16Fsw);
12786 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12787 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12788 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12789
12790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12792 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12793 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12794 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12795 IEM_MC_ELSE()
12796 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12797 IEM_MC_ENDIF();
12798 IEM_MC_USED_FPU();
12799 IEM_MC_ADVANCE_RIP();
12800
12801 IEM_MC_END();
12802 return VINF_SUCCESS;
12803}
12804
12805
12806/** Opcode 0xd8 11/0. */
12807FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12808{
12809 IEMOP_MNEMONIC("fadd st0,stN");
12810 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12811}
12812
12813
12814/** Opcode 0xd8 11/1. */
12815FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12816{
12817 IEMOP_MNEMONIC("fmul st0,stN");
12818 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12819}
12820
12821
12822/** Opcode 0xd8 11/2. */
12823FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12824{
12825 IEMOP_MNEMONIC("fcom st0,stN");
12826 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12827}
12828
12829
12830/** Opcode 0xd8 11/3. */
12831FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12832{
12833 IEMOP_MNEMONIC("fcomp st0,stN");
12834 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12835}
12836
12837
12838/** Opcode 0xd8 11/4. */
12839FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12840{
12841 IEMOP_MNEMONIC("fsub st0,stN");
12842 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12843}
12844
12845
12846/** Opcode 0xd8 11/5. */
12847FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12848{
12849 IEMOP_MNEMONIC("fsubr st0,stN");
12850 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12851}
12852
12853
12854/** Opcode 0xd8 11/6. */
12855FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12856{
12857 IEMOP_MNEMONIC("fdiv st0,stN");
12858 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12859}
12860
12861
12862/** Opcode 0xd8 11/7. */
12863FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12864{
12865 IEMOP_MNEMONIC("fdivr st0,stN");
12866 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12867}
12868
12869
12870/**
12871 * Common worker for FPU instructions working on ST0 and an m32r, and storing
12872 * the result in ST0.
12873 *
12874 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12875 */
12876FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
12877{
12878 IEM_MC_BEGIN(3, 3);
12879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12880 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12881 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12882 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12884 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12885
12886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12888
12889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12891 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12892
12893 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12894 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
12895 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12896 IEM_MC_ELSE()
12897 IEM_MC_FPU_STACK_UNDERFLOW(0);
12898 IEM_MC_ENDIF();
12899 IEM_MC_USED_FPU();
12900 IEM_MC_ADVANCE_RIP();
12901
12902 IEM_MC_END();
12903 return VINF_SUCCESS;
12904}
12905
12906
12907/** Opcode 0xd8 !11/0. */
12908FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
12909{
12910 IEMOP_MNEMONIC("fadd st0,m32r");
12911 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
12912}
12913
12914
12915/** Opcode 0xd8 !11/1. */
12916FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
12917{
12918 IEMOP_MNEMONIC("fmul st0,m32r");
12919 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
12920}
12921
12922
12923/** Opcode 0xd8 !11/2. */
12924FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
12925{
12926 IEMOP_MNEMONIC("fcom st0,m32r");
12927
12928 IEM_MC_BEGIN(3, 3);
12929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12930 IEM_MC_LOCAL(uint16_t, u16Fsw);
12931 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12932 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12934 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12935
12936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12938
12939 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12940 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12941 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12942
12943 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12944 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12945 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12946 IEM_MC_ELSE()
12947 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12948 IEM_MC_ENDIF();
12949 IEM_MC_USED_FPU();
12950 IEM_MC_ADVANCE_RIP();
12951
12952 IEM_MC_END();
12953 return VINF_SUCCESS;
12954}
12955
12956
12957/** Opcode 0xd8 !11/3. */
12958FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
12959{
12960 IEMOP_MNEMONIC("fcomp st0,m32r");
12961
12962 IEM_MC_BEGIN(3, 3);
12963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12964 IEM_MC_LOCAL(uint16_t, u16Fsw);
12965 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12966 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12967 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12968 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12969
12970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12972
12973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12975 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12976
12977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12978 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12979 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12980 IEM_MC_ELSE()
12981 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12982 IEM_MC_ENDIF();
12983 IEM_MC_USED_FPU();
12984 IEM_MC_ADVANCE_RIP();
12985
12986 IEM_MC_END();
12987 return VINF_SUCCESS;
12988}
12989
12990
12991/** Opcode 0xd8 !11/4. */
12992FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
12993{
12994 IEMOP_MNEMONIC("fsub st0,m32r");
12995 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
12996}
12997
12998
12999/** Opcode 0xd8 !11/5. */
13000FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13001{
13002 IEMOP_MNEMONIC("fsubr st0,m32r");
13003 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13004}
13005
13006
13007/** Opcode 0xd8 !11/6. */
13008FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13009{
13010 IEMOP_MNEMONIC("fdiv st0,m32r");
13011 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13012}
13013
13014
13015/** Opcode 0xd8 !11/7. */
13016FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13017{
13018 IEMOP_MNEMONIC("fdivr st0,m32r");
13019 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13020}
13021
13022
13023/** Opcode 0xd8. */
13024FNIEMOP_DEF(iemOp_EscF0)
13025{
13026 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13028
13029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13030 {
13031 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13032 {
13033 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13034 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13035 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13036 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13037 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13038 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13039 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13040 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13042 }
13043 }
13044 else
13045 {
13046 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13047 {
13048 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13049 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13050 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13051 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13052 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13053 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13054 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13055 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13057 }
13058 }
13059}
13060
13061
13062/** Opcode 0xd9 /0 mem32real
13063 * @sa iemOp_fld_m64r */
13064FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13065{
13066 IEMOP_MNEMONIC("fld m32r");
13067
13068 IEM_MC_BEGIN(2, 3);
13069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13070 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13071 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13072 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13073 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13074
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13077
13078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13080 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13081
13082 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13083 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13084 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13085 IEM_MC_ELSE()
13086 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13087 IEM_MC_ENDIF();
13088 IEM_MC_USED_FPU();
13089 IEM_MC_ADVANCE_RIP();
13090
13091 IEM_MC_END();
13092 return VINF_SUCCESS;
13093}
13094
13095
13096/** Opcode 0xd9 !11/2 mem32real */
13097FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13098{
13099 IEMOP_MNEMONIC("fst m32r");
13100 IEM_MC_BEGIN(3, 2);
13101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13102 IEM_MC_LOCAL(uint16_t, u16Fsw);
13103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13104 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13106
13107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13110 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13111
13112 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13114 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13115 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13116 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13117 IEM_MC_ELSE()
13118 IEM_MC_IF_FCW_IM()
13119 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13120 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13121 IEM_MC_ENDIF();
13122 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13123 IEM_MC_ENDIF();
13124 IEM_MC_USED_FPU();
13125 IEM_MC_ADVANCE_RIP();
13126
13127 IEM_MC_END();
13128 return VINF_SUCCESS;
13129}
13130
13131
13132/** Opcode 0xd9 !11/3 */
13133FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13134{
13135 IEMOP_MNEMONIC("fstp m32r");
13136 IEM_MC_BEGIN(3, 2);
13137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13138 IEM_MC_LOCAL(uint16_t, u16Fsw);
13139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13140 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13142
13143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13147
13148 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13150 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13151 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13152 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13153 IEM_MC_ELSE()
13154 IEM_MC_IF_FCW_IM()
13155 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13156 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13157 IEM_MC_ENDIF();
13158 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13159 IEM_MC_ENDIF();
13160 IEM_MC_USED_FPU();
13161 IEM_MC_ADVANCE_RIP();
13162
13163 IEM_MC_END();
13164 return VINF_SUCCESS;
13165}
13166
13167
13168/** Opcode 0xd9 !11/4 */
13169FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13170{
13171 IEMOP_MNEMONIC("fldenv m14/28byte");
13172 IEM_MC_BEGIN(3, 0);
13173 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13174 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13175 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13178 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13179 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13180 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13181 IEM_MC_END();
13182 return VINF_SUCCESS;
13183}
13184
13185
13186/** Opcode 0xd9 !11/5 */
13187FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13188{
13189 IEMOP_MNEMONIC("fldcw m2byte");
13190 IEM_MC_BEGIN(1, 1);
13191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13192 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13195 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13196 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13197 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13198 IEM_MC_END();
13199 return VINF_SUCCESS;
13200}
13201
13202
13203/** Opcode 0xd9 !11/6 */
13204FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13205{
13206 IEMOP_MNEMONIC("fstenv m14/m28byte");
13207 IEM_MC_BEGIN(3, 0);
13208 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13209 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13210 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13214 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13215 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13216 IEM_MC_END();
13217 return VINF_SUCCESS;
13218}
13219
13220
13221/** Opcode 0xd9 !11/7 */
13222FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13223{
13224 IEMOP_MNEMONIC("fnstcw m2byte");
13225 IEM_MC_BEGIN(2, 0);
13226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13227 IEM_MC_LOCAL(uint16_t, u16Fcw);
13228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13231 IEM_MC_FETCH_FCW(u16Fcw);
13232 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13233 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13234 IEM_MC_END();
13235 return VINF_SUCCESS;
13236}
13237
13238
13239/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13240FNIEMOP_DEF(iemOp_fnop)
13241{
13242 IEMOP_MNEMONIC("fnop");
13243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13244
13245 IEM_MC_BEGIN(0, 0);
13246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13248 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13249 * intel optimizations. Investigate. */
13250 IEM_MC_UPDATE_FPU_OPCODE_IP();
13251 IEM_MC_USED_FPU();
13252 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13253 IEM_MC_END();
13254 return VINF_SUCCESS;
13255}
13256
13257
13258/** Opcode 0xd9 11/0 stN */
13259FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13260{
13261 IEMOP_MNEMONIC("fld stN");
13262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13263
13264 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13265 * indicates that it does. */
13266 IEM_MC_BEGIN(0, 2);
13267 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13268 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13271 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13272 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13273 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13274 IEM_MC_ELSE()
13275 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13276 IEM_MC_ENDIF();
13277 IEM_MC_USED_FPU();
13278 IEM_MC_ADVANCE_RIP();
13279 IEM_MC_END();
13280
13281 return VINF_SUCCESS;
13282}
13283
13284
13285/** Opcode 0xd9 11/3 stN */
13286FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13287{
13288 IEMOP_MNEMONIC("fxch stN");
13289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13290
13291 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13292 * indicates that it does. */
13293 IEM_MC_BEGIN(1, 3);
13294 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13295 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13296 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13297 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13298 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13299 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13300 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13301 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13302 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13303 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13304 IEM_MC_ELSE()
13305 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13306 IEM_MC_ENDIF();
13307 IEM_MC_USED_FPU();
13308 IEM_MC_ADVANCE_RIP();
13309 IEM_MC_END();
13310
13311 return VINF_SUCCESS;
13312}
13313
13314
13315/** Opcode 0xd9 11/4, 0xdd 11/2. */
13316FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13317{
13318 IEMOP_MNEMONIC("fstp st0,stN");
13319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13320
13321 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13322 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13323 if (!iDstReg)
13324 {
13325 IEM_MC_BEGIN(0, 1);
13326 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13327 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13328 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13329 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13330 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13331 IEM_MC_ELSE()
13332 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13333 IEM_MC_ENDIF();
13334 IEM_MC_USED_FPU();
13335 IEM_MC_ADVANCE_RIP();
13336 IEM_MC_END();
13337 }
13338 else
13339 {
13340 IEM_MC_BEGIN(0, 2);
13341 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13342 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13345 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13346 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13347 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13348 IEM_MC_ELSE()
13349 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13350 IEM_MC_ENDIF();
13351 IEM_MC_USED_FPU();
13352 IEM_MC_ADVANCE_RIP();
13353 IEM_MC_END();
13354 }
13355 return VINF_SUCCESS;
13356}
13357
13358
13359/**
13360 * Common worker for FPU instructions working on ST0 and replaces it with the
13361 * result, i.e. unary operators.
13362 *
13363 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13364 */
13365FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13366{
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368
13369 IEM_MC_BEGIN(2, 1);
13370 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13371 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13373
13374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13377 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13378 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13379 IEM_MC_ELSE()
13380 IEM_MC_FPU_STACK_UNDERFLOW(0);
13381 IEM_MC_ENDIF();
13382 IEM_MC_USED_FPU();
13383 IEM_MC_ADVANCE_RIP();
13384
13385 IEM_MC_END();
13386 return VINF_SUCCESS;
13387}
13388
13389
13390/** Opcode 0xd9 0xe0. */
13391FNIEMOP_DEF(iemOp_fchs)
13392{
13393 IEMOP_MNEMONIC("fchs st0");
13394 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13395}
13396
13397
13398/** Opcode 0xd9 0xe1. */
13399FNIEMOP_DEF(iemOp_fabs)
13400{
13401 IEMOP_MNEMONIC("fabs st0");
13402 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13403}
13404
13405
13406/**
13407 * Common worker for FPU instructions working on ST0 and only returns FSW.
13408 *
13409 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13410 */
13411FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13412{
13413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13414
13415 IEM_MC_BEGIN(2, 1);
13416 IEM_MC_LOCAL(uint16_t, u16Fsw);
13417 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13418 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13419
13420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13423 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13424 IEM_MC_UPDATE_FSW(u16Fsw);
13425 IEM_MC_ELSE()
13426 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13427 IEM_MC_ENDIF();
13428 IEM_MC_USED_FPU();
13429 IEM_MC_ADVANCE_RIP();
13430
13431 IEM_MC_END();
13432 return VINF_SUCCESS;
13433}
13434
13435
13436/** Opcode 0xd9 0xe4. */
13437FNIEMOP_DEF(iemOp_ftst)
13438{
13439 IEMOP_MNEMONIC("ftst st0");
13440 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13441}
13442
13443
13444/** Opcode 0xd9 0xe5. */
13445FNIEMOP_DEF(iemOp_fxam)
13446{
13447 IEMOP_MNEMONIC("fxam st0");
13448 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13449}
13450
13451
13452/**
13453 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13454 *
13455 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13456 */
13457FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13458{
13459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13460
13461 IEM_MC_BEGIN(1, 1);
13462 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13464
13465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13467 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13468 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13469 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13470 IEM_MC_ELSE()
13471 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13472 IEM_MC_ENDIF();
13473 IEM_MC_USED_FPU();
13474 IEM_MC_ADVANCE_RIP();
13475
13476 IEM_MC_END();
13477 return VINF_SUCCESS;
13478}
13479
13480
13481/** Opcode 0xd9 0xe8. */
13482FNIEMOP_DEF(iemOp_fld1)
13483{
13484 IEMOP_MNEMONIC("fld1");
13485 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13486}
13487
13488
13489/** Opcode 0xd9 0xe9. */
13490FNIEMOP_DEF(iemOp_fldl2t)
13491{
13492 IEMOP_MNEMONIC("fldl2t");
13493 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13494}
13495
13496
13497/** Opcode 0xd9 0xea. */
13498FNIEMOP_DEF(iemOp_fldl2e)
13499{
13500 IEMOP_MNEMONIC("fldl2e");
13501 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13502}
13503
13504/** Opcode 0xd9 0xeb. */
13505FNIEMOP_DEF(iemOp_fldpi)
13506{
13507 IEMOP_MNEMONIC("fldpi");
13508 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13509}
13510
13511
13512/** Opcode 0xd9 0xec. */
13513FNIEMOP_DEF(iemOp_fldlg2)
13514{
13515 IEMOP_MNEMONIC("fldlg2");
13516 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13517}
13518
13519/** Opcode 0xd9 0xed. */
13520FNIEMOP_DEF(iemOp_fldln2)
13521{
13522 IEMOP_MNEMONIC("fldln2");
13523 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13524}
13525
13526
13527/** Opcode 0xd9 0xee. */
13528FNIEMOP_DEF(iemOp_fldz)
13529{
13530 IEMOP_MNEMONIC("fldz");
13531 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13532}
13533
13534
13535/** Opcode 0xd9 0xf0. */
13536FNIEMOP_DEF(iemOp_f2xm1)
13537{
13538 IEMOP_MNEMONIC("f2xm1 st0");
13539 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13540}
13541
13542
13543/** Opcode 0xd9 0xf1. */
13544FNIEMOP_DEF(iemOp_fylx2)
13545{
13546 IEMOP_MNEMONIC("fylx2 st0");
13547 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13548}
13549
13550
13551/**
13552 * Common worker for FPU instructions working on ST0 and having two outputs, one
13553 * replacing ST0 and one pushed onto the stack.
13554 *
13555 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13556 */
13557FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13558{
13559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13560
13561 IEM_MC_BEGIN(2, 1);
13562 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13563 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13564 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13565
13566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13568 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13569 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13570 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13571 IEM_MC_ELSE()
13572 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13573 IEM_MC_ENDIF();
13574 IEM_MC_USED_FPU();
13575 IEM_MC_ADVANCE_RIP();
13576
13577 IEM_MC_END();
13578 return VINF_SUCCESS;
13579}
13580
13581
13582/** Opcode 0xd9 0xf2. */
13583FNIEMOP_DEF(iemOp_fptan)
13584{
13585 IEMOP_MNEMONIC("fptan st0");
13586 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13587}
13588
13589
13590/**
13591 * Common worker for FPU instructions working on STn and ST0, storing the result
13592 * in STn, and popping the stack unless IE, DE or ZE was raised.
13593 *
13594 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13595 */
13596FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13597{
13598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13599
13600 IEM_MC_BEGIN(3, 1);
13601 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13602 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13603 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13605
13606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13607 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13608
13609 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13610 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13611 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13612 IEM_MC_ELSE()
13613 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13614 IEM_MC_ENDIF();
13615 IEM_MC_USED_FPU();
13616 IEM_MC_ADVANCE_RIP();
13617
13618 IEM_MC_END();
13619 return VINF_SUCCESS;
13620}
13621
13622
13623/** Opcode 0xd9 0xf3. */
13624FNIEMOP_DEF(iemOp_fpatan)
13625{
13626 IEMOP_MNEMONIC("fpatan st1,st0");
13627 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13628}
13629
13630
13631/** Opcode 0xd9 0xf4. */
13632FNIEMOP_DEF(iemOp_fxtract)
13633{
13634 IEMOP_MNEMONIC("fxtract st0");
13635 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13636}
13637
13638
13639/** Opcode 0xd9 0xf5. */
13640FNIEMOP_DEF(iemOp_fprem1)
13641{
13642 IEMOP_MNEMONIC("fprem1 st0, st1");
13643 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13644}
13645
13646
13647/** Opcode 0xd9 0xf6. */
13648FNIEMOP_DEF(iemOp_fdecstp)
13649{
13650 IEMOP_MNEMONIC("fdecstp");
13651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13652 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13653 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13654 * FINCSTP and FDECSTP. */
13655
13656 IEM_MC_BEGIN(0,0);
13657
13658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13660
13661 IEM_MC_FPU_STACK_DEC_TOP();
13662 IEM_MC_UPDATE_FSW_CONST(0);
13663
13664 IEM_MC_USED_FPU();
13665 IEM_MC_ADVANCE_RIP();
13666 IEM_MC_END();
13667 return VINF_SUCCESS;
13668}
13669
13670
13671/** Opcode 0xd9 0xf7. */
13672FNIEMOP_DEF(iemOp_fincstp)
13673{
13674 IEMOP_MNEMONIC("fincstp");
13675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13676 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13677 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13678 * FINCSTP and FDECSTP. */
13679
13680 IEM_MC_BEGIN(0,0);
13681
13682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13684
13685 IEM_MC_FPU_STACK_INC_TOP();
13686 IEM_MC_UPDATE_FSW_CONST(0);
13687
13688 IEM_MC_USED_FPU();
13689 IEM_MC_ADVANCE_RIP();
13690 IEM_MC_END();
13691 return VINF_SUCCESS;
13692}
13693
13694
13695/** Opcode 0xd9 0xf8. */
13696FNIEMOP_DEF(iemOp_fprem)
13697{
13698 IEMOP_MNEMONIC("fprem st0, st1");
13699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13700}
13701
13702
13703/** Opcode 0xd9 0xf9. */
13704FNIEMOP_DEF(iemOp_fyl2xp1)
13705{
13706 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13707 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13708}
13709
13710
13711/** Opcode 0xd9 0xfa. */
13712FNIEMOP_DEF(iemOp_fsqrt)
13713{
13714 IEMOP_MNEMONIC("fsqrt st0");
13715 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13716}
13717
13718
13719/** Opcode 0xd9 0xfb. */
13720FNIEMOP_DEF(iemOp_fsincos)
13721{
13722 IEMOP_MNEMONIC("fsincos st0");
13723 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13724}
13725
13726
13727/** Opcode 0xd9 0xfc. */
13728FNIEMOP_DEF(iemOp_frndint)
13729{
13730 IEMOP_MNEMONIC("frndint st0");
13731 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13732}
13733
13734
13735/** Opcode 0xd9 0xfd. */
13736FNIEMOP_DEF(iemOp_fscale)
13737{
13738 IEMOP_MNEMONIC("fscale st0, st1");
13739 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13740}
13741
13742
13743/** Opcode 0xd9 0xfe. */
13744FNIEMOP_DEF(iemOp_fsin)
13745{
13746 IEMOP_MNEMONIC("fsin st0");
13747 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13748}
13749
13750
13751/** Opcode 0xd9 0xff. */
13752FNIEMOP_DEF(iemOp_fcos)
13753{
13754 IEMOP_MNEMONIC("fcos st0");
13755 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13756}
13757
13758
13759/** Used by iemOp_EscF1. */
13760static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13761{
13762 /* 0xe0 */ iemOp_fchs,
13763 /* 0xe1 */ iemOp_fabs,
13764 /* 0xe2 */ iemOp_Invalid,
13765 /* 0xe3 */ iemOp_Invalid,
13766 /* 0xe4 */ iemOp_ftst,
13767 /* 0xe5 */ iemOp_fxam,
13768 /* 0xe6 */ iemOp_Invalid,
13769 /* 0xe7 */ iemOp_Invalid,
13770 /* 0xe8 */ iemOp_fld1,
13771 /* 0xe9 */ iemOp_fldl2t,
13772 /* 0xea */ iemOp_fldl2e,
13773 /* 0xeb */ iemOp_fldpi,
13774 /* 0xec */ iemOp_fldlg2,
13775 /* 0xed */ iemOp_fldln2,
13776 /* 0xee */ iemOp_fldz,
13777 /* 0xef */ iemOp_Invalid,
13778 /* 0xf0 */ iemOp_f2xm1,
13779 /* 0xf1 */ iemOp_fylx2,
13780 /* 0xf2 */ iemOp_fptan,
13781 /* 0xf3 */ iemOp_fpatan,
13782 /* 0xf4 */ iemOp_fxtract,
13783 /* 0xf5 */ iemOp_fprem1,
13784 /* 0xf6 */ iemOp_fdecstp,
13785 /* 0xf7 */ iemOp_fincstp,
13786 /* 0xf8 */ iemOp_fprem,
13787 /* 0xf9 */ iemOp_fyl2xp1,
13788 /* 0xfa */ iemOp_fsqrt,
13789 /* 0xfb */ iemOp_fsincos,
13790 /* 0xfc */ iemOp_frndint,
13791 /* 0xfd */ iemOp_fscale,
13792 /* 0xfe */ iemOp_fsin,
13793 /* 0xff */ iemOp_fcos
13794};
13795
13796
13797/** Opcode 0xd9. */
13798FNIEMOP_DEF(iemOp_EscF1)
13799{
13800 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13803 {
13804 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13805 {
13806 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13807 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13808 case 2:
13809 if (bRm == 0xd0)
13810 return FNIEMOP_CALL(iemOp_fnop);
13811 return IEMOP_RAISE_INVALID_OPCODE();
13812 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13813 case 4:
13814 case 5:
13815 case 6:
13816 case 7:
13817 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13818 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13820 }
13821 }
13822 else
13823 {
13824 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13825 {
13826 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13827 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13828 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13829 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13830 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13831 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13832 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13833 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13835 }
13836 }
13837}
13838
13839
13840/** Opcode 0xda 11/0. */
13841FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13842{
13843 IEMOP_MNEMONIC("fcmovb st0,stN");
13844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13845
13846 IEM_MC_BEGIN(0, 1);
13847 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13848
13849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13851
13852 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13853 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13854 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13855 IEM_MC_ENDIF();
13856 IEM_MC_UPDATE_FPU_OPCODE_IP();
13857 IEM_MC_ELSE()
13858 IEM_MC_FPU_STACK_UNDERFLOW(0);
13859 IEM_MC_ENDIF();
13860 IEM_MC_USED_FPU();
13861 IEM_MC_ADVANCE_RIP();
13862
13863 IEM_MC_END();
13864 return VINF_SUCCESS;
13865}
13866
13867
13868/** Opcode 0xda 11/1. */
13869FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
13870{
13871 IEMOP_MNEMONIC("fcmove st0,stN");
13872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13873
13874 IEM_MC_BEGIN(0, 1);
13875 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13876
13877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13878 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13879
13880 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
13882 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13883 IEM_MC_ENDIF();
13884 IEM_MC_UPDATE_FPU_OPCODE_IP();
13885 IEM_MC_ELSE()
13886 IEM_MC_FPU_STACK_UNDERFLOW(0);
13887 IEM_MC_ENDIF();
13888 IEM_MC_USED_FPU();
13889 IEM_MC_ADVANCE_RIP();
13890
13891 IEM_MC_END();
13892 return VINF_SUCCESS;
13893}
13894
13895
13896/** Opcode 0xda 11/2. */
13897FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
13898{
13899 IEMOP_MNEMONIC("fcmovbe st0,stN");
13900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13901
13902 IEM_MC_BEGIN(0, 1);
13903 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13904
13905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13907
13908 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13909 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13910 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13911 IEM_MC_ENDIF();
13912 IEM_MC_UPDATE_FPU_OPCODE_IP();
13913 IEM_MC_ELSE()
13914 IEM_MC_FPU_STACK_UNDERFLOW(0);
13915 IEM_MC_ENDIF();
13916 IEM_MC_USED_FPU();
13917 IEM_MC_ADVANCE_RIP();
13918
13919 IEM_MC_END();
13920 return VINF_SUCCESS;
13921}
13922
13923
13924/** Opcode 0xda 11/3. */
13925FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
13926{
13927 IEMOP_MNEMONIC("fcmovu st0,stN");
13928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13929
13930 IEM_MC_BEGIN(0, 1);
13931 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13932
13933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13935
13936 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
13938 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13939 IEM_MC_ENDIF();
13940 IEM_MC_UPDATE_FPU_OPCODE_IP();
13941 IEM_MC_ELSE()
13942 IEM_MC_FPU_STACK_UNDERFLOW(0);
13943 IEM_MC_ENDIF();
13944 IEM_MC_USED_FPU();
13945 IEM_MC_ADVANCE_RIP();
13946
13947 IEM_MC_END();
13948 return VINF_SUCCESS;
13949}
13950
13951
13952/**
13953 * Common worker for FPU instructions working on ST0 and STn, only affecting
13954 * flags, and popping twice when done.
13955 *
13956 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13957 */
13958FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13959{
13960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13961
13962 IEM_MC_BEGIN(3, 1);
13963 IEM_MC_LOCAL(uint16_t, u16Fsw);
13964 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13965 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13966 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13967
13968 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13969 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13970 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
13971 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13972 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
13973 IEM_MC_ELSE()
13974 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
13975 IEM_MC_ENDIF();
13976 IEM_MC_USED_FPU();
13977 IEM_MC_ADVANCE_RIP();
13978
13979 IEM_MC_END();
13980 return VINF_SUCCESS;
13981}
13982
13983
13984/** Opcode 0xda 0xe9. */
13985FNIEMOP_DEF(iemOp_fucompp)
13986{
13987 IEMOP_MNEMONIC("fucompp st0,stN");
13988 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
13989}
13990
13991
13992/**
13993 * Common worker for FPU instructions working on ST0 and an m32i, and storing
13994 * the result in ST0.
13995 *
13996 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13997 */
13998FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
13999{
14000 IEM_MC_BEGIN(3, 3);
14001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14002 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14003 IEM_MC_LOCAL(int32_t, i32Val2);
14004 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14005 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14006 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14007
14008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14010
14011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14013 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14014
14015 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14016 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14017 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14018 IEM_MC_ELSE()
14019 IEM_MC_FPU_STACK_UNDERFLOW(0);
14020 IEM_MC_ENDIF();
14021 IEM_MC_USED_FPU();
14022 IEM_MC_ADVANCE_RIP();
14023
14024 IEM_MC_END();
14025 return VINF_SUCCESS;
14026}
14027
14028
14029/** Opcode 0xda !11/0. */
14030FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14031{
14032 IEMOP_MNEMONIC("fiadd m32i");
14033 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14034}
14035
14036
14037/** Opcode 0xda !11/1. */
14038FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14039{
14040 IEMOP_MNEMONIC("fimul m32i");
14041 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14042}
14043
14044
14045/** Opcode 0xda !11/2. */
14046FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14047{
14048 IEMOP_MNEMONIC("ficom st0,m32i");
14049
14050 IEM_MC_BEGIN(3, 3);
14051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14052 IEM_MC_LOCAL(uint16_t, u16Fsw);
14053 IEM_MC_LOCAL(int32_t, i32Val2);
14054 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14056 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14057
14058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14060
14061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14063 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14064
14065 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14066 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14067 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14068 IEM_MC_ELSE()
14069 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14070 IEM_MC_ENDIF();
14071 IEM_MC_USED_FPU();
14072 IEM_MC_ADVANCE_RIP();
14073
14074 IEM_MC_END();
14075 return VINF_SUCCESS;
14076}
14077
14078
14079/** Opcode 0xda !11/3. */
14080FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14081{
14082 IEMOP_MNEMONIC("ficomp st0,m32i");
14083
14084 IEM_MC_BEGIN(3, 3);
14085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14086 IEM_MC_LOCAL(uint16_t, u16Fsw);
14087 IEM_MC_LOCAL(int32_t, i32Val2);
14088 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14090 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14091
14092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14094
14095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14097 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14098
14099 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14100 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14101 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14102 IEM_MC_ELSE()
14103 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14104 IEM_MC_ENDIF();
14105 IEM_MC_USED_FPU();
14106 IEM_MC_ADVANCE_RIP();
14107
14108 IEM_MC_END();
14109 return VINF_SUCCESS;
14110}
14111
14112
14113/** Opcode 0xda !11/4. */
14114FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14115{
14116 IEMOP_MNEMONIC("fisub m32i");
14117 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14118}
14119
14120
14121/** Opcode 0xda !11/5. */
14122FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14123{
14124 IEMOP_MNEMONIC("fisubr m32i");
14125 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14126}
14127
14128
14129/** Opcode 0xda !11/6. */
14130FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14131{
14132 IEMOP_MNEMONIC("fidiv m32i");
14133 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14134}
14135
14136
14137/** Opcode 0xda !11/7. */
14138FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14139{
14140 IEMOP_MNEMONIC("fidivr m32i");
14141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14142}
14143
14144
14145/** Opcode 0xda. */
14146FNIEMOP_DEF(iemOp_EscF2)
14147{
14148 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14151 {
14152 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14153 {
14154 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14155 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14156 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14157 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14158 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14159 case 5:
14160 if (bRm == 0xe9)
14161 return FNIEMOP_CALL(iemOp_fucompp);
14162 return IEMOP_RAISE_INVALID_OPCODE();
14163 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14164 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14166 }
14167 }
14168 else
14169 {
14170 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14171 {
14172 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14173 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14174 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14175 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14176 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14177 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14178 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14179 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14181 }
14182 }
14183}
14184
14185
14186/** Opcode 0xdb !11/0. */
14187FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14188{
14189 IEMOP_MNEMONIC("fild m32i");
14190
14191 IEM_MC_BEGIN(2, 3);
14192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14193 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14194 IEM_MC_LOCAL(int32_t, i32Val);
14195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14196 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14197
14198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14200
14201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14203 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14204
14205 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14206 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14207 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14208 IEM_MC_ELSE()
14209 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14210 IEM_MC_ENDIF();
14211 IEM_MC_USED_FPU();
14212 IEM_MC_ADVANCE_RIP();
14213
14214 IEM_MC_END();
14215 return VINF_SUCCESS;
14216}
14217
14218
14219/** Opcode 0xdb !11/1. */
14220FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14221{
14222 IEMOP_MNEMONIC("fisttp m32i");
14223 IEM_MC_BEGIN(3, 2);
14224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14225 IEM_MC_LOCAL(uint16_t, u16Fsw);
14226 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14227 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14229
14230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14234
14235 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14236 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14237 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14238 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14239 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14240 IEM_MC_ELSE()
14241 IEM_MC_IF_FCW_IM()
14242 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14243 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14244 IEM_MC_ENDIF();
14245 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14246 IEM_MC_ENDIF();
14247 IEM_MC_USED_FPU();
14248 IEM_MC_ADVANCE_RIP();
14249
14250 IEM_MC_END();
14251 return VINF_SUCCESS;
14252}
14253
14254
14255/** Opcode 0xdb !11/2. */
14256FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14257{
14258 IEMOP_MNEMONIC("fist m32i");
14259 IEM_MC_BEGIN(3, 2);
14260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14261 IEM_MC_LOCAL(uint16_t, u16Fsw);
14262 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14263 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14264 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14265
14266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14270
14271 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14272 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14273 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14274 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14275 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14276 IEM_MC_ELSE()
14277 IEM_MC_IF_FCW_IM()
14278 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14279 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14280 IEM_MC_ENDIF();
14281 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14282 IEM_MC_ENDIF();
14283 IEM_MC_USED_FPU();
14284 IEM_MC_ADVANCE_RIP();
14285
14286 IEM_MC_END();
14287 return VINF_SUCCESS;
14288}
14289
14290
14291/** Opcode 0xdb !11/3. */
14292FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14293{
14294 IEMOP_MNEMONIC("fisttp m32i");
14295 IEM_MC_BEGIN(3, 2);
14296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14297 IEM_MC_LOCAL(uint16_t, u16Fsw);
14298 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14299 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14301
14302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14306
14307 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14309 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14310 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14311 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14312 IEM_MC_ELSE()
14313 IEM_MC_IF_FCW_IM()
14314 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14315 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14316 IEM_MC_ENDIF();
14317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14318 IEM_MC_ENDIF();
14319 IEM_MC_USED_FPU();
14320 IEM_MC_ADVANCE_RIP();
14321
14322 IEM_MC_END();
14323 return VINF_SUCCESS;
14324}
14325
14326
14327/** Opcode 0xdb !11/5. */
14328FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14329{
14330 IEMOP_MNEMONIC("fld m80r");
14331
14332 IEM_MC_BEGIN(2, 3);
14333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14334 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14335 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14336 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14338
14339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14341
14342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14344 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14345
14346 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14347 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14348 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14349 IEM_MC_ELSE()
14350 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14351 IEM_MC_ENDIF();
14352 IEM_MC_USED_FPU();
14353 IEM_MC_ADVANCE_RIP();
14354
14355 IEM_MC_END();
14356 return VINF_SUCCESS;
14357}
14358
14359
14360/** Opcode 0xdb !11/7. */
14361FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14362{
14363 IEMOP_MNEMONIC("fstp m80r");
14364 IEM_MC_BEGIN(3, 2);
14365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14366 IEM_MC_LOCAL(uint16_t, u16Fsw);
14367 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14368 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14370
14371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14374 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14375
14376 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14377 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14378 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14379 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14380 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14381 IEM_MC_ELSE()
14382 IEM_MC_IF_FCW_IM()
14383 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14384 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14385 IEM_MC_ENDIF();
14386 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14387 IEM_MC_ENDIF();
14388 IEM_MC_USED_FPU();
14389 IEM_MC_ADVANCE_RIP();
14390
14391 IEM_MC_END();
14392 return VINF_SUCCESS;
14393}
14394
14395
14396/** Opcode 0xdb 11/0. */
14397FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14398{
14399 IEMOP_MNEMONIC("fcmovnb st0,stN");
14400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14401
14402 IEM_MC_BEGIN(0, 1);
14403 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14404
14405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14407
14408 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14409 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14410 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14411 IEM_MC_ENDIF();
14412 IEM_MC_UPDATE_FPU_OPCODE_IP();
14413 IEM_MC_ELSE()
14414 IEM_MC_FPU_STACK_UNDERFLOW(0);
14415 IEM_MC_ENDIF();
14416 IEM_MC_USED_FPU();
14417 IEM_MC_ADVANCE_RIP();
14418
14419 IEM_MC_END();
14420 return VINF_SUCCESS;
14421}
14422
14423
14424/** Opcode 0xdb 11/1. */
14425FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14426{
14427 IEMOP_MNEMONIC("fcmovne st0,stN");
14428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14429
14430 IEM_MC_BEGIN(0, 1);
14431 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14432
14433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14435
14436 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14437 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14438 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14439 IEM_MC_ENDIF();
14440 IEM_MC_UPDATE_FPU_OPCODE_IP();
14441 IEM_MC_ELSE()
14442 IEM_MC_FPU_STACK_UNDERFLOW(0);
14443 IEM_MC_ENDIF();
14444 IEM_MC_USED_FPU();
14445 IEM_MC_ADVANCE_RIP();
14446
14447 IEM_MC_END();
14448 return VINF_SUCCESS;
14449}
14450
14451
14452/** Opcode 0xdb 11/2. */
14453FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14454{
14455 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14457
14458 IEM_MC_BEGIN(0, 1);
14459 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14460
14461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14463
14464 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14465 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14466 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14467 IEM_MC_ENDIF();
14468 IEM_MC_UPDATE_FPU_OPCODE_IP();
14469 IEM_MC_ELSE()
14470 IEM_MC_FPU_STACK_UNDERFLOW(0);
14471 IEM_MC_ENDIF();
14472 IEM_MC_USED_FPU();
14473 IEM_MC_ADVANCE_RIP();
14474
14475 IEM_MC_END();
14476 return VINF_SUCCESS;
14477}
14478
14479
14480/** Opcode 0xdb 11/3. */
14481FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14482{
14483 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14485
14486 IEM_MC_BEGIN(0, 1);
14487 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14488
14489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14491
14492 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14493 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14494 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14495 IEM_MC_ENDIF();
14496 IEM_MC_UPDATE_FPU_OPCODE_IP();
14497 IEM_MC_ELSE()
14498 IEM_MC_FPU_STACK_UNDERFLOW(0);
14499 IEM_MC_ENDIF();
14500 IEM_MC_USED_FPU();
14501 IEM_MC_ADVANCE_RIP();
14502
14503 IEM_MC_END();
14504 return VINF_SUCCESS;
14505}
14506
14507
14508/** Opcode 0xdb 0xe0. */
14509FNIEMOP_DEF(iemOp_fneni)
14510{
14511 IEMOP_MNEMONIC("fneni (8087/ign)");
14512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14513 IEM_MC_BEGIN(0,0);
14514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14515 IEM_MC_ADVANCE_RIP();
14516 IEM_MC_END();
14517 return VINF_SUCCESS;
14518}
14519
14520
14521/** Opcode 0xdb 0xe1. */
14522FNIEMOP_DEF(iemOp_fndisi)
14523{
14524 IEMOP_MNEMONIC("fndisi (8087/ign)");
14525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14526 IEM_MC_BEGIN(0,0);
14527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14528 IEM_MC_ADVANCE_RIP();
14529 IEM_MC_END();
14530 return VINF_SUCCESS;
14531}
14532
14533
14534/** Opcode 0xdb 0xe2. */
14535FNIEMOP_DEF(iemOp_fnclex)
14536{
14537 IEMOP_MNEMONIC("fnclex");
14538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14539
14540 IEM_MC_BEGIN(0,0);
14541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14542 IEM_MC_CLEAR_FSW_EX();
14543 IEM_MC_ADVANCE_RIP();
14544 IEM_MC_END();
14545 return VINF_SUCCESS;
14546}
14547
14548
14549/** Opcode 0xdb 0xe3. */
14550FNIEMOP_DEF(iemOp_fninit)
14551{
14552 IEMOP_MNEMONIC("fninit");
14553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14554 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14555}
14556
14557
14558/** Opcode 0xdb 0xe4. */
14559FNIEMOP_DEF(iemOp_fnsetpm)
14560{
14561 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14563 IEM_MC_BEGIN(0,0);
14564 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14565 IEM_MC_ADVANCE_RIP();
14566 IEM_MC_END();
14567 return VINF_SUCCESS;
14568}
14569
14570
14571/** Opcode 0xdb 0xe5. */
14572FNIEMOP_DEF(iemOp_frstpm)
14573{
14574 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14575#if 0 /* #UDs on newer CPUs */
14576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14577 IEM_MC_BEGIN(0,0);
14578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14579 IEM_MC_ADVANCE_RIP();
14580 IEM_MC_END();
14581 return VINF_SUCCESS;
14582#else
14583 return IEMOP_RAISE_INVALID_OPCODE();
14584#endif
14585}
14586
14587
14588/** Opcode 0xdb 11/5. */
14589FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14590{
14591 IEMOP_MNEMONIC("fucomi st0,stN");
14592 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14593}
14594
14595
14596/** Opcode 0xdb 11/6. */
14597FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14598{
14599 IEMOP_MNEMONIC("fcomi st0,stN");
14600 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14601}
14602
14603
14604/** Opcode 0xdb. */
14605FNIEMOP_DEF(iemOp_EscF3)
14606{
14607 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14610 {
14611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14612 {
14613 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14614 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14615 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14616 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14617 case 4:
14618 switch (bRm)
14619 {
14620 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14621 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14622 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14623 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14624 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14625 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14626 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14627 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14629 }
14630 break;
14631 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14632 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14633 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14635 }
14636 }
14637 else
14638 {
14639 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14640 {
14641 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14642 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14643 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14644 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14645 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14646 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14647 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14648 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14650 }
14651 }
14652}
14653
14654
14655/**
14656 * Common worker for FPU instructions working on STn and ST0, and storing the
14657 * result in STn unless IE, DE or ZE was raised.
14658 *
14659 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14660 */
14661FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14662{
14663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14664
14665 IEM_MC_BEGIN(3, 1);
14666 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14667 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14668 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14669 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14670
14671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14673
14674 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14675 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14676 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14677 IEM_MC_ELSE()
14678 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14679 IEM_MC_ENDIF();
14680 IEM_MC_USED_FPU();
14681 IEM_MC_ADVANCE_RIP();
14682
14683 IEM_MC_END();
14684 return VINF_SUCCESS;
14685}
14686
14687
14688/** Opcode 0xdc 11/0. */
14689FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14690{
14691 IEMOP_MNEMONIC("fadd stN,st0");
14692 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14693}
14694
14695
14696/** Opcode 0xdc 11/1. */
14697FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14698{
14699 IEMOP_MNEMONIC("fmul stN,st0");
14700 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14701}
14702
14703
14704/** Opcode 0xdc 11/4. */
14705FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14706{
14707 IEMOP_MNEMONIC("fsubr stN,st0");
14708 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14709}
14710
14711
14712/** Opcode 0xdc 11/5. */
14713FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14714{
14715 IEMOP_MNEMONIC("fsub stN,st0");
14716 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14717}
14718
14719
14720/** Opcode 0xdc 11/6. */
14721FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14722{
14723 IEMOP_MNEMONIC("fdivr stN,st0");
14724 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14725}
14726
14727
14728/** Opcode 0xdc 11/7. */
14729FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14730{
14731 IEMOP_MNEMONIC("fdiv stN,st0");
14732 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14733}
14734
14735
14736/**
14737 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14738 * memory operand, and storing the result in ST0.
14739 *
14740 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14741 */
14742FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14743{
14744 IEM_MC_BEGIN(3, 3);
14745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14747 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14748 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14749 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14750 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14751
14752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14754 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14755 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14756
14757 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14758 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14759 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14760 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14761 IEM_MC_ELSE()
14762 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14763 IEM_MC_ENDIF();
14764 IEM_MC_USED_FPU();
14765 IEM_MC_ADVANCE_RIP();
14766
14767 IEM_MC_END();
14768 return VINF_SUCCESS;
14769}
14770
14771
14772/** Opcode 0xdc !11/0. */
14773FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14774{
14775 IEMOP_MNEMONIC("fadd m64r");
14776 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14777}
14778
14779
14780/** Opcode 0xdc !11/1. */
14781FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14782{
14783 IEMOP_MNEMONIC("fmul m64r");
14784 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14785}
14786
14787
14788/** Opcode 0xdc !11/2. */
14789FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14790{
14791 IEMOP_MNEMONIC("fcom st0,m64r");
14792
14793 IEM_MC_BEGIN(3, 3);
14794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14795 IEM_MC_LOCAL(uint16_t, u16Fsw);
14796 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14797 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14798 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14799 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14800
14801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14803
14804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14806 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14807
14808 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14809 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14810 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14811 IEM_MC_ELSE()
14812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14813 IEM_MC_ENDIF();
14814 IEM_MC_USED_FPU();
14815 IEM_MC_ADVANCE_RIP();
14816
14817 IEM_MC_END();
14818 return VINF_SUCCESS;
14819}
14820
14821
14822/** Opcode 0xdc !11/3. */
14823FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14824{
14825 IEMOP_MNEMONIC("fcomp st0,m64r");
14826
14827 IEM_MC_BEGIN(3, 3);
14828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14829 IEM_MC_LOCAL(uint16_t, u16Fsw);
14830 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14831 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14832 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14833 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14834
14835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14837
14838 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14839 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14840 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14841
14842 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14843 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14844 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14845 IEM_MC_ELSE()
14846 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14847 IEM_MC_ENDIF();
14848 IEM_MC_USED_FPU();
14849 IEM_MC_ADVANCE_RIP();
14850
14851 IEM_MC_END();
14852 return VINF_SUCCESS;
14853}
14854
14855
14856/** Opcode 0xdc !11/4. */
14857FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14858{
14859 IEMOP_MNEMONIC("fsub m64r");
14860 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14861}
14862
14863
14864/** Opcode 0xdc !11/5. */
14865FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14866{
14867 IEMOP_MNEMONIC("fsubr m64r");
14868 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
14869}
14870
14871
14872/** Opcode 0xdc !11/6. */
14873FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
14874{
14875 IEMOP_MNEMONIC("fdiv m64r");
14876 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
14877}
14878
14879
14880/** Opcode 0xdc !11/7. */
14881FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
14882{
14883 IEMOP_MNEMONIC("fdivr m64r");
14884 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
14885}
14886
14887
14888/** Opcode 0xdc. */
14889FNIEMOP_DEF(iemOp_EscF4)
14890{
14891 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14893 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14894 {
14895 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14896 {
14897 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
14898 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
14899 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
14900 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
14901 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
14902 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
14903 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
14904 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
14905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14906 }
14907 }
14908 else
14909 {
14910 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14911 {
14912 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
14913 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
14914 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
14915 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
14916 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
14917 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
14918 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
14919 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
14920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14921 }
14922 }
14923}
14924
14925
14926/** Opcode 0xdd !11/0.
14927 * @sa iemOp_fld_m32r */
14928FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fld m64r");
14931
14932 IEM_MC_BEGIN(2, 3);
14933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14934 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14935 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
14936 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14937 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
14938
14939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14943
14944 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14945 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14946 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
14947 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14948 IEM_MC_ELSE()
14949 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14950 IEM_MC_ENDIF();
14951 IEM_MC_USED_FPU();
14952 IEM_MC_ADVANCE_RIP();
14953
14954 IEM_MC_END();
14955 return VINF_SUCCESS;
14956}
14957
14958
14959/** Opcode 0xdd !11/0. */
14960FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
14961{
14962 IEMOP_MNEMONIC("fisttp m64i");
14963 IEM_MC_BEGIN(3, 2);
14964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14965 IEM_MC_LOCAL(uint16_t, u16Fsw);
14966 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14967 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14968 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14969
14970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14974
14975 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14976 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14977 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14978 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14979 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14980 IEM_MC_ELSE()
14981 IEM_MC_IF_FCW_IM()
14982 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14983 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14984 IEM_MC_ENDIF();
14985 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14986 IEM_MC_ENDIF();
14987 IEM_MC_USED_FPU();
14988 IEM_MC_ADVANCE_RIP();
14989
14990 IEM_MC_END();
14991 return VINF_SUCCESS;
14992}
14993
14994
14995/** Opcode 0xdd !11/0. */
14996FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
14997{
14998 IEMOP_MNEMONIC("fst m64r");
14999 IEM_MC_BEGIN(3, 2);
15000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15001 IEM_MC_LOCAL(uint16_t, u16Fsw);
15002 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15003 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15004 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15005
15006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15010
15011 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15012 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15013 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15014 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15015 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15016 IEM_MC_ELSE()
15017 IEM_MC_IF_FCW_IM()
15018 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15019 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15020 IEM_MC_ENDIF();
15021 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15022 IEM_MC_ENDIF();
15023 IEM_MC_USED_FPU();
15024 IEM_MC_ADVANCE_RIP();
15025
15026 IEM_MC_END();
15027 return VINF_SUCCESS;
15028}
15029
15030
15031
15032
15033/** Opcode 0xdd !11/0. */
15034FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15035{
15036 IEMOP_MNEMONIC("fstp m64r");
15037 IEM_MC_BEGIN(3, 2);
15038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15039 IEM_MC_LOCAL(uint16_t, u16Fsw);
15040 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15041 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15042 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15043
15044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15046 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15047 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15048
15049 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15050 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15051 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15052 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15053 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15054 IEM_MC_ELSE()
15055 IEM_MC_IF_FCW_IM()
15056 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15057 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15058 IEM_MC_ENDIF();
15059 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15060 IEM_MC_ENDIF();
15061 IEM_MC_USED_FPU();
15062 IEM_MC_ADVANCE_RIP();
15063
15064 IEM_MC_END();
15065 return VINF_SUCCESS;
15066}
15067
15068
15069/** Opcode 0xdd !11/0. */
15070FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15071{
15072 IEMOP_MNEMONIC("frstor m94/108byte");
15073 IEM_MC_BEGIN(3, 0);
15074 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15075 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15076 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15080 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15081 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15082 IEM_MC_END();
15083 return VINF_SUCCESS;
15084}
15085
15086
15087/** Opcode 0xdd !11/0. */
15088FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15089{
15090 IEMOP_MNEMONIC("fnsave m94/108byte");
15091 IEM_MC_BEGIN(3, 0);
15092 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15093 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15094 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15098 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15099 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15100 IEM_MC_END();
15101 return VINF_SUCCESS;
15102
15103}
15104
15105/** Opcode 0xdd !11/0. */
15106FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15107{
15108 IEMOP_MNEMONIC("fnstsw m16");
15109
15110 IEM_MC_BEGIN(0, 2);
15111 IEM_MC_LOCAL(uint16_t, u16Tmp);
15112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15113
15114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15117
15118 IEM_MC_FETCH_FSW(u16Tmp);
15119 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15120 IEM_MC_ADVANCE_RIP();
15121
15122/** @todo Debug / drop a hint to the verifier that things may differ
15123 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15124 * NT4SP1. (X86_FSW_PE) */
15125 IEM_MC_END();
15126 return VINF_SUCCESS;
15127}
15128
15129
15130/** Opcode 0xdd 11/0. */
15131FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15132{
15133 IEMOP_MNEMONIC("ffree stN");
15134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15135 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15136 unmodified. */
15137
15138 IEM_MC_BEGIN(0, 0);
15139
15140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15142
15143 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15144 IEM_MC_UPDATE_FPU_OPCODE_IP();
15145
15146 IEM_MC_USED_FPU();
15147 IEM_MC_ADVANCE_RIP();
15148 IEM_MC_END();
15149 return VINF_SUCCESS;
15150}
15151
15152
15153/** Opcode 0xdd 11/1. */
15154FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15155{
15156 IEMOP_MNEMONIC("fst st0,stN");
15157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15158
15159 IEM_MC_BEGIN(0, 2);
15160 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15161 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15164 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15165 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15166 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15167 IEM_MC_ELSE()
15168 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15169 IEM_MC_ENDIF();
15170 IEM_MC_USED_FPU();
15171 IEM_MC_ADVANCE_RIP();
15172 IEM_MC_END();
15173 return VINF_SUCCESS;
15174}
15175
15176
15177/** Opcode 0xdd 11/3. */
15178FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15179{
15180 IEMOP_MNEMONIC("fcom st0,stN");
15181 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15182}
15183
15184
15185/** Opcode 0xdd 11/4. */
15186FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15187{
15188 IEMOP_MNEMONIC("fcomp st0,stN");
15189 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15190}
15191
15192
15193/** Opcode 0xdd. */
15194FNIEMOP_DEF(iemOp_EscF5)
15195{
15196 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15198 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15199 {
15200 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15201 {
15202 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15203 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15204 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15205 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15206 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15207 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15208 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15209 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15211 }
15212 }
15213 else
15214 {
15215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15216 {
15217 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15218 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15219 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15220 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15221 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15222 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15223 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15224 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15226 }
15227 }
15228}
15229
15230
15231/** Opcode 0xde 11/0. */
15232FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15233{
15234 IEMOP_MNEMONIC("faddp stN,st0");
15235 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15236}
15237
15238
15239/** Opcode 0xde 11/0. */
15240FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15241{
15242 IEMOP_MNEMONIC("fmulp stN,st0");
15243 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15244}
15245
15246
15247/** Opcode 0xde 0xd9. */
15248FNIEMOP_DEF(iemOp_fcompp)
15249{
15250 IEMOP_MNEMONIC("fucompp st0,stN");
15251 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15252}
15253
15254
15255/** Opcode 0xde 11/4. */
15256FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15257{
15258 IEMOP_MNEMONIC("fsubrp stN,st0");
15259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15260}
15261
15262
15263/** Opcode 0xde 11/5. */
15264FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15265{
15266 IEMOP_MNEMONIC("fsubp stN,st0");
15267 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15268}
15269
15270
15271/** Opcode 0xde 11/6. */
15272FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15273{
15274 IEMOP_MNEMONIC("fdivrp stN,st0");
15275 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15276}
15277
15278
15279/** Opcode 0xde 11/7. */
15280FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15281{
15282 IEMOP_MNEMONIC("fdivp stN,st0");
15283 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15284}
15285
15286
15287/**
15288 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15289 * the result in ST0.
15290 *
15291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15292 */
15293FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15294{
15295 IEM_MC_BEGIN(3, 3);
15296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15298 IEM_MC_LOCAL(int16_t, i16Val2);
15299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15301 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15302
15303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15305
15306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15308 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15309
15310 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15311 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15312 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15313 IEM_MC_ELSE()
15314 IEM_MC_FPU_STACK_UNDERFLOW(0);
15315 IEM_MC_ENDIF();
15316 IEM_MC_USED_FPU();
15317 IEM_MC_ADVANCE_RIP();
15318
15319 IEM_MC_END();
15320 return VINF_SUCCESS;
15321}
15322
15323
15324/** Opcode 0xde !11/0. */
15325FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15326{
15327 IEMOP_MNEMONIC("fiadd m16i");
15328 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15329}
15330
15331
15332/** Opcode 0xde !11/1. */
15333FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15334{
15335 IEMOP_MNEMONIC("fimul m16i");
15336 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15337}
15338
15339
15340/** Opcode 0xde !11/2. */
15341FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15342{
15343 IEMOP_MNEMONIC("ficom st0,m16i");
15344
15345 IEM_MC_BEGIN(3, 3);
15346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15347 IEM_MC_LOCAL(uint16_t, u16Fsw);
15348 IEM_MC_LOCAL(int16_t, i16Val2);
15349 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15350 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15351 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15352
15353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15355
15356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15358 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15359
15360 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15361 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15362 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15363 IEM_MC_ELSE()
15364 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15365 IEM_MC_ENDIF();
15366 IEM_MC_USED_FPU();
15367 IEM_MC_ADVANCE_RIP();
15368
15369 IEM_MC_END();
15370 return VINF_SUCCESS;
15371}
15372
15373
15374/** Opcode 0xde !11/3. */
15375FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15376{
15377 IEMOP_MNEMONIC("ficomp st0,m16i");
15378
15379 IEM_MC_BEGIN(3, 3);
15380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15381 IEM_MC_LOCAL(uint16_t, u16Fsw);
15382 IEM_MC_LOCAL(int16_t, i16Val2);
15383 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15385 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15386
15387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15389
15390 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15391 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15392 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15393
15394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15395 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15396 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15397 IEM_MC_ELSE()
15398 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15399 IEM_MC_ENDIF();
15400 IEM_MC_USED_FPU();
15401 IEM_MC_ADVANCE_RIP();
15402
15403 IEM_MC_END();
15404 return VINF_SUCCESS;
15405}
15406
15407
15408/** Opcode 0xde !11/4. */
15409FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15410{
15411 IEMOP_MNEMONIC("fisub m16i");
15412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15413}
15414
15415
15416/** Opcode 0xde !11/5. */
15417FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15418{
15419 IEMOP_MNEMONIC("fisubr m16i");
15420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15421}
15422
15423
15424/** Opcode 0xde !11/6. */
15425FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15426{
15427 IEMOP_MNEMONIC("fiadd m16i");
15428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15429}
15430
15431
15432/** Opcode 0xde !11/7. */
15433FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15434{
15435 IEMOP_MNEMONIC("fiadd m16i");
15436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15437}
15438
15439
15440/** Opcode 0xde. */
15441FNIEMOP_DEF(iemOp_EscF6)
15442{
15443 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15446 {
15447 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15448 {
15449 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15450 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15451 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15452 case 3: if (bRm == 0xd9)
15453 return FNIEMOP_CALL(iemOp_fcompp);
15454 return IEMOP_RAISE_INVALID_OPCODE();
15455 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15456 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15457 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15458 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15460 }
15461 }
15462 else
15463 {
15464 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15465 {
15466 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15467 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15468 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15469 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15470 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15471 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15472 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15473 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15475 }
15476 }
15477}
15478
15479
15480/** Opcode 0xdf 11/0.
15481 * Undocument instruction, assumed to work like ffree + fincstp. */
15482FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15483{
15484 IEMOP_MNEMONIC("ffreep stN");
15485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15486
15487 IEM_MC_BEGIN(0, 0);
15488
15489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15491
15492 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15493 IEM_MC_FPU_STACK_INC_TOP();
15494 IEM_MC_UPDATE_FPU_OPCODE_IP();
15495
15496 IEM_MC_USED_FPU();
15497 IEM_MC_ADVANCE_RIP();
15498 IEM_MC_END();
15499 return VINF_SUCCESS;
15500}
15501
15502
15503/** Opcode 0xdf 0xe0. */
15504FNIEMOP_DEF(iemOp_fnstsw_ax)
15505{
15506 IEMOP_MNEMONIC("fnstsw ax");
15507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15508
15509 IEM_MC_BEGIN(0, 1);
15510 IEM_MC_LOCAL(uint16_t, u16Tmp);
15511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15512 IEM_MC_FETCH_FSW(u16Tmp);
15513 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15514 IEM_MC_ADVANCE_RIP();
15515 IEM_MC_END();
15516 return VINF_SUCCESS;
15517}
15518
15519
15520/** Opcode 0xdf 11/5. */
15521FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15522{
15523 IEMOP_MNEMONIC("fcomip st0,stN");
15524 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15525}
15526
15527
15528/** Opcode 0xdf 11/6. */
15529FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15530{
15531 IEMOP_MNEMONIC("fcomip st0,stN");
15532 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15533}
15534
15535
15536/** Opcode 0xdf !11/0. */
15537FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15538{
15539 IEMOP_MNEMONIC("fild m16i");
15540
15541 IEM_MC_BEGIN(2, 3);
15542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15543 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15544 IEM_MC_LOCAL(int16_t, i16Val);
15545 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15546 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15547
15548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15550
15551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15553 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15554
15555 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15556 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15557 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15558 IEM_MC_ELSE()
15559 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15560 IEM_MC_ENDIF();
15561 IEM_MC_USED_FPU();
15562 IEM_MC_ADVANCE_RIP();
15563
15564 IEM_MC_END();
15565 return VINF_SUCCESS;
15566}
15567
15568
15569/** Opcode 0xdf !11/1. */
15570FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15571{
15572 IEMOP_MNEMONIC("fisttp m16i");
15573 IEM_MC_BEGIN(3, 2);
15574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15575 IEM_MC_LOCAL(uint16_t, u16Fsw);
15576 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15577 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15579
15580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15583 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15584
15585 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15586 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15587 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15588 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15589 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15590 IEM_MC_ELSE()
15591 IEM_MC_IF_FCW_IM()
15592 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15593 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15594 IEM_MC_ENDIF();
15595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15596 IEM_MC_ENDIF();
15597 IEM_MC_USED_FPU();
15598 IEM_MC_ADVANCE_RIP();
15599
15600 IEM_MC_END();
15601 return VINF_SUCCESS;
15602}
15603
15604
15605/** Opcode 0xdf !11/2. */
15606FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15607{
15608 IEMOP_MNEMONIC("fistp m16i");
15609 IEM_MC_BEGIN(3, 2);
15610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15611 IEM_MC_LOCAL(uint16_t, u16Fsw);
15612 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15613 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15615
15616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15620
15621 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15623 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15624 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15625 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15626 IEM_MC_ELSE()
15627 IEM_MC_IF_FCW_IM()
15628 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15629 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15630 IEM_MC_ENDIF();
15631 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15632 IEM_MC_ENDIF();
15633 IEM_MC_USED_FPU();
15634 IEM_MC_ADVANCE_RIP();
15635
15636 IEM_MC_END();
15637 return VINF_SUCCESS;
15638}
15639
15640
15641/** Opcode 0xdf !11/3. */
15642FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15643{
15644 IEMOP_MNEMONIC("fistp m16i");
15645 IEM_MC_BEGIN(3, 2);
15646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15647 IEM_MC_LOCAL(uint16_t, u16Fsw);
15648 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15649 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15650 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15651
15652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15656
15657 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15659 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15660 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15661 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15662 IEM_MC_ELSE()
15663 IEM_MC_IF_FCW_IM()
15664 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15665 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15666 IEM_MC_ENDIF();
15667 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15668 IEM_MC_ENDIF();
15669 IEM_MC_USED_FPU();
15670 IEM_MC_ADVANCE_RIP();
15671
15672 IEM_MC_END();
15673 return VINF_SUCCESS;
15674}
15675
15676
15677/** Opcode 0xdf !11/4. */
15678FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15679
15680
15681/** Opcode 0xdf !11/5. */
15682FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15683{
15684 IEMOP_MNEMONIC("fild m64i");
15685
15686 IEM_MC_BEGIN(2, 3);
15687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15689 IEM_MC_LOCAL(int64_t, i64Val);
15690 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15691 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15692
15693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15695
15696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15698 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15699
15700 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15701 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15702 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15703 IEM_MC_ELSE()
15704 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15705 IEM_MC_ENDIF();
15706 IEM_MC_USED_FPU();
15707 IEM_MC_ADVANCE_RIP();
15708
15709 IEM_MC_END();
15710 return VINF_SUCCESS;
15711}
15712
15713
15714/** Opcode 0xdf !11/6. */
15715FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15716
15717
15718/** Opcode 0xdf !11/7. */
15719FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15720{
15721 IEMOP_MNEMONIC("fistp m64i");
15722 IEM_MC_BEGIN(3, 2);
15723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15724 IEM_MC_LOCAL(uint16_t, u16Fsw);
15725 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15726 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15727 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15728
15729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15733
15734 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15735 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15736 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15737 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15738 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15739 IEM_MC_ELSE()
15740 IEM_MC_IF_FCW_IM()
15741 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15742 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15743 IEM_MC_ENDIF();
15744 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15745 IEM_MC_ENDIF();
15746 IEM_MC_USED_FPU();
15747 IEM_MC_ADVANCE_RIP();
15748
15749 IEM_MC_END();
15750 return VINF_SUCCESS;
15751}
15752
15753
15754/** Opcode 0xdf. */
15755FNIEMOP_DEF(iemOp_EscF7)
15756{
15757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15759 {
15760 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15761 {
15762 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15763 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15764 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15765 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15766 case 4: if (bRm == 0xe0)
15767 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15768 return IEMOP_RAISE_INVALID_OPCODE();
15769 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15770 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15771 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15773 }
15774 }
15775 else
15776 {
15777 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15778 {
15779 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15780 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15781 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15782 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15783 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15784 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15785 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15786 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15788 }
15789 }
15790}
15791
15792
15793/** Opcode 0xe0. */
15794FNIEMOP_DEF(iemOp_loopne_Jb)
15795{
15796 IEMOP_MNEMONIC("loopne Jb");
15797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15798 IEMOP_HLP_NO_LOCK_PREFIX();
15799 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15800
15801 switch (pIemCpu->enmEffAddrMode)
15802 {
15803 case IEMMODE_16BIT:
15804 IEM_MC_BEGIN(0,0);
15805 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15806 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15807 IEM_MC_REL_JMP_S8(i8Imm);
15808 } IEM_MC_ELSE() {
15809 IEM_MC_ADVANCE_RIP();
15810 } IEM_MC_ENDIF();
15811 IEM_MC_END();
15812 return VINF_SUCCESS;
15813
15814 case IEMMODE_32BIT:
15815 IEM_MC_BEGIN(0,0);
15816 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15817 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15818 IEM_MC_REL_JMP_S8(i8Imm);
15819 } IEM_MC_ELSE() {
15820 IEM_MC_ADVANCE_RIP();
15821 } IEM_MC_ENDIF();
15822 IEM_MC_END();
15823 return VINF_SUCCESS;
15824
15825 case IEMMODE_64BIT:
15826 IEM_MC_BEGIN(0,0);
15827 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15828 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15829 IEM_MC_REL_JMP_S8(i8Imm);
15830 } IEM_MC_ELSE() {
15831 IEM_MC_ADVANCE_RIP();
15832 } IEM_MC_ENDIF();
15833 IEM_MC_END();
15834 return VINF_SUCCESS;
15835
15836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15837 }
15838}
15839
15840
15841/** Opcode 0xe1. */
15842FNIEMOP_DEF(iemOp_loope_Jb)
15843{
15844 IEMOP_MNEMONIC("loope Jb");
15845 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15846 IEMOP_HLP_NO_LOCK_PREFIX();
15847 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15848
15849 switch (pIemCpu->enmEffAddrMode)
15850 {
15851 case IEMMODE_16BIT:
15852 IEM_MC_BEGIN(0,0);
15853 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15854 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15855 IEM_MC_REL_JMP_S8(i8Imm);
15856 } IEM_MC_ELSE() {
15857 IEM_MC_ADVANCE_RIP();
15858 } IEM_MC_ENDIF();
15859 IEM_MC_END();
15860 return VINF_SUCCESS;
15861
15862 case IEMMODE_32BIT:
15863 IEM_MC_BEGIN(0,0);
15864 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15865 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15866 IEM_MC_REL_JMP_S8(i8Imm);
15867 } IEM_MC_ELSE() {
15868 IEM_MC_ADVANCE_RIP();
15869 } IEM_MC_ENDIF();
15870 IEM_MC_END();
15871 return VINF_SUCCESS;
15872
15873 case IEMMODE_64BIT:
15874 IEM_MC_BEGIN(0,0);
15875 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15876 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15877 IEM_MC_REL_JMP_S8(i8Imm);
15878 } IEM_MC_ELSE() {
15879 IEM_MC_ADVANCE_RIP();
15880 } IEM_MC_ENDIF();
15881 IEM_MC_END();
15882 return VINF_SUCCESS;
15883
15884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15885 }
15886}
15887
15888
15889/** Opcode 0xe2. */
15890FNIEMOP_DEF(iemOp_loop_Jb)
15891{
15892 IEMOP_MNEMONIC("loop Jb");
15893 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15894 IEMOP_HLP_NO_LOCK_PREFIX();
15895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15896
15897 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
15898 * using the 32-bit operand size override. How can that be restarted? See
15899 * weird pseudo code in intel manual. */
15900 switch (pIemCpu->enmEffAddrMode)
15901 {
15902 case IEMMODE_16BIT:
15903 IEM_MC_BEGIN(0,0);
15904 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15905 {
15906 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15907 IEM_MC_IF_CX_IS_NZ() {
15908 IEM_MC_REL_JMP_S8(i8Imm);
15909 } IEM_MC_ELSE() {
15910 IEM_MC_ADVANCE_RIP();
15911 } IEM_MC_ENDIF();
15912 }
15913 else
15914 {
15915 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
15916 IEM_MC_ADVANCE_RIP();
15917 }
15918 IEM_MC_END();
15919 return VINF_SUCCESS;
15920
15921 case IEMMODE_32BIT:
15922 IEM_MC_BEGIN(0,0);
15923 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15924 {
15925 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15926 IEM_MC_IF_ECX_IS_NZ() {
15927 IEM_MC_REL_JMP_S8(i8Imm);
15928 } IEM_MC_ELSE() {
15929 IEM_MC_ADVANCE_RIP();
15930 } IEM_MC_ENDIF();
15931 }
15932 else
15933 {
15934 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
15935 IEM_MC_ADVANCE_RIP();
15936 }
15937 IEM_MC_END();
15938 return VINF_SUCCESS;
15939
15940 case IEMMODE_64BIT:
15941 IEM_MC_BEGIN(0,0);
15942 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15943 {
15944 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15945 IEM_MC_IF_RCX_IS_NZ() {
15946 IEM_MC_REL_JMP_S8(i8Imm);
15947 } IEM_MC_ELSE() {
15948 IEM_MC_ADVANCE_RIP();
15949 } IEM_MC_ENDIF();
15950 }
15951 else
15952 {
15953 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
15954 IEM_MC_ADVANCE_RIP();
15955 }
15956 IEM_MC_END();
15957 return VINF_SUCCESS;
15958
15959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15960 }
15961}
15962
15963
15964/** Opcode 0xe3. */
15965FNIEMOP_DEF(iemOp_jecxz_Jb)
15966{
15967 IEMOP_MNEMONIC("jecxz Jb");
15968 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15969 IEMOP_HLP_NO_LOCK_PREFIX();
15970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15971
15972 switch (pIemCpu->enmEffAddrMode)
15973 {
15974 case IEMMODE_16BIT:
15975 IEM_MC_BEGIN(0,0);
15976 IEM_MC_IF_CX_IS_NZ() {
15977 IEM_MC_ADVANCE_RIP();
15978 } IEM_MC_ELSE() {
15979 IEM_MC_REL_JMP_S8(i8Imm);
15980 } IEM_MC_ENDIF();
15981 IEM_MC_END();
15982 return VINF_SUCCESS;
15983
15984 case IEMMODE_32BIT:
15985 IEM_MC_BEGIN(0,0);
15986 IEM_MC_IF_ECX_IS_NZ() {
15987 IEM_MC_ADVANCE_RIP();
15988 } IEM_MC_ELSE() {
15989 IEM_MC_REL_JMP_S8(i8Imm);
15990 } IEM_MC_ENDIF();
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993
15994 case IEMMODE_64BIT:
15995 IEM_MC_BEGIN(0,0);
15996 IEM_MC_IF_RCX_IS_NZ() {
15997 IEM_MC_ADVANCE_RIP();
15998 } IEM_MC_ELSE() {
15999 IEM_MC_REL_JMP_S8(i8Imm);
16000 } IEM_MC_ENDIF();
16001 IEM_MC_END();
16002 return VINF_SUCCESS;
16003
16004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16005 }
16006}
16007
16008
16009/** Opcode 0xe4 */
16010FNIEMOP_DEF(iemOp_in_AL_Ib)
16011{
16012 IEMOP_MNEMONIC("in eAX,Ib");
16013 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16014 IEMOP_HLP_NO_LOCK_PREFIX();
16015 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16016}
16017
16018
16019/** Opcode 0xe5 */
16020FNIEMOP_DEF(iemOp_in_eAX_Ib)
16021{
16022 IEMOP_MNEMONIC("in eAX,Ib");
16023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16024 IEMOP_HLP_NO_LOCK_PREFIX();
16025 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16026}
16027
16028
16029/** Opcode 0xe6 */
16030FNIEMOP_DEF(iemOp_out_Ib_AL)
16031{
16032 IEMOP_MNEMONIC("out Ib,AL");
16033 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16034 IEMOP_HLP_NO_LOCK_PREFIX();
16035 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16036}
16037
16038
16039/** Opcode 0xe7 */
16040FNIEMOP_DEF(iemOp_out_Ib_eAX)
16041{
16042 IEMOP_MNEMONIC("out Ib,eAX");
16043 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16044 IEMOP_HLP_NO_LOCK_PREFIX();
16045 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16046}
16047
16048
16049/** Opcode 0xe8. */
16050FNIEMOP_DEF(iemOp_call_Jv)
16051{
16052 IEMOP_MNEMONIC("call Jv");
16053 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16054 switch (pIemCpu->enmEffOpSize)
16055 {
16056 case IEMMODE_16BIT:
16057 {
16058 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16059 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16060 }
16061
16062 case IEMMODE_32BIT:
16063 {
16064 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16065 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16066 }
16067
16068 case IEMMODE_64BIT:
16069 {
16070 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16071 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16072 }
16073
16074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16075 }
16076}
16077
16078
16079/** Opcode 0xe9. */
16080FNIEMOP_DEF(iemOp_jmp_Jv)
16081{
16082 IEMOP_MNEMONIC("jmp Jv");
16083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16084 switch (pIemCpu->enmEffOpSize)
16085 {
16086 case IEMMODE_16BIT:
16087 {
16088 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16089 IEM_MC_BEGIN(0, 0);
16090 IEM_MC_REL_JMP_S16(i16Imm);
16091 IEM_MC_END();
16092 return VINF_SUCCESS;
16093 }
16094
16095 case IEMMODE_64BIT:
16096 case IEMMODE_32BIT:
16097 {
16098 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16099 IEM_MC_BEGIN(0, 0);
16100 IEM_MC_REL_JMP_S32(i32Imm);
16101 IEM_MC_END();
16102 return VINF_SUCCESS;
16103 }
16104
16105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16106 }
16107}
16108
16109
16110/** Opcode 0xea. */
16111FNIEMOP_DEF(iemOp_jmp_Ap)
16112{
16113 IEMOP_MNEMONIC("jmp Ap");
16114 IEMOP_HLP_NO_64BIT();
16115
16116 /* Decode the far pointer address and pass it on to the far call C implementation. */
16117 uint32_t offSeg;
16118 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16119 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16120 else
16121 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16122 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16123 IEMOP_HLP_NO_LOCK_PREFIX();
16124 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16125}
16126
16127
16128/** Opcode 0xeb. */
16129FNIEMOP_DEF(iemOp_jmp_Jb)
16130{
16131 IEMOP_MNEMONIC("jmp Jb");
16132 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16133 IEMOP_HLP_NO_LOCK_PREFIX();
16134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16135
16136 IEM_MC_BEGIN(0, 0);
16137 IEM_MC_REL_JMP_S8(i8Imm);
16138 IEM_MC_END();
16139 return VINF_SUCCESS;
16140}
16141
16142
16143/** Opcode 0xec */
16144FNIEMOP_DEF(iemOp_in_AL_DX)
16145{
16146 IEMOP_MNEMONIC("in AL,DX");
16147 IEMOP_HLP_NO_LOCK_PREFIX();
16148 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16149}
16150
16151
16152/** Opcode 0xed */
16153FNIEMOP_DEF(iemOp_eAX_DX)
16154{
16155 IEMOP_MNEMONIC("in eAX,DX");
16156 IEMOP_HLP_NO_LOCK_PREFIX();
16157 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16158}
16159
16160
16161/** Opcode 0xee */
16162FNIEMOP_DEF(iemOp_out_DX_AL)
16163{
16164 IEMOP_MNEMONIC("out DX,AL");
16165 IEMOP_HLP_NO_LOCK_PREFIX();
16166 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16167}
16168
16169
16170/** Opcode 0xef */
16171FNIEMOP_DEF(iemOp_out_DX_eAX)
16172{
16173 IEMOP_MNEMONIC("out DX,eAX");
16174 IEMOP_HLP_NO_LOCK_PREFIX();
16175 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16176}
16177
16178
16179/** Opcode 0xf0. */
16180FNIEMOP_DEF(iemOp_lock)
16181{
16182 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16183 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16184
16185 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16186 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16187}
16188
16189
16190/** Opcode 0xf1. */
16191FNIEMOP_DEF(iemOp_int_1)
16192{
16193 IEMOP_MNEMONIC("int1"); /* icebp */
16194 /** @todo testcase! */
16195 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16196}
16197
16198
16199/** Opcode 0xf2. */
16200FNIEMOP_DEF(iemOp_repne)
16201{
16202 /* This overrides any previous REPE prefix. */
16203 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16204 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16205 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16206
16207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16208 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16209}
16210
16211
16212/** Opcode 0xf3. */
16213FNIEMOP_DEF(iemOp_repe)
16214{
16215 /* This overrides any previous REPNE prefix. */
16216 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16217 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16218 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16219
16220 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16221 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16222}
16223
16224
16225/** Opcode 0xf4. */
16226FNIEMOP_DEF(iemOp_hlt)
16227{
16228 IEMOP_HLP_NO_LOCK_PREFIX();
16229 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16230}
16231
16232
16233/** Opcode 0xf5. */
16234FNIEMOP_DEF(iemOp_cmc)
16235{
16236 IEMOP_MNEMONIC("cmc");
16237 IEMOP_HLP_NO_LOCK_PREFIX();
16238 IEM_MC_BEGIN(0, 0);
16239 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16240 IEM_MC_ADVANCE_RIP();
16241 IEM_MC_END();
16242 return VINF_SUCCESS;
16243}
16244
16245
16246/**
16247 * Common implementation of 'inc/dec/not/neg Eb'.
16248 *
16249 * @param bRm The RM byte.
16250 * @param pImpl The instruction implementation.
16251 */
16252FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16253{
16254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16255 {
16256 /* register access */
16257 IEM_MC_BEGIN(2, 0);
16258 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16259 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16260 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16261 IEM_MC_REF_EFLAGS(pEFlags);
16262 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16263 IEM_MC_ADVANCE_RIP();
16264 IEM_MC_END();
16265 }
16266 else
16267 {
16268 /* memory access. */
16269 IEM_MC_BEGIN(2, 2);
16270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16273
16274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16275 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16276 IEM_MC_FETCH_EFLAGS(EFlags);
16277 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16278 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16279 else
16280 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16281
16282 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16283 IEM_MC_COMMIT_EFLAGS(EFlags);
16284 IEM_MC_ADVANCE_RIP();
16285 IEM_MC_END();
16286 }
16287 return VINF_SUCCESS;
16288}
16289
16290
16291/**
16292 * Common implementation of 'inc/dec/not/neg Ev'.
16293 *
16294 * @param bRm The RM byte.
16295 * @param pImpl The instruction implementation.
16296 */
16297FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16298{
16299 /* Registers are handled by a common worker. */
16300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16301 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16302
16303 /* Memory we do here. */
16304 switch (pIemCpu->enmEffOpSize)
16305 {
16306 case IEMMODE_16BIT:
16307 IEM_MC_BEGIN(2, 2);
16308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16309 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16311
16312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16313 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16314 IEM_MC_FETCH_EFLAGS(EFlags);
16315 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16316 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16317 else
16318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16319
16320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16321 IEM_MC_COMMIT_EFLAGS(EFlags);
16322 IEM_MC_ADVANCE_RIP();
16323 IEM_MC_END();
16324 return VINF_SUCCESS;
16325
16326 case IEMMODE_32BIT:
16327 IEM_MC_BEGIN(2, 2);
16328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16331
16332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16333 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16334 IEM_MC_FETCH_EFLAGS(EFlags);
16335 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16336 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16337 else
16338 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16339
16340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16341 IEM_MC_COMMIT_EFLAGS(EFlags);
16342 IEM_MC_ADVANCE_RIP();
16343 IEM_MC_END();
16344 return VINF_SUCCESS;
16345
16346 case IEMMODE_64BIT:
16347 IEM_MC_BEGIN(2, 2);
16348 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16349 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16351
16352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16353 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16354 IEM_MC_FETCH_EFLAGS(EFlags);
16355 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16356 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16357 else
16358 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16359
16360 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16361 IEM_MC_COMMIT_EFLAGS(EFlags);
16362 IEM_MC_ADVANCE_RIP();
16363 IEM_MC_END();
16364 return VINF_SUCCESS;
16365
16366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16367 }
16368}
16369
16370
16371/** Opcode 0xf6 /0. */
16372FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16373{
16374 IEMOP_MNEMONIC("test Eb,Ib");
16375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16376
16377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16378 {
16379 /* register access */
16380 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16381 IEMOP_HLP_NO_LOCK_PREFIX();
16382
16383 IEM_MC_BEGIN(3, 0);
16384 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16385 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16386 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16387 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16388 IEM_MC_REF_EFLAGS(pEFlags);
16389 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16390 IEM_MC_ADVANCE_RIP();
16391 IEM_MC_END();
16392 }
16393 else
16394 {
16395 /* memory access. */
16396 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16397
16398 IEM_MC_BEGIN(3, 2);
16399 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16400 IEM_MC_ARG(uint8_t, u8Src, 1);
16401 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16403
16404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16406 IEM_MC_ASSIGN(u8Src, u8Imm);
16407 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16408 IEM_MC_FETCH_EFLAGS(EFlags);
16409 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16410
16411 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16412 IEM_MC_COMMIT_EFLAGS(EFlags);
16413 IEM_MC_ADVANCE_RIP();
16414 IEM_MC_END();
16415 }
16416 return VINF_SUCCESS;
16417}
16418
16419
16420/** Opcode 0xf7 /0. */
16421FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16422{
16423 IEMOP_MNEMONIC("test Ev,Iv");
16424 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16426
16427 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16428 {
16429 /* register access */
16430 switch (pIemCpu->enmEffOpSize)
16431 {
16432 case IEMMODE_16BIT:
16433 {
16434 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16435 IEM_MC_BEGIN(3, 0);
16436 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16437 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16439 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16440 IEM_MC_REF_EFLAGS(pEFlags);
16441 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16442 IEM_MC_ADVANCE_RIP();
16443 IEM_MC_END();
16444 return VINF_SUCCESS;
16445 }
16446
16447 case IEMMODE_32BIT:
16448 {
16449 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16450 IEM_MC_BEGIN(3, 0);
16451 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16452 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16453 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16454 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16455 IEM_MC_REF_EFLAGS(pEFlags);
16456 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16457 /* No clearing the high dword here - test doesn't write back the result. */
16458 IEM_MC_ADVANCE_RIP();
16459 IEM_MC_END();
16460 return VINF_SUCCESS;
16461 }
16462
16463 case IEMMODE_64BIT:
16464 {
16465 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16466 IEM_MC_BEGIN(3, 0);
16467 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16468 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16470 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16471 IEM_MC_REF_EFLAGS(pEFlags);
16472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16473 IEM_MC_ADVANCE_RIP();
16474 IEM_MC_END();
16475 return VINF_SUCCESS;
16476 }
16477
16478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16479 }
16480 }
16481 else
16482 {
16483 /* memory access. */
16484 switch (pIemCpu->enmEffOpSize)
16485 {
16486 case IEMMODE_16BIT:
16487 {
16488 IEM_MC_BEGIN(3, 2);
16489 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16490 IEM_MC_ARG(uint16_t, u16Src, 1);
16491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16493
16494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16495 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16496 IEM_MC_ASSIGN(u16Src, u16Imm);
16497 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16498 IEM_MC_FETCH_EFLAGS(EFlags);
16499 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16500
16501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16502 IEM_MC_COMMIT_EFLAGS(EFlags);
16503 IEM_MC_ADVANCE_RIP();
16504 IEM_MC_END();
16505 return VINF_SUCCESS;
16506 }
16507
16508 case IEMMODE_32BIT:
16509 {
16510 IEM_MC_BEGIN(3, 2);
16511 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16512 IEM_MC_ARG(uint32_t, u32Src, 1);
16513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16515
16516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16518 IEM_MC_ASSIGN(u32Src, u32Imm);
16519 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16520 IEM_MC_FETCH_EFLAGS(EFlags);
16521 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16522
16523 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16524 IEM_MC_COMMIT_EFLAGS(EFlags);
16525 IEM_MC_ADVANCE_RIP();
16526 IEM_MC_END();
16527 return VINF_SUCCESS;
16528 }
16529
16530 case IEMMODE_64BIT:
16531 {
16532 IEM_MC_BEGIN(3, 2);
16533 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16534 IEM_MC_ARG(uint64_t, u64Src, 1);
16535 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16537
16538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16539 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16540 IEM_MC_ASSIGN(u64Src, u64Imm);
16541 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16542 IEM_MC_FETCH_EFLAGS(EFlags);
16543 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16544
16545 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16546 IEM_MC_COMMIT_EFLAGS(EFlags);
16547 IEM_MC_ADVANCE_RIP();
16548 IEM_MC_END();
16549 return VINF_SUCCESS;
16550 }
16551
16552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16553 }
16554 }
16555}
16556
16557
16558/** Opcode 0xf6 /4, /5, /6 and /7. */
16559FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16560{
16561 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16562
16563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16564 {
16565 /* register access */
16566 IEMOP_HLP_NO_LOCK_PREFIX();
16567 IEM_MC_BEGIN(3, 1);
16568 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16569 IEM_MC_ARG(uint8_t, u8Value, 1);
16570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16571 IEM_MC_LOCAL(int32_t, rc);
16572
16573 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16574 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16575 IEM_MC_REF_EFLAGS(pEFlags);
16576 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16577 IEM_MC_IF_LOCAL_IS_Z(rc) {
16578 IEM_MC_ADVANCE_RIP();
16579 } IEM_MC_ELSE() {
16580 IEM_MC_RAISE_DIVIDE_ERROR();
16581 } IEM_MC_ENDIF();
16582
16583 IEM_MC_END();
16584 }
16585 else
16586 {
16587 /* memory access. */
16588 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16589
16590 IEM_MC_BEGIN(3, 2);
16591 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16592 IEM_MC_ARG(uint8_t, u8Value, 1);
16593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16595 IEM_MC_LOCAL(int32_t, rc);
16596
16597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16598 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16599 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16600 IEM_MC_REF_EFLAGS(pEFlags);
16601 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16602 IEM_MC_IF_LOCAL_IS_Z(rc) {
16603 IEM_MC_ADVANCE_RIP();
16604 } IEM_MC_ELSE() {
16605 IEM_MC_RAISE_DIVIDE_ERROR();
16606 } IEM_MC_ENDIF();
16607
16608 IEM_MC_END();
16609 }
16610 return VINF_SUCCESS;
16611}
16612
16613
16614/** Opcode 0xf7 /4, /5, /6 and /7. */
16615FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16616{
16617 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16618 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16619
16620 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16621 {
16622 /* register access */
16623 switch (pIemCpu->enmEffOpSize)
16624 {
16625 case IEMMODE_16BIT:
16626 {
16627 IEMOP_HLP_NO_LOCK_PREFIX();
16628 IEM_MC_BEGIN(4, 1);
16629 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16630 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16631 IEM_MC_ARG(uint16_t, u16Value, 2);
16632 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16633 IEM_MC_LOCAL(int32_t, rc);
16634
16635 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16636 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16637 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16638 IEM_MC_REF_EFLAGS(pEFlags);
16639 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16640 IEM_MC_IF_LOCAL_IS_Z(rc) {
16641 IEM_MC_ADVANCE_RIP();
16642 } IEM_MC_ELSE() {
16643 IEM_MC_RAISE_DIVIDE_ERROR();
16644 } IEM_MC_ENDIF();
16645
16646 IEM_MC_END();
16647 return VINF_SUCCESS;
16648 }
16649
16650 case IEMMODE_32BIT:
16651 {
16652 IEMOP_HLP_NO_LOCK_PREFIX();
16653 IEM_MC_BEGIN(4, 1);
16654 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16655 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16656 IEM_MC_ARG(uint32_t, u32Value, 2);
16657 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16658 IEM_MC_LOCAL(int32_t, rc);
16659
16660 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16661 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16662 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16663 IEM_MC_REF_EFLAGS(pEFlags);
16664 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16665 IEM_MC_IF_LOCAL_IS_Z(rc) {
16666 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16667 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16668 IEM_MC_ADVANCE_RIP();
16669 } IEM_MC_ELSE() {
16670 IEM_MC_RAISE_DIVIDE_ERROR();
16671 } IEM_MC_ENDIF();
16672
16673 IEM_MC_END();
16674 return VINF_SUCCESS;
16675 }
16676
16677 case IEMMODE_64BIT:
16678 {
16679 IEMOP_HLP_NO_LOCK_PREFIX();
16680 IEM_MC_BEGIN(4, 1);
16681 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16682 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16683 IEM_MC_ARG(uint64_t, u64Value, 2);
16684 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16685 IEM_MC_LOCAL(int32_t, rc);
16686
16687 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16688 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16689 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16690 IEM_MC_REF_EFLAGS(pEFlags);
16691 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16692 IEM_MC_IF_LOCAL_IS_Z(rc) {
16693 IEM_MC_ADVANCE_RIP();
16694 } IEM_MC_ELSE() {
16695 IEM_MC_RAISE_DIVIDE_ERROR();
16696 } IEM_MC_ENDIF();
16697
16698 IEM_MC_END();
16699 return VINF_SUCCESS;
16700 }
16701
16702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16703 }
16704 }
16705 else
16706 {
16707 /* memory access. */
16708 switch (pIemCpu->enmEffOpSize)
16709 {
16710 case IEMMODE_16BIT:
16711 {
16712 IEMOP_HLP_NO_LOCK_PREFIX();
16713 IEM_MC_BEGIN(4, 2);
16714 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16715 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16716 IEM_MC_ARG(uint16_t, u16Value, 2);
16717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16719 IEM_MC_LOCAL(int32_t, rc);
16720
16721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16722 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16723 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16724 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16725 IEM_MC_REF_EFLAGS(pEFlags);
16726 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16727 IEM_MC_IF_LOCAL_IS_Z(rc) {
16728 IEM_MC_ADVANCE_RIP();
16729 } IEM_MC_ELSE() {
16730 IEM_MC_RAISE_DIVIDE_ERROR();
16731 } IEM_MC_ENDIF();
16732
16733 IEM_MC_END();
16734 return VINF_SUCCESS;
16735 }
16736
16737 case IEMMODE_32BIT:
16738 {
16739 IEMOP_HLP_NO_LOCK_PREFIX();
16740 IEM_MC_BEGIN(4, 2);
16741 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16742 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16743 IEM_MC_ARG(uint32_t, u32Value, 2);
16744 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16746 IEM_MC_LOCAL(int32_t, rc);
16747
16748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16749 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16750 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16751 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16752 IEM_MC_REF_EFLAGS(pEFlags);
16753 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16754 IEM_MC_IF_LOCAL_IS_Z(rc) {
16755 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16756 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16757 IEM_MC_ADVANCE_RIP();
16758 } IEM_MC_ELSE() {
16759 IEM_MC_RAISE_DIVIDE_ERROR();
16760 } IEM_MC_ENDIF();
16761
16762 IEM_MC_END();
16763 return VINF_SUCCESS;
16764 }
16765
16766 case IEMMODE_64BIT:
16767 {
16768 IEMOP_HLP_NO_LOCK_PREFIX();
16769 IEM_MC_BEGIN(4, 2);
16770 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16771 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16772 IEM_MC_ARG(uint64_t, u64Value, 2);
16773 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16775 IEM_MC_LOCAL(int32_t, rc);
16776
16777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16778 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16779 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16780 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16781 IEM_MC_REF_EFLAGS(pEFlags);
16782 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16783 IEM_MC_IF_LOCAL_IS_Z(rc) {
16784 IEM_MC_ADVANCE_RIP();
16785 } IEM_MC_ELSE() {
16786 IEM_MC_RAISE_DIVIDE_ERROR();
16787 } IEM_MC_ENDIF();
16788
16789 IEM_MC_END();
16790 return VINF_SUCCESS;
16791 }
16792
16793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16794 }
16795 }
16796}
16797
16798/** Opcode 0xf6. */
16799FNIEMOP_DEF(iemOp_Grp3_Eb)
16800{
16801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16802 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16803 {
16804 case 0:
16805 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16806 case 1:
16807 return IEMOP_RAISE_INVALID_OPCODE();
16808 case 2:
16809 IEMOP_MNEMONIC("not Eb");
16810 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16811 case 3:
16812 IEMOP_MNEMONIC("neg Eb");
16813 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16814 case 4:
16815 IEMOP_MNEMONIC("mul Eb");
16816 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16817 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16818 case 5:
16819 IEMOP_MNEMONIC("imul Eb");
16820 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16821 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16822 case 6:
16823 IEMOP_MNEMONIC("div Eb");
16824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16825 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16826 case 7:
16827 IEMOP_MNEMONIC("idiv Eb");
16828 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16829 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16831 }
16832}
16833
16834
16835/** Opcode 0xf7. */
16836FNIEMOP_DEF(iemOp_Grp3_Ev)
16837{
16838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16840 {
16841 case 0:
16842 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16843 case 1:
16844 return IEMOP_RAISE_INVALID_OPCODE();
16845 case 2:
16846 IEMOP_MNEMONIC("not Ev");
16847 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16848 case 3:
16849 IEMOP_MNEMONIC("neg Ev");
16850 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16851 case 4:
16852 IEMOP_MNEMONIC("mul Ev");
16853 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16854 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16855 case 5:
16856 IEMOP_MNEMONIC("imul Ev");
16857 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16858 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
16859 case 6:
16860 IEMOP_MNEMONIC("div Ev");
16861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16862 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
16863 case 7:
16864 IEMOP_MNEMONIC("idiv Ev");
16865 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16866 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
16867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16868 }
16869}
16870
16871
16872/** Opcode 0xf8. */
16873FNIEMOP_DEF(iemOp_clc)
16874{
16875 IEMOP_MNEMONIC("clc");
16876 IEMOP_HLP_NO_LOCK_PREFIX();
16877 IEM_MC_BEGIN(0, 0);
16878 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
16879 IEM_MC_ADVANCE_RIP();
16880 IEM_MC_END();
16881 return VINF_SUCCESS;
16882}
16883
16884
16885/** Opcode 0xf9. */
16886FNIEMOP_DEF(iemOp_stc)
16887{
16888 IEMOP_MNEMONIC("stc");
16889 IEMOP_HLP_NO_LOCK_PREFIX();
16890 IEM_MC_BEGIN(0, 0);
16891 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
16892 IEM_MC_ADVANCE_RIP();
16893 IEM_MC_END();
16894 return VINF_SUCCESS;
16895}
16896
16897
16898/** Opcode 0xfa. */
16899FNIEMOP_DEF(iemOp_cli)
16900{
16901 IEMOP_MNEMONIC("cli");
16902 IEMOP_HLP_NO_LOCK_PREFIX();
16903 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
16904}
16905
16906
16907FNIEMOP_DEF(iemOp_sti)
16908{
16909 IEMOP_MNEMONIC("sti");
16910 IEMOP_HLP_NO_LOCK_PREFIX();
16911 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
16912}
16913
16914
16915/** Opcode 0xfc. */
16916FNIEMOP_DEF(iemOp_cld)
16917{
16918 IEMOP_MNEMONIC("cld");
16919 IEMOP_HLP_NO_LOCK_PREFIX();
16920 IEM_MC_BEGIN(0, 0);
16921 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
16922 IEM_MC_ADVANCE_RIP();
16923 IEM_MC_END();
16924 return VINF_SUCCESS;
16925}
16926
16927
16928/** Opcode 0xfd. */
16929FNIEMOP_DEF(iemOp_std)
16930{
16931 IEMOP_MNEMONIC("std");
16932 IEMOP_HLP_NO_LOCK_PREFIX();
16933 IEM_MC_BEGIN(0, 0);
16934 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
16935 IEM_MC_ADVANCE_RIP();
16936 IEM_MC_END();
16937 return VINF_SUCCESS;
16938}
16939
16940
16941/** Opcode 0xfe. */
16942FNIEMOP_DEF(iemOp_Grp4)
16943{
16944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16945 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16946 {
16947 case 0:
16948 IEMOP_MNEMONIC("inc Ev");
16949 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
16950 case 1:
16951 IEMOP_MNEMONIC("dec Ev");
16952 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
16953 default:
16954 IEMOP_MNEMONIC("grp4-ud");
16955 return IEMOP_RAISE_INVALID_OPCODE();
16956 }
16957}
16958
16959
16960/**
16961 * Opcode 0xff /2.
16962 * @param bRm The RM byte.
16963 */
16964FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
16965{
16966 IEMOP_MNEMONIC("calln Ev");
16967 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16968 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16969
16970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16971 {
16972 /* The new RIP is taken from a register. */
16973 switch (pIemCpu->enmEffOpSize)
16974 {
16975 case IEMMODE_16BIT:
16976 IEM_MC_BEGIN(1, 0);
16977 IEM_MC_ARG(uint16_t, u16Target, 0);
16978 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16979 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16980 IEM_MC_END()
16981 return VINF_SUCCESS;
16982
16983 case IEMMODE_32BIT:
16984 IEM_MC_BEGIN(1, 0);
16985 IEM_MC_ARG(uint32_t, u32Target, 0);
16986 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16987 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
16988 IEM_MC_END()
16989 return VINF_SUCCESS;
16990
16991 case IEMMODE_64BIT:
16992 IEM_MC_BEGIN(1, 0);
16993 IEM_MC_ARG(uint64_t, u64Target, 0);
16994 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16995 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
16996 IEM_MC_END()
16997 return VINF_SUCCESS;
16998
16999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17000 }
17001 }
17002 else
17003 {
17004 /* The new RIP is taken from a register. */
17005 switch (pIemCpu->enmEffOpSize)
17006 {
17007 case IEMMODE_16BIT:
17008 IEM_MC_BEGIN(1, 1);
17009 IEM_MC_ARG(uint16_t, u16Target, 0);
17010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17012 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17013 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17014 IEM_MC_END()
17015 return VINF_SUCCESS;
17016
17017 case IEMMODE_32BIT:
17018 IEM_MC_BEGIN(1, 1);
17019 IEM_MC_ARG(uint32_t, u32Target, 0);
17020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17022 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17023 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17024 IEM_MC_END()
17025 return VINF_SUCCESS;
17026
17027 case IEMMODE_64BIT:
17028 IEM_MC_BEGIN(1, 1);
17029 IEM_MC_ARG(uint64_t, u64Target, 0);
17030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17032 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17033 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17034 IEM_MC_END()
17035 return VINF_SUCCESS;
17036
17037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17038 }
17039 }
17040}
17041
17042typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17043
17044FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17045{
17046 /* Registers? How?? */
17047 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17048 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17049
17050 /* Far pointer loaded from memory. */
17051 switch (pIemCpu->enmEffOpSize)
17052 {
17053 case IEMMODE_16BIT:
17054 IEM_MC_BEGIN(3, 1);
17055 IEM_MC_ARG(uint16_t, u16Sel, 0);
17056 IEM_MC_ARG(uint16_t, offSeg, 1);
17057 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17061 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17062 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17063 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17064 IEM_MC_END();
17065 return VINF_SUCCESS;
17066
17067 case IEMMODE_64BIT:
17068 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17069 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17070 * and call far qword [rsp] encodings. */
17071 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17072 {
17073 IEM_MC_BEGIN(3, 1);
17074 IEM_MC_ARG(uint16_t, u16Sel, 0);
17075 IEM_MC_ARG(uint64_t, offSeg, 1);
17076 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17080 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17081 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17082 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17083 IEM_MC_END();
17084 return VINF_SUCCESS;
17085 }
17086 /* AMD falls thru. */
17087
17088 case IEMMODE_32BIT:
17089 IEM_MC_BEGIN(3, 1);
17090 IEM_MC_ARG(uint16_t, u16Sel, 0);
17091 IEM_MC_ARG(uint32_t, offSeg, 1);
17092 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17096 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17097 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17098 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17099 IEM_MC_END();
17100 return VINF_SUCCESS;
17101
17102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17103 }
17104}
17105
17106
17107/**
17108 * Opcode 0xff /3.
17109 * @param bRm The RM byte.
17110 */
17111FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17112{
17113 IEMOP_MNEMONIC("callf Ep");
17114 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17115}
17116
17117
17118/**
17119 * Opcode 0xff /4.
17120 * @param bRm The RM byte.
17121 */
17122FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17123{
17124 IEMOP_MNEMONIC("jmpn Ev");
17125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17127
17128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17129 {
17130 /* The new RIP is taken from a register. */
17131 switch (pIemCpu->enmEffOpSize)
17132 {
17133 case IEMMODE_16BIT:
17134 IEM_MC_BEGIN(0, 1);
17135 IEM_MC_LOCAL(uint16_t, u16Target);
17136 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17137 IEM_MC_SET_RIP_U16(u16Target);
17138 IEM_MC_END()
17139 return VINF_SUCCESS;
17140
17141 case IEMMODE_32BIT:
17142 IEM_MC_BEGIN(0, 1);
17143 IEM_MC_LOCAL(uint32_t, u32Target);
17144 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17145 IEM_MC_SET_RIP_U32(u32Target);
17146 IEM_MC_END()
17147 return VINF_SUCCESS;
17148
17149 case IEMMODE_64BIT:
17150 IEM_MC_BEGIN(0, 1);
17151 IEM_MC_LOCAL(uint64_t, u64Target);
17152 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17153 IEM_MC_SET_RIP_U64(u64Target);
17154 IEM_MC_END()
17155 return VINF_SUCCESS;
17156
17157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17158 }
17159 }
17160 else
17161 {
17162 /* The new RIP is taken from a memory location. */
17163 switch (pIemCpu->enmEffOpSize)
17164 {
17165 case IEMMODE_16BIT:
17166 IEM_MC_BEGIN(0, 2);
17167 IEM_MC_LOCAL(uint16_t, u16Target);
17168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17170 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17171 IEM_MC_SET_RIP_U16(u16Target);
17172 IEM_MC_END()
17173 return VINF_SUCCESS;
17174
17175 case IEMMODE_32BIT:
17176 IEM_MC_BEGIN(0, 2);
17177 IEM_MC_LOCAL(uint32_t, u32Target);
17178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17180 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17181 IEM_MC_SET_RIP_U32(u32Target);
17182 IEM_MC_END()
17183 return VINF_SUCCESS;
17184
17185 case IEMMODE_64BIT:
17186 IEM_MC_BEGIN(0, 2);
17187 IEM_MC_LOCAL(uint64_t, u64Target);
17188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17190 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17191 IEM_MC_SET_RIP_U64(u64Target);
17192 IEM_MC_END()
17193 return VINF_SUCCESS;
17194
17195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17196 }
17197 }
17198}
17199
17200
17201/**
17202 * Opcode 0xff /5.
17203 * @param bRm The RM byte.
17204 */
17205FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17206{
17207 IEMOP_MNEMONIC("jmpf Ep");
17208 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17209}
17210
17211
17212/**
17213 * Opcode 0xff /6.
17214 * @param bRm The RM byte.
17215 */
17216FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17217{
17218 IEMOP_MNEMONIC("push Ev");
17219 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17220
17221 /* Registers are handled by a common worker. */
17222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17223 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17224
17225 /* Memory we do here. */
17226 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17227 switch (pIemCpu->enmEffOpSize)
17228 {
17229 case IEMMODE_16BIT:
17230 IEM_MC_BEGIN(0, 2);
17231 IEM_MC_LOCAL(uint16_t, u16Src);
17232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17234 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17235 IEM_MC_PUSH_U16(u16Src);
17236 IEM_MC_ADVANCE_RIP();
17237 IEM_MC_END();
17238 return VINF_SUCCESS;
17239
17240 case IEMMODE_32BIT:
17241 IEM_MC_BEGIN(0, 2);
17242 IEM_MC_LOCAL(uint32_t, u32Src);
17243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17245 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17246 IEM_MC_PUSH_U32(u32Src);
17247 IEM_MC_ADVANCE_RIP();
17248 IEM_MC_END();
17249 return VINF_SUCCESS;
17250
17251 case IEMMODE_64BIT:
17252 IEM_MC_BEGIN(0, 2);
17253 IEM_MC_LOCAL(uint64_t, u64Src);
17254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17256 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17257 IEM_MC_PUSH_U64(u64Src);
17258 IEM_MC_ADVANCE_RIP();
17259 IEM_MC_END();
17260 return VINF_SUCCESS;
17261
17262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17263 }
17264}
17265
17266
17267/** Opcode 0xff. */
17268FNIEMOP_DEF(iemOp_Grp5)
17269{
17270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17271 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17272 {
17273 case 0:
17274 IEMOP_MNEMONIC("inc Ev");
17275 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17276 case 1:
17277 IEMOP_MNEMONIC("dec Ev");
17278 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17279 case 2:
17280 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17281 case 3:
17282 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17283 case 4:
17284 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17285 case 5:
17286 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17287 case 6:
17288 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17289 case 7:
17290 IEMOP_MNEMONIC("grp5-ud");
17291 return IEMOP_RAISE_INVALID_OPCODE();
17292 }
17293 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
17294}
17295
17296
17297
17298const PFNIEMOP g_apfnOneByteMap[256] =
17299{
17300 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17301 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17302 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17303 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17304 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17305 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17306 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17307 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17308 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17309 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17310 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17311 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17312 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17313 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17314 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17315 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17316 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17317 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17318 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17319 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17320 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17321 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17322 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17323 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17324 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17325 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17326 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17327 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17328 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17329 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17330 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17331 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17332 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17333 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17334 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17335 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17336 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17337 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17338 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17339 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17340 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17341 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17342 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17343 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17344 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17345 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17346 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17347 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17348 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17349 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17350 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17351 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17352 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17353 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17354 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17355 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17356 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17357 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17358 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17359 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17360 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17361 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17362 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17363 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17364};
17365
17366
17367/** @} */
17368
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette