VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 57952

Last change on this file since 57952 was 56668, checked in by vboxsync, 9 years ago

Fixed outs mnemonic

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 590.3 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 56668 2015-06-28 17:32:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_NO_REAL_OR_V86_MODE();
544
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
548 switch (pIemCpu->enmEffOpSize)
549 {
550 case IEMMODE_16BIT:
551 IEM_MC_BEGIN(0, 1);
552 IEM_MC_LOCAL(uint16_t, u16Ldtr);
553 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
555 IEM_MC_ADVANCE_RIP();
556 IEM_MC_END();
557 break;
558
559 case IEMMODE_32BIT:
560 IEM_MC_BEGIN(0, 1);
561 IEM_MC_LOCAL(uint32_t, u32Ldtr);
562 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
564 IEM_MC_ADVANCE_RIP();
565 IEM_MC_END();
566 break;
567
568 case IEMMODE_64BIT:
569 IEM_MC_BEGIN(0, 1);
570 IEM_MC_LOCAL(uint64_t, u64Ldtr);
571 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 break;
576
577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
578 }
579 }
580 else
581 {
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(uint16_t, u16Ldtr);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
586 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
587 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
588 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
589 IEM_MC_ADVANCE_RIP();
590 IEM_MC_END();
591 }
592 return VINF_SUCCESS;
593}
594
595
596/** Opcode 0x0f 0x00 /1. */
597FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
598{
599 IEMOP_MNEMONIC("str Rv/Mw");
600 IEMOP_HLP_NO_REAL_OR_V86_MODE();
601
602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
603 {
604 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
605 switch (pIemCpu->enmEffOpSize)
606 {
607 case IEMMODE_16BIT:
608 IEM_MC_BEGIN(0, 1);
609 IEM_MC_LOCAL(uint16_t, u16Tr);
610 IEM_MC_FETCH_TR_U16(u16Tr);
611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
612 IEM_MC_ADVANCE_RIP();
613 IEM_MC_END();
614 break;
615
616 case IEMMODE_32BIT:
617 IEM_MC_BEGIN(0, 1);
618 IEM_MC_LOCAL(uint32_t, u32Tr);
619 IEM_MC_FETCH_TR_U32(u32Tr);
620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 break;
624
625 case IEMMODE_64BIT:
626 IEM_MC_BEGIN(0, 1);
627 IEM_MC_LOCAL(uint64_t, u64Tr);
628 IEM_MC_FETCH_TR_U64(u64Tr);
629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
630 IEM_MC_ADVANCE_RIP();
631 IEM_MC_END();
632 break;
633
634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
635 }
636 }
637 else
638 {
639 IEM_MC_BEGIN(0, 2);
640 IEM_MC_LOCAL(uint16_t, u16Tr);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
644 IEM_MC_FETCH_TR_U16(u16Tr);
645 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
646 IEM_MC_ADVANCE_RIP();
647 IEM_MC_END();
648 }
649 return VINF_SUCCESS;
650}
651
652
653/** Opcode 0x0f 0x00 /2. */
654FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
655{
656 IEMOP_MNEMONIC("lldt Ew");
657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
658
659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
660 {
661 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
662 IEM_MC_BEGIN(1, 0);
663 IEM_MC_ARG(uint16_t, u16Sel, 0);
664 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
665 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(1, 1);
671 IEM_MC_ARG(uint16_t, u16Sel, 0);
672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
675 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
676 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
677 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
678 IEM_MC_END();
679 }
680 return VINF_SUCCESS;
681}
682
683
684/** Opcode 0x0f 0x00 /3. */
685FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
686{
687 IEMOP_MNEMONIC("ltr Ew");
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689
690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
691 {
692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
693 IEM_MC_BEGIN(1, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
696 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
697 IEM_MC_END();
698 }
699 else
700 {
701 IEM_MC_BEGIN(1, 1);
702 IEM_MC_ARG(uint16_t, u16Sel, 0);
703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
706 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
707 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
708 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
709 IEM_MC_END();
710 }
711 return VINF_SUCCESS;
712}
713
714
715/** Opcode 0x0f 0x00 /3. */
716FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
717{
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
723 IEM_MC_BEGIN(2, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
727 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
728 IEM_MC_END();
729 }
730 else
731 {
732 IEM_MC_BEGIN(2, 1);
733 IEM_MC_ARG(uint16_t, u16Sel, 0);
734 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
739 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
740 IEM_MC_END();
741 }
742 return VINF_SUCCESS;
743}
744
745
746/** Opcode 0x0f 0x00 /4. */
747FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
748{
749 IEMOP_MNEMONIC("verr Ew");
750 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
751}
752
753
754/** Opcode 0x0f 0x00 /5. */
755FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
756{
757 IEMOP_MNEMONIC("verr Ew");
758 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
759}
760
761
762/** Opcode 0x0f 0x00. */
763FNIEMOP_DEF(iemOp_Grp6)
764{
765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
767 {
768 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
769 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
770 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
771 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
772 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
773 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
775 case 7: return IEMOP_RAISE_INVALID_OPCODE();
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
784{
785 IEMOP_MNEMONIC("sgdt Ms");
786 IEMOP_HLP_64BIT_OP_SIZE();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_ARG(uint8_t, iEffSeg, 0);
789 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
794 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
795 IEM_MC_END();
796 return VINF_SUCCESS;
797}
798
799
800/** Opcode 0x0f 0x01 /0. */
801FNIEMOP_DEF(iemOp_Grp7_vmcall)
802{
803 IEMOP_BITCH_ABOUT_STUB();
804 return IEMOP_RAISE_INVALID_OPCODE();
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmresume)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmxoff)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /1. */
833FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
834{
835 IEMOP_MNEMONIC("sidt Ms");
836 IEMOP_HLP_64BIT_OP_SIZE();
837 IEM_MC_BEGIN(3, 1);
838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
839 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
843 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
844 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
845 IEM_MC_END();
846 return VINF_SUCCESS;
847}
848
849
850/** Opcode 0x0f 0x01 /1. */
851FNIEMOP_DEF(iemOp_Grp7_monitor)
852{
853 IEMOP_MNEMONIC("monitor");
854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
855 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_mwait)
861{
862 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
865}
866
867
868/** Opcode 0x0f 0x01 /2. */
869FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC("lgdt");
872 IEMOP_HLP_64BIT_OP_SIZE();
873 IEM_MC_BEGIN(3, 1);
874 IEM_MC_ARG(uint8_t, iEffSeg, 0);
875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
880 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 0xd0. */
887FNIEMOP_DEF(iemOp_Grp7_xgetbv)
888{
889 IEMOP_MNEMONIC("xgetbv");
890 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
891 {
892 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
893 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
894 }
895 return IEMOP_RAISE_INVALID_OPCODE();
896}
897
898
899/** Opcode 0x0f 0x01 0xd1. */
900FNIEMOP_DEF(iemOp_Grp7_xsetbv)
901{
902 IEMOP_MNEMONIC("xsetbv");
903 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
904 {
905 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
906 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
907 }
908 return IEMOP_RAISE_INVALID_OPCODE();
909}
910
911
912/** Opcode 0x0f 0x01 /3. */
913FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
914{
915 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
916 ? IEMMODE_64BIT
917 : pIemCpu->enmEffOpSize;
918 IEM_MC_BEGIN(3, 1);
919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
920 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
921 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
925 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 0xd8. */
932FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
933
934/** Opcode 0x0f 0x01 0xd9. */
935FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
936
937/** Opcode 0x0f 0x01 0xda. */
938FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
939
940/** Opcode 0x0f 0x01 0xdb. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
942
943/** Opcode 0x0f 0x01 0xdc. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
945
946/** Opcode 0x0f 0x01 0xdd. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
948
949/** Opcode 0x0f 0x01 0xde. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
951
952/** Opcode 0x0f 0x01 0xdf. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
954
955/** Opcode 0x0f 0x01 /4. */
956FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
957{
958 IEMOP_MNEMONIC("smsw");
959 IEMOP_HLP_NO_LOCK_PREFIX();
960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
961 {
962 switch (pIemCpu->enmEffOpSize)
963 {
964 case IEMMODE_16BIT:
965 IEM_MC_BEGIN(0, 1);
966 IEM_MC_LOCAL(uint16_t, u16Tmp);
967 IEM_MC_FETCH_CR0_U16(u16Tmp);
968 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
969 IEM_MC_ADVANCE_RIP();
970 IEM_MC_END();
971 return VINF_SUCCESS;
972
973 case IEMMODE_32BIT:
974 IEM_MC_BEGIN(0, 1);
975 IEM_MC_LOCAL(uint32_t, u32Tmp);
976 IEM_MC_FETCH_CR0_U32(u32Tmp);
977 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
978 IEM_MC_ADVANCE_RIP();
979 IEM_MC_END();
980 return VINF_SUCCESS;
981
982 case IEMMODE_64BIT:
983 IEM_MC_BEGIN(0, 1);
984 IEM_MC_LOCAL(uint64_t, u64Tmp);
985 IEM_MC_FETCH_CR0_U64(u64Tmp);
986 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
987 IEM_MC_ADVANCE_RIP();
988 IEM_MC_END();
989 return VINF_SUCCESS;
990
991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
992 }
993 }
994 else
995 {
996 /* Ignore operand size here, memory refs are always 16-bit. */
997 IEM_MC_BEGIN(0, 2);
998 IEM_MC_LOCAL(uint16_t, u16Tmp);
999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1001 IEM_MC_FETCH_CR0_U16(u16Tmp);
1002 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1003 IEM_MC_ADVANCE_RIP();
1004 IEM_MC_END();
1005 return VINF_SUCCESS;
1006 }
1007}
1008
1009
1010/** Opcode 0x0f 0x01 /6. */
1011FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1012{
1013 /* The operand size is effectively ignored, all is 16-bit and only the
1014 lower 3-bits are used. */
1015 IEMOP_MNEMONIC("lmsw");
1016 IEMOP_HLP_NO_LOCK_PREFIX();
1017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1018 {
1019 IEM_MC_BEGIN(1, 0);
1020 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1021 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1022 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1023 IEM_MC_END();
1024 }
1025 else
1026 {
1027 IEM_MC_BEGIN(1, 1);
1028 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1031 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1032 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1033 IEM_MC_END();
1034 }
1035 return VINF_SUCCESS;
1036}
1037
1038
1039/** Opcode 0x0f 0x01 /7. */
1040FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1041{
1042 IEMOP_MNEMONIC("invlpg");
1043 IEMOP_HLP_NO_LOCK_PREFIX();
1044 IEM_MC_BEGIN(1, 1);
1045 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1047 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1048 IEM_MC_END();
1049 return VINF_SUCCESS;
1050}
1051
1052
1053/** Opcode 0x0f 0x01 /7. */
1054FNIEMOP_DEF(iemOp_Grp7_swapgs)
1055{
1056 IEMOP_MNEMONIC("swapgs");
1057 IEMOP_HLP_NO_LOCK_PREFIX();
1058 IEMOP_HLP_ONLY_64BIT();
1059 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1060}
1061
1062
1063/** Opcode 0x0f 0x01 /7. */
1064FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1065{
1066 NOREF(pIemCpu);
1067 IEMOP_BITCH_ABOUT_STUB();
1068 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1069}
1070
1071
1072/** Opcode 0x0f 0x01. */
1073FNIEMOP_DEF(iemOp_Grp7)
1074{
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1077 {
1078 case 0:
1079 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1080 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1081 switch (bRm & X86_MODRM_RM_MASK)
1082 {
1083 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1084 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1085 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1086 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1087 }
1088 return IEMOP_RAISE_INVALID_OPCODE();
1089
1090 case 1:
1091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1092 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1093 switch (bRm & X86_MODRM_RM_MASK)
1094 {
1095 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1096 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1097 }
1098 return IEMOP_RAISE_INVALID_OPCODE();
1099
1100 case 2:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1106 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1107 }
1108 return IEMOP_RAISE_INVALID_OPCODE();
1109
1110 case 3:
1111 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1112 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1113 switch (bRm & X86_MODRM_RM_MASK)
1114 {
1115 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1116 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1117 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1118 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1119 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1120 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1121 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1122 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1124 }
1125
1126 case 4:
1127 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1128
1129 case 5:
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 6:
1133 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1134
1135 case 7:
1136 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1137 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1138 switch (bRm & X86_MODRM_RM_MASK)
1139 {
1140 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1141 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1142 }
1143 return IEMOP_RAISE_INVALID_OPCODE();
1144
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147}
1148
1149/** Opcode 0x0f 0x00 /3. */
1150FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1151{
1152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1154
1155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1156 {
1157 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1158 switch (pIemCpu->enmEffOpSize)
1159 {
1160 case IEMMODE_16BIT:
1161 {
1162 IEM_MC_BEGIN(4, 0);
1163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1164 IEM_MC_ARG(uint16_t, u16Sel, 1);
1165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1166 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1167
1168 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1169 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1170 IEM_MC_REF_EFLAGS(pEFlags);
1171 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1172
1173 IEM_MC_END();
1174 return VINF_SUCCESS;
1175 }
1176
1177 case IEMMODE_32BIT:
1178 case IEMMODE_64BIT:
1179 {
1180 IEM_MC_BEGIN(4, 0);
1181 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1182 IEM_MC_ARG(uint16_t, u16Sel, 1);
1183 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1184 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1185
1186 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1188 IEM_MC_REF_EFLAGS(pEFlags);
1189 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1190
1191 IEM_MC_END();
1192 return VINF_SUCCESS;
1193 }
1194
1195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1196 }
1197 }
1198 else
1199 {
1200 switch (pIemCpu->enmEffOpSize)
1201 {
1202 case IEMMODE_16BIT:
1203 {
1204 IEM_MC_BEGIN(4, 1);
1205 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1206 IEM_MC_ARG(uint16_t, u16Sel, 1);
1207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1208 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1210
1211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1212 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1213
1214 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1215 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1216 IEM_MC_REF_EFLAGS(pEFlags);
1217 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1218
1219 IEM_MC_END();
1220 return VINF_SUCCESS;
1221 }
1222
1223 case IEMMODE_32BIT:
1224 case IEMMODE_64BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235/** @todo testcase: make sure it's a 16-bit read. */
1236
1237 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1238 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1239 IEM_MC_REF_EFLAGS(pEFlags);
1240 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1241
1242 IEM_MC_END();
1243 return VINF_SUCCESS;
1244 }
1245
1246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1247 }
1248 }
1249}
1250
1251
1252
1253/** Opcode 0x0f 0x02. */
1254FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1255{
1256 IEMOP_MNEMONIC("lar Gv,Ew");
1257 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1258}
1259
1260
1261/** Opcode 0x0f 0x03. */
1262FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1263{
1264 IEMOP_MNEMONIC("lsl Gv,Ew");
1265 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1266}
1267
1268
1269/** Opcode 0x0f 0x04. */
1270FNIEMOP_DEF(iemOp_syscall)
1271{
1272 IEMOP_MNEMONIC("syscall");
1273 IEMOP_HLP_NO_LOCK_PREFIX();
1274 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1275}
1276
1277
1278/** Opcode 0x0f 0x05. */
1279FNIEMOP_DEF(iemOp_clts)
1280{
1281 IEMOP_MNEMONIC("clts");
1282 IEMOP_HLP_NO_LOCK_PREFIX();
1283 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1284}
1285
1286
1287/** Opcode 0x0f 0x06. */
1288FNIEMOP_DEF(iemOp_sysret)
1289{
1290 IEMOP_MNEMONIC("sysret");
1291 IEMOP_HLP_NO_LOCK_PREFIX();
1292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1293}
1294
1295
1296/** Opcode 0x0f 0x08. */
1297FNIEMOP_STUB(iemOp_invd);
1298
1299
1300/** Opcode 0x0f 0x09. */
1301FNIEMOP_DEF(iemOp_wbinvd)
1302{
1303 IEMOP_MNEMONIC("wbinvd");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 IEM_MC_BEGIN(0, 0);
1306 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1307 IEM_MC_ADVANCE_RIP();
1308 IEM_MC_END();
1309 return VINF_SUCCESS; /* ignore for now */
1310}
1311
1312
1313/** Opcode 0x0f 0x0b. */
1314FNIEMOP_STUB(iemOp_ud2);
1315
1316/** Opcode 0x0f 0x0d. */
1317FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1318{
1319 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1320 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1321 {
1322 IEMOP_MNEMONIC("GrpP");
1323 return IEMOP_RAISE_INVALID_OPCODE();
1324 }
1325
1326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1328 {
1329 IEMOP_MNEMONIC("GrpP");
1330 return IEMOP_RAISE_INVALID_OPCODE();
1331 }
1332
1333 IEMOP_HLP_NO_LOCK_PREFIX();
1334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1335 {
1336 case 2: /* Aliased to /0 for the time being. */
1337 case 4: /* Aliased to /0 for the time being. */
1338 case 5: /* Aliased to /0 for the time being. */
1339 case 6: /* Aliased to /0 for the time being. */
1340 case 7: /* Aliased to /0 for the time being. */
1341 case 0: IEMOP_MNEMONIC("prefetch"); break;
1342 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1343 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1345 }
1346
1347 IEM_MC_BEGIN(0, 1);
1348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1350 /* Currently a NOP. */
1351 IEM_MC_ADVANCE_RIP();
1352 IEM_MC_END();
1353 return VINF_SUCCESS;
1354}
1355
1356
1357/** Opcode 0x0f 0x0e. */
1358FNIEMOP_STUB(iemOp_femms);
1359
1360
1361/** Opcode 0x0f 0x0f 0x0c. */
1362FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1363
1364/** Opcode 0x0f 0x0f 0x0d. */
1365FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1366
1367/** Opcode 0x0f 0x0f 0x1c. */
1368FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1369
1370/** Opcode 0x0f 0x0f 0x1d. */
1371FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1372
1373/** Opcode 0x0f 0x0f 0x8a. */
1374FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1375
1376/** Opcode 0x0f 0x0f 0x8e. */
1377FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1378
1379/** Opcode 0x0f 0x0f 0x90. */
1380FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1381
1382/** Opcode 0x0f 0x0f 0x94. */
1383FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1384
1385/** Opcode 0x0f 0x0f 0x96. */
1386FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1387
1388/** Opcode 0x0f 0x0f 0x97. */
1389FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1390
1391/** Opcode 0x0f 0x0f 0x9a. */
1392FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1393
1394/** Opcode 0x0f 0x0f 0x9e. */
1395FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1396
1397/** Opcode 0x0f 0x0f 0xa0. */
1398FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1399
1400/** Opcode 0x0f 0x0f 0xa4. */
1401FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1402
1403/** Opcode 0x0f 0x0f 0xa6. */
1404FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1405
1406/** Opcode 0x0f 0x0f 0xa7. */
1407FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1408
1409/** Opcode 0x0f 0x0f 0xaa. */
1410FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1411
1412/** Opcode 0x0f 0x0f 0xae. */
1413FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1414
1415/** Opcode 0x0f 0x0f 0xb0. */
1416FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0xb4. */
1419FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0xb6. */
1422FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1423
1424/** Opcode 0x0f 0x0f 0xb7. */
1425FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1426
1427/** Opcode 0x0f 0x0f 0xbb. */
1428FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1429
1430/** Opcode 0x0f 0x0f 0xbf. */
1431FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1432
1433
1434/** Opcode 0x0f 0x0f. */
1435FNIEMOP_DEF(iemOp_3Dnow)
1436{
1437 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1438 {
1439 IEMOP_MNEMONIC("3Dnow");
1440 return IEMOP_RAISE_INVALID_OPCODE();
1441 }
1442
1443 /* This is pretty sparse, use switch instead of table. */
1444 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1445 switch (b)
1446 {
1447 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1448 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1449 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1450 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1451 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1452 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1453 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1454 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1455 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1456 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1457 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1458 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1459 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1460 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1461 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1462 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1463 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1464 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1465 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1466 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1467 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1468 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1469 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1470 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1471 default:
1472 return IEMOP_RAISE_INVALID_OPCODE();
1473 }
1474}
1475
1476
1477/** Opcode 0x0f 0x10. */
1478FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1479/** Opcode 0x0f 0x11. */
1480FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1481/** Opcode 0x0f 0x12. */
1482FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1483/** Opcode 0x0f 0x13. */
1484FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1485/** Opcode 0x0f 0x14. */
1486FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1487/** Opcode 0x0f 0x15. */
1488FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1489/** Opcode 0x0f 0x16. */
1490FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1491/** Opcode 0x0f 0x17. */
1492FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1493
1494
1495/** Opcode 0x0f 0x18. */
1496FNIEMOP_DEF(iemOp_prefetch_Grp16)
1497{
1498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1499 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1500 {
1501 IEMOP_HLP_NO_LOCK_PREFIX();
1502 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1503 {
1504 case 4: /* Aliased to /0 for the time being according to AMD. */
1505 case 5: /* Aliased to /0 for the time being according to AMD. */
1506 case 6: /* Aliased to /0 for the time being according to AMD. */
1507 case 7: /* Aliased to /0 for the time being according to AMD. */
1508 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1509 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1510 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1511 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1513 }
1514
1515 IEM_MC_BEGIN(0, 1);
1516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1518 /* Currently a NOP. */
1519 IEM_MC_ADVANCE_RIP();
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522 }
1523
1524 return IEMOP_RAISE_INVALID_OPCODE();
1525}
1526
1527
1528/** Opcode 0x0f 0x19..0x1f. */
1529FNIEMOP_DEF(iemOp_nop_Ev)
1530{
1531 IEMOP_HLP_NO_LOCK_PREFIX();
1532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1534 {
1535 IEM_MC_BEGIN(0, 0);
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 }
1539 else
1540 {
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 /* Currently a NOP. */
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 }
1548 return VINF_SUCCESS;
1549}
1550
1551
1552/** Opcode 0x0f 0x20. */
1553FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1554{
1555 /* mod is ignored, as is operand size overrides. */
1556 IEMOP_MNEMONIC("mov Rd,Cd");
1557 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1558 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1559 else
1560 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1561
1562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1563 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1564 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1565 {
1566 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1567 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1568 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1569 iCrReg |= 8;
1570 }
1571 switch (iCrReg)
1572 {
1573 case 0: case 2: case 3: case 4: case 8:
1574 break;
1575 default:
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578 IEMOP_HLP_DONE_DECODING();
1579
1580 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1581}
1582
1583
1584/** Opcode 0x0f 0x21. */
1585FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1586{
1587 IEMOP_MNEMONIC("mov Rd,Dd");
1588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1589 IEMOP_HLP_NO_LOCK_PREFIX();
1590 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1591 return IEMOP_RAISE_INVALID_OPCODE();
1592 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1593 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1594 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1595}
1596
1597
1598/** Opcode 0x0f 0x22. */
1599FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1600{
1601 /* mod is ignored, as is operand size overrides. */
1602 IEMOP_MNEMONIC("mov Cd,Rd");
1603 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1604 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1605 else
1606 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1607
1608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1609 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1610 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1611 {
1612 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1613 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1614 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1615 iCrReg |= 8;
1616 }
1617 switch (iCrReg)
1618 {
1619 case 0: case 2: case 3: case 4: case 8:
1620 break;
1621 default:
1622 return IEMOP_RAISE_INVALID_OPCODE();
1623 }
1624 IEMOP_HLP_DONE_DECODING();
1625
1626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1627}
1628
1629
1630/** Opcode 0x0f 0x23. */
1631FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1632{
1633 IEMOP_MNEMONIC("mov Dd,Rd");
1634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1636 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1637 return IEMOP_RAISE_INVALID_OPCODE();
1638 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1639 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1640 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1641}
1642
1643
1644/** Opcode 0x0f 0x24. */
1645FNIEMOP_DEF(iemOp_mov_Rd_Td)
1646{
1647 IEMOP_MNEMONIC("mov Rd,Td");
1648 /* The RM byte is not considered, see testcase. */
1649 return IEMOP_RAISE_INVALID_OPCODE();
1650}
1651
1652
1653/** Opcode 0x0f 0x26. */
1654FNIEMOP_DEF(iemOp_mov_Td_Rd)
1655{
1656 IEMOP_MNEMONIC("mov Td,Rd");
1657 /* The RM byte is not considered, see testcase. */
1658 return IEMOP_RAISE_INVALID_OPCODE();
1659}
1660
1661
1662/** Opcode 0x0f 0x28. */
1663FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1664/** Opcode 0x0f 0x29. */
1665FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1666/** Opcode 0x0f 0x2a. */
1667FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1668/** Opcode 0x0f 0x2b. */
1669FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1670/** Opcode 0x0f 0x2c. */
1671FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1672/** Opcode 0x0f 0x2d. */
1673FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1674/** Opcode 0x0f 0x2e. */
1675FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1676/** Opcode 0x0f 0x2f. */
1677FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1678
1679
1680/** Opcode 0x0f 0x30. */
1681FNIEMOP_DEF(iemOp_wrmsr)
1682{
1683 IEMOP_MNEMONIC("wrmsr");
1684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1685 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1686}
1687
1688
1689/** Opcode 0x0f 0x31. */
1690FNIEMOP_DEF(iemOp_rdtsc)
1691{
1692 IEMOP_MNEMONIC("rdtsc");
1693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1694 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1695}
1696
1697
1698/** Opcode 0x0f 0x33. */
1699FNIEMOP_DEF(iemOp_rdmsr)
1700{
1701 IEMOP_MNEMONIC("rdmsr");
1702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1703 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1704}
1705
1706
1707/** Opcode 0x0f 0x34. */
1708FNIEMOP_STUB(iemOp_rdpmc);
1709/** Opcode 0x0f 0x34. */
1710FNIEMOP_STUB(iemOp_sysenter);
1711/** Opcode 0x0f 0x35. */
1712FNIEMOP_STUB(iemOp_sysexit);
1713/** Opcode 0x0f 0x37. */
1714FNIEMOP_STUB(iemOp_getsec);
1715/** Opcode 0x0f 0x38. */
1716FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1717/** Opcode 0x0f 0x3a. */
1718FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1719/** Opcode 0x0f 0x3c (?). */
1720FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1721
1722/**
1723 * Implements a conditional move.
1724 *
1725 * Wish there was an obvious way to do this where we could share and reduce
1726 * code bloat.
1727 *
1728 * @param a_Cnd The conditional "microcode" operation.
1729 */
1730#define CMOV_X(a_Cnd) \
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1733 { \
1734 switch (pIemCpu->enmEffOpSize) \
1735 { \
1736 case IEMMODE_16BIT: \
1737 IEM_MC_BEGIN(0, 1); \
1738 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1739 a_Cnd { \
1740 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1741 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_32BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1752 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1753 } IEM_MC_ELSE() { \
1754 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1755 } IEM_MC_ENDIF(); \
1756 IEM_MC_ADVANCE_RIP(); \
1757 IEM_MC_END(); \
1758 return VINF_SUCCESS; \
1759 \
1760 case IEMMODE_64BIT: \
1761 IEM_MC_BEGIN(0, 1); \
1762 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1763 a_Cnd { \
1764 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1765 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1766 } IEM_MC_ENDIF(); \
1767 IEM_MC_ADVANCE_RIP(); \
1768 IEM_MC_END(); \
1769 return VINF_SUCCESS; \
1770 \
1771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1772 } \
1773 } \
1774 else \
1775 { \
1776 switch (pIemCpu->enmEffOpSize) \
1777 { \
1778 case IEMMODE_16BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1786 } IEM_MC_ENDIF(); \
1787 IEM_MC_ADVANCE_RIP(); \
1788 IEM_MC_END(); \
1789 return VINF_SUCCESS; \
1790 \
1791 case IEMMODE_32BIT: \
1792 IEM_MC_BEGIN(0, 2); \
1793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1794 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1796 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1797 a_Cnd { \
1798 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1799 } IEM_MC_ELSE() { \
1800 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 case IEMMODE_64BIT: \
1807 IEM_MC_BEGIN(0, 2); \
1808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1809 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1811 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1812 a_Cnd { \
1813 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1814 } IEM_MC_ENDIF(); \
1815 IEM_MC_ADVANCE_RIP(); \
1816 IEM_MC_END(); \
1817 return VINF_SUCCESS; \
1818 \
1819 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1820 } \
1821 } do {} while (0)
1822
1823
1824
1825/** Opcode 0x0f 0x40. */
1826FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1827{
1828 IEMOP_MNEMONIC("cmovo Gv,Ev");
1829 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1830}
1831
1832
1833/** Opcode 0x0f 0x41. */
1834FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1835{
1836 IEMOP_MNEMONIC("cmovno Gv,Ev");
1837 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1838}
1839
1840
1841/** Opcode 0x0f 0x42. */
1842FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1843{
1844 IEMOP_MNEMONIC("cmovc Gv,Ev");
1845 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1846}
1847
1848
1849/** Opcode 0x0f 0x43. */
1850FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1851{
1852 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1853 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1854}
1855
1856
1857/** Opcode 0x0f 0x44. */
1858FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1859{
1860 IEMOP_MNEMONIC("cmove Gv,Ev");
1861 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1862}
1863
1864
1865/** Opcode 0x0f 0x45. */
1866FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1867{
1868 IEMOP_MNEMONIC("cmovne Gv,Ev");
1869 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1870}
1871
1872
1873/** Opcode 0x0f 0x46. */
1874FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1875{
1876 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1877 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1878}
1879
1880
1881/** Opcode 0x0f 0x47. */
1882FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1883{
1884 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1885 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1886}
1887
1888
1889/** Opcode 0x0f 0x48. */
1890FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1891{
1892 IEMOP_MNEMONIC("cmovs Gv,Ev");
1893 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1894}
1895
1896
1897/** Opcode 0x0f 0x49. */
1898FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1899{
1900 IEMOP_MNEMONIC("cmovns Gv,Ev");
1901 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1902}
1903
1904
1905/** Opcode 0x0f 0x4a. */
1906FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1907{
1908 IEMOP_MNEMONIC("cmovp Gv,Ev");
1909 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1910}
1911
1912
1913/** Opcode 0x0f 0x4b. */
1914FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1915{
1916 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1917 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1918}
1919
1920
1921/** Opcode 0x0f 0x4c. */
1922FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1923{
1924 IEMOP_MNEMONIC("cmovl Gv,Ev");
1925 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1926}
1927
1928
1929/** Opcode 0x0f 0x4d. */
1930FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1931{
1932 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1933 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1934}
1935
1936
1937/** Opcode 0x0f 0x4e. */
1938FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1939{
1940 IEMOP_MNEMONIC("cmovle Gv,Ev");
1941 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1942}
1943
1944
1945/** Opcode 0x0f 0x4f. */
1946FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1947{
1948 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1949 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1950}
1951
1952#undef CMOV_X
1953
1954/** Opcode 0x0f 0x50. */
1955FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1956/** Opcode 0x0f 0x51. */
1957FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1958/** Opcode 0x0f 0x52. */
1959FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1960/** Opcode 0x0f 0x53. */
1961FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1962/** Opcode 0x0f 0x54. */
1963FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1964/** Opcode 0x0f 0x55. */
1965FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1966/** Opcode 0x0f 0x56. */
1967FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1968/** Opcode 0x0f 0x57. */
1969FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1970/** Opcode 0x0f 0x58. */
1971FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1972/** Opcode 0x0f 0x59. */
1973FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1974/** Opcode 0x0f 0x5a. */
1975FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1976/** Opcode 0x0f 0x5b. */
1977FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1978/** Opcode 0x0f 0x5c. */
1979FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1980/** Opcode 0x0f 0x5d. */
1981FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1982/** Opcode 0x0f 0x5e. */
1983FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1984/** Opcode 0x0f 0x5f. */
1985FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1986
1987
1988/**
1989 * Common worker for SSE2 and MMX instructions on the forms:
1990 * pxxxx xmm1, xmm2/mem128
1991 * pxxxx mm1, mm2/mem32
1992 *
1993 * The 2nd operand is the first half of a register, which in the memory case
1994 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1995 * memory accessed for MMX.
1996 *
1997 * Exceptions type 4.
1998 */
1999FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2000{
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2003 {
2004 case IEM_OP_PRF_SIZE_OP: /* SSE */
2005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2006 {
2007 /*
2008 * Register, register.
2009 */
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 IEM_MC_BEGIN(2, 0);
2012 IEM_MC_ARG(uint128_t *, pDst, 0);
2013 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2015 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2016 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2017 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2018 IEM_MC_ADVANCE_RIP();
2019 IEM_MC_END();
2020 }
2021 else
2022 {
2023 /*
2024 * Register, memory.
2025 */
2026 IEM_MC_BEGIN(2, 2);
2027 IEM_MC_ARG(uint128_t *, pDst, 0);
2028 IEM_MC_LOCAL(uint64_t, uSrc);
2029 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2031
2032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2034 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2035 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2036
2037 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2038 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2039
2040 IEM_MC_ADVANCE_RIP();
2041 IEM_MC_END();
2042 }
2043 return VINF_SUCCESS;
2044
2045 case 0: /* MMX */
2046 if (!pImpl->pfnU64)
2047 return IEMOP_RAISE_INVALID_OPCODE();
2048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2049 {
2050 /*
2051 * Register, register.
2052 */
2053 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2054 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 IEM_MC_BEGIN(2, 0);
2057 IEM_MC_ARG(uint64_t *, pDst, 0);
2058 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2059 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2060 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2061 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2062 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * Register, memory.
2070 */
2071 IEM_MC_BEGIN(2, 2);
2072 IEM_MC_ARG(uint64_t *, pDst, 0);
2073 IEM_MC_LOCAL(uint32_t, uSrc);
2074 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076
2077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2080 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2081
2082 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2083 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2084
2085 IEM_MC_ADVANCE_RIP();
2086 IEM_MC_END();
2087 }
2088 return VINF_SUCCESS;
2089
2090 default:
2091 return IEMOP_RAISE_INVALID_OPCODE();
2092 }
2093}
2094
2095
2096/** Opcode 0x0f 0x60. */
2097FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2098{
2099 IEMOP_MNEMONIC("punpcklbw");
2100 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2101}
2102
2103
2104/** Opcode 0x0f 0x61. */
2105FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2106{
2107 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2108 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2109}
2110
2111
2112/** Opcode 0x0f 0x62. */
2113FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2114{
2115 IEMOP_MNEMONIC("punpckldq");
2116 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2117}
2118
2119
2120/** Opcode 0x0f 0x63. */
2121FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2122/** Opcode 0x0f 0x64. */
2123FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2124/** Opcode 0x0f 0x65. */
2125FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2126/** Opcode 0x0f 0x66. */
2127FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2128/** Opcode 0x0f 0x67. */
2129FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2130
2131
2132/**
2133 * Common worker for SSE2 and MMX instructions on the forms:
2134 * pxxxx xmm1, xmm2/mem128
2135 * pxxxx mm1, mm2/mem64
2136 *
2137 * The 2nd operand is the second half of a register, which in the memory case
2138 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2139 * where it may read the full 128 bits or only the upper 64 bits.
2140 *
2141 * Exceptions type 4.
2142 */
2143FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2144{
2145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2146 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2147 {
2148 case IEM_OP_PRF_SIZE_OP: /* SSE */
2149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2150 {
2151 /*
2152 * Register, register.
2153 */
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(2, 0);
2156 IEM_MC_ARG(uint128_t *, pDst, 0);
2157 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2159 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2160 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2161 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2162 IEM_MC_ADVANCE_RIP();
2163 IEM_MC_END();
2164 }
2165 else
2166 {
2167 /*
2168 * Register, memory.
2169 */
2170 IEM_MC_BEGIN(2, 2);
2171 IEM_MC_ARG(uint128_t *, pDst, 0);
2172 IEM_MC_LOCAL(uint128_t, uSrc);
2173 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2175
2176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2179 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2180
2181 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2182 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2183
2184 IEM_MC_ADVANCE_RIP();
2185 IEM_MC_END();
2186 }
2187 return VINF_SUCCESS;
2188
2189 case 0: /* MMX */
2190 if (!pImpl->pfnU64)
2191 return IEMOP_RAISE_INVALID_OPCODE();
2192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2193 {
2194 /*
2195 * Register, register.
2196 */
2197 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2198 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_BEGIN(2, 0);
2201 IEM_MC_ARG(uint64_t *, pDst, 0);
2202 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2204 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2205 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2206 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2207 IEM_MC_ADVANCE_RIP();
2208 IEM_MC_END();
2209 }
2210 else
2211 {
2212 /*
2213 * Register, memory.
2214 */
2215 IEM_MC_BEGIN(2, 2);
2216 IEM_MC_ARG(uint64_t *, pDst, 0);
2217 IEM_MC_LOCAL(uint64_t, uSrc);
2218 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2220
2221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2224 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2225
2226 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2227 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2228
2229 IEM_MC_ADVANCE_RIP();
2230 IEM_MC_END();
2231 }
2232 return VINF_SUCCESS;
2233
2234 default:
2235 return IEMOP_RAISE_INVALID_OPCODE();
2236 }
2237}
2238
2239
2240/** Opcode 0x0f 0x68. */
2241FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2242{
2243 IEMOP_MNEMONIC("punpckhbw");
2244 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2245}
2246
2247
2248/** Opcode 0x0f 0x69. */
2249FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2250{
2251 IEMOP_MNEMONIC("punpckhwd");
2252 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2253}
2254
2255
2256/** Opcode 0x0f 0x6a. */
2257FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2258{
2259 IEMOP_MNEMONIC("punpckhdq");
2260 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2261}
2262
2263/** Opcode 0x0f 0x6b. */
2264FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2265
2266
2267/** Opcode 0x0f 0x6c. */
2268FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2269{
2270 IEMOP_MNEMONIC("punpcklqdq");
2271 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2272}
2273
2274
2275/** Opcode 0x0f 0x6d. */
2276FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2277{
2278 IEMOP_MNEMONIC("punpckhqdq");
2279 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2280}
2281
2282
2283/** Opcode 0x0f 0x6e. */
2284FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2285{
2286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2287 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2288 {
2289 case IEM_OP_PRF_SIZE_OP: /* SSE */
2290 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2292 {
2293 /* XMM, greg*/
2294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2295 IEM_MC_BEGIN(0, 1);
2296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2297 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2298 {
2299 IEM_MC_LOCAL(uint64_t, u64Tmp);
2300 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2301 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2302 }
2303 else
2304 {
2305 IEM_MC_LOCAL(uint32_t, u32Tmp);
2306 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2307 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2308 }
2309 IEM_MC_ADVANCE_RIP();
2310 IEM_MC_END();
2311 }
2312 else
2313 {
2314 /* XMM, [mem] */
2315 IEM_MC_BEGIN(0, 2);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2320 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2321 {
2322 IEM_MC_LOCAL(uint64_t, u64Tmp);
2323 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2324 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2325 }
2326 else
2327 {
2328 IEM_MC_LOCAL(uint32_t, u32Tmp);
2329 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2330 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2331 }
2332 IEM_MC_ADVANCE_RIP();
2333 IEM_MC_END();
2334 }
2335 return VINF_SUCCESS;
2336
2337 case 0: /* MMX */
2338 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2339 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2340 {
2341 /* MMX, greg */
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_BEGIN(0, 1);
2344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2345 IEM_MC_LOCAL(uint64_t, u64Tmp);
2346 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2347 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2348 else
2349 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2350 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2351 IEM_MC_ADVANCE_RIP();
2352 IEM_MC_END();
2353 }
2354 else
2355 {
2356 /* MMX, [mem] */
2357 IEM_MC_BEGIN(0, 2);
2358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2359 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2362 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2363 {
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2366 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2367 }
2368 else
2369 {
2370 IEM_MC_LOCAL(uint32_t, u32Tmp);
2371 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2372 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2373 }
2374 IEM_MC_ADVANCE_RIP();
2375 IEM_MC_END();
2376 }
2377 return VINF_SUCCESS;
2378
2379 default:
2380 return IEMOP_RAISE_INVALID_OPCODE();
2381 }
2382}
2383
2384
2385/** Opcode 0x0f 0x6f. */
2386FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2387{
2388 bool fAligned = false;
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2391 {
2392 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2393 fAligned = true;
2394 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2395 if (fAligned)
2396 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2397 else
2398 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2400 {
2401 /*
2402 * Register, register.
2403 */
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405 IEM_MC_BEGIN(0, 1);
2406 IEM_MC_LOCAL(uint128_t, u128Tmp);
2407 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2408 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2409 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2410 IEM_MC_ADVANCE_RIP();
2411 IEM_MC_END();
2412 }
2413 else
2414 {
2415 /*
2416 * Register, memory.
2417 */
2418 IEM_MC_BEGIN(0, 2);
2419 IEM_MC_LOCAL(uint128_t, u128Tmp);
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2425 if (fAligned)
2426 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2427 else
2428 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2429 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2430
2431 IEM_MC_ADVANCE_RIP();
2432 IEM_MC_END();
2433 }
2434 return VINF_SUCCESS;
2435
2436 case 0: /* MMX */
2437 IEMOP_MNEMONIC("movq Pq,Qq");
2438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2439 {
2440 /*
2441 * Register, register.
2442 */
2443 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2444 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2446 IEM_MC_BEGIN(0, 1);
2447 IEM_MC_LOCAL(uint64_t, u64Tmp);
2448 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2449 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2450 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2451 IEM_MC_ADVANCE_RIP();
2452 IEM_MC_END();
2453 }
2454 else
2455 {
2456 /*
2457 * Register, memory.
2458 */
2459 IEM_MC_BEGIN(0, 2);
2460 IEM_MC_LOCAL(uint64_t, u64Tmp);
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2462
2463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2466 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2467 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2468
2469 IEM_MC_ADVANCE_RIP();
2470 IEM_MC_END();
2471 }
2472 return VINF_SUCCESS;
2473
2474 default:
2475 return IEMOP_RAISE_INVALID_OPCODE();
2476 }
2477}
2478
2479
2480/** Opcode 0x0f 0x70. The immediate here is evil! */
2481FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2482{
2483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2484 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2485 {
2486 case IEM_OP_PRF_SIZE_OP: /* SSE */
2487 case IEM_OP_PRF_REPNZ: /* SSE */
2488 case IEM_OP_PRF_REPZ: /* SSE */
2489 {
2490 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2491 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2492 {
2493 case IEM_OP_PRF_SIZE_OP:
2494 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2495 pfnAImpl = iemAImpl_pshufd;
2496 break;
2497 case IEM_OP_PRF_REPNZ:
2498 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2499 pfnAImpl = iemAImpl_pshuflw;
2500 break;
2501 case IEM_OP_PRF_REPZ:
2502 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2503 pfnAImpl = iemAImpl_pshufhw;
2504 break;
2505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2506 }
2507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2508 {
2509 /*
2510 * Register, register.
2511 */
2512 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2514
2515 IEM_MC_BEGIN(3, 0);
2516 IEM_MC_ARG(uint128_t *, pDst, 0);
2517 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2518 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2520 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2521 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2522 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2523 IEM_MC_ADVANCE_RIP();
2524 IEM_MC_END();
2525 }
2526 else
2527 {
2528 /*
2529 * Register, memory.
2530 */
2531 IEM_MC_BEGIN(3, 2);
2532 IEM_MC_ARG(uint128_t *, pDst, 0);
2533 IEM_MC_LOCAL(uint128_t, uSrc);
2534 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2536
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2538 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2539 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2542
2543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2544 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2545 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2546
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 return VINF_SUCCESS;
2551 }
2552
2553 case 0: /* MMX Extension */
2554 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2556 {
2557 /*
2558 * Register, register.
2559 */
2560 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2562
2563 IEM_MC_BEGIN(3, 0);
2564 IEM_MC_ARG(uint64_t *, pDst, 0);
2565 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2566 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2567 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2568 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2569 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2570 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /*
2577 * Register, memory.
2578 */
2579 IEM_MC_BEGIN(3, 2);
2580 IEM_MC_ARG(uint64_t *, pDst, 0);
2581 IEM_MC_LOCAL(uint64_t, uSrc);
2582 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2584
2585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2586 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2587 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2590
2591 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2592 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2593 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2594
2595 IEM_MC_ADVANCE_RIP();
2596 IEM_MC_END();
2597 }
2598 return VINF_SUCCESS;
2599
2600 default:
2601 return IEMOP_RAISE_INVALID_OPCODE();
2602 }
2603}
2604
2605
2606/** Opcode 0x0f 0x71 11/2. */
2607FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2608
2609/** Opcode 0x66 0x0f 0x71 11/2. */
2610FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2611
2612/** Opcode 0x0f 0x71 11/4. */
2613FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2614
2615/** Opcode 0x66 0x0f 0x71 11/4. */
2616FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2617
2618/** Opcode 0x0f 0x71 11/6. */
2619FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2620
2621/** Opcode 0x66 0x0f 0x71 11/6. */
2622FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2623
2624
2625/** Opcode 0x0f 0x71. */
2626FNIEMOP_DEF(iemOp_Grp12)
2627{
2628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2629 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2630 return IEMOP_RAISE_INVALID_OPCODE();
2631 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2632 {
2633 case 0: case 1: case 3: case 5: case 7:
2634 return IEMOP_RAISE_INVALID_OPCODE();
2635 case 2:
2636 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2637 {
2638 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2639 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2640 default: return IEMOP_RAISE_INVALID_OPCODE();
2641 }
2642 case 4:
2643 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2644 {
2645 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2646 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2647 default: return IEMOP_RAISE_INVALID_OPCODE();
2648 }
2649 case 6:
2650 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2651 {
2652 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2653 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2654 default: return IEMOP_RAISE_INVALID_OPCODE();
2655 }
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658}
2659
2660
2661/** Opcode 0x0f 0x72 11/2. */
2662FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2663
2664/** Opcode 0x66 0x0f 0x72 11/2. */
2665FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2666
2667/** Opcode 0x0f 0x72 11/4. */
2668FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2669
2670/** Opcode 0x66 0x0f 0x72 11/4. */
2671FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2672
2673/** Opcode 0x0f 0x72 11/6. */
2674FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2675
2676/** Opcode 0x66 0x0f 0x72 11/6. */
2677FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2678
2679
2680/** Opcode 0x0f 0x72. */
2681FNIEMOP_DEF(iemOp_Grp13)
2682{
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2684 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2685 return IEMOP_RAISE_INVALID_OPCODE();
2686 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2687 {
2688 case 0: case 1: case 3: case 5: case 7:
2689 return IEMOP_RAISE_INVALID_OPCODE();
2690 case 2:
2691 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2692 {
2693 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2694 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2695 default: return IEMOP_RAISE_INVALID_OPCODE();
2696 }
2697 case 4:
2698 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2699 {
2700 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2701 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2702 default: return IEMOP_RAISE_INVALID_OPCODE();
2703 }
2704 case 6:
2705 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2706 {
2707 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2708 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2709 default: return IEMOP_RAISE_INVALID_OPCODE();
2710 }
2711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2712 }
2713}
2714
2715
2716/** Opcode 0x0f 0x73 11/2. */
2717FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2718
2719/** Opcode 0x66 0x0f 0x73 11/2. */
2720FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2721
2722/** Opcode 0x66 0x0f 0x73 11/3. */
2723FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2724
2725/** Opcode 0x0f 0x73 11/6. */
2726FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2727
2728/** Opcode 0x66 0x0f 0x73 11/6. */
2729FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2730
2731/** Opcode 0x66 0x0f 0x73 11/7. */
2732FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2733
2734
2735/** Opcode 0x0f 0x73. */
2736FNIEMOP_DEF(iemOp_Grp14)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2740 return IEMOP_RAISE_INVALID_OPCODE();
2741 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2742 {
2743 case 0: case 1: case 4: case 5:
2744 return IEMOP_RAISE_INVALID_OPCODE();
2745 case 2:
2746 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2747 {
2748 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2749 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2750 default: return IEMOP_RAISE_INVALID_OPCODE();
2751 }
2752 case 3:
2753 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2754 {
2755 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2756 default: return IEMOP_RAISE_INVALID_OPCODE();
2757 }
2758 case 6:
2759 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2760 {
2761 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2762 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2763 default: return IEMOP_RAISE_INVALID_OPCODE();
2764 }
2765 case 7:
2766 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2767 {
2768 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2769 default: return IEMOP_RAISE_INVALID_OPCODE();
2770 }
2771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2772 }
2773}
2774
2775
2776/**
2777 * Common worker for SSE2 and MMX instructions on the forms:
2778 * pxxx mm1, mm2/mem64
2779 * pxxx xmm1, xmm2/mem128
2780 *
2781 * Proper alignment of the 128-bit operand is enforced.
2782 * Exceptions type 4. SSE2 and MMX cpuid checks.
2783 */
2784FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2785{
2786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2787 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2788 {
2789 case IEM_OP_PRF_SIZE_OP: /* SSE */
2790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2791 {
2792 /*
2793 * Register, register.
2794 */
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2796 IEM_MC_BEGIN(2, 0);
2797 IEM_MC_ARG(uint128_t *, pDst, 0);
2798 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2800 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2801 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2802 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2803 IEM_MC_ADVANCE_RIP();
2804 IEM_MC_END();
2805 }
2806 else
2807 {
2808 /*
2809 * Register, memory.
2810 */
2811 IEM_MC_BEGIN(2, 2);
2812 IEM_MC_ARG(uint128_t *, pDst, 0);
2813 IEM_MC_LOCAL(uint128_t, uSrc);
2814 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2816
2817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2820 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2821
2822 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2823 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2824
2825 IEM_MC_ADVANCE_RIP();
2826 IEM_MC_END();
2827 }
2828 return VINF_SUCCESS;
2829
2830 case 0: /* MMX */
2831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2832 {
2833 /*
2834 * Register, register.
2835 */
2836 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2837 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_BEGIN(2, 0);
2840 IEM_MC_ARG(uint64_t *, pDst, 0);
2841 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2843 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2844 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2845 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 else
2850 {
2851 /*
2852 * Register, memory.
2853 */
2854 IEM_MC_BEGIN(2, 2);
2855 IEM_MC_ARG(uint64_t *, pDst, 0);
2856 IEM_MC_LOCAL(uint64_t, uSrc);
2857 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2859
2860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2862 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2863 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2864
2865 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2866 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2867
2868 IEM_MC_ADVANCE_RIP();
2869 IEM_MC_END();
2870 }
2871 return VINF_SUCCESS;
2872
2873 default:
2874 return IEMOP_RAISE_INVALID_OPCODE();
2875 }
2876}
2877
2878
2879/** Opcode 0x0f 0x74. */
2880FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2881{
2882 IEMOP_MNEMONIC("pcmpeqb");
2883 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2884}
2885
2886
2887/** Opcode 0x0f 0x75. */
2888FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2889{
2890 IEMOP_MNEMONIC("pcmpeqw");
2891 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2892}
2893
2894
2895/** Opcode 0x0f 0x76. */
2896FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2897{
2898 IEMOP_MNEMONIC("pcmpeqd");
2899 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2900}
2901
2902
2903/** Opcode 0x0f 0x77. */
2904FNIEMOP_STUB(iemOp_emms);
2905/** Opcode 0x0f 0x78. */
2906FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2907/** Opcode 0x0f 0x79. */
2908FNIEMOP_UD_STUB(iemOp_vmwrite);
2909/** Opcode 0x0f 0x7c. */
2910FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2911/** Opcode 0x0f 0x7d. */
2912FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2913
2914
2915/** Opcode 0x0f 0x7e. */
2916FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2917{
2918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2919 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2920 {
2921 case IEM_OP_PRF_SIZE_OP: /* SSE */
2922 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2924 {
2925 /* greg, XMM */
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927 IEM_MC_BEGIN(0, 1);
2928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2929 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2930 {
2931 IEM_MC_LOCAL(uint64_t, u64Tmp);
2932 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2933 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2934 }
2935 else
2936 {
2937 IEM_MC_LOCAL(uint32_t, u32Tmp);
2938 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2939 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2940 }
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 }
2944 else
2945 {
2946 /* [mem], XMM */
2947 IEM_MC_BEGIN(0, 2);
2948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2952 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2953 {
2954 IEM_MC_LOCAL(uint64_t, u64Tmp);
2955 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2956 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2957 }
2958 else
2959 {
2960 IEM_MC_LOCAL(uint32_t, u32Tmp);
2961 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2962 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2963 }
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 return VINF_SUCCESS;
2968
2969 case 0: /* MMX */
2970 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2972 {
2973 /* greg, MMX */
2974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2975 IEM_MC_BEGIN(0, 1);
2976 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2977 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2978 {
2979 IEM_MC_LOCAL(uint64_t, u64Tmp);
2980 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2981 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2982 }
2983 else
2984 {
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2987 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2988 }
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 }
2992 else
2993 {
2994 /* [mem], MMX */
2995 IEM_MC_BEGIN(0, 2);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3001 {
3002 IEM_MC_LOCAL(uint64_t, u64Tmp);
3003 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3004 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3005 }
3006 else
3007 {
3008 IEM_MC_LOCAL(uint32_t, u32Tmp);
3009 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3010 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3011 }
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 return VINF_SUCCESS;
3016
3017 default:
3018 return IEMOP_RAISE_INVALID_OPCODE();
3019 }
3020}
3021
3022
3023/** Opcode 0x0f 0x7f. */
3024FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3025{
3026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3027 bool fAligned = false;
3028 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3029 {
3030 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3031 fAligned = true;
3032 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3033 if (fAligned)
3034 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3035 else
3036 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3038 {
3039 /*
3040 * Register, register.
3041 */
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_BEGIN(0, 1);
3044 IEM_MC_LOCAL(uint128_t, u128Tmp);
3045 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3046 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3047 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3048 IEM_MC_ADVANCE_RIP();
3049 IEM_MC_END();
3050 }
3051 else
3052 {
3053 /*
3054 * Register, memory.
3055 */
3056 IEM_MC_BEGIN(0, 2);
3057 IEM_MC_LOCAL(uint128_t, u128Tmp);
3058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3059
3060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3062 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3063 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3064 if (fAligned)
3065 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3066 else
3067 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3068
3069 IEM_MC_ADVANCE_RIP();
3070 IEM_MC_END();
3071 }
3072 return VINF_SUCCESS;
3073
3074 case 0: /* MMX */
3075 IEMOP_MNEMONIC("movq Qq,Pq");
3076
3077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3078 {
3079 /*
3080 * Register, register.
3081 */
3082 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3083 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_BEGIN(0, 1);
3086 IEM_MC_LOCAL(uint64_t, u64Tmp);
3087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3088 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3089 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 }
3093 else
3094 {
3095 /*
3096 * Register, memory.
3097 */
3098 IEM_MC_BEGIN(0, 2);
3099 IEM_MC_LOCAL(uint64_t, u64Tmp);
3100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3101
3102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3104 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3105 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3106 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3107
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 return VINF_SUCCESS;
3112
3113 default:
3114 return IEMOP_RAISE_INVALID_OPCODE();
3115 }
3116}
3117
3118
3119
3120/** Opcode 0x0f 0x80. */
3121FNIEMOP_DEF(iemOp_jo_Jv)
3122{
3123 IEMOP_MNEMONIC("jo Jv");
3124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3125 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3126 {
3127 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3128 IEMOP_HLP_NO_LOCK_PREFIX();
3129
3130 IEM_MC_BEGIN(0, 0);
3131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3132 IEM_MC_REL_JMP_S16(i16Imm);
3133 } IEM_MC_ELSE() {
3134 IEM_MC_ADVANCE_RIP();
3135 } IEM_MC_ENDIF();
3136 IEM_MC_END();
3137 }
3138 else
3139 {
3140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3141 IEMOP_HLP_NO_LOCK_PREFIX();
3142
3143 IEM_MC_BEGIN(0, 0);
3144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3145 IEM_MC_REL_JMP_S32(i32Imm);
3146 } IEM_MC_ELSE() {
3147 IEM_MC_ADVANCE_RIP();
3148 } IEM_MC_ENDIF();
3149 IEM_MC_END();
3150 }
3151 return VINF_SUCCESS;
3152}
3153
3154
3155/** Opcode 0x0f 0x81. */
3156FNIEMOP_DEF(iemOp_jno_Jv)
3157{
3158 IEMOP_MNEMONIC("jno Jv");
3159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3160 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3161 {
3162 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3163 IEMOP_HLP_NO_LOCK_PREFIX();
3164
3165 IEM_MC_BEGIN(0, 0);
3166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3167 IEM_MC_ADVANCE_RIP();
3168 } IEM_MC_ELSE() {
3169 IEM_MC_REL_JMP_S16(i16Imm);
3170 } IEM_MC_ENDIF();
3171 IEM_MC_END();
3172 }
3173 else
3174 {
3175 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3176 IEMOP_HLP_NO_LOCK_PREFIX();
3177
3178 IEM_MC_BEGIN(0, 0);
3179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3180 IEM_MC_ADVANCE_RIP();
3181 } IEM_MC_ELSE() {
3182 IEM_MC_REL_JMP_S32(i32Imm);
3183 } IEM_MC_ENDIF();
3184 IEM_MC_END();
3185 }
3186 return VINF_SUCCESS;
3187}
3188
3189
3190/** Opcode 0x0f 0x82. */
3191FNIEMOP_DEF(iemOp_jc_Jv)
3192{
3193 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3195 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3196 {
3197 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3198 IEMOP_HLP_NO_LOCK_PREFIX();
3199
3200 IEM_MC_BEGIN(0, 0);
3201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3202 IEM_MC_REL_JMP_S16(i16Imm);
3203 } IEM_MC_ELSE() {
3204 IEM_MC_ADVANCE_RIP();
3205 } IEM_MC_ENDIF();
3206 IEM_MC_END();
3207 }
3208 else
3209 {
3210 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3211 IEMOP_HLP_NO_LOCK_PREFIX();
3212
3213 IEM_MC_BEGIN(0, 0);
3214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3215 IEM_MC_REL_JMP_S32(i32Imm);
3216 } IEM_MC_ELSE() {
3217 IEM_MC_ADVANCE_RIP();
3218 } IEM_MC_ENDIF();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/** Opcode 0x0f 0x83. */
3226FNIEMOP_DEF(iemOp_jnc_Jv)
3227{
3228 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3230 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3231 {
3232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3233 IEMOP_HLP_NO_LOCK_PREFIX();
3234
3235 IEM_MC_BEGIN(0, 0);
3236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3237 IEM_MC_ADVANCE_RIP();
3238 } IEM_MC_ELSE() {
3239 IEM_MC_REL_JMP_S16(i16Imm);
3240 } IEM_MC_ENDIF();
3241 IEM_MC_END();
3242 }
3243 else
3244 {
3245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3246 IEMOP_HLP_NO_LOCK_PREFIX();
3247
3248 IEM_MC_BEGIN(0, 0);
3249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3250 IEM_MC_ADVANCE_RIP();
3251 } IEM_MC_ELSE() {
3252 IEM_MC_REL_JMP_S32(i32Imm);
3253 } IEM_MC_ENDIF();
3254 IEM_MC_END();
3255 }
3256 return VINF_SUCCESS;
3257}
3258
3259
3260/** Opcode 0x0f 0x84. */
3261FNIEMOP_DEF(iemOp_je_Jv)
3262{
3263 IEMOP_MNEMONIC("je/jz Jv");
3264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3265 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3266 {
3267 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3268 IEMOP_HLP_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(0, 0);
3271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3272 IEM_MC_REL_JMP_S16(i16Imm);
3273 } IEM_MC_ELSE() {
3274 IEM_MC_ADVANCE_RIP();
3275 } IEM_MC_ENDIF();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3281 IEMOP_HLP_NO_LOCK_PREFIX();
3282
3283 IEM_MC_BEGIN(0, 0);
3284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3285 IEM_MC_REL_JMP_S32(i32Imm);
3286 } IEM_MC_ELSE() {
3287 IEM_MC_ADVANCE_RIP();
3288 } IEM_MC_ENDIF();
3289 IEM_MC_END();
3290 }
3291 return VINF_SUCCESS;
3292}
3293
3294
3295/** Opcode 0x0f 0x85. */
3296FNIEMOP_DEF(iemOp_jne_Jv)
3297{
3298 IEMOP_MNEMONIC("jne/jnz Jv");
3299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3300 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3301 {
3302 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3303 IEMOP_HLP_NO_LOCK_PREFIX();
3304
3305 IEM_MC_BEGIN(0, 0);
3306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3307 IEM_MC_ADVANCE_RIP();
3308 } IEM_MC_ELSE() {
3309 IEM_MC_REL_JMP_S16(i16Imm);
3310 } IEM_MC_ENDIF();
3311 IEM_MC_END();
3312 }
3313 else
3314 {
3315 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3316 IEMOP_HLP_NO_LOCK_PREFIX();
3317
3318 IEM_MC_BEGIN(0, 0);
3319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3320 IEM_MC_ADVANCE_RIP();
3321 } IEM_MC_ELSE() {
3322 IEM_MC_REL_JMP_S32(i32Imm);
3323 } IEM_MC_ENDIF();
3324 IEM_MC_END();
3325 }
3326 return VINF_SUCCESS;
3327}
3328
3329
3330/** Opcode 0x0f 0x86. */
3331FNIEMOP_DEF(iemOp_jbe_Jv)
3332{
3333 IEMOP_MNEMONIC("jbe/jna Jv");
3334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3335 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3336 {
3337 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3338 IEMOP_HLP_NO_LOCK_PREFIX();
3339
3340 IEM_MC_BEGIN(0, 0);
3341 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3342 IEM_MC_REL_JMP_S16(i16Imm);
3343 } IEM_MC_ELSE() {
3344 IEM_MC_ADVANCE_RIP();
3345 } IEM_MC_ENDIF();
3346 IEM_MC_END();
3347 }
3348 else
3349 {
3350 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3351 IEMOP_HLP_NO_LOCK_PREFIX();
3352
3353 IEM_MC_BEGIN(0, 0);
3354 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3355 IEM_MC_REL_JMP_S32(i32Imm);
3356 } IEM_MC_ELSE() {
3357 IEM_MC_ADVANCE_RIP();
3358 } IEM_MC_ENDIF();
3359 IEM_MC_END();
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/** Opcode 0x0f 0x87. */
3366FNIEMOP_DEF(iemOp_jnbe_Jv)
3367{
3368 IEMOP_MNEMONIC("jnbe/ja Jv");
3369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3370 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3371 {
3372 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3373 IEMOP_HLP_NO_LOCK_PREFIX();
3374
3375 IEM_MC_BEGIN(0, 0);
3376 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3377 IEM_MC_ADVANCE_RIP();
3378 } IEM_MC_ELSE() {
3379 IEM_MC_REL_JMP_S16(i16Imm);
3380 } IEM_MC_ENDIF();
3381 IEM_MC_END();
3382 }
3383 else
3384 {
3385 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3386 IEMOP_HLP_NO_LOCK_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3390 IEM_MC_ADVANCE_RIP();
3391 } IEM_MC_ELSE() {
3392 IEM_MC_REL_JMP_S32(i32Imm);
3393 } IEM_MC_ENDIF();
3394 IEM_MC_END();
3395 }
3396 return VINF_SUCCESS;
3397}
3398
3399
3400/** Opcode 0x0f 0x88. */
3401FNIEMOP_DEF(iemOp_js_Jv)
3402{
3403 IEMOP_MNEMONIC("js Jv");
3404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3405 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3406 {
3407 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3408 IEMOP_HLP_NO_LOCK_PREFIX();
3409
3410 IEM_MC_BEGIN(0, 0);
3411 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3412 IEM_MC_REL_JMP_S16(i16Imm);
3413 } IEM_MC_ELSE() {
3414 IEM_MC_ADVANCE_RIP();
3415 } IEM_MC_ENDIF();
3416 IEM_MC_END();
3417 }
3418 else
3419 {
3420 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3421 IEMOP_HLP_NO_LOCK_PREFIX();
3422
3423 IEM_MC_BEGIN(0, 0);
3424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3425 IEM_MC_REL_JMP_S32(i32Imm);
3426 } IEM_MC_ELSE() {
3427 IEM_MC_ADVANCE_RIP();
3428 } IEM_MC_ENDIF();
3429 IEM_MC_END();
3430 }
3431 return VINF_SUCCESS;
3432}
3433
3434
3435/** Opcode 0x0f 0x89. */
3436FNIEMOP_DEF(iemOp_jns_Jv)
3437{
3438 IEMOP_MNEMONIC("jns Jv");
3439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3440 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3441 {
3442 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3443 IEMOP_HLP_NO_LOCK_PREFIX();
3444
3445 IEM_MC_BEGIN(0, 0);
3446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3447 IEM_MC_ADVANCE_RIP();
3448 } IEM_MC_ELSE() {
3449 IEM_MC_REL_JMP_S16(i16Imm);
3450 } IEM_MC_ENDIF();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3456 IEMOP_HLP_NO_LOCK_PREFIX();
3457
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3460 IEM_MC_ADVANCE_RIP();
3461 } IEM_MC_ELSE() {
3462 IEM_MC_REL_JMP_S32(i32Imm);
3463 } IEM_MC_ENDIF();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x8a. */
3471FNIEMOP_DEF(iemOp_jp_Jv)
3472{
3473 IEMOP_MNEMONIC("jp Jv");
3474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3475 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3476 {
3477 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3478 IEMOP_HLP_NO_LOCK_PREFIX();
3479
3480 IEM_MC_BEGIN(0, 0);
3481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3482 IEM_MC_REL_JMP_S16(i16Imm);
3483 } IEM_MC_ELSE() {
3484 IEM_MC_ADVANCE_RIP();
3485 } IEM_MC_ENDIF();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3491 IEMOP_HLP_NO_LOCK_PREFIX();
3492
3493 IEM_MC_BEGIN(0, 0);
3494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3495 IEM_MC_REL_JMP_S32(i32Imm);
3496 } IEM_MC_ELSE() {
3497 IEM_MC_ADVANCE_RIP();
3498 } IEM_MC_ENDIF();
3499 IEM_MC_END();
3500 }
3501 return VINF_SUCCESS;
3502}
3503
3504
3505/** Opcode 0x0f 0x8b. */
3506FNIEMOP_DEF(iemOp_jnp_Jv)
3507{
3508 IEMOP_MNEMONIC("jo Jv");
3509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3510 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3511 {
3512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3513 IEMOP_HLP_NO_LOCK_PREFIX();
3514
3515 IEM_MC_BEGIN(0, 0);
3516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3517 IEM_MC_ADVANCE_RIP();
3518 } IEM_MC_ELSE() {
3519 IEM_MC_REL_JMP_S16(i16Imm);
3520 } IEM_MC_ENDIF();
3521 IEM_MC_END();
3522 }
3523 else
3524 {
3525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3526 IEMOP_HLP_NO_LOCK_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3530 IEM_MC_ADVANCE_RIP();
3531 } IEM_MC_ELSE() {
3532 IEM_MC_REL_JMP_S32(i32Imm);
3533 } IEM_MC_ENDIF();
3534 IEM_MC_END();
3535 }
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** Opcode 0x0f 0x8c. */
3541FNIEMOP_DEF(iemOp_jl_Jv)
3542{
3543 IEMOP_MNEMONIC("jl/jnge Jv");
3544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3545 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3546 {
3547 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3548 IEMOP_HLP_NO_LOCK_PREFIX();
3549
3550 IEM_MC_BEGIN(0, 0);
3551 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3552 IEM_MC_REL_JMP_S16(i16Imm);
3553 } IEM_MC_ELSE() {
3554 IEM_MC_ADVANCE_RIP();
3555 } IEM_MC_ENDIF();
3556 IEM_MC_END();
3557 }
3558 else
3559 {
3560 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3561 IEMOP_HLP_NO_LOCK_PREFIX();
3562
3563 IEM_MC_BEGIN(0, 0);
3564 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3565 IEM_MC_REL_JMP_S32(i32Imm);
3566 } IEM_MC_ELSE() {
3567 IEM_MC_ADVANCE_RIP();
3568 } IEM_MC_ENDIF();
3569 IEM_MC_END();
3570 }
3571 return VINF_SUCCESS;
3572}
3573
3574
3575/** Opcode 0x0f 0x8d. */
3576FNIEMOP_DEF(iemOp_jnl_Jv)
3577{
3578 IEMOP_MNEMONIC("jnl/jge Jv");
3579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3580 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3581 {
3582 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3583 IEMOP_HLP_NO_LOCK_PREFIX();
3584
3585 IEM_MC_BEGIN(0, 0);
3586 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3587 IEM_MC_ADVANCE_RIP();
3588 } IEM_MC_ELSE() {
3589 IEM_MC_REL_JMP_S16(i16Imm);
3590 } IEM_MC_ENDIF();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3596 IEMOP_HLP_NO_LOCK_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3600 IEM_MC_ADVANCE_RIP();
3601 } IEM_MC_ELSE() {
3602 IEM_MC_REL_JMP_S32(i32Imm);
3603 } IEM_MC_ENDIF();
3604 IEM_MC_END();
3605 }
3606 return VINF_SUCCESS;
3607}
3608
3609
3610/** Opcode 0x0f 0x8e. */
3611FNIEMOP_DEF(iemOp_jle_Jv)
3612{
3613 IEMOP_MNEMONIC("jle/jng Jv");
3614 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3615 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3616 {
3617 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3618 IEMOP_HLP_NO_LOCK_PREFIX();
3619
3620 IEM_MC_BEGIN(0, 0);
3621 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3622 IEM_MC_REL_JMP_S16(i16Imm);
3623 } IEM_MC_ELSE() {
3624 IEM_MC_ADVANCE_RIP();
3625 } IEM_MC_ENDIF();
3626 IEM_MC_END();
3627 }
3628 else
3629 {
3630 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3631 IEMOP_HLP_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3635 IEM_MC_REL_JMP_S32(i32Imm);
3636 } IEM_MC_ELSE() {
3637 IEM_MC_ADVANCE_RIP();
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 return VINF_SUCCESS;
3642}
3643
3644
3645/** Opcode 0x0f 0x8f. */
3646FNIEMOP_DEF(iemOp_jnle_Jv)
3647{
3648 IEMOP_MNEMONIC("jnle/jg Jv");
3649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3650 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3651 {
3652 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3653 IEMOP_HLP_NO_LOCK_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0);
3656 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3657 IEM_MC_ADVANCE_RIP();
3658 } IEM_MC_ELSE() {
3659 IEM_MC_REL_JMP_S16(i16Imm);
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662 }
3663 else
3664 {
3665 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3666 IEMOP_HLP_NO_LOCK_PREFIX();
3667
3668 IEM_MC_BEGIN(0, 0);
3669 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_REL_JMP_S32(i32Imm);
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679
3680/** Opcode 0x0f 0x90. */
3681FNIEMOP_DEF(iemOp_seto_Eb)
3682{
3683 IEMOP_MNEMONIC("seto Eb");
3684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3685 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3686
3687 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3688 * any way. AMD says it's "unused", whatever that means. We're
3689 * ignoring for now. */
3690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3691 {
3692 /* register target */
3693 IEM_MC_BEGIN(0, 0);
3694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3695 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3696 } IEM_MC_ELSE() {
3697 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3698 } IEM_MC_ENDIF();
3699 IEM_MC_ADVANCE_RIP();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 /* memory target */
3705 IEM_MC_BEGIN(0, 1);
3706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3709 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3710 } IEM_MC_ELSE() {
3711 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719
3720/** Opcode 0x0f 0x91. */
3721FNIEMOP_DEF(iemOp_setno_Eb)
3722{
3723 IEMOP_MNEMONIC("setno Eb");
3724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3725 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3726
3727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3728 * any way. AMD says it's "unused", whatever that means. We're
3729 * ignoring for now. */
3730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3731 {
3732 /* register target */
3733 IEM_MC_BEGIN(0, 0);
3734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3735 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3736 } IEM_MC_ELSE() {
3737 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3738 } IEM_MC_ENDIF();
3739 IEM_MC_ADVANCE_RIP();
3740 IEM_MC_END();
3741 }
3742 else
3743 {
3744 /* memory target */
3745 IEM_MC_BEGIN(0, 1);
3746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3749 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3750 } IEM_MC_ELSE() {
3751 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3752 } IEM_MC_ENDIF();
3753 IEM_MC_ADVANCE_RIP();
3754 IEM_MC_END();
3755 }
3756 return VINF_SUCCESS;
3757}
3758
3759
3760/** Opcode 0x0f 0x92. */
3761FNIEMOP_DEF(iemOp_setc_Eb)
3762{
3763 IEMOP_MNEMONIC("setc Eb");
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3766
3767 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3768 * any way. AMD says it's "unused", whatever that means. We're
3769 * ignoring for now. */
3770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3771 {
3772 /* register target */
3773 IEM_MC_BEGIN(0, 0);
3774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3776 } IEM_MC_ELSE() {
3777 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3778 } IEM_MC_ENDIF();
3779 IEM_MC_ADVANCE_RIP();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 /* memory target */
3785 IEM_MC_BEGIN(0, 1);
3786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3789 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3790 } IEM_MC_ELSE() {
3791 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_ADVANCE_RIP();
3794 IEM_MC_END();
3795 }
3796 return VINF_SUCCESS;
3797}
3798
3799
3800/** Opcode 0x0f 0x93. */
3801FNIEMOP_DEF(iemOp_setnc_Eb)
3802{
3803 IEMOP_MNEMONIC("setnc Eb");
3804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3805 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3806
3807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3808 * any way. AMD says it's "unused", whatever that means. We're
3809 * ignoring for now. */
3810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3811 {
3812 /* register target */
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3818 } IEM_MC_ENDIF();
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /* memory target */
3825 IEM_MC_BEGIN(0, 1);
3826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3829 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3830 } IEM_MC_ELSE() {
3831 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3832 } IEM_MC_ENDIF();
3833 IEM_MC_ADVANCE_RIP();
3834 IEM_MC_END();
3835 }
3836 return VINF_SUCCESS;
3837}
3838
3839
3840/** Opcode 0x0f 0x94. */
3841FNIEMOP_DEF(iemOp_sete_Eb)
3842{
3843 IEMOP_MNEMONIC("sete Eb");
3844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3846
3847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3848 * any way. AMD says it's "unused", whatever that means. We're
3849 * ignoring for now. */
3850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3851 {
3852 /* register target */
3853 IEM_MC_BEGIN(0, 0);
3854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3856 } IEM_MC_ELSE() {
3857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3858 } IEM_MC_ENDIF();
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 /* memory target */
3865 IEM_MC_BEGIN(0, 1);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3869 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879
3880/** Opcode 0x0f 0x95. */
3881FNIEMOP_DEF(iemOp_setne_Eb)
3882{
3883 IEMOP_MNEMONIC("setne Eb");
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3886
3887 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3888 * any way. AMD says it's "unused", whatever that means. We're
3889 * ignoring for now. */
3890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3891 {
3892 /* register target */
3893 IEM_MC_BEGIN(0, 0);
3894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3896 } IEM_MC_ELSE() {
3897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3898 } IEM_MC_ENDIF();
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 else
3903 {
3904 /* memory target */
3905 IEM_MC_BEGIN(0, 1);
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3909 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3910 } IEM_MC_ELSE() {
3911 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3912 } IEM_MC_ENDIF();
3913 IEM_MC_ADVANCE_RIP();
3914 IEM_MC_END();
3915 }
3916 return VINF_SUCCESS;
3917}
3918
3919
3920/** Opcode 0x0f 0x96. */
3921FNIEMOP_DEF(iemOp_setbe_Eb)
3922{
3923 IEMOP_MNEMONIC("setbe Eb");
3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3925 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3926
3927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3928 * any way. AMD says it's "unused", whatever that means. We're
3929 * ignoring for now. */
3930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3931 {
3932 /* register target */
3933 IEM_MC_BEGIN(0, 0);
3934 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3935 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3936 } IEM_MC_ELSE() {
3937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3938 } IEM_MC_ENDIF();
3939 IEM_MC_ADVANCE_RIP();
3940 IEM_MC_END();
3941 }
3942 else
3943 {
3944 /* memory target */
3945 IEM_MC_BEGIN(0, 1);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3949 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3950 } IEM_MC_ELSE() {
3951 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3952 } IEM_MC_ENDIF();
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** Opcode 0x0f 0x97. */
3961FNIEMOP_DEF(iemOp_setnbe_Eb)
3962{
3963 IEMOP_MNEMONIC("setnbe Eb");
3964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3965 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3966
3967 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3968 * any way. AMD says it's "unused", whatever that means. We're
3969 * ignoring for now. */
3970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3971 {
3972 /* register target */
3973 IEM_MC_BEGIN(0, 0);
3974 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3975 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3976 } IEM_MC_ELSE() {
3977 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3978 } IEM_MC_ENDIF();
3979 IEM_MC_ADVANCE_RIP();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /* memory target */
3985 IEM_MC_BEGIN(0, 1);
3986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3988 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3989 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3990 } IEM_MC_ELSE() {
3991 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3992 } IEM_MC_ENDIF();
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 }
3996 return VINF_SUCCESS;
3997}
3998
3999
4000/** Opcode 0x0f 0x98. */
4001FNIEMOP_DEF(iemOp_sets_Eb)
4002{
4003 IEMOP_MNEMONIC("sets Eb");
4004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4005 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4006
4007 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4008 * any way. AMD says it's "unused", whatever that means. We're
4009 * ignoring for now. */
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 {
4012 /* register target */
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4015 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4016 } IEM_MC_ELSE() {
4017 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 }
4022 else
4023 {
4024 /* memory target */
4025 IEM_MC_BEGIN(0, 1);
4026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4029 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4030 } IEM_MC_ELSE() {
4031 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4032 } IEM_MC_ENDIF();
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 return VINF_SUCCESS;
4037}
4038
4039
4040/** Opcode 0x0f 0x99. */
4041FNIEMOP_DEF(iemOp_setns_Eb)
4042{
4043 IEMOP_MNEMONIC("setns Eb");
4044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4045 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4046
4047 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4048 * any way. AMD says it's "unused", whatever that means. We're
4049 * ignoring for now. */
4050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4051 {
4052 /* register target */
4053 IEM_MC_BEGIN(0, 0);
4054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4055 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4056 } IEM_MC_ELSE() {
4057 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4058 } IEM_MC_ENDIF();
4059 IEM_MC_ADVANCE_RIP();
4060 IEM_MC_END();
4061 }
4062 else
4063 {
4064 /* memory target */
4065 IEM_MC_BEGIN(0, 1);
4066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4069 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4070 } IEM_MC_ELSE() {
4071 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4072 } IEM_MC_ENDIF();
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 return VINF_SUCCESS;
4077}
4078
4079
4080/** Opcode 0x0f 0x9a. */
4081FNIEMOP_DEF(iemOp_setp_Eb)
4082{
4083 IEMOP_MNEMONIC("setnp Eb");
4084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4085 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4086
4087 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4088 * any way. AMD says it's "unused", whatever that means. We're
4089 * ignoring for now. */
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 {
4092 /* register target */
4093 IEM_MC_BEGIN(0, 0);
4094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4095 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4096 } IEM_MC_ELSE() {
4097 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4098 } IEM_MC_ENDIF();
4099 IEM_MC_ADVANCE_RIP();
4100 IEM_MC_END();
4101 }
4102 else
4103 {
4104 /* memory target */
4105 IEM_MC_BEGIN(0, 1);
4106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4109 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4112 } IEM_MC_ENDIF();
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 return VINF_SUCCESS;
4117}
4118
4119
4120/** Opcode 0x0f 0x9b. */
4121FNIEMOP_DEF(iemOp_setnp_Eb)
4122{
4123 IEMOP_MNEMONIC("setnp Eb");
4124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4126
4127 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4128 * any way. AMD says it's "unused", whatever that means. We're
4129 * ignoring for now. */
4130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4131 {
4132 /* register target */
4133 IEM_MC_BEGIN(0, 0);
4134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4135 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4136 } IEM_MC_ELSE() {
4137 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4138 } IEM_MC_ENDIF();
4139 IEM_MC_ADVANCE_RIP();
4140 IEM_MC_END();
4141 }
4142 else
4143 {
4144 /* memory target */
4145 IEM_MC_BEGIN(0, 1);
4146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4149 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4150 } IEM_MC_ELSE() {
4151 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4152 } IEM_MC_ENDIF();
4153 IEM_MC_ADVANCE_RIP();
4154 IEM_MC_END();
4155 }
4156 return VINF_SUCCESS;
4157}
4158
4159
4160/** Opcode 0x0f 0x9c. */
4161FNIEMOP_DEF(iemOp_setl_Eb)
4162{
4163 IEMOP_MNEMONIC("setl Eb");
4164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4165 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4166
4167 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4168 * any way. AMD says it's "unused", whatever that means. We're
4169 * ignoring for now. */
4170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4171 {
4172 /* register target */
4173 IEM_MC_BEGIN(0, 0);
4174 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4175 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4176 } IEM_MC_ELSE() {
4177 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4178 } IEM_MC_ENDIF();
4179 IEM_MC_ADVANCE_RIP();
4180 IEM_MC_END();
4181 }
4182 else
4183 {
4184 /* memory target */
4185 IEM_MC_BEGIN(0, 1);
4186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4188 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4189 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4190 } IEM_MC_ELSE() {
4191 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4192 } IEM_MC_ENDIF();
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/** Opcode 0x0f 0x9d. */
4201FNIEMOP_DEF(iemOp_setnl_Eb)
4202{
4203 IEMOP_MNEMONIC("setnl Eb");
4204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4205 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4206
4207 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4208 * any way. AMD says it's "unused", whatever that means. We're
4209 * ignoring for now. */
4210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4211 {
4212 /* register target */
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4215 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4216 } IEM_MC_ELSE() {
4217 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4218 } IEM_MC_ENDIF();
4219 IEM_MC_ADVANCE_RIP();
4220 IEM_MC_END();
4221 }
4222 else
4223 {
4224 /* memory target */
4225 IEM_MC_BEGIN(0, 1);
4226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4228 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4229 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4230 } IEM_MC_ELSE() {
4231 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_ADVANCE_RIP();
4234 IEM_MC_END();
4235 }
4236 return VINF_SUCCESS;
4237}
4238
4239
4240/** Opcode 0x0f 0x9e. */
4241FNIEMOP_DEF(iemOp_setle_Eb)
4242{
4243 IEMOP_MNEMONIC("setle Eb");
4244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4245 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4246
4247 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4248 * any way. AMD says it's "unused", whatever that means. We're
4249 * ignoring for now. */
4250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4251 {
4252 /* register target */
4253 IEM_MC_BEGIN(0, 0);
4254 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4256 } IEM_MC_ELSE() {
4257 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4258 } IEM_MC_ENDIF();
4259 IEM_MC_ADVANCE_RIP();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /* memory target */
4265 IEM_MC_BEGIN(0, 1);
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4268 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4269 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4270 } IEM_MC_ELSE() {
4271 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4272 } IEM_MC_ENDIF();
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 return VINF_SUCCESS;
4277}
4278
4279
4280/** Opcode 0x0f 0x9f. */
4281FNIEMOP_DEF(iemOp_setnle_Eb)
4282{
4283 IEMOP_MNEMONIC("setnle Eb");
4284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4285 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4286
4287 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4288 * any way. AMD says it's "unused", whatever that means. We're
4289 * ignoring for now. */
4290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4291 {
4292 /* register target */
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_ADVANCE_RIP();
4300 IEM_MC_END();
4301 }
4302 else
4303 {
4304 /* memory target */
4305 IEM_MC_BEGIN(0, 1);
4306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4308 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4309 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4310 } IEM_MC_ELSE() {
4311 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4312 } IEM_MC_ENDIF();
4313 IEM_MC_ADVANCE_RIP();
4314 IEM_MC_END();
4315 }
4316 return VINF_SUCCESS;
4317}
4318
4319
4320/**
4321 * Common 'push segment-register' helper.
4322 */
4323FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4324{
4325 IEMOP_HLP_NO_LOCK_PREFIX();
4326 if (iReg < X86_SREG_FS)
4327 IEMOP_HLP_NO_64BIT();
4328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4329
4330 switch (pIemCpu->enmEffOpSize)
4331 {
4332 case IEMMODE_16BIT:
4333 IEM_MC_BEGIN(0, 1);
4334 IEM_MC_LOCAL(uint16_t, u16Value);
4335 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4336 IEM_MC_PUSH_U16(u16Value);
4337 IEM_MC_ADVANCE_RIP();
4338 IEM_MC_END();
4339 break;
4340
4341 case IEMMODE_32BIT:
4342 IEM_MC_BEGIN(0, 1);
4343 IEM_MC_LOCAL(uint32_t, u32Value);
4344 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4345 IEM_MC_PUSH_U32_SREG(u32Value);
4346 IEM_MC_ADVANCE_RIP();
4347 IEM_MC_END();
4348 break;
4349
4350 case IEMMODE_64BIT:
4351 IEM_MC_BEGIN(0, 1);
4352 IEM_MC_LOCAL(uint64_t, u64Value);
4353 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4354 IEM_MC_PUSH_U64(u64Value);
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 break;
4358 }
4359
4360 return VINF_SUCCESS;
4361}
4362
4363
4364/** Opcode 0x0f 0xa0. */
4365FNIEMOP_DEF(iemOp_push_fs)
4366{
4367 IEMOP_MNEMONIC("push fs");
4368 IEMOP_HLP_NO_LOCK_PREFIX();
4369 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4370}
4371
4372
4373/** Opcode 0x0f 0xa1. */
4374FNIEMOP_DEF(iemOp_pop_fs)
4375{
4376 IEMOP_MNEMONIC("pop fs");
4377 IEMOP_HLP_NO_LOCK_PREFIX();
4378 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4379}
4380
4381
4382/** Opcode 0x0f 0xa2. */
4383FNIEMOP_DEF(iemOp_cpuid)
4384{
4385 IEMOP_MNEMONIC("cpuid");
4386 IEMOP_HLP_NO_LOCK_PREFIX();
4387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4388}
4389
4390
4391/**
4392 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4393 * iemOp_bts_Ev_Gv.
4394 */
4395FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4396{
4397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4398 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4399
4400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4401 {
4402 /* register destination. */
4403 IEMOP_HLP_NO_LOCK_PREFIX();
4404 switch (pIemCpu->enmEffOpSize)
4405 {
4406 case IEMMODE_16BIT:
4407 IEM_MC_BEGIN(3, 0);
4408 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4409 IEM_MC_ARG(uint16_t, u16Src, 1);
4410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4411
4412 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4413 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4414 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4415 IEM_MC_REF_EFLAGS(pEFlags);
4416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4417
4418 IEM_MC_ADVANCE_RIP();
4419 IEM_MC_END();
4420 return VINF_SUCCESS;
4421
4422 case IEMMODE_32BIT:
4423 IEM_MC_BEGIN(3, 0);
4424 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4425 IEM_MC_ARG(uint32_t, u32Src, 1);
4426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4427
4428 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4429 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4430 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4431 IEM_MC_REF_EFLAGS(pEFlags);
4432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4433
4434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4435 IEM_MC_ADVANCE_RIP();
4436 IEM_MC_END();
4437 return VINF_SUCCESS;
4438
4439 case IEMMODE_64BIT:
4440 IEM_MC_BEGIN(3, 0);
4441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4442 IEM_MC_ARG(uint64_t, u64Src, 1);
4443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4444
4445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4446 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4447 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4448 IEM_MC_REF_EFLAGS(pEFlags);
4449 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4450
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 return VINF_SUCCESS;
4454
4455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4456 }
4457 }
4458 else
4459 {
4460 /* memory destination. */
4461
4462 uint32_t fAccess;
4463 if (pImpl->pfnLockedU16)
4464 fAccess = IEM_ACCESS_DATA_RW;
4465 else /* BT */
4466 {
4467 IEMOP_HLP_NO_LOCK_PREFIX();
4468 fAccess = IEM_ACCESS_DATA_R;
4469 }
4470
4471 NOREF(fAccess);
4472
4473 /** @todo test negative bit offsets! */
4474 switch (pIemCpu->enmEffOpSize)
4475 {
4476 case IEMMODE_16BIT:
4477 IEM_MC_BEGIN(3, 2);
4478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4479 IEM_MC_ARG(uint16_t, u16Src, 1);
4480 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4482 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4483
4484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4485 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4486 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4487 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4488 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4489 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4490 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4491 IEM_MC_FETCH_EFLAGS(EFlags);
4492
4493 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4494 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4496 else
4497 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4498 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4499
4500 IEM_MC_COMMIT_EFLAGS(EFlags);
4501 IEM_MC_ADVANCE_RIP();
4502 IEM_MC_END();
4503 return VINF_SUCCESS;
4504
4505 case IEMMODE_32BIT:
4506 IEM_MC_BEGIN(3, 2);
4507 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4508 IEM_MC_ARG(uint32_t, u32Src, 1);
4509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4511 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4512
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4514 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4515 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4516 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4517 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4518 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4519 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4520 IEM_MC_FETCH_EFLAGS(EFlags);
4521
4522 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4523 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4525 else
4526 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4528
4529 IEM_MC_COMMIT_EFLAGS(EFlags);
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 return VINF_SUCCESS;
4533
4534 case IEMMODE_64BIT:
4535 IEM_MC_BEGIN(3, 2);
4536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4537 IEM_MC_ARG(uint64_t, u64Src, 1);
4538 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4540 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4541
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4543 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4544 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4545 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4546 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4547 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4548 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4549 IEM_MC_FETCH_EFLAGS(EFlags);
4550
4551 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4552 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4554 else
4555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4556 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4557
4558 IEM_MC_COMMIT_EFLAGS(EFlags);
4559 IEM_MC_ADVANCE_RIP();
4560 IEM_MC_END();
4561 return VINF_SUCCESS;
4562
4563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4564 }
4565 }
4566}
4567
4568
4569/** Opcode 0x0f 0xa3. */
4570FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4571{
4572 IEMOP_MNEMONIC("bt Gv,Gv");
4573 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4574}
4575
4576
4577/**
4578 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4579 */
4580FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4581{
4582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4583 IEMOP_HLP_NO_LOCK_PREFIX();
4584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4585
4586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4587 {
4588 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4589 IEMOP_HLP_NO_LOCK_PREFIX();
4590
4591 switch (pIemCpu->enmEffOpSize)
4592 {
4593 case IEMMODE_16BIT:
4594 IEM_MC_BEGIN(4, 0);
4595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4596 IEM_MC_ARG(uint16_t, u16Src, 1);
4597 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4598 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4599
4600 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4601 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4602 IEM_MC_REF_EFLAGS(pEFlags);
4603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4604
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 return VINF_SUCCESS;
4608
4609 case IEMMODE_32BIT:
4610 IEM_MC_BEGIN(4, 0);
4611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4612 IEM_MC_ARG(uint32_t, u32Src, 1);
4613 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4614 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4615
4616 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4617 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4618 IEM_MC_REF_EFLAGS(pEFlags);
4619 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4620
4621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 case IEMMODE_64BIT:
4627 IEM_MC_BEGIN(4, 0);
4628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4629 IEM_MC_ARG(uint64_t, u64Src, 1);
4630 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4631 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4632
4633 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4634 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4635 IEM_MC_REF_EFLAGS(pEFlags);
4636 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4637
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 return VINF_SUCCESS;
4641
4642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4643 }
4644 }
4645 else
4646 {
4647 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4648
4649 switch (pIemCpu->enmEffOpSize)
4650 {
4651 case IEMMODE_16BIT:
4652 IEM_MC_BEGIN(4, 2);
4653 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4654 IEM_MC_ARG(uint16_t, u16Src, 1);
4655 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4656 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4658
4659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4660 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4661 IEM_MC_ASSIGN(cShiftArg, cShift);
4662 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4663 IEM_MC_FETCH_EFLAGS(EFlags);
4664 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4665 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4666
4667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4668 IEM_MC_COMMIT_EFLAGS(EFlags);
4669 IEM_MC_ADVANCE_RIP();
4670 IEM_MC_END();
4671 return VINF_SUCCESS;
4672
4673 case IEMMODE_32BIT:
4674 IEM_MC_BEGIN(4, 2);
4675 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4676 IEM_MC_ARG(uint32_t, u32Src, 1);
4677 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4678 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4680
4681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4682 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4683 IEM_MC_ASSIGN(cShiftArg, cShift);
4684 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4685 IEM_MC_FETCH_EFLAGS(EFlags);
4686 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4687 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4688
4689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4690 IEM_MC_COMMIT_EFLAGS(EFlags);
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 return VINF_SUCCESS;
4694
4695 case IEMMODE_64BIT:
4696 IEM_MC_BEGIN(4, 2);
4697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4698 IEM_MC_ARG(uint64_t, u64Src, 1);
4699 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4702
4703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4704 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4705 IEM_MC_ASSIGN(cShiftArg, cShift);
4706 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4707 IEM_MC_FETCH_EFLAGS(EFlags);
4708 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4709 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4710
4711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4712 IEM_MC_COMMIT_EFLAGS(EFlags);
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 return VINF_SUCCESS;
4716
4717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4718 }
4719 }
4720}
4721
4722
4723/**
4724 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4725 */
4726FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4727{
4728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4729 IEMOP_HLP_NO_LOCK_PREFIX();
4730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4731
4732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4733 {
4734 IEMOP_HLP_NO_LOCK_PREFIX();
4735
4736 switch (pIemCpu->enmEffOpSize)
4737 {
4738 case IEMMODE_16BIT:
4739 IEM_MC_BEGIN(4, 0);
4740 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4741 IEM_MC_ARG(uint16_t, u16Src, 1);
4742 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4743 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4744
4745 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4746 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4747 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4748 IEM_MC_REF_EFLAGS(pEFlags);
4749 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4750
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 return VINF_SUCCESS;
4754
4755 case IEMMODE_32BIT:
4756 IEM_MC_BEGIN(4, 0);
4757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4758 IEM_MC_ARG(uint32_t, u32Src, 1);
4759 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4761
4762 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4763 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4764 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4765 IEM_MC_REF_EFLAGS(pEFlags);
4766 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4767
4768 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4769 IEM_MC_ADVANCE_RIP();
4770 IEM_MC_END();
4771 return VINF_SUCCESS;
4772
4773 case IEMMODE_64BIT:
4774 IEM_MC_BEGIN(4, 0);
4775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4776 IEM_MC_ARG(uint64_t, u64Src, 1);
4777 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4779
4780 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4781 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4783 IEM_MC_REF_EFLAGS(pEFlags);
4784 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4785
4786 IEM_MC_ADVANCE_RIP();
4787 IEM_MC_END();
4788 return VINF_SUCCESS;
4789
4790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4791 }
4792 }
4793 else
4794 {
4795 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4796
4797 switch (pIemCpu->enmEffOpSize)
4798 {
4799 case IEMMODE_16BIT:
4800 IEM_MC_BEGIN(4, 2);
4801 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4802 IEM_MC_ARG(uint16_t, u16Src, 1);
4803 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4806
4807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4808 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4810 IEM_MC_FETCH_EFLAGS(EFlags);
4811 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4812 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4813
4814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4815 IEM_MC_COMMIT_EFLAGS(EFlags);
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 return VINF_SUCCESS;
4819
4820 case IEMMODE_32BIT:
4821 IEM_MC_BEGIN(4, 2);
4822 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4823 IEM_MC_ARG(uint32_t, u32Src, 1);
4824 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4827
4828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4829 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4830 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4831 IEM_MC_FETCH_EFLAGS(EFlags);
4832 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4834
4835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4836 IEM_MC_COMMIT_EFLAGS(EFlags);
4837 IEM_MC_ADVANCE_RIP();
4838 IEM_MC_END();
4839 return VINF_SUCCESS;
4840
4841 case IEMMODE_64BIT:
4842 IEM_MC_BEGIN(4, 2);
4843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4844 IEM_MC_ARG(uint64_t, u64Src, 1);
4845 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4848
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4850 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4851 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4852 IEM_MC_FETCH_EFLAGS(EFlags);
4853 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4854 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4855
4856 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4857 IEM_MC_COMMIT_EFLAGS(EFlags);
4858 IEM_MC_ADVANCE_RIP();
4859 IEM_MC_END();
4860 return VINF_SUCCESS;
4861
4862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4863 }
4864 }
4865}
4866
4867
4868
4869/** Opcode 0x0f 0xa4. */
4870FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4871{
4872 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4873 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4874}
4875
4876
4877/** Opcode 0x0f 0xa7. */
4878FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4879{
4880 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4881 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4882}
4883
4884
4885/** Opcode 0x0f 0xa8. */
4886FNIEMOP_DEF(iemOp_push_gs)
4887{
4888 IEMOP_MNEMONIC("push gs");
4889 IEMOP_HLP_NO_LOCK_PREFIX();
4890 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4891}
4892
4893
4894/** Opcode 0x0f 0xa9. */
4895FNIEMOP_DEF(iemOp_pop_gs)
4896{
4897 IEMOP_MNEMONIC("pop gs");
4898 IEMOP_HLP_NO_LOCK_PREFIX();
4899 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4900}
4901
4902
4903/** Opcode 0x0f 0xaa. */
4904FNIEMOP_STUB(iemOp_rsm);
4905
4906
4907/** Opcode 0x0f 0xab. */
4908FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4909{
4910 IEMOP_MNEMONIC("bts Ev,Gv");
4911 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4912}
4913
4914
4915/** Opcode 0x0f 0xac. */
4916FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4917{
4918 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4919 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4920}
4921
4922
4923/** Opcode 0x0f 0xad. */
4924FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4925{
4926 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4927 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4928}
4929
4930
4931/** Opcode 0x0f 0xae mem/0. */
4932FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4933{
4934 IEMOP_MNEMONIC("fxsave m512");
4935 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4936 return IEMOP_RAISE_INVALID_OPCODE();
4937
4938 IEM_MC_BEGIN(3, 1);
4939 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4940 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4941 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4945 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** Opcode 0x0f 0xae mem/1. */
4952FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
4953{
4954 IEMOP_MNEMONIC("fxrstor m512");
4955 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4956 return IEMOP_RAISE_INVALID_OPCODE();
4957
4958 IEM_MC_BEGIN(3, 1);
4959 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4960 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4961 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4965 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
4966 IEM_MC_END();
4967 return VINF_SUCCESS;
4968}
4969
4970
4971/** Opcode 0x0f 0xae mem/2. */
4972FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
4973
4974/** Opcode 0x0f 0xae mem/3. */
4975FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
4976
4977/** Opcode 0x0f 0xae mem/4. */
4978FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
4979
4980/** Opcode 0x0f 0xae mem/5. */
4981FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
4982
4983/** Opcode 0x0f 0xae mem/6. */
4984FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
4985
4986/** Opcode 0x0f 0xae mem/7. */
4987FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
4988
4989
4990/** Opcode 0x0f 0xae 11b/5. */
4991FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
4992{
4993 IEMOP_MNEMONIC("lfence");
4994 IEMOP_HLP_NO_LOCK_PREFIX();
4995 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
4996 return IEMOP_RAISE_INVALID_OPCODE();
4997
4998 IEM_MC_BEGIN(0, 0);
4999 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5000 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5001 else
5002 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0xae 11b/6. */
5010FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5011{
5012 IEMOP_MNEMONIC("mfence");
5013 IEMOP_HLP_NO_LOCK_PREFIX();
5014 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5015 return IEMOP_RAISE_INVALID_OPCODE();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5019 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5020 else
5021 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5022 IEM_MC_ADVANCE_RIP();
5023 IEM_MC_END();
5024 return VINF_SUCCESS;
5025}
5026
5027
5028/** Opcode 0x0f 0xae 11b/7. */
5029FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5030{
5031 IEMOP_MNEMONIC("sfence");
5032 IEMOP_HLP_NO_LOCK_PREFIX();
5033 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5034 return IEMOP_RAISE_INVALID_OPCODE();
5035
5036 IEM_MC_BEGIN(0, 0);
5037 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5038 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5039 else
5040 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0xf3 0x0f 0xae 11b/0. */
5048FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5049
5050/** Opcode 0xf3 0x0f 0xae 11b/1. */
5051FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5052
5053/** Opcode 0xf3 0x0f 0xae 11b/2. */
5054FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5055
5056/** Opcode 0xf3 0x0f 0xae 11b/3. */
5057FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5058
5059
5060/** Opcode 0x0f 0xae. */
5061FNIEMOP_DEF(iemOp_Grp15)
5062{
5063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5064 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5065 {
5066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5067 {
5068 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5069 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5070 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5071 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5072 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5073 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5074 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5075 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5077 }
5078 }
5079 else
5080 {
5081 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5082 {
5083 case 0:
5084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5085 {
5086 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5087 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5088 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5089 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5090 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5091 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5092 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5093 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 break;
5097
5098 case IEM_OP_PRF_REPZ:
5099 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5100 {
5101 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5102 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5103 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5104 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5105 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5106 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5107 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5108 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5110 }
5111 break;
5112
5113 default:
5114 return IEMOP_RAISE_INVALID_OPCODE();
5115 }
5116 }
5117}
5118
5119
5120/** Opcode 0x0f 0xaf. */
5121FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5122{
5123 IEMOP_MNEMONIC("imul Gv,Ev");
5124 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5126}
5127
5128
5129/** Opcode 0x0f 0xb0. */
5130FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5131{
5132 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5134
5135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5136 {
5137 IEMOP_HLP_DONE_DECODING();
5138 IEM_MC_BEGIN(4, 0);
5139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5140 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5141 IEM_MC_ARG(uint8_t, u8Src, 2);
5142 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5143
5144 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5145 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5146 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5147 IEM_MC_REF_EFLAGS(pEFlags);
5148 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5150 else
5151 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5152
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 }
5156 else
5157 {
5158 IEM_MC_BEGIN(4, 3);
5159 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5160 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5161 IEM_MC_ARG(uint8_t, u8Src, 2);
5162 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5164 IEM_MC_LOCAL(uint8_t, u8Al);
5165
5166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5167 IEMOP_HLP_DONE_DECODING();
5168 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5169 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5170 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5171 IEM_MC_FETCH_EFLAGS(EFlags);
5172 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5175 else
5176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5177
5178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5179 IEM_MC_COMMIT_EFLAGS(EFlags);
5180 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5181 IEM_MC_ADVANCE_RIP();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187/** Opcode 0x0f 0xb1. */
5188FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5189{
5190 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5192
5193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5194 {
5195 IEMOP_HLP_DONE_DECODING();
5196 switch (pIemCpu->enmEffOpSize)
5197 {
5198 case IEMMODE_16BIT:
5199 IEM_MC_BEGIN(4, 0);
5200 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5201 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5202 IEM_MC_ARG(uint16_t, u16Src, 2);
5203 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5204
5205 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5206 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5207 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5208 IEM_MC_REF_EFLAGS(pEFlags);
5209 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5211 else
5212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5213
5214 IEM_MC_ADVANCE_RIP();
5215 IEM_MC_END();
5216 return VINF_SUCCESS;
5217
5218 case IEMMODE_32BIT:
5219 IEM_MC_BEGIN(4, 0);
5220 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5221 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5222 IEM_MC_ARG(uint32_t, u32Src, 2);
5223 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5224
5225 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5226 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5227 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5228 IEM_MC_REF_EFLAGS(pEFlags);
5229 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5231 else
5232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5233
5234 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5235 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 return VINF_SUCCESS;
5239
5240 case IEMMODE_64BIT:
5241 IEM_MC_BEGIN(4, 0);
5242 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5243 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5244#ifdef RT_ARCH_X86
5245 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5246#else
5247 IEM_MC_ARG(uint64_t, u64Src, 2);
5248#endif
5249 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5250
5251 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5252 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5253 IEM_MC_REF_EFLAGS(pEFlags);
5254#ifdef RT_ARCH_X86
5255 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5258 else
5259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5260#else
5261 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5262 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5264 else
5265 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5266#endif
5267
5268 IEM_MC_ADVANCE_RIP();
5269 IEM_MC_END();
5270 return VINF_SUCCESS;
5271
5272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5273 }
5274 }
5275 else
5276 {
5277 switch (pIemCpu->enmEffOpSize)
5278 {
5279 case IEMMODE_16BIT:
5280 IEM_MC_BEGIN(4, 3);
5281 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5282 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5283 IEM_MC_ARG(uint16_t, u16Src, 2);
5284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5286 IEM_MC_LOCAL(uint16_t, u16Ax);
5287
5288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5289 IEMOP_HLP_DONE_DECODING();
5290 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5291 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5292 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5293 IEM_MC_FETCH_EFLAGS(EFlags);
5294 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5295 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5297 else
5298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5299
5300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5301 IEM_MC_COMMIT_EFLAGS(EFlags);
5302 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5303 IEM_MC_ADVANCE_RIP();
5304 IEM_MC_END();
5305 return VINF_SUCCESS;
5306
5307 case IEMMODE_32BIT:
5308 IEM_MC_BEGIN(4, 3);
5309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5310 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5311 IEM_MC_ARG(uint32_t, u32Src, 2);
5312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5314 IEM_MC_LOCAL(uint32_t, u32Eax);
5315
5316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5317 IEMOP_HLP_DONE_DECODING();
5318 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5319 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5320 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5321 IEM_MC_FETCH_EFLAGS(EFlags);
5322 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5323 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5324 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5325 else
5326 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5327
5328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5329 IEM_MC_COMMIT_EFLAGS(EFlags);
5330 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5331 IEM_MC_ADVANCE_RIP();
5332 IEM_MC_END();
5333 return VINF_SUCCESS;
5334
5335 case IEMMODE_64BIT:
5336 IEM_MC_BEGIN(4, 3);
5337 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5338 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5339#ifdef RT_ARCH_X86
5340 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5341#else
5342 IEM_MC_ARG(uint64_t, u64Src, 2);
5343#endif
5344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5346 IEM_MC_LOCAL(uint64_t, u64Rax);
5347
5348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5349 IEMOP_HLP_DONE_DECODING();
5350 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5351 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5352 IEM_MC_FETCH_EFLAGS(EFlags);
5353 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5354#ifdef RT_ARCH_X86
5355 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5356 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5358 else
5359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5360#else
5361 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5362 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5363 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5364 else
5365 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5366#endif
5367
5368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5369 IEM_MC_COMMIT_EFLAGS(EFlags);
5370 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 }
5378}
5379
5380
5381FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5382{
5383 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5384 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5385
5386 switch (pIemCpu->enmEffOpSize)
5387 {
5388 case IEMMODE_16BIT:
5389 IEM_MC_BEGIN(5, 1);
5390 IEM_MC_ARG(uint16_t, uSel, 0);
5391 IEM_MC_ARG(uint16_t, offSeg, 1);
5392 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5393 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5394 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5399 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5400 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5401 IEM_MC_END();
5402 return VINF_SUCCESS;
5403
5404 case IEMMODE_32BIT:
5405 IEM_MC_BEGIN(5, 1);
5406 IEM_MC_ARG(uint16_t, uSel, 0);
5407 IEM_MC_ARG(uint32_t, offSeg, 1);
5408 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5409 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5410 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5411 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5415 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5416 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5417 IEM_MC_END();
5418 return VINF_SUCCESS;
5419
5420 case IEMMODE_64BIT:
5421 IEM_MC_BEGIN(5, 1);
5422 IEM_MC_ARG(uint16_t, uSel, 0);
5423 IEM_MC_ARG(uint64_t, offSeg, 1);
5424 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5425 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5426 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5427 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5431 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5432 else
5433 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5434 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5435 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5436 IEM_MC_END();
5437 return VINF_SUCCESS;
5438
5439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5440 }
5441}
5442
5443
5444/** Opcode 0x0f 0xb2. */
5445FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5446{
5447 IEMOP_MNEMONIC("lss Gv,Mp");
5448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5450 return IEMOP_RAISE_INVALID_OPCODE();
5451 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5452}
5453
5454
5455/** Opcode 0x0f 0xb3. */
5456FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5457{
5458 IEMOP_MNEMONIC("btr Ev,Gv");
5459 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5460}
5461
5462
5463/** Opcode 0x0f 0xb4. */
5464FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5465{
5466 IEMOP_MNEMONIC("lfs Gv,Mp");
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5469 return IEMOP_RAISE_INVALID_OPCODE();
5470 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5471}
5472
5473
5474/** Opcode 0x0f 0xb5. */
5475FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5476{
5477 IEMOP_MNEMONIC("lgs Gv,Mp");
5478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5480 return IEMOP_RAISE_INVALID_OPCODE();
5481 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5482}
5483
5484
5485/** Opcode 0x0f 0xb6. */
5486FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5487{
5488 IEMOP_MNEMONIC("movzx Gv,Eb");
5489
5490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5491 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5492
5493 /*
5494 * If rm is denoting a register, no more instruction bytes.
5495 */
5496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5497 {
5498 switch (pIemCpu->enmEffOpSize)
5499 {
5500 case IEMMODE_16BIT:
5501 IEM_MC_BEGIN(0, 1);
5502 IEM_MC_LOCAL(uint16_t, u16Value);
5503 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5504 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508
5509 case IEMMODE_32BIT:
5510 IEM_MC_BEGIN(0, 1);
5511 IEM_MC_LOCAL(uint32_t, u32Value);
5512 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5513 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_64BIT:
5519 IEM_MC_BEGIN(0, 1);
5520 IEM_MC_LOCAL(uint64_t, u64Value);
5521 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5522 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5523 IEM_MC_ADVANCE_RIP();
5524 IEM_MC_END();
5525 return VINF_SUCCESS;
5526
5527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5528 }
5529 }
5530 else
5531 {
5532 /*
5533 * We're loading a register from memory.
5534 */
5535 switch (pIemCpu->enmEffOpSize)
5536 {
5537 case IEMMODE_16BIT:
5538 IEM_MC_BEGIN(0, 2);
5539 IEM_MC_LOCAL(uint16_t, u16Value);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5542 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5543 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 return VINF_SUCCESS;
5547
5548 case IEMMODE_32BIT:
5549 IEM_MC_BEGIN(0, 2);
5550 IEM_MC_LOCAL(uint32_t, u32Value);
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5553 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5554 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 return VINF_SUCCESS;
5558
5559 case IEMMODE_64BIT:
5560 IEM_MC_BEGIN(0, 2);
5561 IEM_MC_LOCAL(uint64_t, u64Value);
5562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5564 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5565 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 return VINF_SUCCESS;
5569
5570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5571 }
5572 }
5573}
5574
5575
5576/** Opcode 0x0f 0xb7. */
5577FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5578{
5579 IEMOP_MNEMONIC("movzx Gv,Ew");
5580
5581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5582 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5583
5584 /** @todo Not entirely sure how the operand size prefix is handled here,
5585 * assuming that it will be ignored. Would be nice to have a few
5586 * test for this. */
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5591 {
5592 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5593 {
5594 IEM_MC_BEGIN(0, 1);
5595 IEM_MC_LOCAL(uint32_t, u32Value);
5596 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5597 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 IEM_MC_BEGIN(0, 1);
5604 IEM_MC_LOCAL(uint64_t, u64Value);
5605 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5606 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5607 IEM_MC_ADVANCE_RIP();
5608 IEM_MC_END();
5609 }
5610 }
5611 else
5612 {
5613 /*
5614 * We're loading a register from memory.
5615 */
5616 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5617 {
5618 IEM_MC_BEGIN(0, 2);
5619 IEM_MC_LOCAL(uint32_t, u32Value);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5622 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5623 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 }
5627 else
5628 {
5629 IEM_MC_BEGIN(0, 2);
5630 IEM_MC_LOCAL(uint64_t, u64Value);
5631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5633 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5634 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5635 IEM_MC_ADVANCE_RIP();
5636 IEM_MC_END();
5637 }
5638 }
5639 return VINF_SUCCESS;
5640}
5641
5642
5643/** Opcode 0x0f 0xb8. */
5644FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5645
5646
5647/** Opcode 0x0f 0xb9. */
5648FNIEMOP_DEF(iemOp_Grp10)
5649{
5650 Log(("iemOp_Grp10 -> #UD\n"));
5651 return IEMOP_RAISE_INVALID_OPCODE();
5652}
5653
5654
5655/** Opcode 0x0f 0xba. */
5656FNIEMOP_DEF(iemOp_Grp8)
5657{
5658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5659 PCIEMOPBINSIZES pImpl;
5660 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5661 {
5662 case 0: case 1: case 2: case 3:
5663 return IEMOP_RAISE_INVALID_OPCODE();
5664 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5665 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5666 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5667 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5669 }
5670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5671
5672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5673 {
5674 /* register destination. */
5675 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5676 IEMOP_HLP_NO_LOCK_PREFIX();
5677
5678 switch (pIemCpu->enmEffOpSize)
5679 {
5680 case IEMMODE_16BIT:
5681 IEM_MC_BEGIN(3, 0);
5682 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5683 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5685
5686 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5687 IEM_MC_REF_EFLAGS(pEFlags);
5688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5689
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 case IEMMODE_32BIT:
5695 IEM_MC_BEGIN(3, 0);
5696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5697 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5698 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5699
5700 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5701 IEM_MC_REF_EFLAGS(pEFlags);
5702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5703
5704 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5705 IEM_MC_ADVANCE_RIP();
5706 IEM_MC_END();
5707 return VINF_SUCCESS;
5708
5709 case IEMMODE_64BIT:
5710 IEM_MC_BEGIN(3, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5712 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5713 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5714
5715 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5716 IEM_MC_REF_EFLAGS(pEFlags);
5717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5718
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726 else
5727 {
5728 /* memory destination. */
5729
5730 uint32_t fAccess;
5731 if (pImpl->pfnLockedU16)
5732 fAccess = IEM_ACCESS_DATA_RW;
5733 else /* BT */
5734 {
5735 IEMOP_HLP_NO_LOCK_PREFIX();
5736 fAccess = IEM_ACCESS_DATA_R;
5737 }
5738
5739 /** @todo test negative bit offsets! */
5740 switch (pIemCpu->enmEffOpSize)
5741 {
5742 case IEMMODE_16BIT:
5743 IEM_MC_BEGIN(3, 1);
5744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5745 IEM_MC_ARG(uint16_t, u16Src, 1);
5746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5748
5749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5750 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5751 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5752 IEM_MC_FETCH_EFLAGS(EFlags);
5753 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5754 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5756 else
5757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5758 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5759
5760 IEM_MC_COMMIT_EFLAGS(EFlags);
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763 return VINF_SUCCESS;
5764
5765 case IEMMODE_32BIT:
5766 IEM_MC_BEGIN(3, 1);
5767 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5768 IEM_MC_ARG(uint32_t, u32Src, 1);
5769 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5771
5772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5773 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5774 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5775 IEM_MC_FETCH_EFLAGS(EFlags);
5776 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5777 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5779 else
5780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5782
5783 IEM_MC_COMMIT_EFLAGS(EFlags);
5784 IEM_MC_ADVANCE_RIP();
5785 IEM_MC_END();
5786 return VINF_SUCCESS;
5787
5788 case IEMMODE_64BIT:
5789 IEM_MC_BEGIN(3, 1);
5790 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5791 IEM_MC_ARG(uint64_t, u64Src, 1);
5792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5794
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5796 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5797 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5798 IEM_MC_FETCH_EFLAGS(EFlags);
5799 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5800 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5802 else
5803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5805
5806 IEM_MC_COMMIT_EFLAGS(EFlags);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810
5811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5812 }
5813 }
5814
5815}
5816
5817
5818/** Opcode 0x0f 0xbb. */
5819FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5820{
5821 IEMOP_MNEMONIC("btc Ev,Gv");
5822 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5823}
5824
5825
5826/** Opcode 0x0f 0xbc. */
5827FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5828{
5829 IEMOP_MNEMONIC("bsf Gv,Ev");
5830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5831 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5832}
5833
5834
5835/** Opcode 0x0f 0xbd. */
5836FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5837{
5838 IEMOP_MNEMONIC("bsr Gv,Ev");
5839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5841}
5842
5843
5844/** Opcode 0x0f 0xbe. */
5845FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5846{
5847 IEMOP_MNEMONIC("movsx Gv,Eb");
5848
5849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5850 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5851
5852 /*
5853 * If rm is denoting a register, no more instruction bytes.
5854 */
5855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5856 {
5857 switch (pIemCpu->enmEffOpSize)
5858 {
5859 case IEMMODE_16BIT:
5860 IEM_MC_BEGIN(0, 1);
5861 IEM_MC_LOCAL(uint16_t, u16Value);
5862 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5863 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5864 IEM_MC_ADVANCE_RIP();
5865 IEM_MC_END();
5866 return VINF_SUCCESS;
5867
5868 case IEMMODE_32BIT:
5869 IEM_MC_BEGIN(0, 1);
5870 IEM_MC_LOCAL(uint32_t, u32Value);
5871 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5872 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5873 IEM_MC_ADVANCE_RIP();
5874 IEM_MC_END();
5875 return VINF_SUCCESS;
5876
5877 case IEMMODE_64BIT:
5878 IEM_MC_BEGIN(0, 1);
5879 IEM_MC_LOCAL(uint64_t, u64Value);
5880 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5881 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5882 IEM_MC_ADVANCE_RIP();
5883 IEM_MC_END();
5884 return VINF_SUCCESS;
5885
5886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5887 }
5888 }
5889 else
5890 {
5891 /*
5892 * We're loading a register from memory.
5893 */
5894 switch (pIemCpu->enmEffOpSize)
5895 {
5896 case IEMMODE_16BIT:
5897 IEM_MC_BEGIN(0, 2);
5898 IEM_MC_LOCAL(uint16_t, u16Value);
5899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5901 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5902 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 case IEMMODE_32BIT:
5908 IEM_MC_BEGIN(0, 2);
5909 IEM_MC_LOCAL(uint32_t, u32Value);
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5912 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5913 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5914 IEM_MC_ADVANCE_RIP();
5915 IEM_MC_END();
5916 return VINF_SUCCESS;
5917
5918 case IEMMODE_64BIT:
5919 IEM_MC_BEGIN(0, 2);
5920 IEM_MC_LOCAL(uint64_t, u64Value);
5921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5923 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5924 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5925 IEM_MC_ADVANCE_RIP();
5926 IEM_MC_END();
5927 return VINF_SUCCESS;
5928
5929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5930 }
5931 }
5932}
5933
5934
5935/** Opcode 0x0f 0xbf. */
5936FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
5937{
5938 IEMOP_MNEMONIC("movsx Gv,Ew");
5939
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5942
5943 /** @todo Not entirely sure how the operand size prefix is handled here,
5944 * assuming that it will be ignored. Would be nice to have a few
5945 * test for this. */
5946 /*
5947 * If rm is denoting a register, no more instruction bytes.
5948 */
5949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5950 {
5951 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5952 {
5953 IEM_MC_BEGIN(0, 1);
5954 IEM_MC_LOCAL(uint32_t, u32Value);
5955 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5956 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 }
5960 else
5961 {
5962 IEM_MC_BEGIN(0, 1);
5963 IEM_MC_LOCAL(uint64_t, u64Value);
5964 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5965 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5966 IEM_MC_ADVANCE_RIP();
5967 IEM_MC_END();
5968 }
5969 }
5970 else
5971 {
5972 /*
5973 * We're loading a register from memory.
5974 */
5975 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5976 {
5977 IEM_MC_BEGIN(0, 2);
5978 IEM_MC_LOCAL(uint32_t, u32Value);
5979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5981 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5982 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5983 IEM_MC_ADVANCE_RIP();
5984 IEM_MC_END();
5985 }
5986 else
5987 {
5988 IEM_MC_BEGIN(0, 2);
5989 IEM_MC_LOCAL(uint64_t, u64Value);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5992 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5993 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5994 IEM_MC_ADVANCE_RIP();
5995 IEM_MC_END();
5996 }
5997 }
5998 return VINF_SUCCESS;
5999}
6000
6001
6002/** Opcode 0x0f 0xc0. */
6003FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6004{
6005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6006 IEMOP_MNEMONIC("xadd Eb,Gb");
6007
6008 /*
6009 * If rm is denoting a register, no more instruction bytes.
6010 */
6011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6012 {
6013 IEMOP_HLP_NO_LOCK_PREFIX();
6014
6015 IEM_MC_BEGIN(3, 0);
6016 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6017 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6018 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6019
6020 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6021 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6022 IEM_MC_REF_EFLAGS(pEFlags);
6023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6024
6025 IEM_MC_ADVANCE_RIP();
6026 IEM_MC_END();
6027 }
6028 else
6029 {
6030 /*
6031 * We're accessing memory.
6032 */
6033 IEM_MC_BEGIN(3, 3);
6034 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6035 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6036 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6037 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6041 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6042 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6043 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6044 IEM_MC_FETCH_EFLAGS(EFlags);
6045 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6047 else
6048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6049
6050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6051 IEM_MC_COMMIT_EFLAGS(EFlags);
6052 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6053 IEM_MC_ADVANCE_RIP();
6054 IEM_MC_END();
6055 return VINF_SUCCESS;
6056 }
6057 return VINF_SUCCESS;
6058}
6059
6060
6061/** Opcode 0x0f 0xc1. */
6062FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6063{
6064 IEMOP_MNEMONIC("xadd Ev,Gv");
6065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6066
6067 /*
6068 * If rm is denoting a register, no more instruction bytes.
6069 */
6070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6071 {
6072 IEMOP_HLP_NO_LOCK_PREFIX();
6073
6074 switch (pIemCpu->enmEffOpSize)
6075 {
6076 case IEMMODE_16BIT:
6077 IEM_MC_BEGIN(3, 0);
6078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6079 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6081
6082 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6083 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6084 IEM_MC_REF_EFLAGS(pEFlags);
6085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6086
6087 IEM_MC_ADVANCE_RIP();
6088 IEM_MC_END();
6089 return VINF_SUCCESS;
6090
6091 case IEMMODE_32BIT:
6092 IEM_MC_BEGIN(3, 0);
6093 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6094 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6095 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6096
6097 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6098 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6099 IEM_MC_REF_EFLAGS(pEFlags);
6100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6101
6102 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6103 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 case IEMMODE_64BIT:
6109 IEM_MC_BEGIN(3, 0);
6110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6111 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6113
6114 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6115 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6118
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 return VINF_SUCCESS;
6122
6123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6124 }
6125 }
6126 else
6127 {
6128 /*
6129 * We're accessing memory.
6130 */
6131 switch (pIemCpu->enmEffOpSize)
6132 {
6133 case IEMMODE_16BIT:
6134 IEM_MC_BEGIN(3, 3);
6135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6136 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6137 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6138 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6140
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6142 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6143 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6144 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6145 IEM_MC_FETCH_EFLAGS(EFlags);
6146 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6147 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6148 else
6149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6150
6151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6152 IEM_MC_COMMIT_EFLAGS(EFlags);
6153 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6154 IEM_MC_ADVANCE_RIP();
6155 IEM_MC_END();
6156 return VINF_SUCCESS;
6157
6158 case IEMMODE_32BIT:
6159 IEM_MC_BEGIN(3, 3);
6160 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6161 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6162 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6163 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6165
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6167 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6168 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6169 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6170 IEM_MC_FETCH_EFLAGS(EFlags);
6171 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6172 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6173 else
6174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6175
6176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6177 IEM_MC_COMMIT_EFLAGS(EFlags);
6178 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 return VINF_SUCCESS;
6182
6183 case IEMMODE_64BIT:
6184 IEM_MC_BEGIN(3, 3);
6185 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6186 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6187 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6188 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6190
6191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6192 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6193 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6194 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6195 IEM_MC_FETCH_EFLAGS(EFlags);
6196 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6198 else
6199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6200
6201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6202 IEM_MC_COMMIT_EFLAGS(EFlags);
6203 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6209 }
6210 }
6211}
6212
6213/** Opcode 0x0f 0xc2. */
6214FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6215
6216/** Opcode 0x0f 0xc3. */
6217FNIEMOP_STUB(iemOp_movnti_My_Gy);
6218
6219/** Opcode 0x0f 0xc4. */
6220FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6221
6222/** Opcode 0x0f 0xc5. */
6223FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6224
6225/** Opcode 0x0f 0xc6. */
6226FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6227
6228
6229/** Opcode 0x0f 0xc7 !11/1. */
6230FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6231{
6232 IEMOP_MNEMONIC("cmpxchg8b Mq");
6233
6234 IEM_MC_BEGIN(4, 3);
6235 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6236 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6237 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6238 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6239 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6240 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6242
6243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6244 IEMOP_HLP_DONE_DECODING();
6245 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6246
6247 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6248 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6249 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6250
6251 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6252 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6253 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6254
6255 IEM_MC_FETCH_EFLAGS(EFlags);
6256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6258 else
6259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6260
6261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6262 IEM_MC_COMMIT_EFLAGS(EFlags);
6263 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6264 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6265 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6266 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6267 IEM_MC_ENDIF();
6268 IEM_MC_ADVANCE_RIP();
6269
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272}
6273
6274
6275/** Opcode REX.W 0x0f 0xc7 !11/1. */
6276FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6277
6278/** Opcode 0x0f 0xc7 11/6. */
6279FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6280
6281/** Opcode 0x0f 0xc7 !11/6. */
6282FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6283
6284/** Opcode 0x66 0x0f 0xc7 !11/6. */
6285FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6286
6287/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6288FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6289
6290/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6291FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6292
6293
6294/** Opcode 0x0f 0xc7. */
6295FNIEMOP_DEF(iemOp_Grp9)
6296{
6297 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6300 {
6301 case 0: case 2: case 3: case 4: case 5:
6302 return IEMOP_RAISE_INVALID_OPCODE();
6303 case 1:
6304 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6305 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6306 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6307 return IEMOP_RAISE_INVALID_OPCODE();
6308 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6309 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6310 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6311 case 6:
6312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6313 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6314 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6315 {
6316 case 0:
6317 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6318 case IEM_OP_PRF_SIZE_OP:
6319 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6320 case IEM_OP_PRF_REPZ:
6321 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6322 default:
6323 return IEMOP_RAISE_INVALID_OPCODE();
6324 }
6325 case 7:
6326 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6327 {
6328 case 0:
6329 case IEM_OP_PRF_REPZ:
6330 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6331 default:
6332 return IEMOP_RAISE_INVALID_OPCODE();
6333 }
6334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6335 }
6336}
6337
6338
6339/**
6340 * Common 'bswap register' helper.
6341 */
6342FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6343{
6344 IEMOP_HLP_NO_LOCK_PREFIX();
6345 switch (pIemCpu->enmEffOpSize)
6346 {
6347 case IEMMODE_16BIT:
6348 IEM_MC_BEGIN(1, 0);
6349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6350 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_32BIT:
6357 IEM_MC_BEGIN(1, 0);
6358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6359 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6360 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6361 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365
6366 case IEMMODE_64BIT:
6367 IEM_MC_BEGIN(1, 0);
6368 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6369 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6370 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 return VINF_SUCCESS;
6374
6375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6376 }
6377}
6378
6379
6380/** Opcode 0x0f 0xc8. */
6381FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6382{
6383 IEMOP_MNEMONIC("bswap rAX/r8");
6384 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6385 prefix. REX.B is the correct prefix it appears. For a parallel
6386 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6387 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6388}
6389
6390
6391/** Opcode 0x0f 0xc9. */
6392FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6393{
6394 IEMOP_MNEMONIC("bswap rCX/r9");
6395 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6396}
6397
6398
6399/** Opcode 0x0f 0xca. */
6400FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6401{
6402 IEMOP_MNEMONIC("bswap rDX/r9");
6403 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6404}
6405
6406
6407/** Opcode 0x0f 0xcb. */
6408FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6409{
6410 IEMOP_MNEMONIC("bswap rBX/r9");
6411 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6412}
6413
6414
6415/** Opcode 0x0f 0xcc. */
6416FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6417{
6418 IEMOP_MNEMONIC("bswap rSP/r12");
6419 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6420}
6421
6422
6423/** Opcode 0x0f 0xcd. */
6424FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6425{
6426 IEMOP_MNEMONIC("bswap rBP/r13");
6427 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6428}
6429
6430
6431/** Opcode 0x0f 0xce. */
6432FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6433{
6434 IEMOP_MNEMONIC("bswap rSI/r14");
6435 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6436}
6437
6438
6439/** Opcode 0x0f 0xcf. */
6440FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6441{
6442 IEMOP_MNEMONIC("bswap rDI/r15");
6443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6444}
6445
6446
6447
6448/** Opcode 0x0f 0xd0. */
6449FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6450/** Opcode 0x0f 0xd1. */
6451FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6452/** Opcode 0x0f 0xd2. */
6453FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6454/** Opcode 0x0f 0xd3. */
6455FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6456/** Opcode 0x0f 0xd4. */
6457FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6458/** Opcode 0x0f 0xd5. */
6459FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6460/** Opcode 0x0f 0xd6. */
6461FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6462
6463
6464/** Opcode 0x0f 0xd7. */
6465FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6466{
6467 /* Docs says register only. */
6468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6469 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6470 return IEMOP_RAISE_INVALID_OPCODE();
6471
6472 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6473 /** @todo testcase: Check that the instruction implicitly clears the high
6474 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6475 * and opcode modifications are made to work with the whole width (not
6476 * just 128). */
6477 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6478 {
6479 case IEM_OP_PRF_SIZE_OP: /* SSE */
6480 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6481 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6482 IEM_MC_BEGIN(2, 0);
6483 IEM_MC_ARG(uint64_t *, pDst, 0);
6484 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6486 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6487 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 return VINF_SUCCESS;
6492
6493 case 0: /* MMX */
6494 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6495 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6496 IEM_MC_BEGIN(2, 0);
6497 IEM_MC_ARG(uint64_t *, pDst, 0);
6498 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6499 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6500 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6501 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6502 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6503 IEM_MC_ADVANCE_RIP();
6504 IEM_MC_END();
6505 return VINF_SUCCESS;
6506
6507 default:
6508 return IEMOP_RAISE_INVALID_OPCODE();
6509 }
6510}
6511
6512
6513/** Opcode 0x0f 0xd8. */
6514FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6515/** Opcode 0x0f 0xd9. */
6516FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6517/** Opcode 0x0f 0xda. */
6518FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6519/** Opcode 0x0f 0xdb. */
6520FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6521/** Opcode 0x0f 0xdc. */
6522FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6523/** Opcode 0x0f 0xdd. */
6524FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6525/** Opcode 0x0f 0xde. */
6526FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6527/** Opcode 0x0f 0xdf. */
6528FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6529/** Opcode 0x0f 0xe0. */
6530FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6531/** Opcode 0x0f 0xe1. */
6532FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6533/** Opcode 0x0f 0xe2. */
6534FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6535/** Opcode 0x0f 0xe3. */
6536FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6537/** Opcode 0x0f 0xe4. */
6538FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6539/** Opcode 0x0f 0xe5. */
6540FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6541/** Opcode 0x0f 0xe6. */
6542FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6543/** Opcode 0x0f 0xe7. */
6544FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6545/** Opcode 0x0f 0xe8. */
6546FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6547/** Opcode 0x0f 0xe9. */
6548FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6549/** Opcode 0x0f 0xea. */
6550FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6551/** Opcode 0x0f 0xeb. */
6552FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6553/** Opcode 0x0f 0xec. */
6554FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6555/** Opcode 0x0f 0xed. */
6556FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6557/** Opcode 0x0f 0xee. */
6558FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6559
6560
6561/** Opcode 0x0f 0xef. */
6562FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6563{
6564 IEMOP_MNEMONIC("pxor");
6565 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6566}
6567
6568
6569/** Opcode 0x0f 0xf0. */
6570FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6571/** Opcode 0x0f 0xf1. */
6572FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6573/** Opcode 0x0f 0xf2. */
6574FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6575/** Opcode 0x0f 0xf3. */
6576FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6577/** Opcode 0x0f 0xf4. */
6578FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6579/** Opcode 0x0f 0xf5. */
6580FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6581/** Opcode 0x0f 0xf6. */
6582FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6583/** Opcode 0x0f 0xf7. */
6584FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6585/** Opcode 0x0f 0xf8. */
6586FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6587/** Opcode 0x0f 0xf9. */
6588FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6589/** Opcode 0x0f 0xfa. */
6590FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6591/** Opcode 0x0f 0xfb. */
6592FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6593/** Opcode 0x0f 0xfc. */
6594FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6595/** Opcode 0x0f 0xfd. */
6596FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6597/** Opcode 0x0f 0xfe. */
6598FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6599
6600
6601const PFNIEMOP g_apfnTwoByteMap[256] =
6602{
6603 /* 0x00 */ iemOp_Grp6,
6604 /* 0x01 */ iemOp_Grp7,
6605 /* 0x02 */ iemOp_lar_Gv_Ew,
6606 /* 0x03 */ iemOp_lsl_Gv_Ew,
6607 /* 0x04 */ iemOp_Invalid,
6608 /* 0x05 */ iemOp_syscall,
6609 /* 0x06 */ iemOp_clts,
6610 /* 0x07 */ iemOp_sysret,
6611 /* 0x08 */ iemOp_invd,
6612 /* 0x09 */ iemOp_wbinvd,
6613 /* 0x0a */ iemOp_Invalid,
6614 /* 0x0b */ iemOp_ud2,
6615 /* 0x0c */ iemOp_Invalid,
6616 /* 0x0d */ iemOp_nop_Ev_GrpP,
6617 /* 0x0e */ iemOp_femms,
6618 /* 0x0f */ iemOp_3Dnow,
6619 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6620 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6621 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6622 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6623 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6624 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6625 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6626 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6627 /* 0x18 */ iemOp_prefetch_Grp16,
6628 /* 0x19 */ iemOp_nop_Ev,
6629 /* 0x1a */ iemOp_nop_Ev,
6630 /* 0x1b */ iemOp_nop_Ev,
6631 /* 0x1c */ iemOp_nop_Ev,
6632 /* 0x1d */ iemOp_nop_Ev,
6633 /* 0x1e */ iemOp_nop_Ev,
6634 /* 0x1f */ iemOp_nop_Ev,
6635 /* 0x20 */ iemOp_mov_Rd_Cd,
6636 /* 0x21 */ iemOp_mov_Rd_Dd,
6637 /* 0x22 */ iemOp_mov_Cd_Rd,
6638 /* 0x23 */ iemOp_mov_Dd_Rd,
6639 /* 0x24 */ iemOp_mov_Rd_Td,
6640 /* 0x25 */ iemOp_Invalid,
6641 /* 0x26 */ iemOp_mov_Td_Rd,
6642 /* 0x27 */ iemOp_Invalid,
6643 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6644 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6645 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6646 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6647 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6648 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6649 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6650 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6651 /* 0x30 */ iemOp_wrmsr,
6652 /* 0x31 */ iemOp_rdtsc,
6653 /* 0x32 */ iemOp_rdmsr,
6654 /* 0x33 */ iemOp_rdpmc,
6655 /* 0x34 */ iemOp_sysenter,
6656 /* 0x35 */ iemOp_sysexit,
6657 /* 0x36 */ iemOp_Invalid,
6658 /* 0x37 */ iemOp_getsec,
6659 /* 0x38 */ iemOp_3byte_Esc_A4,
6660 /* 0x39 */ iemOp_Invalid,
6661 /* 0x3a */ iemOp_3byte_Esc_A5,
6662 /* 0x3b */ iemOp_Invalid,
6663 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6664 /* 0x3d */ iemOp_Invalid,
6665 /* 0x3e */ iemOp_Invalid,
6666 /* 0x3f */ iemOp_Invalid,
6667 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6668 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6669 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6670 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6671 /* 0x44 */ iemOp_cmove_Gv_Ev,
6672 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6673 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6674 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6675 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6676 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6677 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6678 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6679 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6680 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6681 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6682 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6683 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6684 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6685 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6686 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6687 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6688 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6689 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6690 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6691 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6692 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6693 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6694 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6695 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6696 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6697 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6698 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6699 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6700 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6701 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6702 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6703 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6704 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6705 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6706 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6707 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6708 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6709 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6710 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6711 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6712 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6713 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6714 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6715 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6716 /* 0x71 */ iemOp_Grp12,
6717 /* 0x72 */ iemOp_Grp13,
6718 /* 0x73 */ iemOp_Grp14,
6719 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6720 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6721 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6722 /* 0x77 */ iemOp_emms,
6723 /* 0x78 */ iemOp_vmread_AmdGrp17,
6724 /* 0x79 */ iemOp_vmwrite,
6725 /* 0x7a */ iemOp_Invalid,
6726 /* 0x7b */ iemOp_Invalid,
6727 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6728 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6729 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6730 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6731 /* 0x80 */ iemOp_jo_Jv,
6732 /* 0x81 */ iemOp_jno_Jv,
6733 /* 0x82 */ iemOp_jc_Jv,
6734 /* 0x83 */ iemOp_jnc_Jv,
6735 /* 0x84 */ iemOp_je_Jv,
6736 /* 0x85 */ iemOp_jne_Jv,
6737 /* 0x86 */ iemOp_jbe_Jv,
6738 /* 0x87 */ iemOp_jnbe_Jv,
6739 /* 0x88 */ iemOp_js_Jv,
6740 /* 0x89 */ iemOp_jns_Jv,
6741 /* 0x8a */ iemOp_jp_Jv,
6742 /* 0x8b */ iemOp_jnp_Jv,
6743 /* 0x8c */ iemOp_jl_Jv,
6744 /* 0x8d */ iemOp_jnl_Jv,
6745 /* 0x8e */ iemOp_jle_Jv,
6746 /* 0x8f */ iemOp_jnle_Jv,
6747 /* 0x90 */ iemOp_seto_Eb,
6748 /* 0x91 */ iemOp_setno_Eb,
6749 /* 0x92 */ iemOp_setc_Eb,
6750 /* 0x93 */ iemOp_setnc_Eb,
6751 /* 0x94 */ iemOp_sete_Eb,
6752 /* 0x95 */ iemOp_setne_Eb,
6753 /* 0x96 */ iemOp_setbe_Eb,
6754 /* 0x97 */ iemOp_setnbe_Eb,
6755 /* 0x98 */ iemOp_sets_Eb,
6756 /* 0x99 */ iemOp_setns_Eb,
6757 /* 0x9a */ iemOp_setp_Eb,
6758 /* 0x9b */ iemOp_setnp_Eb,
6759 /* 0x9c */ iemOp_setl_Eb,
6760 /* 0x9d */ iemOp_setnl_Eb,
6761 /* 0x9e */ iemOp_setle_Eb,
6762 /* 0x9f */ iemOp_setnle_Eb,
6763 /* 0xa0 */ iemOp_push_fs,
6764 /* 0xa1 */ iemOp_pop_fs,
6765 /* 0xa2 */ iemOp_cpuid,
6766 /* 0xa3 */ iemOp_bt_Ev_Gv,
6767 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6768 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6769 /* 0xa6 */ iemOp_Invalid,
6770 /* 0xa7 */ iemOp_Invalid,
6771 /* 0xa8 */ iemOp_push_gs,
6772 /* 0xa9 */ iemOp_pop_gs,
6773 /* 0xaa */ iemOp_rsm,
6774 /* 0xab */ iemOp_bts_Ev_Gv,
6775 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6776 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6777 /* 0xae */ iemOp_Grp15,
6778 /* 0xaf */ iemOp_imul_Gv_Ev,
6779 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6780 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6781 /* 0xb2 */ iemOp_lss_Gv_Mp,
6782 /* 0xb3 */ iemOp_btr_Ev_Gv,
6783 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6784 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6785 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6786 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6787 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6788 /* 0xb9 */ iemOp_Grp10,
6789 /* 0xba */ iemOp_Grp8,
6790 /* 0xbd */ iemOp_btc_Ev_Gv,
6791 /* 0xbc */ iemOp_bsf_Gv_Ev,
6792 /* 0xbd */ iemOp_bsr_Gv_Ev,
6793 /* 0xbe */ iemOp_movsx_Gv_Eb,
6794 /* 0xbf */ iemOp_movsx_Gv_Ew,
6795 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6796 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6797 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6798 /* 0xc3 */ iemOp_movnti_My_Gy,
6799 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6800 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6801 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6802 /* 0xc7 */ iemOp_Grp9,
6803 /* 0xc8 */ iemOp_bswap_rAX_r8,
6804 /* 0xc9 */ iemOp_bswap_rCX_r9,
6805 /* 0xca */ iemOp_bswap_rDX_r10,
6806 /* 0xcb */ iemOp_bswap_rBX_r11,
6807 /* 0xcc */ iemOp_bswap_rSP_r12,
6808 /* 0xcd */ iemOp_bswap_rBP_r13,
6809 /* 0xce */ iemOp_bswap_rSI_r14,
6810 /* 0xcf */ iemOp_bswap_rDI_r15,
6811 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6812 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6813 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6814 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6815 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6816 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6817 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6818 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6819 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6820 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6821 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6822 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6823 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6824 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6825 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6826 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6827 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6828 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6829 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6830 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6831 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6832 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6833 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6834 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6835 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6836 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6837 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6838 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6839 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6840 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6841 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6842 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6843 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6844 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6845 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6846 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6847 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6848 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6849 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6850 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6851 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6852 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6853 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6854 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6855 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6856 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6857 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6858 /* 0xff */ iemOp_Invalid
6859};
6860
6861/** @} */
6862
6863
6864/** @name One byte opcodes.
6865 *
6866 * @{
6867 */
6868
6869/** Opcode 0x00. */
6870FNIEMOP_DEF(iemOp_add_Eb_Gb)
6871{
6872 IEMOP_MNEMONIC("add Eb,Gb");
6873 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6874}
6875
6876
6877/** Opcode 0x01. */
6878FNIEMOP_DEF(iemOp_add_Ev_Gv)
6879{
6880 IEMOP_MNEMONIC("add Ev,Gv");
6881 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6882}
6883
6884
6885/** Opcode 0x02. */
6886FNIEMOP_DEF(iemOp_add_Gb_Eb)
6887{
6888 IEMOP_MNEMONIC("add Gb,Eb");
6889 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6890}
6891
6892
6893/** Opcode 0x03. */
6894FNIEMOP_DEF(iemOp_add_Gv_Ev)
6895{
6896 IEMOP_MNEMONIC("add Gv,Ev");
6897 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6898}
6899
6900
6901/** Opcode 0x04. */
6902FNIEMOP_DEF(iemOp_add_Al_Ib)
6903{
6904 IEMOP_MNEMONIC("add al,Ib");
6905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6906}
6907
6908
6909/** Opcode 0x05. */
6910FNIEMOP_DEF(iemOp_add_eAX_Iz)
6911{
6912 IEMOP_MNEMONIC("add rAX,Iz");
6913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
6914}
6915
6916
6917/** Opcode 0x06. */
6918FNIEMOP_DEF(iemOp_push_ES)
6919{
6920 IEMOP_MNEMONIC("push es");
6921 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
6922}
6923
6924
6925/** Opcode 0x07. */
6926FNIEMOP_DEF(iemOp_pop_ES)
6927{
6928 IEMOP_MNEMONIC("pop es");
6929 IEMOP_HLP_NO_64BIT();
6930 IEMOP_HLP_NO_LOCK_PREFIX();
6931 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
6932}
6933
6934
6935/** Opcode 0x08. */
6936FNIEMOP_DEF(iemOp_or_Eb_Gb)
6937{
6938 IEMOP_MNEMONIC("or Eb,Gb");
6939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6940 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
6941}
6942
6943
6944/** Opcode 0x09. */
6945FNIEMOP_DEF(iemOp_or_Ev_Gv)
6946{
6947 IEMOP_MNEMONIC("or Ev,Gv ");
6948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6949 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
6950}
6951
6952
6953/** Opcode 0x0a. */
6954FNIEMOP_DEF(iemOp_or_Gb_Eb)
6955{
6956 IEMOP_MNEMONIC("or Gb,Eb");
6957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6958 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
6959}
6960
6961
6962/** Opcode 0x0b. */
6963FNIEMOP_DEF(iemOp_or_Gv_Ev)
6964{
6965 IEMOP_MNEMONIC("or Gv,Ev");
6966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6967 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
6968}
6969
6970
6971/** Opcode 0x0c. */
6972FNIEMOP_DEF(iemOp_or_Al_Ib)
6973{
6974 IEMOP_MNEMONIC("or al,Ib");
6975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6976 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
6977}
6978
6979
6980/** Opcode 0x0d. */
6981FNIEMOP_DEF(iemOp_or_eAX_Iz)
6982{
6983 IEMOP_MNEMONIC("or rAX,Iz");
6984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
6986}
6987
6988
6989/** Opcode 0x0e. */
6990FNIEMOP_DEF(iemOp_push_CS)
6991{
6992 IEMOP_MNEMONIC("push cs");
6993 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
6994}
6995
6996
6997/** Opcode 0x0f. */
6998FNIEMOP_DEF(iemOp_2byteEscape)
6999{
7000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7001 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7002}
7003
7004/** Opcode 0x10. */
7005FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7006{
7007 IEMOP_MNEMONIC("adc Eb,Gb");
7008 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7009}
7010
7011
7012/** Opcode 0x11. */
7013FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7014{
7015 IEMOP_MNEMONIC("adc Ev,Gv");
7016 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7017}
7018
7019
7020/** Opcode 0x12. */
7021FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7022{
7023 IEMOP_MNEMONIC("adc Gb,Eb");
7024 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7025}
7026
7027
7028/** Opcode 0x13. */
7029FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7030{
7031 IEMOP_MNEMONIC("adc Gv,Ev");
7032 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7033}
7034
7035
7036/** Opcode 0x14. */
7037FNIEMOP_DEF(iemOp_adc_Al_Ib)
7038{
7039 IEMOP_MNEMONIC("adc al,Ib");
7040 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7041}
7042
7043
7044/** Opcode 0x15. */
7045FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7046{
7047 IEMOP_MNEMONIC("adc rAX,Iz");
7048 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7049}
7050
7051
7052/** Opcode 0x16. */
7053FNIEMOP_DEF(iemOp_push_SS)
7054{
7055 IEMOP_MNEMONIC("push ss");
7056 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7057}
7058
7059
7060/** Opcode 0x17. */
7061FNIEMOP_DEF(iemOp_pop_SS)
7062{
7063 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7064 IEMOP_HLP_NO_LOCK_PREFIX();
7065 IEMOP_HLP_NO_64BIT();
7066 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7067}
7068
7069
7070/** Opcode 0x18. */
7071FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7072{
7073 IEMOP_MNEMONIC("sbb Eb,Gb");
7074 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7075}
7076
7077
7078/** Opcode 0x19. */
7079FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7080{
7081 IEMOP_MNEMONIC("sbb Ev,Gv");
7082 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7083}
7084
7085
7086/** Opcode 0x1a. */
7087FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7088{
7089 IEMOP_MNEMONIC("sbb Gb,Eb");
7090 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7091}
7092
7093
7094/** Opcode 0x1b. */
7095FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7096{
7097 IEMOP_MNEMONIC("sbb Gv,Ev");
7098 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7099}
7100
7101
7102/** Opcode 0x1c. */
7103FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7104{
7105 IEMOP_MNEMONIC("sbb al,Ib");
7106 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7107}
7108
7109
7110/** Opcode 0x1d. */
7111FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7112{
7113 IEMOP_MNEMONIC("sbb rAX,Iz");
7114 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7115}
7116
7117
7118/** Opcode 0x1e. */
7119FNIEMOP_DEF(iemOp_push_DS)
7120{
7121 IEMOP_MNEMONIC("push ds");
7122 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7123}
7124
7125
7126/** Opcode 0x1f. */
7127FNIEMOP_DEF(iemOp_pop_DS)
7128{
7129 IEMOP_MNEMONIC("pop ds");
7130 IEMOP_HLP_NO_LOCK_PREFIX();
7131 IEMOP_HLP_NO_64BIT();
7132 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7133}
7134
7135
7136/** Opcode 0x20. */
7137FNIEMOP_DEF(iemOp_and_Eb_Gb)
7138{
7139 IEMOP_MNEMONIC("and Eb,Gb");
7140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7142}
7143
7144
7145/** Opcode 0x21. */
7146FNIEMOP_DEF(iemOp_and_Ev_Gv)
7147{
7148 IEMOP_MNEMONIC("and Ev,Gv");
7149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7150 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7151}
7152
7153
7154/** Opcode 0x22. */
7155FNIEMOP_DEF(iemOp_and_Gb_Eb)
7156{
7157 IEMOP_MNEMONIC("and Gb,Eb");
7158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7160}
7161
7162
7163/** Opcode 0x23. */
7164FNIEMOP_DEF(iemOp_and_Gv_Ev)
7165{
7166 IEMOP_MNEMONIC("and Gv,Ev");
7167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7168 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7169}
7170
7171
7172/** Opcode 0x24. */
7173FNIEMOP_DEF(iemOp_and_Al_Ib)
7174{
7175 IEMOP_MNEMONIC("and al,Ib");
7176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7178}
7179
7180
7181/** Opcode 0x25. */
7182FNIEMOP_DEF(iemOp_and_eAX_Iz)
7183{
7184 IEMOP_MNEMONIC("and rAX,Iz");
7185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7187}
7188
7189
7190/** Opcode 0x26. */
7191FNIEMOP_DEF(iemOp_seg_ES)
7192{
7193 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7194 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7195 pIemCpu->iEffSeg = X86_SREG_ES;
7196
7197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7198 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7199}
7200
7201
7202/** Opcode 0x27. */
7203FNIEMOP_DEF(iemOp_daa)
7204{
7205 IEMOP_MNEMONIC("daa AL");
7206 IEMOP_HLP_NO_64BIT();
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7208 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7209 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7210}
7211
7212
7213/** Opcode 0x28. */
7214FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7215{
7216 IEMOP_MNEMONIC("sub Eb,Gb");
7217 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7218}
7219
7220
7221/** Opcode 0x29. */
7222FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7223{
7224 IEMOP_MNEMONIC("sub Ev,Gv");
7225 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7226}
7227
7228
7229/** Opcode 0x2a. */
7230FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7231{
7232 IEMOP_MNEMONIC("sub Gb,Eb");
7233 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7234}
7235
7236
7237/** Opcode 0x2b. */
7238FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7239{
7240 IEMOP_MNEMONIC("sub Gv,Ev");
7241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7242}
7243
7244
7245/** Opcode 0x2c. */
7246FNIEMOP_DEF(iemOp_sub_Al_Ib)
7247{
7248 IEMOP_MNEMONIC("sub al,Ib");
7249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7250}
7251
7252
7253/** Opcode 0x2d. */
7254FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7255{
7256 IEMOP_MNEMONIC("sub rAX,Iz");
7257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7258}
7259
7260
7261/** Opcode 0x2e. */
7262FNIEMOP_DEF(iemOp_seg_CS)
7263{
7264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7265 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7266 pIemCpu->iEffSeg = X86_SREG_CS;
7267
7268 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7269 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7270}
7271
7272
7273/** Opcode 0x2f. */
7274FNIEMOP_DEF(iemOp_das)
7275{
7276 IEMOP_MNEMONIC("das AL");
7277 IEMOP_HLP_NO_64BIT();
7278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7280 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7281}
7282
7283
7284/** Opcode 0x30. */
7285FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7286{
7287 IEMOP_MNEMONIC("xor Eb,Gb");
7288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7289 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7290}
7291
7292
7293/** Opcode 0x31. */
7294FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7295{
7296 IEMOP_MNEMONIC("xor Ev,Gv");
7297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7298 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7299}
7300
7301
7302/** Opcode 0x32. */
7303FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7304{
7305 IEMOP_MNEMONIC("xor Gb,Eb");
7306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7308}
7309
7310
7311/** Opcode 0x33. */
7312FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7313{
7314 IEMOP_MNEMONIC("xor Gv,Ev");
7315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7317}
7318
7319
7320/** Opcode 0x34. */
7321FNIEMOP_DEF(iemOp_xor_Al_Ib)
7322{
7323 IEMOP_MNEMONIC("xor al,Ib");
7324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7325 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7326}
7327
7328
7329/** Opcode 0x35. */
7330FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7331{
7332 IEMOP_MNEMONIC("xor rAX,Iz");
7333 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7334 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7335}
7336
7337
7338/** Opcode 0x36. */
7339FNIEMOP_DEF(iemOp_seg_SS)
7340{
7341 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7342 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7343 pIemCpu->iEffSeg = X86_SREG_SS;
7344
7345 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7346 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7347}
7348
7349
7350/** Opcode 0x37. */
7351FNIEMOP_STUB(iemOp_aaa);
7352
7353
7354/** Opcode 0x38. */
7355FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7356{
7357 IEMOP_MNEMONIC("cmp Eb,Gb");
7358 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7359 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7360}
7361
7362
7363/** Opcode 0x39. */
7364FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7365{
7366 IEMOP_MNEMONIC("cmp Ev,Gv");
7367 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7369}
7370
7371
7372/** Opcode 0x3a. */
7373FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7374{
7375 IEMOP_MNEMONIC("cmp Gb,Eb");
7376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7377}
7378
7379
7380/** Opcode 0x3b. */
7381FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7382{
7383 IEMOP_MNEMONIC("cmp Gv,Ev");
7384 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7385}
7386
7387
7388/** Opcode 0x3c. */
7389FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7390{
7391 IEMOP_MNEMONIC("cmp al,Ib");
7392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7393}
7394
7395
7396/** Opcode 0x3d. */
7397FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7398{
7399 IEMOP_MNEMONIC("cmp rAX,Iz");
7400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7401}
7402
7403
7404/** Opcode 0x3e. */
7405FNIEMOP_DEF(iemOp_seg_DS)
7406{
7407 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7408 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7409 pIemCpu->iEffSeg = X86_SREG_DS;
7410
7411 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7412 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7413}
7414
7415
7416/** Opcode 0x3f. */
7417FNIEMOP_STUB(iemOp_aas);
7418
7419/**
7420 * Common 'inc/dec/not/neg register' helper.
7421 */
7422FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7423{
7424 IEMOP_HLP_NO_LOCK_PREFIX();
7425 switch (pIemCpu->enmEffOpSize)
7426 {
7427 case IEMMODE_16BIT:
7428 IEM_MC_BEGIN(2, 0);
7429 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7430 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7431 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7432 IEM_MC_REF_EFLAGS(pEFlags);
7433 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7434 IEM_MC_ADVANCE_RIP();
7435 IEM_MC_END();
7436 return VINF_SUCCESS;
7437
7438 case IEMMODE_32BIT:
7439 IEM_MC_BEGIN(2, 0);
7440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7441 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7442 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7443 IEM_MC_REF_EFLAGS(pEFlags);
7444 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7446 IEM_MC_ADVANCE_RIP();
7447 IEM_MC_END();
7448 return VINF_SUCCESS;
7449
7450 case IEMMODE_64BIT:
7451 IEM_MC_BEGIN(2, 0);
7452 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7453 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7454 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7455 IEM_MC_REF_EFLAGS(pEFlags);
7456 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7457 IEM_MC_ADVANCE_RIP();
7458 IEM_MC_END();
7459 return VINF_SUCCESS;
7460 }
7461 return VINF_SUCCESS;
7462}
7463
7464
7465/** Opcode 0x40. */
7466FNIEMOP_DEF(iemOp_inc_eAX)
7467{
7468 /*
7469 * This is a REX prefix in 64-bit mode.
7470 */
7471 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7472 {
7473 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7474 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7475
7476 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7477 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7478 }
7479
7480 IEMOP_MNEMONIC("inc eAX");
7481 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7482}
7483
7484
7485/** Opcode 0x41. */
7486FNIEMOP_DEF(iemOp_inc_eCX)
7487{
7488 /*
7489 * This is a REX prefix in 64-bit mode.
7490 */
7491 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7492 {
7493 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7494 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7495 pIemCpu->uRexB = 1 << 3;
7496
7497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7499 }
7500
7501 IEMOP_MNEMONIC("inc eCX");
7502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7503}
7504
7505
7506/** Opcode 0x42. */
7507FNIEMOP_DEF(iemOp_inc_eDX)
7508{
7509 /*
7510 * This is a REX prefix in 64-bit mode.
7511 */
7512 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7513 {
7514 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7515 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7516 pIemCpu->uRexIndex = 1 << 3;
7517
7518 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7519 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7520 }
7521
7522 IEMOP_MNEMONIC("inc eDX");
7523 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7524}
7525
7526
7527
7528/** Opcode 0x43. */
7529FNIEMOP_DEF(iemOp_inc_eBX)
7530{
7531 /*
7532 * This is a REX prefix in 64-bit mode.
7533 */
7534 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7535 {
7536 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7537 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7538 pIemCpu->uRexB = 1 << 3;
7539 pIemCpu->uRexIndex = 1 << 3;
7540
7541 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7542 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7543 }
7544
7545 IEMOP_MNEMONIC("inc eBX");
7546 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7547}
7548
7549
7550/** Opcode 0x44. */
7551FNIEMOP_DEF(iemOp_inc_eSP)
7552{
7553 /*
7554 * This is a REX prefix in 64-bit mode.
7555 */
7556 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7557 {
7558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7559 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7560 pIemCpu->uRexReg = 1 << 3;
7561
7562 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7563 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7564 }
7565
7566 IEMOP_MNEMONIC("inc eSP");
7567 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7568}
7569
7570
7571/** Opcode 0x45. */
7572FNIEMOP_DEF(iemOp_inc_eBP)
7573{
7574 /*
7575 * This is a REX prefix in 64-bit mode.
7576 */
7577 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7578 {
7579 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7580 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7581 pIemCpu->uRexReg = 1 << 3;
7582 pIemCpu->uRexB = 1 << 3;
7583
7584 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7585 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7586 }
7587
7588 IEMOP_MNEMONIC("inc eBP");
7589 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7590}
7591
7592
7593/** Opcode 0x46. */
7594FNIEMOP_DEF(iemOp_inc_eSI)
7595{
7596 /*
7597 * This is a REX prefix in 64-bit mode.
7598 */
7599 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7600 {
7601 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7602 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7603 pIemCpu->uRexReg = 1 << 3;
7604 pIemCpu->uRexIndex = 1 << 3;
7605
7606 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7607 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7608 }
7609
7610 IEMOP_MNEMONIC("inc eSI");
7611 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7612}
7613
7614
7615/** Opcode 0x47. */
7616FNIEMOP_DEF(iemOp_inc_eDI)
7617{
7618 /*
7619 * This is a REX prefix in 64-bit mode.
7620 */
7621 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7622 {
7623 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7624 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7625 pIemCpu->uRexReg = 1 << 3;
7626 pIemCpu->uRexB = 1 << 3;
7627 pIemCpu->uRexIndex = 1 << 3;
7628
7629 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7630 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7631 }
7632
7633 IEMOP_MNEMONIC("inc eDI");
7634 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7635}
7636
7637
7638/** Opcode 0x48. */
7639FNIEMOP_DEF(iemOp_dec_eAX)
7640{
7641 /*
7642 * This is a REX prefix in 64-bit mode.
7643 */
7644 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7645 {
7646 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7647 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7648 iemRecalEffOpSize(pIemCpu);
7649
7650 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7651 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7652 }
7653
7654 IEMOP_MNEMONIC("dec eAX");
7655 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7656}
7657
7658
7659/** Opcode 0x49. */
7660FNIEMOP_DEF(iemOp_dec_eCX)
7661{
7662 /*
7663 * This is a REX prefix in 64-bit mode.
7664 */
7665 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7666 {
7667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7668 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7669 pIemCpu->uRexB = 1 << 3;
7670 iemRecalEffOpSize(pIemCpu);
7671
7672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7674 }
7675
7676 IEMOP_MNEMONIC("dec eCX");
7677 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7678}
7679
7680
7681/** Opcode 0x4a. */
7682FNIEMOP_DEF(iemOp_dec_eDX)
7683{
7684 /*
7685 * This is a REX prefix in 64-bit mode.
7686 */
7687 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7688 {
7689 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7690 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7691 pIemCpu->uRexIndex = 1 << 3;
7692 iemRecalEffOpSize(pIemCpu);
7693
7694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7696 }
7697
7698 IEMOP_MNEMONIC("dec eDX");
7699 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7700}
7701
7702
7703/** Opcode 0x4b. */
7704FNIEMOP_DEF(iemOp_dec_eBX)
7705{
7706 /*
7707 * This is a REX prefix in 64-bit mode.
7708 */
7709 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7710 {
7711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7712 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7713 pIemCpu->uRexB = 1 << 3;
7714 pIemCpu->uRexIndex = 1 << 3;
7715 iemRecalEffOpSize(pIemCpu);
7716
7717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7719 }
7720
7721 IEMOP_MNEMONIC("dec eBX");
7722 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7723}
7724
7725
7726/** Opcode 0x4c. */
7727FNIEMOP_DEF(iemOp_dec_eSP)
7728{
7729 /*
7730 * This is a REX prefix in 64-bit mode.
7731 */
7732 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7733 {
7734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7735 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7736 pIemCpu->uRexReg = 1 << 3;
7737 iemRecalEffOpSize(pIemCpu);
7738
7739 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7740 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7741 }
7742
7743 IEMOP_MNEMONIC("dec eSP");
7744 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7745}
7746
7747
7748/** Opcode 0x4d. */
7749FNIEMOP_DEF(iemOp_dec_eBP)
7750{
7751 /*
7752 * This is a REX prefix in 64-bit mode.
7753 */
7754 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7755 {
7756 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7757 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7758 pIemCpu->uRexReg = 1 << 3;
7759 pIemCpu->uRexB = 1 << 3;
7760 iemRecalEffOpSize(pIemCpu);
7761
7762 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7763 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7764 }
7765
7766 IEMOP_MNEMONIC("dec eBP");
7767 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7768}
7769
7770
7771/** Opcode 0x4e. */
7772FNIEMOP_DEF(iemOp_dec_eSI)
7773{
7774 /*
7775 * This is a REX prefix in 64-bit mode.
7776 */
7777 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7778 {
7779 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7780 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7781 pIemCpu->uRexReg = 1 << 3;
7782 pIemCpu->uRexIndex = 1 << 3;
7783 iemRecalEffOpSize(pIemCpu);
7784
7785 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7786 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7787 }
7788
7789 IEMOP_MNEMONIC("dec eSI");
7790 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7791}
7792
7793
7794/** Opcode 0x4f. */
7795FNIEMOP_DEF(iemOp_dec_eDI)
7796{
7797 /*
7798 * This is a REX prefix in 64-bit mode.
7799 */
7800 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7801 {
7802 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7803 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7804 pIemCpu->uRexReg = 1 << 3;
7805 pIemCpu->uRexB = 1 << 3;
7806 pIemCpu->uRexIndex = 1 << 3;
7807 iemRecalEffOpSize(pIemCpu);
7808
7809 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7810 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7811 }
7812
7813 IEMOP_MNEMONIC("dec eDI");
7814 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7815}
7816
7817
7818/**
7819 * Common 'push register' helper.
7820 */
7821FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7822{
7823 IEMOP_HLP_NO_LOCK_PREFIX();
7824 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7825 {
7826 iReg |= pIemCpu->uRexB;
7827 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7828 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7829 }
7830
7831 switch (pIemCpu->enmEffOpSize)
7832 {
7833 case IEMMODE_16BIT:
7834 IEM_MC_BEGIN(0, 1);
7835 IEM_MC_LOCAL(uint16_t, u16Value);
7836 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7837 IEM_MC_PUSH_U16(u16Value);
7838 IEM_MC_ADVANCE_RIP();
7839 IEM_MC_END();
7840 break;
7841
7842 case IEMMODE_32BIT:
7843 IEM_MC_BEGIN(0, 1);
7844 IEM_MC_LOCAL(uint32_t, u32Value);
7845 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7846 IEM_MC_PUSH_U32(u32Value);
7847 IEM_MC_ADVANCE_RIP();
7848 IEM_MC_END();
7849 break;
7850
7851 case IEMMODE_64BIT:
7852 IEM_MC_BEGIN(0, 1);
7853 IEM_MC_LOCAL(uint64_t, u64Value);
7854 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7855 IEM_MC_PUSH_U64(u64Value);
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 break;
7859 }
7860
7861 return VINF_SUCCESS;
7862}
7863
7864
7865/** Opcode 0x50. */
7866FNIEMOP_DEF(iemOp_push_eAX)
7867{
7868 IEMOP_MNEMONIC("push rAX");
7869 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7870}
7871
7872
7873/** Opcode 0x51. */
7874FNIEMOP_DEF(iemOp_push_eCX)
7875{
7876 IEMOP_MNEMONIC("push rCX");
7877 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7878}
7879
7880
7881/** Opcode 0x52. */
7882FNIEMOP_DEF(iemOp_push_eDX)
7883{
7884 IEMOP_MNEMONIC("push rDX");
7885 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7886}
7887
7888
7889/** Opcode 0x53. */
7890FNIEMOP_DEF(iemOp_push_eBX)
7891{
7892 IEMOP_MNEMONIC("push rBX");
7893 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7894}
7895
7896
7897/** Opcode 0x54. */
7898FNIEMOP_DEF(iemOp_push_eSP)
7899{
7900 IEMOP_MNEMONIC("push rSP");
7901 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7902}
7903
7904
7905/** Opcode 0x55. */
7906FNIEMOP_DEF(iemOp_push_eBP)
7907{
7908 IEMOP_MNEMONIC("push rBP");
7909 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
7910}
7911
7912
7913/** Opcode 0x56. */
7914FNIEMOP_DEF(iemOp_push_eSI)
7915{
7916 IEMOP_MNEMONIC("push rSI");
7917 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
7918}
7919
7920
7921/** Opcode 0x57. */
7922FNIEMOP_DEF(iemOp_push_eDI)
7923{
7924 IEMOP_MNEMONIC("push rDI");
7925 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
7926}
7927
7928
7929/**
7930 * Common 'pop register' helper.
7931 */
7932FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
7933{
7934 IEMOP_HLP_NO_LOCK_PREFIX();
7935 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7936 {
7937 iReg |= pIemCpu->uRexB;
7938 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7939 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7940 }
7941
7942 switch (pIemCpu->enmEffOpSize)
7943 {
7944 case IEMMODE_16BIT:
7945 IEM_MC_BEGIN(0, 1);
7946 IEM_MC_LOCAL(uint16_t, *pu16Dst);
7947 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7948 IEM_MC_POP_U16(pu16Dst);
7949 IEM_MC_ADVANCE_RIP();
7950 IEM_MC_END();
7951 break;
7952
7953 case IEMMODE_32BIT:
7954 IEM_MC_BEGIN(0, 1);
7955 IEM_MC_LOCAL(uint32_t, *pu32Dst);
7956 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7957 IEM_MC_POP_U32(pu32Dst);
7958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 break;
7962
7963 case IEMMODE_64BIT:
7964 IEM_MC_BEGIN(0, 1);
7965 IEM_MC_LOCAL(uint64_t, *pu64Dst);
7966 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7967 IEM_MC_POP_U64(pu64Dst);
7968 IEM_MC_ADVANCE_RIP();
7969 IEM_MC_END();
7970 break;
7971 }
7972
7973 return VINF_SUCCESS;
7974}
7975
7976
7977/** Opcode 0x58. */
7978FNIEMOP_DEF(iemOp_pop_eAX)
7979{
7980 IEMOP_MNEMONIC("pop rAX");
7981 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
7982}
7983
7984
7985/** Opcode 0x59. */
7986FNIEMOP_DEF(iemOp_pop_eCX)
7987{
7988 IEMOP_MNEMONIC("pop rCX");
7989 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
7990}
7991
7992
7993/** Opcode 0x5a. */
7994FNIEMOP_DEF(iemOp_pop_eDX)
7995{
7996 IEMOP_MNEMONIC("pop rDX");
7997 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
7998}
7999
8000
8001/** Opcode 0x5b. */
8002FNIEMOP_DEF(iemOp_pop_eBX)
8003{
8004 IEMOP_MNEMONIC("pop rBX");
8005 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8006}
8007
8008
8009/** Opcode 0x5c. */
8010FNIEMOP_DEF(iemOp_pop_eSP)
8011{
8012 IEMOP_MNEMONIC("pop rSP");
8013 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8014 {
8015 if (pIemCpu->uRexB)
8016 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8017 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8018 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8019 }
8020
8021 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8022 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8023 /** @todo add testcase for this instruction. */
8024 switch (pIemCpu->enmEffOpSize)
8025 {
8026 case IEMMODE_16BIT:
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint16_t, u16Dst);
8029 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8030 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 break;
8034
8035 case IEMMODE_32BIT:
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(uint32_t, u32Dst);
8038 IEM_MC_POP_U32(&u32Dst);
8039 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 break;
8043
8044 case IEMMODE_64BIT:
8045 IEM_MC_BEGIN(0, 1);
8046 IEM_MC_LOCAL(uint64_t, u64Dst);
8047 IEM_MC_POP_U64(&u64Dst);
8048 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 break;
8052 }
8053
8054 return VINF_SUCCESS;
8055}
8056
8057
8058/** Opcode 0x5d. */
8059FNIEMOP_DEF(iemOp_pop_eBP)
8060{
8061 IEMOP_MNEMONIC("pop rBP");
8062 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8063}
8064
8065
8066/** Opcode 0x5e. */
8067FNIEMOP_DEF(iemOp_pop_eSI)
8068{
8069 IEMOP_MNEMONIC("pop rSI");
8070 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8071}
8072
8073
8074/** Opcode 0x5f. */
8075FNIEMOP_DEF(iemOp_pop_eDI)
8076{
8077 IEMOP_MNEMONIC("pop rDI");
8078 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8079}
8080
8081
8082/** Opcode 0x60. */
8083FNIEMOP_DEF(iemOp_pusha)
8084{
8085 IEMOP_MNEMONIC("pusha");
8086 IEMOP_HLP_NO_64BIT();
8087 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8088 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8089 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8090 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8091}
8092
8093
8094/** Opcode 0x61. */
8095FNIEMOP_DEF(iemOp_popa)
8096{
8097 IEMOP_MNEMONIC("popa");
8098 IEMOP_HLP_NO_64BIT();
8099 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8100 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8101 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8102 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8103}
8104
8105
8106/** Opcode 0x62. */
8107FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8108
8109
8110/** Opcode 0x63 - non-64-bit modes. */
8111FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8112{
8113 IEMOP_MNEMONIC("arpl Ew,Gw");
8114 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8116
8117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8118 {
8119 /* Register */
8120 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8121 IEM_MC_BEGIN(3, 0);
8122 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8123 IEM_MC_ARG(uint16_t, u16Src, 1);
8124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8125
8126 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8127 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8128 IEM_MC_REF_EFLAGS(pEFlags);
8129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8130
8131 IEM_MC_ADVANCE_RIP();
8132 IEM_MC_END();
8133 }
8134 else
8135 {
8136 /* Memory */
8137 IEM_MC_BEGIN(3, 2);
8138 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8139 IEM_MC_ARG(uint16_t, u16Src, 1);
8140 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8142
8143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8144 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8145 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8146 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8147 IEM_MC_FETCH_EFLAGS(EFlags);
8148 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8149
8150 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8151 IEM_MC_COMMIT_EFLAGS(EFlags);
8152 IEM_MC_ADVANCE_RIP();
8153 IEM_MC_END();
8154 }
8155 return VINF_SUCCESS;
8156
8157}
8158
8159
8160/** Opcode 0x63.
8161 * @note This is a weird one. It works like a regular move instruction if
8162 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8163 * @todo This definitely needs a testcase to verify the odd cases. */
8164FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8165{
8166 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8167
8168 IEMOP_MNEMONIC("movsxd Gv,Ev");
8169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8170
8171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8172 {
8173 /*
8174 * Register to register.
8175 */
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177 IEM_MC_BEGIN(0, 1);
8178 IEM_MC_LOCAL(uint64_t, u64Value);
8179 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8180 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8181 IEM_MC_ADVANCE_RIP();
8182 IEM_MC_END();
8183 }
8184 else
8185 {
8186 /*
8187 * We're loading a register from memory.
8188 */
8189 IEM_MC_BEGIN(0, 2);
8190 IEM_MC_LOCAL(uint64_t, u64Value);
8191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8195 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8196 IEM_MC_ADVANCE_RIP();
8197 IEM_MC_END();
8198 }
8199 return VINF_SUCCESS;
8200}
8201
8202
8203/** Opcode 0x64. */
8204FNIEMOP_DEF(iemOp_seg_FS)
8205{
8206 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8207 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8208 pIemCpu->iEffSeg = X86_SREG_FS;
8209
8210 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8211 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8212}
8213
8214
8215/** Opcode 0x65. */
8216FNIEMOP_DEF(iemOp_seg_GS)
8217{
8218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8219 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8220 pIemCpu->iEffSeg = X86_SREG_GS;
8221
8222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8223 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8224}
8225
8226
8227/** Opcode 0x66. */
8228FNIEMOP_DEF(iemOp_op_size)
8229{
8230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8231 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8232 iemRecalEffOpSize(pIemCpu);
8233
8234 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8235 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8236}
8237
8238
8239/** Opcode 0x67. */
8240FNIEMOP_DEF(iemOp_addr_size)
8241{
8242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8243 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8244 switch (pIemCpu->enmDefAddrMode)
8245 {
8246 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8247 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8248 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8249 default: AssertFailed();
8250 }
8251
8252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8254}
8255
8256
8257/** Opcode 0x68. */
8258FNIEMOP_DEF(iemOp_push_Iz)
8259{
8260 IEMOP_MNEMONIC("push Iz");
8261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8262 switch (pIemCpu->enmEffOpSize)
8263 {
8264 case IEMMODE_16BIT:
8265 {
8266 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8267 IEMOP_HLP_NO_LOCK_PREFIX();
8268 IEM_MC_BEGIN(0,0);
8269 IEM_MC_PUSH_U16(u16Imm);
8270 IEM_MC_ADVANCE_RIP();
8271 IEM_MC_END();
8272 return VINF_SUCCESS;
8273 }
8274
8275 case IEMMODE_32BIT:
8276 {
8277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8278 IEMOP_HLP_NO_LOCK_PREFIX();
8279 IEM_MC_BEGIN(0,0);
8280 IEM_MC_PUSH_U32(u32Imm);
8281 IEM_MC_ADVANCE_RIP();
8282 IEM_MC_END();
8283 return VINF_SUCCESS;
8284 }
8285
8286 case IEMMODE_64BIT:
8287 {
8288 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8289 IEMOP_HLP_NO_LOCK_PREFIX();
8290 IEM_MC_BEGIN(0,0);
8291 IEM_MC_PUSH_U64(u64Imm);
8292 IEM_MC_ADVANCE_RIP();
8293 IEM_MC_END();
8294 return VINF_SUCCESS;
8295 }
8296
8297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8298 }
8299}
8300
8301
8302/** Opcode 0x69. */
8303FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8304{
8305 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8308
8309 switch (pIemCpu->enmEffOpSize)
8310 {
8311 case IEMMODE_16BIT:
8312 {
8313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8314 {
8315 /* register operand */
8316 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318
8319 IEM_MC_BEGIN(3, 1);
8320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8321 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8323 IEM_MC_LOCAL(uint16_t, u16Tmp);
8324
8325 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8326 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8327 IEM_MC_REF_EFLAGS(pEFlags);
8328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8330
8331 IEM_MC_ADVANCE_RIP();
8332 IEM_MC_END();
8333 }
8334 else
8335 {
8336 /* memory operand */
8337 IEM_MC_BEGIN(3, 2);
8338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8339 IEM_MC_ARG(uint16_t, u16Src, 1);
8340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8341 IEM_MC_LOCAL(uint16_t, u16Tmp);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8343
8344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8345 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8346 IEM_MC_ASSIGN(u16Src, u16Imm);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8349 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8350 IEM_MC_REF_EFLAGS(pEFlags);
8351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8352 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8353
8354 IEM_MC_ADVANCE_RIP();
8355 IEM_MC_END();
8356 }
8357 return VINF_SUCCESS;
8358 }
8359
8360 case IEMMODE_32BIT:
8361 {
8362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8363 {
8364 /* register operand */
8365 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367
8368 IEM_MC_BEGIN(3, 1);
8369 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8370 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8371 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8372 IEM_MC_LOCAL(uint32_t, u32Tmp);
8373
8374 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8375 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8376 IEM_MC_REF_EFLAGS(pEFlags);
8377 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8378 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8379
8380 IEM_MC_ADVANCE_RIP();
8381 IEM_MC_END();
8382 }
8383 else
8384 {
8385 /* memory operand */
8386 IEM_MC_BEGIN(3, 2);
8387 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8388 IEM_MC_ARG(uint32_t, u32Src, 1);
8389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8390 IEM_MC_LOCAL(uint32_t, u32Tmp);
8391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8392
8393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8394 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8395 IEM_MC_ASSIGN(u32Src, u32Imm);
8396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8397 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8398 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8399 IEM_MC_REF_EFLAGS(pEFlags);
8400 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8401 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8402
8403 IEM_MC_ADVANCE_RIP();
8404 IEM_MC_END();
8405 }
8406 return VINF_SUCCESS;
8407 }
8408
8409 case IEMMODE_64BIT:
8410 {
8411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8412 {
8413 /* register operand */
8414 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416
8417 IEM_MC_BEGIN(3, 1);
8418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8419 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8420 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8421 IEM_MC_LOCAL(uint64_t, u64Tmp);
8422
8423 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8424 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8425 IEM_MC_REF_EFLAGS(pEFlags);
8426 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8427 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8428
8429 IEM_MC_ADVANCE_RIP();
8430 IEM_MC_END();
8431 }
8432 else
8433 {
8434 /* memory operand */
8435 IEM_MC_BEGIN(3, 2);
8436 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8437 IEM_MC_ARG(uint64_t, u64Src, 1);
8438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8439 IEM_MC_LOCAL(uint64_t, u64Tmp);
8440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8441
8442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8443 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8444 IEM_MC_ASSIGN(u64Src, u64Imm);
8445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8446 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8447 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8448 IEM_MC_REF_EFLAGS(pEFlags);
8449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8450 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8451
8452 IEM_MC_ADVANCE_RIP();
8453 IEM_MC_END();
8454 }
8455 return VINF_SUCCESS;
8456 }
8457 }
8458 AssertFailedReturn(VERR_IEM_IPE_9);
8459}
8460
8461
8462/** Opcode 0x6a. */
8463FNIEMOP_DEF(iemOp_push_Ib)
8464{
8465 IEMOP_MNEMONIC("push Ib");
8466 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8467 IEMOP_HLP_NO_LOCK_PREFIX();
8468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8469
8470 IEM_MC_BEGIN(0,0);
8471 switch (pIemCpu->enmEffOpSize)
8472 {
8473 case IEMMODE_16BIT:
8474 IEM_MC_PUSH_U16(i8Imm);
8475 break;
8476 case IEMMODE_32BIT:
8477 IEM_MC_PUSH_U32(i8Imm);
8478 break;
8479 case IEMMODE_64BIT:
8480 IEM_MC_PUSH_U64(i8Imm);
8481 break;
8482 }
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486}
8487
8488
8489/** Opcode 0x6b. */
8490FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8491{
8492 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8494 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8495
8496 switch (pIemCpu->enmEffOpSize)
8497 {
8498 case IEMMODE_16BIT:
8499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8500 {
8501 /* register operand */
8502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504
8505 IEM_MC_BEGIN(3, 1);
8506 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8507 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8509 IEM_MC_LOCAL(uint16_t, u16Tmp);
8510
8511 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8512 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8513 IEM_MC_REF_EFLAGS(pEFlags);
8514 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8515 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8516
8517 IEM_MC_ADVANCE_RIP();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /* memory operand */
8523 IEM_MC_BEGIN(3, 2);
8524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8525 IEM_MC_ARG(uint16_t, u16Src, 1);
8526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8527 IEM_MC_LOCAL(uint16_t, u16Tmp);
8528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8529
8530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8531 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8532 IEM_MC_ASSIGN(u16Src, u16Imm);
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8535 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8536 IEM_MC_REF_EFLAGS(pEFlags);
8537 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8538 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8539
8540 IEM_MC_ADVANCE_RIP();
8541 IEM_MC_END();
8542 }
8543 return VINF_SUCCESS;
8544
8545 case IEMMODE_32BIT:
8546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8547 {
8548 /* register operand */
8549 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551
8552 IEM_MC_BEGIN(3, 1);
8553 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8554 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8555 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8556 IEM_MC_LOCAL(uint32_t, u32Tmp);
8557
8558 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8559 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8560 IEM_MC_REF_EFLAGS(pEFlags);
8561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8562 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8563
8564 IEM_MC_ADVANCE_RIP();
8565 IEM_MC_END();
8566 }
8567 else
8568 {
8569 /* memory operand */
8570 IEM_MC_BEGIN(3, 2);
8571 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8572 IEM_MC_ARG(uint32_t, u32Src, 1);
8573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8574 IEM_MC_LOCAL(uint32_t, u32Tmp);
8575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8576
8577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8578 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8579 IEM_MC_ASSIGN(u32Src, u32Imm);
8580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8581 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8582 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8583 IEM_MC_REF_EFLAGS(pEFlags);
8584 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8585 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8586
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 }
8590 return VINF_SUCCESS;
8591
8592 case IEMMODE_64BIT:
8593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8594 {
8595 /* register operand */
8596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598
8599 IEM_MC_BEGIN(3, 1);
8600 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8601 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8602 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8603 IEM_MC_LOCAL(uint64_t, u64Tmp);
8604
8605 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8606 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8607 IEM_MC_REF_EFLAGS(pEFlags);
8608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8609 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8610
8611 IEM_MC_ADVANCE_RIP();
8612 IEM_MC_END();
8613 }
8614 else
8615 {
8616 /* memory operand */
8617 IEM_MC_BEGIN(3, 2);
8618 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8619 IEM_MC_ARG(uint64_t, u64Src, 1);
8620 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8621 IEM_MC_LOCAL(uint64_t, u64Tmp);
8622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8623
8624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8625 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8626 IEM_MC_ASSIGN(u64Src, u64Imm);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8629 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8630 IEM_MC_REF_EFLAGS(pEFlags);
8631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8633
8634 IEM_MC_ADVANCE_RIP();
8635 IEM_MC_END();
8636 }
8637 return VINF_SUCCESS;
8638 }
8639 AssertFailedReturn(VERR_IEM_IPE_8);
8640}
8641
8642
8643/** Opcode 0x6c. */
8644FNIEMOP_DEF(iemOp_insb_Yb_DX)
8645{
8646 IEMOP_HLP_NO_LOCK_PREFIX();
8647 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8648 {
8649 IEMOP_MNEMONIC("rep ins Yb,DX");
8650 switch (pIemCpu->enmEffAddrMode)
8651 {
8652 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8653 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8654 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8656 }
8657 }
8658 else
8659 {
8660 IEMOP_MNEMONIC("ins Yb,DX");
8661 switch (pIemCpu->enmEffAddrMode)
8662 {
8663 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8664 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8665 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8667 }
8668 }
8669}
8670
8671
8672/** Opcode 0x6d. */
8673FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8674{
8675 IEMOP_HLP_NO_LOCK_PREFIX();
8676 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8677 {
8678 IEMOP_MNEMONIC("rep ins Yv,DX");
8679 switch (pIemCpu->enmEffOpSize)
8680 {
8681 case IEMMODE_16BIT:
8682 switch (pIemCpu->enmEffAddrMode)
8683 {
8684 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8685 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8686 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8688 }
8689 break;
8690 case IEMMODE_64BIT:
8691 case IEMMODE_32BIT:
8692 switch (pIemCpu->enmEffAddrMode)
8693 {
8694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8698 }
8699 break;
8700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8701 }
8702 }
8703 else
8704 {
8705 IEMOP_MNEMONIC("ins Yv,DX");
8706 switch (pIemCpu->enmEffOpSize)
8707 {
8708 case IEMMODE_16BIT:
8709 switch (pIemCpu->enmEffAddrMode)
8710 {
8711 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8712 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8713 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8715 }
8716 break;
8717 case IEMMODE_64BIT:
8718 case IEMMODE_32BIT:
8719 switch (pIemCpu->enmEffAddrMode)
8720 {
8721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8725 }
8726 break;
8727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8728 }
8729 }
8730}
8731
8732
8733/** Opcode 0x6e. */
8734FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8735{
8736 IEMOP_HLP_NO_LOCK_PREFIX();
8737 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8738 {
8739 IEMOP_MNEMONIC("rep outs DX,Yb");
8740 switch (pIemCpu->enmEffAddrMode)
8741 {
8742 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8746 }
8747 }
8748 else
8749 {
8750 IEMOP_MNEMONIC("outs DX,Yb");
8751 switch (pIemCpu->enmEffAddrMode)
8752 {
8753 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8754 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8755 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8757 }
8758 }
8759}
8760
8761
8762/** Opcode 0x6f. */
8763FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8764{
8765 IEMOP_HLP_NO_LOCK_PREFIX();
8766 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8767 {
8768 IEMOP_MNEMONIC("rep outs DX,Yv");
8769 switch (pIemCpu->enmEffOpSize)
8770 {
8771 case IEMMODE_16BIT:
8772 switch (pIemCpu->enmEffAddrMode)
8773 {
8774 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8775 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8776 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8778 }
8779 break;
8780 case IEMMODE_64BIT:
8781 case IEMMODE_32BIT:
8782 switch (pIemCpu->enmEffAddrMode)
8783 {
8784 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8785 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8786 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8788 }
8789 break;
8790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8791 }
8792 }
8793 else
8794 {
8795 IEMOP_MNEMONIC("outs DX,Yv");
8796 switch (pIemCpu->enmEffOpSize)
8797 {
8798 case IEMMODE_16BIT:
8799 switch (pIemCpu->enmEffAddrMode)
8800 {
8801 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8805 }
8806 break;
8807 case IEMMODE_64BIT:
8808 case IEMMODE_32BIT:
8809 switch (pIemCpu->enmEffAddrMode)
8810 {
8811 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8812 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8813 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8815 }
8816 break;
8817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8818 }
8819 }
8820}
8821
8822
8823/** Opcode 0x70. */
8824FNIEMOP_DEF(iemOp_jo_Jb)
8825{
8826 IEMOP_MNEMONIC("jo Jb");
8827 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8828 IEMOP_HLP_NO_LOCK_PREFIX();
8829 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8830
8831 IEM_MC_BEGIN(0, 0);
8832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8833 IEM_MC_REL_JMP_S8(i8Imm);
8834 } IEM_MC_ELSE() {
8835 IEM_MC_ADVANCE_RIP();
8836 } IEM_MC_ENDIF();
8837 IEM_MC_END();
8838 return VINF_SUCCESS;
8839}
8840
8841
8842/** Opcode 0x71. */
8843FNIEMOP_DEF(iemOp_jno_Jb)
8844{
8845 IEMOP_MNEMONIC("jno Jb");
8846 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8847 IEMOP_HLP_NO_LOCK_PREFIX();
8848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8849
8850 IEM_MC_BEGIN(0, 0);
8851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8852 IEM_MC_ADVANCE_RIP();
8853 } IEM_MC_ELSE() {
8854 IEM_MC_REL_JMP_S8(i8Imm);
8855 } IEM_MC_ENDIF();
8856 IEM_MC_END();
8857 return VINF_SUCCESS;
8858}
8859
8860/** Opcode 0x72. */
8861FNIEMOP_DEF(iemOp_jc_Jb)
8862{
8863 IEMOP_MNEMONIC("jc/jnae Jb");
8864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8865 IEMOP_HLP_NO_LOCK_PREFIX();
8866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8867
8868 IEM_MC_BEGIN(0, 0);
8869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8870 IEM_MC_REL_JMP_S8(i8Imm);
8871 } IEM_MC_ELSE() {
8872 IEM_MC_ADVANCE_RIP();
8873 } IEM_MC_ENDIF();
8874 IEM_MC_END();
8875 return VINF_SUCCESS;
8876}
8877
8878
8879/** Opcode 0x73. */
8880FNIEMOP_DEF(iemOp_jnc_Jb)
8881{
8882 IEMOP_MNEMONIC("jnc/jnb Jb");
8883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8884 IEMOP_HLP_NO_LOCK_PREFIX();
8885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8886
8887 IEM_MC_BEGIN(0, 0);
8888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8889 IEM_MC_ADVANCE_RIP();
8890 } IEM_MC_ELSE() {
8891 IEM_MC_REL_JMP_S8(i8Imm);
8892 } IEM_MC_ENDIF();
8893 IEM_MC_END();
8894 return VINF_SUCCESS;
8895}
8896
8897
8898/** Opcode 0x74. */
8899FNIEMOP_DEF(iemOp_je_Jb)
8900{
8901 IEMOP_MNEMONIC("je/jz Jb");
8902 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8903 IEMOP_HLP_NO_LOCK_PREFIX();
8904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8905
8906 IEM_MC_BEGIN(0, 0);
8907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8908 IEM_MC_REL_JMP_S8(i8Imm);
8909 } IEM_MC_ELSE() {
8910 IEM_MC_ADVANCE_RIP();
8911 } IEM_MC_ENDIF();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914}
8915
8916
8917/** Opcode 0x75. */
8918FNIEMOP_DEF(iemOp_jne_Jb)
8919{
8920 IEMOP_MNEMONIC("jne/jnz Jb");
8921 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8922 IEMOP_HLP_NO_LOCK_PREFIX();
8923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8924
8925 IEM_MC_BEGIN(0, 0);
8926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8927 IEM_MC_ADVANCE_RIP();
8928 } IEM_MC_ELSE() {
8929 IEM_MC_REL_JMP_S8(i8Imm);
8930 } IEM_MC_ENDIF();
8931 IEM_MC_END();
8932 return VINF_SUCCESS;
8933}
8934
8935
8936/** Opcode 0x76. */
8937FNIEMOP_DEF(iemOp_jbe_Jb)
8938{
8939 IEMOP_MNEMONIC("jbe/jna Jb");
8940 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8941 IEMOP_HLP_NO_LOCK_PREFIX();
8942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8943
8944 IEM_MC_BEGIN(0, 0);
8945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8946 IEM_MC_REL_JMP_S8(i8Imm);
8947 } IEM_MC_ELSE() {
8948 IEM_MC_ADVANCE_RIP();
8949 } IEM_MC_ENDIF();
8950 IEM_MC_END();
8951 return VINF_SUCCESS;
8952}
8953
8954
8955/** Opcode 0x77. */
8956FNIEMOP_DEF(iemOp_jnbe_Jb)
8957{
8958 IEMOP_MNEMONIC("jnbe/ja Jb");
8959 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8960 IEMOP_HLP_NO_LOCK_PREFIX();
8961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8962
8963 IEM_MC_BEGIN(0, 0);
8964 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8965 IEM_MC_ADVANCE_RIP();
8966 } IEM_MC_ELSE() {
8967 IEM_MC_REL_JMP_S8(i8Imm);
8968 } IEM_MC_ENDIF();
8969 IEM_MC_END();
8970 return VINF_SUCCESS;
8971}
8972
8973
8974/** Opcode 0x78. */
8975FNIEMOP_DEF(iemOp_js_Jb)
8976{
8977 IEMOP_MNEMONIC("js Jb");
8978 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8979 IEMOP_HLP_NO_LOCK_PREFIX();
8980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8981
8982 IEM_MC_BEGIN(0, 0);
8983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8984 IEM_MC_REL_JMP_S8(i8Imm);
8985 } IEM_MC_ELSE() {
8986 IEM_MC_ADVANCE_RIP();
8987 } IEM_MC_ENDIF();
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990}
8991
8992
8993/** Opcode 0x79. */
8994FNIEMOP_DEF(iemOp_jns_Jb)
8995{
8996 IEMOP_MNEMONIC("jns Jb");
8997 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8998 IEMOP_HLP_NO_LOCK_PREFIX();
8999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9000
9001 IEM_MC_BEGIN(0, 0);
9002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9003 IEM_MC_ADVANCE_RIP();
9004 } IEM_MC_ELSE() {
9005 IEM_MC_REL_JMP_S8(i8Imm);
9006 } IEM_MC_ENDIF();
9007 IEM_MC_END();
9008 return VINF_SUCCESS;
9009}
9010
9011
9012/** Opcode 0x7a. */
9013FNIEMOP_DEF(iemOp_jp_Jb)
9014{
9015 IEMOP_MNEMONIC("jp Jb");
9016 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9017 IEMOP_HLP_NO_LOCK_PREFIX();
9018 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9019
9020 IEM_MC_BEGIN(0, 0);
9021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9022 IEM_MC_REL_JMP_S8(i8Imm);
9023 } IEM_MC_ELSE() {
9024 IEM_MC_ADVANCE_RIP();
9025 } IEM_MC_ENDIF();
9026 IEM_MC_END();
9027 return VINF_SUCCESS;
9028}
9029
9030
9031/** Opcode 0x7b. */
9032FNIEMOP_DEF(iemOp_jnp_Jb)
9033{
9034 IEMOP_MNEMONIC("jnp Jb");
9035 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9036 IEMOP_HLP_NO_LOCK_PREFIX();
9037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9038
9039 IEM_MC_BEGIN(0, 0);
9040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9041 IEM_MC_ADVANCE_RIP();
9042 } IEM_MC_ELSE() {
9043 IEM_MC_REL_JMP_S8(i8Imm);
9044 } IEM_MC_ENDIF();
9045 IEM_MC_END();
9046 return VINF_SUCCESS;
9047}
9048
9049
9050/** Opcode 0x7c. */
9051FNIEMOP_DEF(iemOp_jl_Jb)
9052{
9053 IEMOP_MNEMONIC("jl/jnge Jb");
9054 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9055 IEMOP_HLP_NO_LOCK_PREFIX();
9056 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9057
9058 IEM_MC_BEGIN(0, 0);
9059 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9060 IEM_MC_REL_JMP_S8(i8Imm);
9061 } IEM_MC_ELSE() {
9062 IEM_MC_ADVANCE_RIP();
9063 } IEM_MC_ENDIF();
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066}
9067
9068
9069/** Opcode 0x7d. */
9070FNIEMOP_DEF(iemOp_jnl_Jb)
9071{
9072 IEMOP_MNEMONIC("jnl/jge Jb");
9073 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9074 IEMOP_HLP_NO_LOCK_PREFIX();
9075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9076
9077 IEM_MC_BEGIN(0, 0);
9078 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9079 IEM_MC_ADVANCE_RIP();
9080 } IEM_MC_ELSE() {
9081 IEM_MC_REL_JMP_S8(i8Imm);
9082 } IEM_MC_ENDIF();
9083 IEM_MC_END();
9084 return VINF_SUCCESS;
9085}
9086
9087
9088/** Opcode 0x7e. */
9089FNIEMOP_DEF(iemOp_jle_Jb)
9090{
9091 IEMOP_MNEMONIC("jle/jng Jb");
9092 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9093 IEMOP_HLP_NO_LOCK_PREFIX();
9094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9095
9096 IEM_MC_BEGIN(0, 0);
9097 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9098 IEM_MC_REL_JMP_S8(i8Imm);
9099 } IEM_MC_ELSE() {
9100 IEM_MC_ADVANCE_RIP();
9101 } IEM_MC_ENDIF();
9102 IEM_MC_END();
9103 return VINF_SUCCESS;
9104}
9105
9106
9107/** Opcode 0x7f. */
9108FNIEMOP_DEF(iemOp_jnle_Jb)
9109{
9110 IEMOP_MNEMONIC("jnle/jg Jb");
9111 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9112 IEMOP_HLP_NO_LOCK_PREFIX();
9113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9114
9115 IEM_MC_BEGIN(0, 0);
9116 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9117 IEM_MC_ADVANCE_RIP();
9118 } IEM_MC_ELSE() {
9119 IEM_MC_REL_JMP_S8(i8Imm);
9120 } IEM_MC_ENDIF();
9121 IEM_MC_END();
9122 return VINF_SUCCESS;
9123}
9124
9125
9126/** Opcode 0x80. */
9127FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9128{
9129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9130 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9131 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9132
9133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9134 {
9135 /* register target */
9136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9137 IEMOP_HLP_NO_LOCK_PREFIX();
9138 IEM_MC_BEGIN(3, 0);
9139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9140 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9142
9143 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9144 IEM_MC_REF_EFLAGS(pEFlags);
9145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9146
9147 IEM_MC_ADVANCE_RIP();
9148 IEM_MC_END();
9149 }
9150 else
9151 {
9152 /* memory target */
9153 uint32_t fAccess;
9154 if (pImpl->pfnLockedU8)
9155 fAccess = IEM_ACCESS_DATA_RW;
9156 else
9157 { /* CMP */
9158 IEMOP_HLP_NO_LOCK_PREFIX();
9159 fAccess = IEM_ACCESS_DATA_R;
9160 }
9161 IEM_MC_BEGIN(3, 2);
9162 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9165
9166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9167 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9168 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9169
9170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9171 IEM_MC_FETCH_EFLAGS(EFlags);
9172 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9174 else
9175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9176
9177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9178 IEM_MC_COMMIT_EFLAGS(EFlags);
9179 IEM_MC_ADVANCE_RIP();
9180 IEM_MC_END();
9181 }
9182 return VINF_SUCCESS;
9183}
9184
9185
9186/** Opcode 0x81. */
9187FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9188{
9189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9190 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9192
9193 switch (pIemCpu->enmEffOpSize)
9194 {
9195 case IEMMODE_16BIT:
9196 {
9197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9198 {
9199 /* register target */
9200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9201 IEMOP_HLP_NO_LOCK_PREFIX();
9202 IEM_MC_BEGIN(3, 0);
9203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9206
9207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9208 IEM_MC_REF_EFLAGS(pEFlags);
9209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9210
9211 IEM_MC_ADVANCE_RIP();
9212 IEM_MC_END();
9213 }
9214 else
9215 {
9216 /* memory target */
9217 uint32_t fAccess;
9218 if (pImpl->pfnLockedU16)
9219 fAccess = IEM_ACCESS_DATA_RW;
9220 else
9221 { /* CMP, TEST */
9222 IEMOP_HLP_NO_LOCK_PREFIX();
9223 fAccess = IEM_ACCESS_DATA_R;
9224 }
9225 IEM_MC_BEGIN(3, 2);
9226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9227 IEM_MC_ARG(uint16_t, u16Src, 1);
9228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9230
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9232 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9233 IEM_MC_ASSIGN(u16Src, u16Imm);
9234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9235 IEM_MC_FETCH_EFLAGS(EFlags);
9236 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9238 else
9239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9240
9241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9242 IEM_MC_COMMIT_EFLAGS(EFlags);
9243 IEM_MC_ADVANCE_RIP();
9244 IEM_MC_END();
9245 }
9246 break;
9247 }
9248
9249 case IEMMODE_32BIT:
9250 {
9251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9252 {
9253 /* register target */
9254 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9255 IEMOP_HLP_NO_LOCK_PREFIX();
9256 IEM_MC_BEGIN(3, 0);
9257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9258 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9259 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9260
9261 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9262 IEM_MC_REF_EFLAGS(pEFlags);
9263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9264 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9265
9266 IEM_MC_ADVANCE_RIP();
9267 IEM_MC_END();
9268 }
9269 else
9270 {
9271 /* memory target */
9272 uint32_t fAccess;
9273 if (pImpl->pfnLockedU32)
9274 fAccess = IEM_ACCESS_DATA_RW;
9275 else
9276 { /* CMP, TEST */
9277 IEMOP_HLP_NO_LOCK_PREFIX();
9278 fAccess = IEM_ACCESS_DATA_R;
9279 }
9280 IEM_MC_BEGIN(3, 2);
9281 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9282 IEM_MC_ARG(uint32_t, u32Src, 1);
9283 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9285
9286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9288 IEM_MC_ASSIGN(u32Src, u32Imm);
9289 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9290 IEM_MC_FETCH_EFLAGS(EFlags);
9291 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9293 else
9294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9295
9296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9297 IEM_MC_COMMIT_EFLAGS(EFlags);
9298 IEM_MC_ADVANCE_RIP();
9299 IEM_MC_END();
9300 }
9301 break;
9302 }
9303
9304 case IEMMODE_64BIT:
9305 {
9306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9307 {
9308 /* register target */
9309 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9310 IEMOP_HLP_NO_LOCK_PREFIX();
9311 IEM_MC_BEGIN(3, 0);
9312 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9313 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9315
9316 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9317 IEM_MC_REF_EFLAGS(pEFlags);
9318 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9319
9320 IEM_MC_ADVANCE_RIP();
9321 IEM_MC_END();
9322 }
9323 else
9324 {
9325 /* memory target */
9326 uint32_t fAccess;
9327 if (pImpl->pfnLockedU64)
9328 fAccess = IEM_ACCESS_DATA_RW;
9329 else
9330 { /* CMP */
9331 IEMOP_HLP_NO_LOCK_PREFIX();
9332 fAccess = IEM_ACCESS_DATA_R;
9333 }
9334 IEM_MC_BEGIN(3, 2);
9335 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9336 IEM_MC_ARG(uint64_t, u64Src, 1);
9337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9339
9340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9341 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9342 IEM_MC_ASSIGN(u64Src, u64Imm);
9343 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9344 IEM_MC_FETCH_EFLAGS(EFlags);
9345 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9347 else
9348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9349
9350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9351 IEM_MC_COMMIT_EFLAGS(EFlags);
9352 IEM_MC_ADVANCE_RIP();
9353 IEM_MC_END();
9354 }
9355 break;
9356 }
9357 }
9358 return VINF_SUCCESS;
9359}
9360
9361
9362/** Opcode 0x82. */
9363FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9364{
9365 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9366 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9367}
9368
9369
9370/** Opcode 0x83. */
9371FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9372{
9373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9374 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9375 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9376
9377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9378 {
9379 /*
9380 * Register target
9381 */
9382 IEMOP_HLP_NO_LOCK_PREFIX();
9383 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9384 switch (pIemCpu->enmEffOpSize)
9385 {
9386 case IEMMODE_16BIT:
9387 {
9388 IEM_MC_BEGIN(3, 0);
9389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9390 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9392
9393 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9394 IEM_MC_REF_EFLAGS(pEFlags);
9395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9396
9397 IEM_MC_ADVANCE_RIP();
9398 IEM_MC_END();
9399 break;
9400 }
9401
9402 case IEMMODE_32BIT:
9403 {
9404 IEM_MC_BEGIN(3, 0);
9405 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9406 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9407 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9408
9409 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9410 IEM_MC_REF_EFLAGS(pEFlags);
9411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9412 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9413
9414 IEM_MC_ADVANCE_RIP();
9415 IEM_MC_END();
9416 break;
9417 }
9418
9419 case IEMMODE_64BIT:
9420 {
9421 IEM_MC_BEGIN(3, 0);
9422 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9423 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9425
9426 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9427 IEM_MC_REF_EFLAGS(pEFlags);
9428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9429
9430 IEM_MC_ADVANCE_RIP();
9431 IEM_MC_END();
9432 break;
9433 }
9434 }
9435 }
9436 else
9437 {
9438 /*
9439 * Memory target.
9440 */
9441 uint32_t fAccess;
9442 if (pImpl->pfnLockedU16)
9443 fAccess = IEM_ACCESS_DATA_RW;
9444 else
9445 { /* CMP */
9446 IEMOP_HLP_NO_LOCK_PREFIX();
9447 fAccess = IEM_ACCESS_DATA_R;
9448 }
9449
9450 switch (pIemCpu->enmEffOpSize)
9451 {
9452 case IEMMODE_16BIT:
9453 {
9454 IEM_MC_BEGIN(3, 2);
9455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9456 IEM_MC_ARG(uint16_t, u16Src, 1);
9457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9459
9460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9461 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9462 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9463 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9464 IEM_MC_FETCH_EFLAGS(EFlags);
9465 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9467 else
9468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9469
9470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9471 IEM_MC_COMMIT_EFLAGS(EFlags);
9472 IEM_MC_ADVANCE_RIP();
9473 IEM_MC_END();
9474 break;
9475 }
9476
9477 case IEMMODE_32BIT:
9478 {
9479 IEM_MC_BEGIN(3, 2);
9480 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9481 IEM_MC_ARG(uint32_t, u32Src, 1);
9482 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9484
9485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9486 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9487 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9488 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9489 IEM_MC_FETCH_EFLAGS(EFlags);
9490 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9492 else
9493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9494
9495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9496 IEM_MC_COMMIT_EFLAGS(EFlags);
9497 IEM_MC_ADVANCE_RIP();
9498 IEM_MC_END();
9499 break;
9500 }
9501
9502 case IEMMODE_64BIT:
9503 {
9504 IEM_MC_BEGIN(3, 2);
9505 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9506 IEM_MC_ARG(uint64_t, u64Src, 1);
9507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9509
9510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9511 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9512 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9513 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9514 IEM_MC_FETCH_EFLAGS(EFlags);
9515 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9517 else
9518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9519
9520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9521 IEM_MC_COMMIT_EFLAGS(EFlags);
9522 IEM_MC_ADVANCE_RIP();
9523 IEM_MC_END();
9524 break;
9525 }
9526 }
9527 }
9528 return VINF_SUCCESS;
9529}
9530
9531
9532/** Opcode 0x84. */
9533FNIEMOP_DEF(iemOp_test_Eb_Gb)
9534{
9535 IEMOP_MNEMONIC("test Eb,Gb");
9536 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9539}
9540
9541
9542/** Opcode 0x85. */
9543FNIEMOP_DEF(iemOp_test_Ev_Gv)
9544{
9545 IEMOP_MNEMONIC("test Ev,Gv");
9546 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9548 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9549}
9550
9551
9552/** Opcode 0x86. */
9553FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9554{
9555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9556 IEMOP_MNEMONIC("xchg Eb,Gb");
9557
9558 /*
9559 * If rm is denoting a register, no more instruction bytes.
9560 */
9561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9562 {
9563 IEMOP_HLP_NO_LOCK_PREFIX();
9564
9565 IEM_MC_BEGIN(0, 2);
9566 IEM_MC_LOCAL(uint8_t, uTmp1);
9567 IEM_MC_LOCAL(uint8_t, uTmp2);
9568
9569 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9570 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9571 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9572 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9573
9574 IEM_MC_ADVANCE_RIP();
9575 IEM_MC_END();
9576 }
9577 else
9578 {
9579 /*
9580 * We're accessing memory.
9581 */
9582/** @todo the register must be committed separately! */
9583 IEM_MC_BEGIN(2, 2);
9584 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9585 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9587
9588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9589 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9590 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9591 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9593
9594 IEM_MC_ADVANCE_RIP();
9595 IEM_MC_END();
9596 }
9597 return VINF_SUCCESS;
9598}
9599
9600
9601/** Opcode 0x87. */
9602FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9603{
9604 IEMOP_MNEMONIC("xchg Ev,Gv");
9605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9606
9607 /*
9608 * If rm is denoting a register, no more instruction bytes.
9609 */
9610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9611 {
9612 IEMOP_HLP_NO_LOCK_PREFIX();
9613
9614 switch (pIemCpu->enmEffOpSize)
9615 {
9616 case IEMMODE_16BIT:
9617 IEM_MC_BEGIN(0, 2);
9618 IEM_MC_LOCAL(uint16_t, uTmp1);
9619 IEM_MC_LOCAL(uint16_t, uTmp2);
9620
9621 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9622 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9623 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9624 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9625
9626 IEM_MC_ADVANCE_RIP();
9627 IEM_MC_END();
9628 return VINF_SUCCESS;
9629
9630 case IEMMODE_32BIT:
9631 IEM_MC_BEGIN(0, 2);
9632 IEM_MC_LOCAL(uint32_t, uTmp1);
9633 IEM_MC_LOCAL(uint32_t, uTmp2);
9634
9635 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9636 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9637 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9638 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9639
9640 IEM_MC_ADVANCE_RIP();
9641 IEM_MC_END();
9642 return VINF_SUCCESS;
9643
9644 case IEMMODE_64BIT:
9645 IEM_MC_BEGIN(0, 2);
9646 IEM_MC_LOCAL(uint64_t, uTmp1);
9647 IEM_MC_LOCAL(uint64_t, uTmp2);
9648
9649 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9650 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9651 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9652 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9653
9654 IEM_MC_ADVANCE_RIP();
9655 IEM_MC_END();
9656 return VINF_SUCCESS;
9657
9658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9659 }
9660 }
9661 else
9662 {
9663 /*
9664 * We're accessing memory.
9665 */
9666 switch (pIemCpu->enmEffOpSize)
9667 {
9668/** @todo the register must be committed separately! */
9669 case IEMMODE_16BIT:
9670 IEM_MC_BEGIN(2, 2);
9671 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9672 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9674
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9676 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9677 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9678 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9680
9681 IEM_MC_ADVANCE_RIP();
9682 IEM_MC_END();
9683 return VINF_SUCCESS;
9684
9685 case IEMMODE_32BIT:
9686 IEM_MC_BEGIN(2, 2);
9687 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9688 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9690
9691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9692 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9693 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9694 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9695 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9696
9697 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9698 IEM_MC_ADVANCE_RIP();
9699 IEM_MC_END();
9700 return VINF_SUCCESS;
9701
9702 case IEMMODE_64BIT:
9703 IEM_MC_BEGIN(2, 2);
9704 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9705 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9707
9708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9709 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9710 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9711 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9712 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9713
9714 IEM_MC_ADVANCE_RIP();
9715 IEM_MC_END();
9716 return VINF_SUCCESS;
9717
9718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9719 }
9720 }
9721}
9722
9723
9724/** Opcode 0x88. */
9725FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9726{
9727 IEMOP_MNEMONIC("mov Eb,Gb");
9728
9729 uint8_t bRm;
9730 IEM_OPCODE_GET_NEXT_U8(&bRm);
9731 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9732
9733 /*
9734 * If rm is denoting a register, no more instruction bytes.
9735 */
9736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9737 {
9738 IEM_MC_BEGIN(0, 1);
9739 IEM_MC_LOCAL(uint8_t, u8Value);
9740 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9741 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9742 IEM_MC_ADVANCE_RIP();
9743 IEM_MC_END();
9744 }
9745 else
9746 {
9747 /*
9748 * We're writing a register to memory.
9749 */
9750 IEM_MC_BEGIN(0, 2);
9751 IEM_MC_LOCAL(uint8_t, u8Value);
9752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9754 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9755 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9756 IEM_MC_ADVANCE_RIP();
9757 IEM_MC_END();
9758 }
9759 return VINF_SUCCESS;
9760
9761}
9762
9763
9764/** Opcode 0x89. */
9765FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9766{
9767 IEMOP_MNEMONIC("mov Ev,Gv");
9768
9769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9770 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9771
9772 /*
9773 * If rm is denoting a register, no more instruction bytes.
9774 */
9775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9776 {
9777 switch (pIemCpu->enmEffOpSize)
9778 {
9779 case IEMMODE_16BIT:
9780 IEM_MC_BEGIN(0, 1);
9781 IEM_MC_LOCAL(uint16_t, u16Value);
9782 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9783 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 break;
9787
9788 case IEMMODE_32BIT:
9789 IEM_MC_BEGIN(0, 1);
9790 IEM_MC_LOCAL(uint32_t, u32Value);
9791 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9792 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9793 IEM_MC_ADVANCE_RIP();
9794 IEM_MC_END();
9795 break;
9796
9797 case IEMMODE_64BIT:
9798 IEM_MC_BEGIN(0, 1);
9799 IEM_MC_LOCAL(uint64_t, u64Value);
9800 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9801 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9802 IEM_MC_ADVANCE_RIP();
9803 IEM_MC_END();
9804 break;
9805 }
9806 }
9807 else
9808 {
9809 /*
9810 * We're writing a register to memory.
9811 */
9812 switch (pIemCpu->enmEffOpSize)
9813 {
9814 case IEMMODE_16BIT:
9815 IEM_MC_BEGIN(0, 2);
9816 IEM_MC_LOCAL(uint16_t, u16Value);
9817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9819 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9820 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9821 IEM_MC_ADVANCE_RIP();
9822 IEM_MC_END();
9823 break;
9824
9825 case IEMMODE_32BIT:
9826 IEM_MC_BEGIN(0, 2);
9827 IEM_MC_LOCAL(uint32_t, u32Value);
9828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9831 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 break;
9835
9836 case IEMMODE_64BIT:
9837 IEM_MC_BEGIN(0, 2);
9838 IEM_MC_LOCAL(uint64_t, u64Value);
9839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9841 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9842 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9843 IEM_MC_ADVANCE_RIP();
9844 IEM_MC_END();
9845 break;
9846 }
9847 }
9848 return VINF_SUCCESS;
9849}
9850
9851
9852/** Opcode 0x8a. */
9853FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9854{
9855 IEMOP_MNEMONIC("mov Gb,Eb");
9856
9857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9858 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9859
9860 /*
9861 * If rm is denoting a register, no more instruction bytes.
9862 */
9863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9864 {
9865 IEM_MC_BEGIN(0, 1);
9866 IEM_MC_LOCAL(uint8_t, u8Value);
9867 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9868 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9869 IEM_MC_ADVANCE_RIP();
9870 IEM_MC_END();
9871 }
9872 else
9873 {
9874 /*
9875 * We're loading a register from memory.
9876 */
9877 IEM_MC_BEGIN(0, 2);
9878 IEM_MC_LOCAL(uint8_t, u8Value);
9879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9881 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9882 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9883 IEM_MC_ADVANCE_RIP();
9884 IEM_MC_END();
9885 }
9886 return VINF_SUCCESS;
9887}
9888
9889
9890/** Opcode 0x8b. */
9891FNIEMOP_DEF(iemOp_mov_Gv_Ev)
9892{
9893 IEMOP_MNEMONIC("mov Gv,Ev");
9894
9895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9896 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9897
9898 /*
9899 * If rm is denoting a register, no more instruction bytes.
9900 */
9901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9902 {
9903 switch (pIemCpu->enmEffOpSize)
9904 {
9905 case IEMMODE_16BIT:
9906 IEM_MC_BEGIN(0, 1);
9907 IEM_MC_LOCAL(uint16_t, u16Value);
9908 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9909 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9910 IEM_MC_ADVANCE_RIP();
9911 IEM_MC_END();
9912 break;
9913
9914 case IEMMODE_32BIT:
9915 IEM_MC_BEGIN(0, 1);
9916 IEM_MC_LOCAL(uint32_t, u32Value);
9917 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9918 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 break;
9922
9923 case IEMMODE_64BIT:
9924 IEM_MC_BEGIN(0, 1);
9925 IEM_MC_LOCAL(uint64_t, u64Value);
9926 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9928 IEM_MC_ADVANCE_RIP();
9929 IEM_MC_END();
9930 break;
9931 }
9932 }
9933 else
9934 {
9935 /*
9936 * We're loading a register from memory.
9937 */
9938 switch (pIemCpu->enmEffOpSize)
9939 {
9940 case IEMMODE_16BIT:
9941 IEM_MC_BEGIN(0, 2);
9942 IEM_MC_LOCAL(uint16_t, u16Value);
9943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9945 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
9946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9947 IEM_MC_ADVANCE_RIP();
9948 IEM_MC_END();
9949 break;
9950
9951 case IEMMODE_32BIT:
9952 IEM_MC_BEGIN(0, 2);
9953 IEM_MC_LOCAL(uint32_t, u32Value);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9956 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
9957 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9958 IEM_MC_ADVANCE_RIP();
9959 IEM_MC_END();
9960 break;
9961
9962 case IEMMODE_64BIT:
9963 IEM_MC_BEGIN(0, 2);
9964 IEM_MC_LOCAL(uint64_t, u64Value);
9965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9967 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
9968 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9969 IEM_MC_ADVANCE_RIP();
9970 IEM_MC_END();
9971 break;
9972 }
9973 }
9974 return VINF_SUCCESS;
9975}
9976
9977
9978/** Opcode 0x63. */
9979FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
9980{
9981 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
9982 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
9983 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
9984 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
9985 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
9986}
9987
9988
9989/** Opcode 0x8c. */
9990FNIEMOP_DEF(iemOp_mov_Ev_Sw)
9991{
9992 IEMOP_MNEMONIC("mov Ev,Sw");
9993
9994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9995 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9996
9997 /*
9998 * Check that the destination register exists. The REX.R prefix is ignored.
9999 */
10000 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10001 if ( iSegReg > X86_SREG_GS)
10002 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10003
10004 /*
10005 * If rm is denoting a register, no more instruction bytes.
10006 * In that case, the operand size is respected and the upper bits are
10007 * cleared (starting with some pentium).
10008 */
10009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10010 {
10011 switch (pIemCpu->enmEffOpSize)
10012 {
10013 case IEMMODE_16BIT:
10014 IEM_MC_BEGIN(0, 1);
10015 IEM_MC_LOCAL(uint16_t, u16Value);
10016 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10017 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 break;
10021
10022 case IEMMODE_32BIT:
10023 IEM_MC_BEGIN(0, 1);
10024 IEM_MC_LOCAL(uint32_t, u32Value);
10025 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10026 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10027 IEM_MC_ADVANCE_RIP();
10028 IEM_MC_END();
10029 break;
10030
10031 case IEMMODE_64BIT:
10032 IEM_MC_BEGIN(0, 1);
10033 IEM_MC_LOCAL(uint64_t, u64Value);
10034 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10035 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10036 IEM_MC_ADVANCE_RIP();
10037 IEM_MC_END();
10038 break;
10039 }
10040 }
10041 else
10042 {
10043 /*
10044 * We're saving the register to memory. The access is word sized
10045 * regardless of operand size prefixes.
10046 */
10047#if 0 /* not necessary */
10048 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10049#endif
10050 IEM_MC_BEGIN(0, 2);
10051 IEM_MC_LOCAL(uint16_t, u16Value);
10052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10054 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10055 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10056 IEM_MC_ADVANCE_RIP();
10057 IEM_MC_END();
10058 }
10059 return VINF_SUCCESS;
10060}
10061
10062
10063
10064
10065/** Opcode 0x8d. */
10066FNIEMOP_DEF(iemOp_lea_Gv_M)
10067{
10068 IEMOP_MNEMONIC("lea Gv,M");
10069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10070 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10072 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10073
10074 switch (pIemCpu->enmEffOpSize)
10075 {
10076 case IEMMODE_16BIT:
10077 IEM_MC_BEGIN(0, 2);
10078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10079 IEM_MC_LOCAL(uint16_t, u16Cast);
10080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10081 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10082 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 return VINF_SUCCESS;
10086
10087 case IEMMODE_32BIT:
10088 IEM_MC_BEGIN(0, 2);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10090 IEM_MC_LOCAL(uint32_t, u32Cast);
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10092 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10093 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10094 IEM_MC_ADVANCE_RIP();
10095 IEM_MC_END();
10096 return VINF_SUCCESS;
10097
10098 case IEMMODE_64BIT:
10099 IEM_MC_BEGIN(0, 1);
10100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10103 IEM_MC_ADVANCE_RIP();
10104 IEM_MC_END();
10105 return VINF_SUCCESS;
10106 }
10107 AssertFailedReturn(VERR_IEM_IPE_7);
10108}
10109
10110
10111/** Opcode 0x8e. */
10112FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10113{
10114 IEMOP_MNEMONIC("mov Sw,Ev");
10115
10116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10117 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10118
10119 /*
10120 * The practical operand size is 16-bit.
10121 */
10122#if 0 /* not necessary */
10123 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10124#endif
10125
10126 /*
10127 * Check that the destination register exists and can be used with this
10128 * instruction. The REX.R prefix is ignored.
10129 */
10130 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10131 if ( iSegReg == X86_SREG_CS
10132 || iSegReg > X86_SREG_GS)
10133 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10134
10135 /*
10136 * If rm is denoting a register, no more instruction bytes.
10137 */
10138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10139 {
10140 IEM_MC_BEGIN(2, 0);
10141 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10142 IEM_MC_ARG(uint16_t, u16Value, 1);
10143 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10144 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10145 IEM_MC_END();
10146 }
10147 else
10148 {
10149 /*
10150 * We're loading the register from memory. The access is word sized
10151 * regardless of operand size prefixes.
10152 */
10153 IEM_MC_BEGIN(2, 1);
10154 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10155 IEM_MC_ARG(uint16_t, u16Value, 1);
10156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10158 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10159 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10160 IEM_MC_END();
10161 }
10162 return VINF_SUCCESS;
10163}
10164
10165
10166/** Opcode 0x8f /0. */
10167FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10168{
10169 /* This bugger is rather annoying as it requires rSP to be updated before
10170 doing the effective address calculations. Will eventually require a
10171 split between the R/M+SIB decoding and the effective address
10172 calculation - which is something that is required for any attempt at
10173 reusing this code for a recompiler. It may also be good to have if we
10174 need to delay #UD exception caused by invalid lock prefixes.
10175
10176 For now, we'll do a mostly safe interpreter-only implementation here. */
10177 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10178 * now until tests show it's checked.. */
10179 IEMOP_MNEMONIC("pop Ev");
10180 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10181
10182 /* Register access is relatively easy and can share code. */
10183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10184 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10185
10186 /*
10187 * Memory target.
10188 *
10189 * Intel says that RSP is incremented before it's used in any effective
10190 * address calcuations. This means some serious extra annoyance here since
10191 * we decode and calculate the effective address in one step and like to
10192 * delay committing registers till everything is done.
10193 *
10194 * So, we'll decode and calculate the effective address twice. This will
10195 * require some recoding if turned into a recompiler.
10196 */
10197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10198
10199#ifndef TST_IEM_CHECK_MC
10200 /* Calc effective address with modified ESP. */
10201 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10202 RTGCPTR GCPtrEff;
10203 VBOXSTRICTRC rcStrict;
10204 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10205 if (rcStrict != VINF_SUCCESS)
10206 return rcStrict;
10207 pIemCpu->offOpcode = offOpcodeSaved;
10208
10209 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10210 uint64_t const RspSaved = pCtx->rsp;
10211 switch (pIemCpu->enmEffOpSize)
10212 {
10213 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10214 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10215 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10219 Assert(rcStrict == VINF_SUCCESS);
10220 pCtx->rsp = RspSaved;
10221
10222 /* Perform the operation - this should be CImpl. */
10223 RTUINT64U TmpRsp;
10224 TmpRsp.u = pCtx->rsp;
10225 switch (pIemCpu->enmEffOpSize)
10226 {
10227 case IEMMODE_16BIT:
10228 {
10229 uint16_t u16Value;
10230 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10231 if (rcStrict == VINF_SUCCESS)
10232 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10233 break;
10234 }
10235
10236 case IEMMODE_32BIT:
10237 {
10238 uint32_t u32Value;
10239 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10240 if (rcStrict == VINF_SUCCESS)
10241 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10242 break;
10243 }
10244
10245 case IEMMODE_64BIT:
10246 {
10247 uint64_t u64Value;
10248 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10249 if (rcStrict == VINF_SUCCESS)
10250 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10251 break;
10252 }
10253
10254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10255 }
10256 if (rcStrict == VINF_SUCCESS)
10257 {
10258 pCtx->rsp = TmpRsp.u;
10259 iemRegUpdateRipAndClearRF(pIemCpu);
10260 }
10261 return rcStrict;
10262
10263#else
10264 return VERR_IEM_IPE_2;
10265#endif
10266}
10267
10268
10269/** Opcode 0x8f. */
10270FNIEMOP_DEF(iemOp_Grp1A)
10271{
10272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10273 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10274 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10275
10276 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10277 /** @todo XOP decoding. */
10278 IEMOP_MNEMONIC("3-byte-xop");
10279 return IEMOP_RAISE_INVALID_OPCODE();
10280}
10281
10282
10283/**
10284 * Common 'xchg reg,rAX' helper.
10285 */
10286FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10287{
10288 IEMOP_HLP_NO_LOCK_PREFIX();
10289
10290 iReg |= pIemCpu->uRexB;
10291 switch (pIemCpu->enmEffOpSize)
10292 {
10293 case IEMMODE_16BIT:
10294 IEM_MC_BEGIN(0, 2);
10295 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10296 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10297 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10298 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10299 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10300 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10301 IEM_MC_ADVANCE_RIP();
10302 IEM_MC_END();
10303 return VINF_SUCCESS;
10304
10305 case IEMMODE_32BIT:
10306 IEM_MC_BEGIN(0, 2);
10307 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10308 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10309 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10310 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10311 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10312 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10313 IEM_MC_ADVANCE_RIP();
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316
10317 case IEMMODE_64BIT:
10318 IEM_MC_BEGIN(0, 2);
10319 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10320 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10321 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10322 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10323 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10324 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10325 IEM_MC_ADVANCE_RIP();
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328
10329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10330 }
10331}
10332
10333
10334/** Opcode 0x90. */
10335FNIEMOP_DEF(iemOp_nop)
10336{
10337 /* R8/R8D and RAX/EAX can be exchanged. */
10338 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10339 {
10340 IEMOP_MNEMONIC("xchg r8,rAX");
10341 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10342 }
10343
10344 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10345 IEMOP_MNEMONIC("pause");
10346 else
10347 IEMOP_MNEMONIC("nop");
10348 IEM_MC_BEGIN(0, 0);
10349 IEM_MC_ADVANCE_RIP();
10350 IEM_MC_END();
10351 return VINF_SUCCESS;
10352}
10353
10354
10355/** Opcode 0x91. */
10356FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10357{
10358 IEMOP_MNEMONIC("xchg rCX,rAX");
10359 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10360}
10361
10362
10363/** Opcode 0x92. */
10364FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10365{
10366 IEMOP_MNEMONIC("xchg rDX,rAX");
10367 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10368}
10369
10370
10371/** Opcode 0x93. */
10372FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10373{
10374 IEMOP_MNEMONIC("xchg rBX,rAX");
10375 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10376}
10377
10378
10379/** Opcode 0x94. */
10380FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10381{
10382 IEMOP_MNEMONIC("xchg rSX,rAX");
10383 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10384}
10385
10386
10387/** Opcode 0x95. */
10388FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10389{
10390 IEMOP_MNEMONIC("xchg rBP,rAX");
10391 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10392}
10393
10394
10395/** Opcode 0x96. */
10396FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10397{
10398 IEMOP_MNEMONIC("xchg rSI,rAX");
10399 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10400}
10401
10402
10403/** Opcode 0x97. */
10404FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10405{
10406 IEMOP_MNEMONIC("xchg rDI,rAX");
10407 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10408}
10409
10410
10411/** Opcode 0x98. */
10412FNIEMOP_DEF(iemOp_cbw)
10413{
10414 IEMOP_HLP_NO_LOCK_PREFIX();
10415 switch (pIemCpu->enmEffOpSize)
10416 {
10417 case IEMMODE_16BIT:
10418 IEMOP_MNEMONIC("cbw");
10419 IEM_MC_BEGIN(0, 1);
10420 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10421 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10422 } IEM_MC_ELSE() {
10423 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10424 } IEM_MC_ENDIF();
10425 IEM_MC_ADVANCE_RIP();
10426 IEM_MC_END();
10427 return VINF_SUCCESS;
10428
10429 case IEMMODE_32BIT:
10430 IEMOP_MNEMONIC("cwde");
10431 IEM_MC_BEGIN(0, 1);
10432 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10433 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10434 } IEM_MC_ELSE() {
10435 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10436 } IEM_MC_ENDIF();
10437 IEM_MC_ADVANCE_RIP();
10438 IEM_MC_END();
10439 return VINF_SUCCESS;
10440
10441 case IEMMODE_64BIT:
10442 IEMOP_MNEMONIC("cdqe");
10443 IEM_MC_BEGIN(0, 1);
10444 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10445 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10446 } IEM_MC_ELSE() {
10447 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10448 } IEM_MC_ENDIF();
10449 IEM_MC_ADVANCE_RIP();
10450 IEM_MC_END();
10451 return VINF_SUCCESS;
10452
10453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10454 }
10455}
10456
10457
10458/** Opcode 0x99. */
10459FNIEMOP_DEF(iemOp_cwd)
10460{
10461 IEMOP_HLP_NO_LOCK_PREFIX();
10462 switch (pIemCpu->enmEffOpSize)
10463 {
10464 case IEMMODE_16BIT:
10465 IEMOP_MNEMONIC("cwd");
10466 IEM_MC_BEGIN(0, 1);
10467 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10468 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10469 } IEM_MC_ELSE() {
10470 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_ADVANCE_RIP();
10473 IEM_MC_END();
10474 return VINF_SUCCESS;
10475
10476 case IEMMODE_32BIT:
10477 IEMOP_MNEMONIC("cdq");
10478 IEM_MC_BEGIN(0, 1);
10479 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10480 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10481 } IEM_MC_ELSE() {
10482 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10483 } IEM_MC_ENDIF();
10484 IEM_MC_ADVANCE_RIP();
10485 IEM_MC_END();
10486 return VINF_SUCCESS;
10487
10488 case IEMMODE_64BIT:
10489 IEMOP_MNEMONIC("cqo");
10490 IEM_MC_BEGIN(0, 1);
10491 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10492 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10493 } IEM_MC_ELSE() {
10494 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10495 } IEM_MC_ENDIF();
10496 IEM_MC_ADVANCE_RIP();
10497 IEM_MC_END();
10498 return VINF_SUCCESS;
10499
10500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10501 }
10502}
10503
10504
10505/** Opcode 0x9a. */
10506FNIEMOP_DEF(iemOp_call_Ap)
10507{
10508 IEMOP_MNEMONIC("call Ap");
10509 IEMOP_HLP_NO_64BIT();
10510
10511 /* Decode the far pointer address and pass it on to the far call C implementation. */
10512 uint32_t offSeg;
10513 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10514 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10515 else
10516 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10517 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10520}
10521
10522
10523/** Opcode 0x9b. (aka fwait) */
10524FNIEMOP_DEF(iemOp_wait)
10525{
10526 IEMOP_MNEMONIC("wait");
10527 IEMOP_HLP_NO_LOCK_PREFIX();
10528
10529 IEM_MC_BEGIN(0, 0);
10530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10532 IEM_MC_ADVANCE_RIP();
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535}
10536
10537
10538/** Opcode 0x9c. */
10539FNIEMOP_DEF(iemOp_pushf_Fv)
10540{
10541 IEMOP_HLP_NO_LOCK_PREFIX();
10542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10543 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10544}
10545
10546
10547/** Opcode 0x9d. */
10548FNIEMOP_DEF(iemOp_popf_Fv)
10549{
10550 IEMOP_HLP_NO_LOCK_PREFIX();
10551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10552 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10553}
10554
10555
10556/** Opcode 0x9e. */
10557FNIEMOP_DEF(iemOp_sahf)
10558{
10559 IEMOP_MNEMONIC("sahf");
10560 IEMOP_HLP_NO_LOCK_PREFIX();
10561 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10562 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10563 return IEMOP_RAISE_INVALID_OPCODE();
10564 IEM_MC_BEGIN(0, 2);
10565 IEM_MC_LOCAL(uint32_t, u32Flags);
10566 IEM_MC_LOCAL(uint32_t, EFlags);
10567 IEM_MC_FETCH_EFLAGS(EFlags);
10568 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10569 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10570 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10571 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10572 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10573 IEM_MC_COMMIT_EFLAGS(EFlags);
10574 IEM_MC_ADVANCE_RIP();
10575 IEM_MC_END();
10576 return VINF_SUCCESS;
10577}
10578
10579
10580/** Opcode 0x9f. */
10581FNIEMOP_DEF(iemOp_lahf)
10582{
10583 IEMOP_MNEMONIC("lahf");
10584 IEMOP_HLP_NO_LOCK_PREFIX();
10585 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10586 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10587 return IEMOP_RAISE_INVALID_OPCODE();
10588 IEM_MC_BEGIN(0, 1);
10589 IEM_MC_LOCAL(uint8_t, u8Flags);
10590 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10591 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10592 IEM_MC_ADVANCE_RIP();
10593 IEM_MC_END();
10594 return VINF_SUCCESS;
10595}
10596
10597
10598/**
10599 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10600 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10601 * prefixes. Will return on failures.
10602 * @param a_GCPtrMemOff The variable to store the offset in.
10603 */
10604#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10605 do \
10606 { \
10607 switch (pIemCpu->enmEffAddrMode) \
10608 { \
10609 case IEMMODE_16BIT: \
10610 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10611 break; \
10612 case IEMMODE_32BIT: \
10613 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10614 break; \
10615 case IEMMODE_64BIT: \
10616 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10617 break; \
10618 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10619 } \
10620 IEMOP_HLP_NO_LOCK_PREFIX(); \
10621 } while (0)
10622
10623/** Opcode 0xa0. */
10624FNIEMOP_DEF(iemOp_mov_Al_Ob)
10625{
10626 /*
10627 * Get the offset and fend of lock prefixes.
10628 */
10629 RTGCPTR GCPtrMemOff;
10630 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10631
10632 /*
10633 * Fetch AL.
10634 */
10635 IEM_MC_BEGIN(0,1);
10636 IEM_MC_LOCAL(uint8_t, u8Tmp);
10637 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10638 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10639 IEM_MC_ADVANCE_RIP();
10640 IEM_MC_END();
10641 return VINF_SUCCESS;
10642}
10643
10644
10645/** Opcode 0xa1. */
10646FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10647{
10648 /*
10649 * Get the offset and fend of lock prefixes.
10650 */
10651 IEMOP_MNEMONIC("mov rAX,Ov");
10652 RTGCPTR GCPtrMemOff;
10653 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10654
10655 /*
10656 * Fetch rAX.
10657 */
10658 switch (pIemCpu->enmEffOpSize)
10659 {
10660 case IEMMODE_16BIT:
10661 IEM_MC_BEGIN(0,1);
10662 IEM_MC_LOCAL(uint16_t, u16Tmp);
10663 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10664 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668
10669 case IEMMODE_32BIT:
10670 IEM_MC_BEGIN(0,1);
10671 IEM_MC_LOCAL(uint32_t, u32Tmp);
10672 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10673 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10674 IEM_MC_ADVANCE_RIP();
10675 IEM_MC_END();
10676 return VINF_SUCCESS;
10677
10678 case IEMMODE_64BIT:
10679 IEM_MC_BEGIN(0,1);
10680 IEM_MC_LOCAL(uint64_t, u64Tmp);
10681 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10682 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10683 IEM_MC_ADVANCE_RIP();
10684 IEM_MC_END();
10685 return VINF_SUCCESS;
10686
10687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10688 }
10689}
10690
10691
10692/** Opcode 0xa2. */
10693FNIEMOP_DEF(iemOp_mov_Ob_AL)
10694{
10695 /*
10696 * Get the offset and fend of lock prefixes.
10697 */
10698 RTGCPTR GCPtrMemOff;
10699 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10700
10701 /*
10702 * Store AL.
10703 */
10704 IEM_MC_BEGIN(0,1);
10705 IEM_MC_LOCAL(uint8_t, u8Tmp);
10706 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10707 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711}
10712
10713
10714/** Opcode 0xa3. */
10715FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10716{
10717 /*
10718 * Get the offset and fend of lock prefixes.
10719 */
10720 RTGCPTR GCPtrMemOff;
10721 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10722
10723 /*
10724 * Store rAX.
10725 */
10726 switch (pIemCpu->enmEffOpSize)
10727 {
10728 case IEMMODE_16BIT:
10729 IEM_MC_BEGIN(0,1);
10730 IEM_MC_LOCAL(uint16_t, u16Tmp);
10731 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10732 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 return VINF_SUCCESS;
10736
10737 case IEMMODE_32BIT:
10738 IEM_MC_BEGIN(0,1);
10739 IEM_MC_LOCAL(uint32_t, u32Tmp);
10740 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10741 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10742 IEM_MC_ADVANCE_RIP();
10743 IEM_MC_END();
10744 return VINF_SUCCESS;
10745
10746 case IEMMODE_64BIT:
10747 IEM_MC_BEGIN(0,1);
10748 IEM_MC_LOCAL(uint64_t, u64Tmp);
10749 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10750 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10751 IEM_MC_ADVANCE_RIP();
10752 IEM_MC_END();
10753 return VINF_SUCCESS;
10754
10755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10756 }
10757}
10758
10759/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10760#define IEM_MOVS_CASE(ValBits, AddrBits) \
10761 IEM_MC_BEGIN(0, 2); \
10762 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10763 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10764 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10765 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10766 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10767 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10769 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10770 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10771 } IEM_MC_ELSE() { \
10772 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10773 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10774 } IEM_MC_ENDIF(); \
10775 IEM_MC_ADVANCE_RIP(); \
10776 IEM_MC_END();
10777
10778/** Opcode 0xa4. */
10779FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10780{
10781 IEMOP_HLP_NO_LOCK_PREFIX();
10782
10783 /*
10784 * Use the C implementation if a repeat prefix is encountered.
10785 */
10786 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10787 {
10788 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10789 switch (pIemCpu->enmEffAddrMode)
10790 {
10791 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10792 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10793 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10795 }
10796 }
10797 IEMOP_MNEMONIC("movsb Xb,Yb");
10798
10799 /*
10800 * Sharing case implementation with movs[wdq] below.
10801 */
10802 switch (pIemCpu->enmEffAddrMode)
10803 {
10804 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10805 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10806 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10808 }
10809 return VINF_SUCCESS;
10810}
10811
10812
10813/** Opcode 0xa5. */
10814FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10815{
10816 IEMOP_HLP_NO_LOCK_PREFIX();
10817
10818 /*
10819 * Use the C implementation if a repeat prefix is encountered.
10820 */
10821 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10822 {
10823 IEMOP_MNEMONIC("rep movs Xv,Yv");
10824 switch (pIemCpu->enmEffOpSize)
10825 {
10826 case IEMMODE_16BIT:
10827 switch (pIemCpu->enmEffAddrMode)
10828 {
10829 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10833 }
10834 break;
10835 case IEMMODE_32BIT:
10836 switch (pIemCpu->enmEffAddrMode)
10837 {
10838 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10839 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10840 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10842 }
10843 case IEMMODE_64BIT:
10844 switch (pIemCpu->enmEffAddrMode)
10845 {
10846 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10847 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10848 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10850 }
10851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10852 }
10853 }
10854 IEMOP_MNEMONIC("movs Xv,Yv");
10855
10856 /*
10857 * Annoying double switch here.
10858 * Using ugly macro for implementing the cases, sharing it with movsb.
10859 */
10860 switch (pIemCpu->enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT:
10863 switch (pIemCpu->enmEffAddrMode)
10864 {
10865 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10866 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10867 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10869 }
10870 break;
10871
10872 case IEMMODE_32BIT:
10873 switch (pIemCpu->enmEffAddrMode)
10874 {
10875 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10876 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10877 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10879 }
10880 break;
10881
10882 case IEMMODE_64BIT:
10883 switch (pIemCpu->enmEffAddrMode)
10884 {
10885 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
10886 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
10887 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
10888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10889 }
10890 break;
10891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10892 }
10893 return VINF_SUCCESS;
10894}
10895
10896#undef IEM_MOVS_CASE
10897
10898/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
10899#define IEM_CMPS_CASE(ValBits, AddrBits) \
10900 IEM_MC_BEGIN(3, 3); \
10901 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
10902 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
10903 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10904 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
10905 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10906 \
10907 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10908 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
10909 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10910 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
10911 IEM_MC_REF_LOCAL(puValue1, uValue1); \
10912 IEM_MC_REF_EFLAGS(pEFlags); \
10913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
10914 \
10915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10917 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10918 } IEM_MC_ELSE() { \
10919 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10921 } IEM_MC_ENDIF(); \
10922 IEM_MC_ADVANCE_RIP(); \
10923 IEM_MC_END(); \
10924
10925/** Opcode 0xa6. */
10926FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
10927{
10928 IEMOP_HLP_NO_LOCK_PREFIX();
10929
10930 /*
10931 * Use the C implementation if a repeat prefix is encountered.
10932 */
10933 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10934 {
10935 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10936 switch (pIemCpu->enmEffAddrMode)
10937 {
10938 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
10939 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
10940 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
10941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10942 }
10943 }
10944 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10945 {
10946 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10947 switch (pIemCpu->enmEffAddrMode)
10948 {
10949 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
10950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
10951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
10952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10953 }
10954 }
10955 IEMOP_MNEMONIC("cmps Xb,Yb");
10956
10957 /*
10958 * Sharing case implementation with cmps[wdq] below.
10959 */
10960 switch (pIemCpu->enmEffAddrMode)
10961 {
10962 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
10963 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
10964 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
10965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10966 }
10967 return VINF_SUCCESS;
10968
10969}
10970
10971
10972/** Opcode 0xa7. */
10973FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
10974{
10975 IEMOP_HLP_NO_LOCK_PREFIX();
10976
10977 /*
10978 * Use the C implementation if a repeat prefix is encountered.
10979 */
10980 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10981 {
10982 IEMOP_MNEMONIC("repe cmps Xv,Yv");
10983 switch (pIemCpu->enmEffOpSize)
10984 {
10985 case IEMMODE_16BIT:
10986 switch (pIemCpu->enmEffAddrMode)
10987 {
10988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
10989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
10990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993 break;
10994 case IEMMODE_32BIT:
10995 switch (pIemCpu->enmEffAddrMode)
10996 {
10997 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
10998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
10999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11001 }
11002 case IEMMODE_64BIT:
11003 switch (pIemCpu->enmEffAddrMode)
11004 {
11005 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11006 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11007 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11009 }
11010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11011 }
11012 }
11013
11014 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11015 {
11016 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11017 switch (pIemCpu->enmEffOpSize)
11018 {
11019 case IEMMODE_16BIT:
11020 switch (pIemCpu->enmEffAddrMode)
11021 {
11022 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11023 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11024 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11026 }
11027 break;
11028 case IEMMODE_32BIT:
11029 switch (pIemCpu->enmEffAddrMode)
11030 {
11031 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11032 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11033 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11035 }
11036 case IEMMODE_64BIT:
11037 switch (pIemCpu->enmEffAddrMode)
11038 {
11039 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11040 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11041 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11043 }
11044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11045 }
11046 }
11047
11048 IEMOP_MNEMONIC("cmps Xv,Yv");
11049
11050 /*
11051 * Annoying double switch here.
11052 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11053 */
11054 switch (pIemCpu->enmEffOpSize)
11055 {
11056 case IEMMODE_16BIT:
11057 switch (pIemCpu->enmEffAddrMode)
11058 {
11059 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11060 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11061 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11063 }
11064 break;
11065
11066 case IEMMODE_32BIT:
11067 switch (pIemCpu->enmEffAddrMode)
11068 {
11069 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11070 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11071 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11073 }
11074 break;
11075
11076 case IEMMODE_64BIT:
11077 switch (pIemCpu->enmEffAddrMode)
11078 {
11079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11080 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11081 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11083 }
11084 break;
11085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11086 }
11087 return VINF_SUCCESS;
11088
11089}
11090
11091#undef IEM_CMPS_CASE
11092
11093/** Opcode 0xa8. */
11094FNIEMOP_DEF(iemOp_test_AL_Ib)
11095{
11096 IEMOP_MNEMONIC("test al,Ib");
11097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11098 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11099}
11100
11101
11102/** Opcode 0xa9. */
11103FNIEMOP_DEF(iemOp_test_eAX_Iz)
11104{
11105 IEMOP_MNEMONIC("test rAX,Iz");
11106 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11108}
11109
11110
11111/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11112#define IEM_STOS_CASE(ValBits, AddrBits) \
11113 IEM_MC_BEGIN(0, 2); \
11114 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11115 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11116 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11117 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11118 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11120 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11121 } IEM_MC_ELSE() { \
11122 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11123 } IEM_MC_ENDIF(); \
11124 IEM_MC_ADVANCE_RIP(); \
11125 IEM_MC_END(); \
11126
11127/** Opcode 0xaa. */
11128FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11129{
11130 IEMOP_HLP_NO_LOCK_PREFIX();
11131
11132 /*
11133 * Use the C implementation if a repeat prefix is encountered.
11134 */
11135 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11136 {
11137 IEMOP_MNEMONIC("rep stos Yb,al");
11138 switch (pIemCpu->enmEffAddrMode)
11139 {
11140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11144 }
11145 }
11146 IEMOP_MNEMONIC("stos Yb,al");
11147
11148 /*
11149 * Sharing case implementation with stos[wdq] below.
11150 */
11151 switch (pIemCpu->enmEffAddrMode)
11152 {
11153 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11154 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11155 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 return VINF_SUCCESS;
11159}
11160
11161
11162/** Opcode 0xab. */
11163FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11164{
11165 IEMOP_HLP_NO_LOCK_PREFIX();
11166
11167 /*
11168 * Use the C implementation if a repeat prefix is encountered.
11169 */
11170 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11171 {
11172 IEMOP_MNEMONIC("rep stos Yv,rAX");
11173 switch (pIemCpu->enmEffOpSize)
11174 {
11175 case IEMMODE_16BIT:
11176 switch (pIemCpu->enmEffAddrMode)
11177 {
11178 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11179 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11180 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11182 }
11183 break;
11184 case IEMMODE_32BIT:
11185 switch (pIemCpu->enmEffAddrMode)
11186 {
11187 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11191 }
11192 case IEMMODE_64BIT:
11193 switch (pIemCpu->enmEffAddrMode)
11194 {
11195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11196 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11197 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11199 }
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 }
11203 IEMOP_MNEMONIC("stos Yv,rAX");
11204
11205 /*
11206 * Annoying double switch here.
11207 * Using ugly macro for implementing the cases, sharing it with stosb.
11208 */
11209 switch (pIemCpu->enmEffOpSize)
11210 {
11211 case IEMMODE_16BIT:
11212 switch (pIemCpu->enmEffAddrMode)
11213 {
11214 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11215 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11216 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 break;
11220
11221 case IEMMODE_32BIT:
11222 switch (pIemCpu->enmEffAddrMode)
11223 {
11224 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11225 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11226 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11228 }
11229 break;
11230
11231 case IEMMODE_64BIT:
11232 switch (pIemCpu->enmEffAddrMode)
11233 {
11234 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11235 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11236 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11238 }
11239 break;
11240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11241 }
11242 return VINF_SUCCESS;
11243}
11244
11245#undef IEM_STOS_CASE
11246
11247/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11248#define IEM_LODS_CASE(ValBits, AddrBits) \
11249 IEM_MC_BEGIN(0, 2); \
11250 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11251 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11252 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11253 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11254 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11256 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11257 } IEM_MC_ELSE() { \
11258 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11259 } IEM_MC_ENDIF(); \
11260 IEM_MC_ADVANCE_RIP(); \
11261 IEM_MC_END();
11262
11263/** Opcode 0xac. */
11264FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11265{
11266 IEMOP_HLP_NO_LOCK_PREFIX();
11267
11268 /*
11269 * Use the C implementation if a repeat prefix is encountered.
11270 */
11271 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11272 {
11273 IEMOP_MNEMONIC("rep lodsb al,Xb");
11274 switch (pIemCpu->enmEffAddrMode)
11275 {
11276 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11277 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11278 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11280 }
11281 }
11282 IEMOP_MNEMONIC("lodsb al,Xb");
11283
11284 /*
11285 * Sharing case implementation with stos[wdq] below.
11286 */
11287 switch (pIemCpu->enmEffAddrMode)
11288 {
11289 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11290 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11291 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294 return VINF_SUCCESS;
11295}
11296
11297
11298/** Opcode 0xad. */
11299FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11300{
11301 IEMOP_HLP_NO_LOCK_PREFIX();
11302
11303 /*
11304 * Use the C implementation if a repeat prefix is encountered.
11305 */
11306 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11307 {
11308 IEMOP_MNEMONIC("rep lods rAX,Xv");
11309 switch (pIemCpu->enmEffOpSize)
11310 {
11311 case IEMMODE_16BIT:
11312 switch (pIemCpu->enmEffAddrMode)
11313 {
11314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11318 }
11319 break;
11320 case IEMMODE_32BIT:
11321 switch (pIemCpu->enmEffAddrMode)
11322 {
11323 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11324 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11325 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11327 }
11328 case IEMMODE_64BIT:
11329 switch (pIemCpu->enmEffAddrMode)
11330 {
11331 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11332 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11333 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11335 }
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11337 }
11338 }
11339 IEMOP_MNEMONIC("lods rAX,Xv");
11340
11341 /*
11342 * Annoying double switch here.
11343 * Using ugly macro for implementing the cases, sharing it with lodsb.
11344 */
11345 switch (pIemCpu->enmEffOpSize)
11346 {
11347 case IEMMODE_16BIT:
11348 switch (pIemCpu->enmEffAddrMode)
11349 {
11350 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11351 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11352 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11354 }
11355 break;
11356
11357 case IEMMODE_32BIT:
11358 switch (pIemCpu->enmEffAddrMode)
11359 {
11360 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11361 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11362 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11364 }
11365 break;
11366
11367 case IEMMODE_64BIT:
11368 switch (pIemCpu->enmEffAddrMode)
11369 {
11370 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11371 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11372 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11374 }
11375 break;
11376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11377 }
11378 return VINF_SUCCESS;
11379}
11380
11381#undef IEM_LODS_CASE
11382
11383/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11384#define IEM_SCAS_CASE(ValBits, AddrBits) \
11385 IEM_MC_BEGIN(3, 2); \
11386 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11387 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11388 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11389 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11390 \
11391 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11392 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11393 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11394 IEM_MC_REF_EFLAGS(pEFlags); \
11395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11396 \
11397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11398 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11399 } IEM_MC_ELSE() { \
11400 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11401 } IEM_MC_ENDIF(); \
11402 IEM_MC_ADVANCE_RIP(); \
11403 IEM_MC_END();
11404
11405/** Opcode 0xae. */
11406FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11407{
11408 IEMOP_HLP_NO_LOCK_PREFIX();
11409
11410 /*
11411 * Use the C implementation if a repeat prefix is encountered.
11412 */
11413 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11414 {
11415 IEMOP_MNEMONIC("repe scasb al,Xb");
11416 switch (pIemCpu->enmEffAddrMode)
11417 {
11418 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11419 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11420 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11422 }
11423 }
11424 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11425 {
11426 IEMOP_MNEMONIC("repne scasb al,Xb");
11427 switch (pIemCpu->enmEffAddrMode)
11428 {
11429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 IEMOP_MNEMONIC("scasb al,Xb");
11436
11437 /*
11438 * Sharing case implementation with stos[wdq] below.
11439 */
11440 switch (pIemCpu->enmEffAddrMode)
11441 {
11442 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11443 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11444 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11446 }
11447 return VINF_SUCCESS;
11448}
11449
11450
11451/** Opcode 0xaf. */
11452FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11453{
11454 IEMOP_HLP_NO_LOCK_PREFIX();
11455
11456 /*
11457 * Use the C implementation if a repeat prefix is encountered.
11458 */
11459 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11460 {
11461 IEMOP_MNEMONIC("repe scas rAX,Xv");
11462 switch (pIemCpu->enmEffOpSize)
11463 {
11464 case IEMMODE_16BIT:
11465 switch (pIemCpu->enmEffAddrMode)
11466 {
11467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 break;
11473 case IEMMODE_32BIT:
11474 switch (pIemCpu->enmEffAddrMode)
11475 {
11476 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11477 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11478 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 case IEMMODE_64BIT:
11482 switch (pIemCpu->enmEffAddrMode)
11483 {
11484 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11485 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11486 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11488 }
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11493 {
11494 IEMOP_MNEMONIC("repne scas rAX,Xv");
11495 switch (pIemCpu->enmEffOpSize)
11496 {
11497 case IEMMODE_16BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 break;
11506 case IEMMODE_32BIT:
11507 switch (pIemCpu->enmEffAddrMode)
11508 {
11509 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11513 }
11514 case IEMMODE_64BIT:
11515 switch (pIemCpu->enmEffAddrMode)
11516 {
11517 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11521 }
11522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11523 }
11524 }
11525 IEMOP_MNEMONIC("scas rAX,Xv");
11526
11527 /*
11528 * Annoying double switch here.
11529 * Using ugly macro for implementing the cases, sharing it with scasb.
11530 */
11531 switch (pIemCpu->enmEffOpSize)
11532 {
11533 case IEMMODE_16BIT:
11534 switch (pIemCpu->enmEffAddrMode)
11535 {
11536 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11537 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11538 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11540 }
11541 break;
11542
11543 case IEMMODE_32BIT:
11544 switch (pIemCpu->enmEffAddrMode)
11545 {
11546 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11547 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11548 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11550 }
11551 break;
11552
11553 case IEMMODE_64BIT:
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11557 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11558 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 break;
11562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11563 }
11564 return VINF_SUCCESS;
11565}
11566
11567#undef IEM_SCAS_CASE
11568
11569/**
11570 * Common 'mov r8, imm8' helper.
11571 */
11572FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11573{
11574 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11575 IEMOP_HLP_NO_LOCK_PREFIX();
11576
11577 IEM_MC_BEGIN(0, 1);
11578 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11579 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11580 IEM_MC_ADVANCE_RIP();
11581 IEM_MC_END();
11582
11583 return VINF_SUCCESS;
11584}
11585
11586
11587/** Opcode 0xb0. */
11588FNIEMOP_DEF(iemOp_mov_AL_Ib)
11589{
11590 IEMOP_MNEMONIC("mov AL,Ib");
11591 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11592}
11593
11594
11595/** Opcode 0xb1. */
11596FNIEMOP_DEF(iemOp_CL_Ib)
11597{
11598 IEMOP_MNEMONIC("mov CL,Ib");
11599 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11600}
11601
11602
11603/** Opcode 0xb2. */
11604FNIEMOP_DEF(iemOp_DL_Ib)
11605{
11606 IEMOP_MNEMONIC("mov DL,Ib");
11607 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11608}
11609
11610
11611/** Opcode 0xb3. */
11612FNIEMOP_DEF(iemOp_BL_Ib)
11613{
11614 IEMOP_MNEMONIC("mov BL,Ib");
11615 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11616}
11617
11618
11619/** Opcode 0xb4. */
11620FNIEMOP_DEF(iemOp_mov_AH_Ib)
11621{
11622 IEMOP_MNEMONIC("mov AH,Ib");
11623 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11624}
11625
11626
11627/** Opcode 0xb5. */
11628FNIEMOP_DEF(iemOp_CH_Ib)
11629{
11630 IEMOP_MNEMONIC("mov CH,Ib");
11631 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11632}
11633
11634
11635/** Opcode 0xb6. */
11636FNIEMOP_DEF(iemOp_DH_Ib)
11637{
11638 IEMOP_MNEMONIC("mov DH,Ib");
11639 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11640}
11641
11642
11643/** Opcode 0xb7. */
11644FNIEMOP_DEF(iemOp_BH_Ib)
11645{
11646 IEMOP_MNEMONIC("mov BH,Ib");
11647 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11648}
11649
11650
11651/**
11652 * Common 'mov regX,immX' helper.
11653 */
11654FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11655{
11656 switch (pIemCpu->enmEffOpSize)
11657 {
11658 case IEMMODE_16BIT:
11659 {
11660 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11661 IEMOP_HLP_NO_LOCK_PREFIX();
11662
11663 IEM_MC_BEGIN(0, 1);
11664 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11665 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11666 IEM_MC_ADVANCE_RIP();
11667 IEM_MC_END();
11668 break;
11669 }
11670
11671 case IEMMODE_32BIT:
11672 {
11673 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11674 IEMOP_HLP_NO_LOCK_PREFIX();
11675
11676 IEM_MC_BEGIN(0, 1);
11677 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11678 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11679 IEM_MC_ADVANCE_RIP();
11680 IEM_MC_END();
11681 break;
11682 }
11683 case IEMMODE_64BIT:
11684 {
11685 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11686 IEMOP_HLP_NO_LOCK_PREFIX();
11687
11688 IEM_MC_BEGIN(0, 1);
11689 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11690 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11691 IEM_MC_ADVANCE_RIP();
11692 IEM_MC_END();
11693 break;
11694 }
11695 }
11696
11697 return VINF_SUCCESS;
11698}
11699
11700
11701/** Opcode 0xb8. */
11702FNIEMOP_DEF(iemOp_eAX_Iv)
11703{
11704 IEMOP_MNEMONIC("mov rAX,IV");
11705 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11706}
11707
11708
11709/** Opcode 0xb9. */
11710FNIEMOP_DEF(iemOp_eCX_Iv)
11711{
11712 IEMOP_MNEMONIC("mov rCX,IV");
11713 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11714}
11715
11716
11717/** Opcode 0xba. */
11718FNIEMOP_DEF(iemOp_eDX_Iv)
11719{
11720 IEMOP_MNEMONIC("mov rDX,IV");
11721 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11722}
11723
11724
11725/** Opcode 0xbb. */
11726FNIEMOP_DEF(iemOp_eBX_Iv)
11727{
11728 IEMOP_MNEMONIC("mov rBX,IV");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xbc. */
11734FNIEMOP_DEF(iemOp_eSP_Iv)
11735{
11736 IEMOP_MNEMONIC("mov rSP,IV");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xbd. */
11742FNIEMOP_DEF(iemOp_eBP_Iv)
11743{
11744 IEMOP_MNEMONIC("mov rBP,IV");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xbe. */
11750FNIEMOP_DEF(iemOp_eSI_Iv)
11751{
11752 IEMOP_MNEMONIC("mov rSI,IV");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xbf. */
11758FNIEMOP_DEF(iemOp_eDI_Iv)
11759{
11760 IEMOP_MNEMONIC("mov rDI,IV");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11762}
11763
11764
11765/** Opcode 0xc0. */
11766FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11767{
11768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11769 PCIEMOPSHIFTSIZES pImpl;
11770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11771 {
11772 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11773 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11774 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11775 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11776 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11777 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11778 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11779 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11780 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11781 }
11782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11783
11784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11785 {
11786 /* register */
11787 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11788 IEMOP_HLP_NO_LOCK_PREFIX();
11789 IEM_MC_BEGIN(3, 0);
11790 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11791 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11793 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11794 IEM_MC_REF_EFLAGS(pEFlags);
11795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11796 IEM_MC_ADVANCE_RIP();
11797 IEM_MC_END();
11798 }
11799 else
11800 {
11801 /* memory */
11802 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11803 IEM_MC_BEGIN(3, 2);
11804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11805 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11806 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11808
11809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11810 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11811 IEM_MC_ASSIGN(cShiftArg, cShift);
11812 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11813 IEM_MC_FETCH_EFLAGS(EFlags);
11814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11815
11816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11817 IEM_MC_COMMIT_EFLAGS(EFlags);
11818 IEM_MC_ADVANCE_RIP();
11819 IEM_MC_END();
11820 }
11821 return VINF_SUCCESS;
11822}
11823
11824
11825/** Opcode 0xc1. */
11826FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11827{
11828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11829 PCIEMOPSHIFTSIZES pImpl;
11830 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11831 {
11832 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11833 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11834 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11835 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11836 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11837 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11838 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11839 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11840 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11841 }
11842 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11843
11844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11845 {
11846 /* register */
11847 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11848 IEMOP_HLP_NO_LOCK_PREFIX();
11849 switch (pIemCpu->enmEffOpSize)
11850 {
11851 case IEMMODE_16BIT:
11852 IEM_MC_BEGIN(3, 0);
11853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11854 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11856 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11857 IEM_MC_REF_EFLAGS(pEFlags);
11858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11859 IEM_MC_ADVANCE_RIP();
11860 IEM_MC_END();
11861 return VINF_SUCCESS;
11862
11863 case IEMMODE_32BIT:
11864 IEM_MC_BEGIN(3, 0);
11865 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11866 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11868 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11869 IEM_MC_REF_EFLAGS(pEFlags);
11870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11871 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11872 IEM_MC_ADVANCE_RIP();
11873 IEM_MC_END();
11874 return VINF_SUCCESS;
11875
11876 case IEMMODE_64BIT:
11877 IEM_MC_BEGIN(3, 0);
11878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11879 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11880 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11881 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11882 IEM_MC_REF_EFLAGS(pEFlags);
11883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11884 IEM_MC_ADVANCE_RIP();
11885 IEM_MC_END();
11886 return VINF_SUCCESS;
11887
11888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11889 }
11890 }
11891 else
11892 {
11893 /* memory */
11894 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11895 switch (pIemCpu->enmEffOpSize)
11896 {
11897 case IEMMODE_16BIT:
11898 IEM_MC_BEGIN(3, 2);
11899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11900 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11901 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11903
11904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11905 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11906 IEM_MC_ASSIGN(cShiftArg, cShift);
11907 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11908 IEM_MC_FETCH_EFLAGS(EFlags);
11909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11910
11911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11912 IEM_MC_COMMIT_EFLAGS(EFlags);
11913 IEM_MC_ADVANCE_RIP();
11914 IEM_MC_END();
11915 return VINF_SUCCESS;
11916
11917 case IEMMODE_32BIT:
11918 IEM_MC_BEGIN(3, 2);
11919 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11920 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11923
11924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11925 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11926 IEM_MC_ASSIGN(cShiftArg, cShift);
11927 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11928 IEM_MC_FETCH_EFLAGS(EFlags);
11929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11930
11931 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11932 IEM_MC_COMMIT_EFLAGS(EFlags);
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 return VINF_SUCCESS;
11936
11937 case IEMMODE_64BIT:
11938 IEM_MC_BEGIN(3, 2);
11939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11940 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11941 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11945 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11946 IEM_MC_ASSIGN(cShiftArg, cShift);
11947 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11948 IEM_MC_FETCH_EFLAGS(EFlags);
11949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11950
11951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11952 IEM_MC_COMMIT_EFLAGS(EFlags);
11953 IEM_MC_ADVANCE_RIP();
11954 IEM_MC_END();
11955 return VINF_SUCCESS;
11956
11957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11958 }
11959 }
11960}
11961
11962
11963/** Opcode 0xc2. */
11964FNIEMOP_DEF(iemOp_retn_Iw)
11965{
11966 IEMOP_MNEMONIC("retn Iw");
11967 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11968 IEMOP_HLP_NO_LOCK_PREFIX();
11969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11970 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
11971}
11972
11973
11974/** Opcode 0xc3. */
11975FNIEMOP_DEF(iemOp_retn)
11976{
11977 IEMOP_MNEMONIC("retn");
11978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11979 IEMOP_HLP_NO_LOCK_PREFIX();
11980 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
11981}
11982
11983
11984/** Opcode 0xc4. */
11985FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
11986{
11987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11988 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11989 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11990 {
11991 IEMOP_MNEMONIC("2-byte-vex");
11992 /* The LES instruction is invalid 64-bit mode. In legacy and
11993 compatability mode it is invalid with MOD=3.
11994 The use as a VEX prefix is made possible by assigning the inverted
11995 REX.R to the top MOD bit, and the top bit in the inverted register
11996 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
11997 to accessing registers 0..7 in this VEX form. */
11998 /** @todo VEX: Just use new tables for it. */
11999 return IEMOP_RAISE_INVALID_OPCODE();
12000 }
12001 IEMOP_MNEMONIC("les Gv,Mp");
12002 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12003}
12004
12005
12006/** Opcode 0xc5. */
12007FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12008{
12009 /* The LDS instruction is invalid 64-bit mode. In legacy and
12010 compatability mode it is invalid with MOD=3.
12011 The use as a VEX prefix is made possible by assigning the inverted
12012 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12013 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12015 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12016 {
12017 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12018 {
12019 IEMOP_MNEMONIC("lds Gv,Mp");
12020 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12021 }
12022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12023 }
12024
12025 IEMOP_MNEMONIC("3-byte-vex");
12026 /** @todo Test when exctly the VEX conformance checks kick in during
12027 * instruction decoding and fetching (using \#PF). */
12028 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12029 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12030 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12031#if 0 /* will make sense of this next week... */
12032 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12033 &&
12034 )
12035 {
12036
12037 }
12038#endif
12039
12040 /** @todo VEX: Just use new tables for it. */
12041 return IEMOP_RAISE_INVALID_OPCODE();
12042}
12043
12044
12045/** Opcode 0xc6. */
12046FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12047{
12048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12049 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12050 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12051 return IEMOP_RAISE_INVALID_OPCODE();
12052 IEMOP_MNEMONIC("mov Eb,Ib");
12053
12054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12055 {
12056 /* register access */
12057 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12058 IEM_MC_BEGIN(0, 0);
12059 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12060 IEM_MC_ADVANCE_RIP();
12061 IEM_MC_END();
12062 }
12063 else
12064 {
12065 /* memory access. */
12066 IEM_MC_BEGIN(0, 1);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12070 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12071 IEM_MC_ADVANCE_RIP();
12072 IEM_MC_END();
12073 }
12074 return VINF_SUCCESS;
12075}
12076
12077
12078/** Opcode 0xc7. */
12079FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12080{
12081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12082 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12083 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12084 return IEMOP_RAISE_INVALID_OPCODE();
12085 IEMOP_MNEMONIC("mov Ev,Iz");
12086
12087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12088 {
12089 /* register access */
12090 switch (pIemCpu->enmEffOpSize)
12091 {
12092 case IEMMODE_16BIT:
12093 IEM_MC_BEGIN(0, 0);
12094 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12096 IEM_MC_ADVANCE_RIP();
12097 IEM_MC_END();
12098 return VINF_SUCCESS;
12099
12100 case IEMMODE_32BIT:
12101 IEM_MC_BEGIN(0, 0);
12102 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12103 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12104 IEM_MC_ADVANCE_RIP();
12105 IEM_MC_END();
12106 return VINF_SUCCESS;
12107
12108 case IEMMODE_64BIT:
12109 IEM_MC_BEGIN(0, 0);
12110 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12111 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12112 IEM_MC_ADVANCE_RIP();
12113 IEM_MC_END();
12114 return VINF_SUCCESS;
12115
12116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12117 }
12118 }
12119 else
12120 {
12121 /* memory access. */
12122 switch (pIemCpu->enmEffOpSize)
12123 {
12124 case IEMMODE_16BIT:
12125 IEM_MC_BEGIN(0, 1);
12126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12128 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12129 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12130 IEM_MC_ADVANCE_RIP();
12131 IEM_MC_END();
12132 return VINF_SUCCESS;
12133
12134 case IEMMODE_32BIT:
12135 IEM_MC_BEGIN(0, 1);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12138 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12139 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12140 IEM_MC_ADVANCE_RIP();
12141 IEM_MC_END();
12142 return VINF_SUCCESS;
12143
12144 case IEMMODE_64BIT:
12145 IEM_MC_BEGIN(0, 1);
12146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12148 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12149 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12150 IEM_MC_ADVANCE_RIP();
12151 IEM_MC_END();
12152 return VINF_SUCCESS;
12153
12154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12155 }
12156 }
12157}
12158
12159
12160
12161
12162/** Opcode 0xc8. */
12163FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12164{
12165 IEMOP_MNEMONIC("enter Iw,Ib");
12166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12167 IEMOP_HLP_NO_LOCK_PREFIX();
12168 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12169 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12170 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12171}
12172
12173
12174/** Opcode 0xc9. */
12175FNIEMOP_DEF(iemOp_leave)
12176{
12177 IEMOP_MNEMONIC("retn");
12178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12179 IEMOP_HLP_NO_LOCK_PREFIX();
12180 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12181}
12182
12183
12184/** Opcode 0xca. */
12185FNIEMOP_DEF(iemOp_retf_Iw)
12186{
12187 IEMOP_MNEMONIC("retf Iw");
12188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12189 IEMOP_HLP_NO_LOCK_PREFIX();
12190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12191 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12192}
12193
12194
12195/** Opcode 0xcb. */
12196FNIEMOP_DEF(iemOp_retf)
12197{
12198 IEMOP_MNEMONIC("retf");
12199 IEMOP_HLP_NO_LOCK_PREFIX();
12200 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12201 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12202}
12203
12204
12205/** Opcode 0xcc. */
12206FNIEMOP_DEF(iemOp_int_3)
12207{
12208 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12209}
12210
12211
12212/** Opcode 0xcd. */
12213FNIEMOP_DEF(iemOp_int_Ib)
12214{
12215 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12216 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12217}
12218
12219
12220/** Opcode 0xce. */
12221FNIEMOP_DEF(iemOp_into)
12222{
12223 IEMOP_MNEMONIC("into");
12224 IEMOP_HLP_NO_64BIT();
12225
12226 IEM_MC_BEGIN(2, 0);
12227 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12228 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12229 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12230 IEM_MC_END();
12231 return VINF_SUCCESS;
12232}
12233
12234
12235/** Opcode 0xcf. */
12236FNIEMOP_DEF(iemOp_iret)
12237{
12238 IEMOP_MNEMONIC("iret");
12239 IEMOP_HLP_NO_LOCK_PREFIX();
12240 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12241}
12242
12243
12244/** Opcode 0xd0. */
12245FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12246{
12247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12248 PCIEMOPSHIFTSIZES pImpl;
12249 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12250 {
12251 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12252 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12253 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12254 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12255 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12256 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12257 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12258 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12259 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12260 }
12261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12262
12263 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12264 {
12265 /* register */
12266 IEMOP_HLP_NO_LOCK_PREFIX();
12267 IEM_MC_BEGIN(3, 0);
12268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12269 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12271 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12272 IEM_MC_REF_EFLAGS(pEFlags);
12273 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12274 IEM_MC_ADVANCE_RIP();
12275 IEM_MC_END();
12276 }
12277 else
12278 {
12279 /* memory */
12280 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12281 IEM_MC_BEGIN(3, 2);
12282 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12283 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12284 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12286
12287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12288 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12289 IEM_MC_FETCH_EFLAGS(EFlags);
12290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12291
12292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12293 IEM_MC_COMMIT_EFLAGS(EFlags);
12294 IEM_MC_ADVANCE_RIP();
12295 IEM_MC_END();
12296 }
12297 return VINF_SUCCESS;
12298}
12299
12300
12301
12302/** Opcode 0xd1. */
12303FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12304{
12305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12306 PCIEMOPSHIFTSIZES pImpl;
12307 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12308 {
12309 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12310 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12311 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12312 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12313 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12314 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12315 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12316 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12317 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12318 }
12319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12320
12321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12322 {
12323 /* register */
12324 IEMOP_HLP_NO_LOCK_PREFIX();
12325 switch (pIemCpu->enmEffOpSize)
12326 {
12327 case IEMMODE_16BIT:
12328 IEM_MC_BEGIN(3, 0);
12329 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12330 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12332 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12333 IEM_MC_REF_EFLAGS(pEFlags);
12334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12335 IEM_MC_ADVANCE_RIP();
12336 IEM_MC_END();
12337 return VINF_SUCCESS;
12338
12339 case IEMMODE_32BIT:
12340 IEM_MC_BEGIN(3, 0);
12341 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12342 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12343 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12344 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12345 IEM_MC_REF_EFLAGS(pEFlags);
12346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12347 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12348 IEM_MC_ADVANCE_RIP();
12349 IEM_MC_END();
12350 return VINF_SUCCESS;
12351
12352 case IEMMODE_64BIT:
12353 IEM_MC_BEGIN(3, 0);
12354 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12355 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12356 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12357 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12358 IEM_MC_REF_EFLAGS(pEFlags);
12359 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12360 IEM_MC_ADVANCE_RIP();
12361 IEM_MC_END();
12362 return VINF_SUCCESS;
12363
12364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12365 }
12366 }
12367 else
12368 {
12369 /* memory */
12370 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12371 switch (pIemCpu->enmEffOpSize)
12372 {
12373 case IEMMODE_16BIT:
12374 IEM_MC_BEGIN(3, 2);
12375 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12376 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12377 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12379
12380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12381 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12382 IEM_MC_FETCH_EFLAGS(EFlags);
12383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12384
12385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12386 IEM_MC_COMMIT_EFLAGS(EFlags);
12387 IEM_MC_ADVANCE_RIP();
12388 IEM_MC_END();
12389 return VINF_SUCCESS;
12390
12391 case IEMMODE_32BIT:
12392 IEM_MC_BEGIN(3, 2);
12393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12394 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12397
12398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12399 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12400 IEM_MC_FETCH_EFLAGS(EFlags);
12401 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12402
12403 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12404 IEM_MC_COMMIT_EFLAGS(EFlags);
12405 IEM_MC_ADVANCE_RIP();
12406 IEM_MC_END();
12407 return VINF_SUCCESS;
12408
12409 case IEMMODE_64BIT:
12410 IEM_MC_BEGIN(3, 2);
12411 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12412 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12415
12416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12417 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12418 IEM_MC_FETCH_EFLAGS(EFlags);
12419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12420
12421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12422 IEM_MC_COMMIT_EFLAGS(EFlags);
12423 IEM_MC_ADVANCE_RIP();
12424 IEM_MC_END();
12425 return VINF_SUCCESS;
12426
12427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12428 }
12429 }
12430}
12431
12432
12433/** Opcode 0xd2. */
12434FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12435{
12436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12437 PCIEMOPSHIFTSIZES pImpl;
12438 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12439 {
12440 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12441 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12442 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12443 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12444 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12445 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12446 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12447 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12448 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12449 }
12450 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12451
12452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12453 {
12454 /* register */
12455 IEMOP_HLP_NO_LOCK_PREFIX();
12456 IEM_MC_BEGIN(3, 0);
12457 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12458 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12460 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12461 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12462 IEM_MC_REF_EFLAGS(pEFlags);
12463 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12464 IEM_MC_ADVANCE_RIP();
12465 IEM_MC_END();
12466 }
12467 else
12468 {
12469 /* memory */
12470 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12471 IEM_MC_BEGIN(3, 2);
12472 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12473 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12474 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12476
12477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12478 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12479 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12480 IEM_MC_FETCH_EFLAGS(EFlags);
12481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12482
12483 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12484 IEM_MC_COMMIT_EFLAGS(EFlags);
12485 IEM_MC_ADVANCE_RIP();
12486 IEM_MC_END();
12487 }
12488 return VINF_SUCCESS;
12489}
12490
12491
12492/** Opcode 0xd3. */
12493FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12494{
12495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12496 PCIEMOPSHIFTSIZES pImpl;
12497 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12498 {
12499 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12500 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12501 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12502 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12503 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12504 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12505 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12506 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12508 }
12509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12510
12511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12512 {
12513 /* register */
12514 IEMOP_HLP_NO_LOCK_PREFIX();
12515 switch (pIemCpu->enmEffOpSize)
12516 {
12517 case IEMMODE_16BIT:
12518 IEM_MC_BEGIN(3, 0);
12519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12522 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12523 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12524 IEM_MC_REF_EFLAGS(pEFlags);
12525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12526 IEM_MC_ADVANCE_RIP();
12527 IEM_MC_END();
12528 return VINF_SUCCESS;
12529
12530 case IEMMODE_32BIT:
12531 IEM_MC_BEGIN(3, 0);
12532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12533 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12534 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12535 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12536 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12537 IEM_MC_REF_EFLAGS(pEFlags);
12538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12539 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12540 IEM_MC_ADVANCE_RIP();
12541 IEM_MC_END();
12542 return VINF_SUCCESS;
12543
12544 case IEMMODE_64BIT:
12545 IEM_MC_BEGIN(3, 0);
12546 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12547 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12548 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12549 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12550 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12551 IEM_MC_REF_EFLAGS(pEFlags);
12552 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12553 IEM_MC_ADVANCE_RIP();
12554 IEM_MC_END();
12555 return VINF_SUCCESS;
12556
12557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12558 }
12559 }
12560 else
12561 {
12562 /* memory */
12563 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12564 switch (pIemCpu->enmEffOpSize)
12565 {
12566 case IEMMODE_16BIT:
12567 IEM_MC_BEGIN(3, 2);
12568 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12569 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12570 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12572
12573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12575 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12576 IEM_MC_FETCH_EFLAGS(EFlags);
12577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12578
12579 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12580 IEM_MC_COMMIT_EFLAGS(EFlags);
12581 IEM_MC_ADVANCE_RIP();
12582 IEM_MC_END();
12583 return VINF_SUCCESS;
12584
12585 case IEMMODE_32BIT:
12586 IEM_MC_BEGIN(3, 2);
12587 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12588 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12589 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12591
12592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12593 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12594 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12595 IEM_MC_FETCH_EFLAGS(EFlags);
12596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12597
12598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12599 IEM_MC_COMMIT_EFLAGS(EFlags);
12600 IEM_MC_ADVANCE_RIP();
12601 IEM_MC_END();
12602 return VINF_SUCCESS;
12603
12604 case IEMMODE_64BIT:
12605 IEM_MC_BEGIN(3, 2);
12606 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12607 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12608 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12610
12611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12612 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12613 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12614 IEM_MC_FETCH_EFLAGS(EFlags);
12615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12616
12617 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12618 IEM_MC_COMMIT_EFLAGS(EFlags);
12619 IEM_MC_ADVANCE_RIP();
12620 IEM_MC_END();
12621 return VINF_SUCCESS;
12622
12623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12624 }
12625 }
12626}
12627
12628/** Opcode 0xd4. */
12629FNIEMOP_DEF(iemOp_aam_Ib)
12630{
12631 IEMOP_MNEMONIC("aam Ib");
12632 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12633 IEMOP_HLP_NO_LOCK_PREFIX();
12634 IEMOP_HLP_NO_64BIT();
12635 if (!bImm)
12636 return IEMOP_RAISE_DIVIDE_ERROR();
12637 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12638}
12639
12640
12641/** Opcode 0xd5. */
12642FNIEMOP_DEF(iemOp_aad_Ib)
12643{
12644 IEMOP_MNEMONIC("aad Ib");
12645 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12646 IEMOP_HLP_NO_LOCK_PREFIX();
12647 IEMOP_HLP_NO_64BIT();
12648 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12649}
12650
12651
12652/** Opcode 0xd6. */
12653FNIEMOP_DEF(iemOp_salc)
12654{
12655 IEMOP_MNEMONIC("salc");
12656 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12658 IEMOP_HLP_NO_64BIT();
12659
12660 IEM_MC_BEGIN(0, 0);
12661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12662 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12663 } IEM_MC_ELSE() {
12664 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12665 } IEM_MC_ENDIF();
12666 IEM_MC_ADVANCE_RIP();
12667 IEM_MC_END();
12668 return VINF_SUCCESS;
12669}
12670
12671
12672/** Opcode 0xd7. */
12673FNIEMOP_DEF(iemOp_xlat)
12674{
12675 IEMOP_MNEMONIC("xlat");
12676 IEMOP_HLP_NO_LOCK_PREFIX();
12677 switch (pIemCpu->enmEffAddrMode)
12678 {
12679 case IEMMODE_16BIT:
12680 IEM_MC_BEGIN(2, 0);
12681 IEM_MC_LOCAL(uint8_t, u8Tmp);
12682 IEM_MC_LOCAL(uint16_t, u16Addr);
12683 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12684 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12685 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12686 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12687 IEM_MC_ADVANCE_RIP();
12688 IEM_MC_END();
12689 return VINF_SUCCESS;
12690
12691 case IEMMODE_32BIT:
12692 IEM_MC_BEGIN(2, 0);
12693 IEM_MC_LOCAL(uint8_t, u8Tmp);
12694 IEM_MC_LOCAL(uint32_t, u32Addr);
12695 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12696 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12697 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12698 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12699 IEM_MC_ADVANCE_RIP();
12700 IEM_MC_END();
12701 return VINF_SUCCESS;
12702
12703 case IEMMODE_64BIT:
12704 IEM_MC_BEGIN(2, 0);
12705 IEM_MC_LOCAL(uint8_t, u8Tmp);
12706 IEM_MC_LOCAL(uint64_t, u64Addr);
12707 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12708 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12709 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12710 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12711 IEM_MC_ADVANCE_RIP();
12712 IEM_MC_END();
12713 return VINF_SUCCESS;
12714
12715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12716 }
12717}
12718
12719
12720/**
12721 * Common worker for FPU instructions working on ST0 and STn, and storing the
12722 * result in ST0.
12723 *
12724 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12725 */
12726FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12727{
12728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12729
12730 IEM_MC_BEGIN(3, 1);
12731 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12732 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12733 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12734 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12735
12736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12738 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12739 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12740 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12741 IEM_MC_ELSE()
12742 IEM_MC_FPU_STACK_UNDERFLOW(0);
12743 IEM_MC_ENDIF();
12744 IEM_MC_USED_FPU();
12745 IEM_MC_ADVANCE_RIP();
12746
12747 IEM_MC_END();
12748 return VINF_SUCCESS;
12749}
12750
12751
12752/**
12753 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12754 * flags.
12755 *
12756 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12757 */
12758FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12759{
12760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12761
12762 IEM_MC_BEGIN(3, 1);
12763 IEM_MC_LOCAL(uint16_t, u16Fsw);
12764 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12765 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12767
12768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12770 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12771 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12772 IEM_MC_UPDATE_FSW(u16Fsw);
12773 IEM_MC_ELSE()
12774 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12775 IEM_MC_ENDIF();
12776 IEM_MC_USED_FPU();
12777 IEM_MC_ADVANCE_RIP();
12778
12779 IEM_MC_END();
12780 return VINF_SUCCESS;
12781}
12782
12783
12784/**
12785 * Common worker for FPU instructions working on ST0 and STn, only affecting
12786 * flags, and popping when done.
12787 *
12788 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12789 */
12790FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12791{
12792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12793
12794 IEM_MC_BEGIN(3, 1);
12795 IEM_MC_LOCAL(uint16_t, u16Fsw);
12796 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12797 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12798 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12799
12800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12802 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12803 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12804 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12805 IEM_MC_ELSE()
12806 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12807 IEM_MC_ENDIF();
12808 IEM_MC_USED_FPU();
12809 IEM_MC_ADVANCE_RIP();
12810
12811 IEM_MC_END();
12812 return VINF_SUCCESS;
12813}
12814
12815
12816/** Opcode 0xd8 11/0. */
12817FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12818{
12819 IEMOP_MNEMONIC("fadd st0,stN");
12820 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12821}
12822
12823
12824/** Opcode 0xd8 11/1. */
12825FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12826{
12827 IEMOP_MNEMONIC("fmul st0,stN");
12828 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12829}
12830
12831
12832/** Opcode 0xd8 11/2. */
12833FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12834{
12835 IEMOP_MNEMONIC("fcom st0,stN");
12836 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12837}
12838
12839
12840/** Opcode 0xd8 11/3. */
12841FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12842{
12843 IEMOP_MNEMONIC("fcomp st0,stN");
12844 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12845}
12846
12847
12848/** Opcode 0xd8 11/4. */
12849FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12850{
12851 IEMOP_MNEMONIC("fsub st0,stN");
12852 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12853}
12854
12855
12856/** Opcode 0xd8 11/5. */
12857FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC("fsubr st0,stN");
12860 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12861}
12862
12863
12864/** Opcode 0xd8 11/6. */
12865FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12866{
12867 IEMOP_MNEMONIC("fdiv st0,stN");
12868 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12869}
12870
12871
12872/** Opcode 0xd8 11/7. */
12873FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12874{
12875 IEMOP_MNEMONIC("fdivr st0,stN");
12876 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12877}
12878
12879
12880/**
12881 * Common worker for FPU instructions working on ST0 and an m32r, and storing
12882 * the result in ST0.
12883 *
12884 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12885 */
12886FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
12887{
12888 IEM_MC_BEGIN(3, 3);
12889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12890 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12891 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12892 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12894 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12895
12896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12898
12899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12901 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12902
12903 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12904 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
12905 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12906 IEM_MC_ELSE()
12907 IEM_MC_FPU_STACK_UNDERFLOW(0);
12908 IEM_MC_ENDIF();
12909 IEM_MC_USED_FPU();
12910 IEM_MC_ADVANCE_RIP();
12911
12912 IEM_MC_END();
12913 return VINF_SUCCESS;
12914}
12915
12916
12917/** Opcode 0xd8 !11/0. */
12918FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
12919{
12920 IEMOP_MNEMONIC("fadd st0,m32r");
12921 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
12922}
12923
12924
12925/** Opcode 0xd8 !11/1. */
12926FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
12927{
12928 IEMOP_MNEMONIC("fmul st0,m32r");
12929 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
12930}
12931
12932
12933/** Opcode 0xd8 !11/2. */
12934FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
12935{
12936 IEMOP_MNEMONIC("fcom st0,m32r");
12937
12938 IEM_MC_BEGIN(3, 3);
12939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12940 IEM_MC_LOCAL(uint16_t, u16Fsw);
12941 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12942 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12944 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12945
12946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12948
12949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12951 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12952
12953 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12954 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12955 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12956 IEM_MC_ELSE()
12957 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12958 IEM_MC_ENDIF();
12959 IEM_MC_USED_FPU();
12960 IEM_MC_ADVANCE_RIP();
12961
12962 IEM_MC_END();
12963 return VINF_SUCCESS;
12964}
12965
12966
12967/** Opcode 0xd8 !11/3. */
12968FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
12969{
12970 IEMOP_MNEMONIC("fcomp st0,m32r");
12971
12972 IEM_MC_BEGIN(3, 3);
12973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12974 IEM_MC_LOCAL(uint16_t, u16Fsw);
12975 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12978 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12979
12980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12982
12983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12985 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12986
12987 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12988 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12989 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12990 IEM_MC_ELSE()
12991 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12992 IEM_MC_ENDIF();
12993 IEM_MC_USED_FPU();
12994 IEM_MC_ADVANCE_RIP();
12995
12996 IEM_MC_END();
12997 return VINF_SUCCESS;
12998}
12999
13000
13001/** Opcode 0xd8 !11/4. */
13002FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC("fsub st0,m32r");
13005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13006}
13007
13008
13009/** Opcode 0xd8 !11/5. */
13010FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13011{
13012 IEMOP_MNEMONIC("fsubr st0,m32r");
13013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13014}
13015
13016
13017/** Opcode 0xd8 !11/6. */
13018FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13019{
13020 IEMOP_MNEMONIC("fdiv st0,m32r");
13021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13022}
13023
13024
13025/** Opcode 0xd8 !11/7. */
13026FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13027{
13028 IEMOP_MNEMONIC("fdivr st0,m32r");
13029 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13030}
13031
13032
13033/** Opcode 0xd8. */
13034FNIEMOP_DEF(iemOp_EscF0)
13035{
13036 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13038
13039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13040 {
13041 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13042 {
13043 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13044 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13045 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13046 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13047 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13048 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13049 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13050 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13052 }
13053 }
13054 else
13055 {
13056 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13057 {
13058 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13059 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13060 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13061 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13062 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13063 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13064 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13065 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13067 }
13068 }
13069}
13070
13071
13072/** Opcode 0xd9 /0 mem32real
13073 * @sa iemOp_fld_m64r */
13074FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13075{
13076 IEMOP_MNEMONIC("fld m32r");
13077
13078 IEM_MC_BEGIN(2, 3);
13079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13080 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13081 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13082 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13083 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13084
13085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13087
13088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13090 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13091
13092 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13093 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13094 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13095 IEM_MC_ELSE()
13096 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13097 IEM_MC_ENDIF();
13098 IEM_MC_USED_FPU();
13099 IEM_MC_ADVANCE_RIP();
13100
13101 IEM_MC_END();
13102 return VINF_SUCCESS;
13103}
13104
13105
13106/** Opcode 0xd9 !11/2 mem32real */
13107FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13108{
13109 IEMOP_MNEMONIC("fst m32r");
13110 IEM_MC_BEGIN(3, 2);
13111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13112 IEM_MC_LOCAL(uint16_t, u16Fsw);
13113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13114 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13116
13117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13121
13122 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13124 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13125 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13126 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13127 IEM_MC_ELSE()
13128 IEM_MC_IF_FCW_IM()
13129 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13130 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13131 IEM_MC_ENDIF();
13132 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13133 IEM_MC_ENDIF();
13134 IEM_MC_USED_FPU();
13135 IEM_MC_ADVANCE_RIP();
13136
13137 IEM_MC_END();
13138 return VINF_SUCCESS;
13139}
13140
13141
13142/** Opcode 0xd9 !11/3 */
13143FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13144{
13145 IEMOP_MNEMONIC("fstp m32r");
13146 IEM_MC_BEGIN(3, 2);
13147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13148 IEM_MC_LOCAL(uint16_t, u16Fsw);
13149 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13150 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13152
13153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13157
13158 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13159 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13160 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13161 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13162 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13163 IEM_MC_ELSE()
13164 IEM_MC_IF_FCW_IM()
13165 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13166 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13167 IEM_MC_ENDIF();
13168 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13169 IEM_MC_ENDIF();
13170 IEM_MC_USED_FPU();
13171 IEM_MC_ADVANCE_RIP();
13172
13173 IEM_MC_END();
13174 return VINF_SUCCESS;
13175}
13176
13177
13178/** Opcode 0xd9 !11/4 */
13179FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13180{
13181 IEMOP_MNEMONIC("fldenv m14/28byte");
13182 IEM_MC_BEGIN(3, 0);
13183 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13184 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13185 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13189 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13190 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13191 IEM_MC_END();
13192 return VINF_SUCCESS;
13193}
13194
13195
13196/** Opcode 0xd9 !11/5 */
13197FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13198{
13199 IEMOP_MNEMONIC("fldcw m2byte");
13200 IEM_MC_BEGIN(1, 1);
13201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13202 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13206 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13207 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13208 IEM_MC_END();
13209 return VINF_SUCCESS;
13210}
13211
13212
13213/** Opcode 0xd9 !11/6 */
13214FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13215{
13216 IEMOP_MNEMONIC("fstenv m14/m28byte");
13217 IEM_MC_BEGIN(3, 0);
13218 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13219 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13220 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13224 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13225 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13226 IEM_MC_END();
13227 return VINF_SUCCESS;
13228}
13229
13230
13231/** Opcode 0xd9 !11/7 */
13232FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13233{
13234 IEMOP_MNEMONIC("fnstcw m2byte");
13235 IEM_MC_BEGIN(2, 0);
13236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13237 IEM_MC_LOCAL(uint16_t, u16Fcw);
13238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13241 IEM_MC_FETCH_FCW(u16Fcw);
13242 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13243 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13244 IEM_MC_END();
13245 return VINF_SUCCESS;
13246}
13247
13248
13249/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13250FNIEMOP_DEF(iemOp_fnop)
13251{
13252 IEMOP_MNEMONIC("fnop");
13253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13254
13255 IEM_MC_BEGIN(0, 0);
13256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13258 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13259 * intel optimizations. Investigate. */
13260 IEM_MC_UPDATE_FPU_OPCODE_IP();
13261 IEM_MC_USED_FPU();
13262 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13263 IEM_MC_END();
13264 return VINF_SUCCESS;
13265}
13266
13267
13268/** Opcode 0xd9 11/0 stN */
13269FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13270{
13271 IEMOP_MNEMONIC("fld stN");
13272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13273
13274 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13275 * indicates that it does. */
13276 IEM_MC_BEGIN(0, 2);
13277 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13278 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13282 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13283 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13284 IEM_MC_ELSE()
13285 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13286 IEM_MC_ENDIF();
13287 IEM_MC_USED_FPU();
13288 IEM_MC_ADVANCE_RIP();
13289 IEM_MC_END();
13290
13291 return VINF_SUCCESS;
13292}
13293
13294
13295/** Opcode 0xd9 11/3 stN */
13296FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13297{
13298 IEMOP_MNEMONIC("fxch stN");
13299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13300
13301 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13302 * indicates that it does. */
13303 IEM_MC_BEGIN(1, 3);
13304 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13305 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13306 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13307 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13310 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13311 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13312 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13313 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13314 IEM_MC_ELSE()
13315 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13316 IEM_MC_ENDIF();
13317 IEM_MC_USED_FPU();
13318 IEM_MC_ADVANCE_RIP();
13319 IEM_MC_END();
13320
13321 return VINF_SUCCESS;
13322}
13323
13324
13325/** Opcode 0xd9 11/4, 0xdd 11/2. */
13326FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13327{
13328 IEMOP_MNEMONIC("fstp st0,stN");
13329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13330
13331 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13332 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13333 if (!iDstReg)
13334 {
13335 IEM_MC_BEGIN(0, 1);
13336 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13339 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13340 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13341 IEM_MC_ELSE()
13342 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13343 IEM_MC_ENDIF();
13344 IEM_MC_USED_FPU();
13345 IEM_MC_ADVANCE_RIP();
13346 IEM_MC_END();
13347 }
13348 else
13349 {
13350 IEM_MC_BEGIN(0, 2);
13351 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13352 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13355 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13356 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13357 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13358 IEM_MC_ELSE()
13359 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13360 IEM_MC_ENDIF();
13361 IEM_MC_USED_FPU();
13362 IEM_MC_ADVANCE_RIP();
13363 IEM_MC_END();
13364 }
13365 return VINF_SUCCESS;
13366}
13367
13368
13369/**
13370 * Common worker for FPU instructions working on ST0 and replaces it with the
13371 * result, i.e. unary operators.
13372 *
13373 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13374 */
13375FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13376{
13377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13378
13379 IEM_MC_BEGIN(2, 1);
13380 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13381 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13382 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13383
13384 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13385 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13387 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13388 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13389 IEM_MC_ELSE()
13390 IEM_MC_FPU_STACK_UNDERFLOW(0);
13391 IEM_MC_ENDIF();
13392 IEM_MC_USED_FPU();
13393 IEM_MC_ADVANCE_RIP();
13394
13395 IEM_MC_END();
13396 return VINF_SUCCESS;
13397}
13398
13399
13400/** Opcode 0xd9 0xe0. */
13401FNIEMOP_DEF(iemOp_fchs)
13402{
13403 IEMOP_MNEMONIC("fchs st0");
13404 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13405}
13406
13407
13408/** Opcode 0xd9 0xe1. */
13409FNIEMOP_DEF(iemOp_fabs)
13410{
13411 IEMOP_MNEMONIC("fabs st0");
13412 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13413}
13414
13415
13416/**
13417 * Common worker for FPU instructions working on ST0 and only returns FSW.
13418 *
13419 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13420 */
13421FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13422{
13423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13424
13425 IEM_MC_BEGIN(2, 1);
13426 IEM_MC_LOCAL(uint16_t, u16Fsw);
13427 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13428 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13429
13430 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13431 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13433 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13434 IEM_MC_UPDATE_FSW(u16Fsw);
13435 IEM_MC_ELSE()
13436 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13437 IEM_MC_ENDIF();
13438 IEM_MC_USED_FPU();
13439 IEM_MC_ADVANCE_RIP();
13440
13441 IEM_MC_END();
13442 return VINF_SUCCESS;
13443}
13444
13445
13446/** Opcode 0xd9 0xe4. */
13447FNIEMOP_DEF(iemOp_ftst)
13448{
13449 IEMOP_MNEMONIC("ftst st0");
13450 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13451}
13452
13453
13454/** Opcode 0xd9 0xe5. */
13455FNIEMOP_DEF(iemOp_fxam)
13456{
13457 IEMOP_MNEMONIC("fxam st0");
13458 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13459}
13460
13461
13462/**
13463 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13464 *
13465 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13466 */
13467FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13468{
13469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13470
13471 IEM_MC_BEGIN(1, 1);
13472 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13473 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13474
13475 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13476 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13477 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13478 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13479 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13480 IEM_MC_ELSE()
13481 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13482 IEM_MC_ENDIF();
13483 IEM_MC_USED_FPU();
13484 IEM_MC_ADVANCE_RIP();
13485
13486 IEM_MC_END();
13487 return VINF_SUCCESS;
13488}
13489
13490
13491/** Opcode 0xd9 0xe8. */
13492FNIEMOP_DEF(iemOp_fld1)
13493{
13494 IEMOP_MNEMONIC("fld1");
13495 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13496}
13497
13498
13499/** Opcode 0xd9 0xe9. */
13500FNIEMOP_DEF(iemOp_fldl2t)
13501{
13502 IEMOP_MNEMONIC("fldl2t");
13503 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13504}
13505
13506
13507/** Opcode 0xd9 0xea. */
13508FNIEMOP_DEF(iemOp_fldl2e)
13509{
13510 IEMOP_MNEMONIC("fldl2e");
13511 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13512}
13513
13514/** Opcode 0xd9 0xeb. */
13515FNIEMOP_DEF(iemOp_fldpi)
13516{
13517 IEMOP_MNEMONIC("fldpi");
13518 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13519}
13520
13521
13522/** Opcode 0xd9 0xec. */
13523FNIEMOP_DEF(iemOp_fldlg2)
13524{
13525 IEMOP_MNEMONIC("fldlg2");
13526 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13527}
13528
13529/** Opcode 0xd9 0xed. */
13530FNIEMOP_DEF(iemOp_fldln2)
13531{
13532 IEMOP_MNEMONIC("fldln2");
13533 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13534}
13535
13536
13537/** Opcode 0xd9 0xee. */
13538FNIEMOP_DEF(iemOp_fldz)
13539{
13540 IEMOP_MNEMONIC("fldz");
13541 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13542}
13543
13544
13545/** Opcode 0xd9 0xf0. */
13546FNIEMOP_DEF(iemOp_f2xm1)
13547{
13548 IEMOP_MNEMONIC("f2xm1 st0");
13549 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13550}
13551
13552
13553/** Opcode 0xd9 0xf1. */
13554FNIEMOP_DEF(iemOp_fylx2)
13555{
13556 IEMOP_MNEMONIC("fylx2 st0");
13557 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13558}
13559
13560
13561/**
13562 * Common worker for FPU instructions working on ST0 and having two outputs, one
13563 * replacing ST0 and one pushed onto the stack.
13564 *
13565 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13566 */
13567FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13568{
13569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13570
13571 IEM_MC_BEGIN(2, 1);
13572 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13573 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13575
13576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13577 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13579 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13580 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13581 IEM_MC_ELSE()
13582 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13583 IEM_MC_ENDIF();
13584 IEM_MC_USED_FPU();
13585 IEM_MC_ADVANCE_RIP();
13586
13587 IEM_MC_END();
13588 return VINF_SUCCESS;
13589}
13590
13591
13592/** Opcode 0xd9 0xf2. */
13593FNIEMOP_DEF(iemOp_fptan)
13594{
13595 IEMOP_MNEMONIC("fptan st0");
13596 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13597}
13598
13599
13600/**
13601 * Common worker for FPU instructions working on STn and ST0, storing the result
13602 * in STn, and popping the stack unless IE, DE or ZE was raised.
13603 *
13604 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13605 */
13606FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13607{
13608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13609
13610 IEM_MC_BEGIN(3, 1);
13611 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13612 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13613 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13614 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13615
13616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13618
13619 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13620 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13621 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13622 IEM_MC_ELSE()
13623 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13624 IEM_MC_ENDIF();
13625 IEM_MC_USED_FPU();
13626 IEM_MC_ADVANCE_RIP();
13627
13628 IEM_MC_END();
13629 return VINF_SUCCESS;
13630}
13631
13632
13633/** Opcode 0xd9 0xf3. */
13634FNIEMOP_DEF(iemOp_fpatan)
13635{
13636 IEMOP_MNEMONIC("fpatan st1,st0");
13637 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13638}
13639
13640
13641/** Opcode 0xd9 0xf4. */
13642FNIEMOP_DEF(iemOp_fxtract)
13643{
13644 IEMOP_MNEMONIC("fxtract st0");
13645 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13646}
13647
13648
13649/** Opcode 0xd9 0xf5. */
13650FNIEMOP_DEF(iemOp_fprem1)
13651{
13652 IEMOP_MNEMONIC("fprem1 st0, st1");
13653 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13654}
13655
13656
13657/** Opcode 0xd9 0xf6. */
13658FNIEMOP_DEF(iemOp_fdecstp)
13659{
13660 IEMOP_MNEMONIC("fdecstp");
13661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13662 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13663 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13664 * FINCSTP and FDECSTP. */
13665
13666 IEM_MC_BEGIN(0,0);
13667
13668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13670
13671 IEM_MC_FPU_STACK_DEC_TOP();
13672 IEM_MC_UPDATE_FSW_CONST(0);
13673
13674 IEM_MC_USED_FPU();
13675 IEM_MC_ADVANCE_RIP();
13676 IEM_MC_END();
13677 return VINF_SUCCESS;
13678}
13679
13680
13681/** Opcode 0xd9 0xf7. */
13682FNIEMOP_DEF(iemOp_fincstp)
13683{
13684 IEMOP_MNEMONIC("fincstp");
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13687 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13688 * FINCSTP and FDECSTP. */
13689
13690 IEM_MC_BEGIN(0,0);
13691
13692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13693 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13694
13695 IEM_MC_FPU_STACK_INC_TOP();
13696 IEM_MC_UPDATE_FSW_CONST(0);
13697
13698 IEM_MC_USED_FPU();
13699 IEM_MC_ADVANCE_RIP();
13700 IEM_MC_END();
13701 return VINF_SUCCESS;
13702}
13703
13704
13705/** Opcode 0xd9 0xf8. */
13706FNIEMOP_DEF(iemOp_fprem)
13707{
13708 IEMOP_MNEMONIC("fprem st0, st1");
13709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13710}
13711
13712
13713/** Opcode 0xd9 0xf9. */
13714FNIEMOP_DEF(iemOp_fyl2xp1)
13715{
13716 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13717 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13718}
13719
13720
13721/** Opcode 0xd9 0xfa. */
13722FNIEMOP_DEF(iemOp_fsqrt)
13723{
13724 IEMOP_MNEMONIC("fsqrt st0");
13725 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13726}
13727
13728
13729/** Opcode 0xd9 0xfb. */
13730FNIEMOP_DEF(iemOp_fsincos)
13731{
13732 IEMOP_MNEMONIC("fsincos st0");
13733 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13734}
13735
13736
13737/** Opcode 0xd9 0xfc. */
13738FNIEMOP_DEF(iemOp_frndint)
13739{
13740 IEMOP_MNEMONIC("frndint st0");
13741 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13742}
13743
13744
13745/** Opcode 0xd9 0xfd. */
13746FNIEMOP_DEF(iemOp_fscale)
13747{
13748 IEMOP_MNEMONIC("fscale st0, st1");
13749 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13750}
13751
13752
13753/** Opcode 0xd9 0xfe. */
13754FNIEMOP_DEF(iemOp_fsin)
13755{
13756 IEMOP_MNEMONIC("fsin st0");
13757 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13758}
13759
13760
13761/** Opcode 0xd9 0xff. */
13762FNIEMOP_DEF(iemOp_fcos)
13763{
13764 IEMOP_MNEMONIC("fcos st0");
13765 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13766}
13767
13768
13769/** Used by iemOp_EscF1. */
13770static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13771{
13772 /* 0xe0 */ iemOp_fchs,
13773 /* 0xe1 */ iemOp_fabs,
13774 /* 0xe2 */ iemOp_Invalid,
13775 /* 0xe3 */ iemOp_Invalid,
13776 /* 0xe4 */ iemOp_ftst,
13777 /* 0xe5 */ iemOp_fxam,
13778 /* 0xe6 */ iemOp_Invalid,
13779 /* 0xe7 */ iemOp_Invalid,
13780 /* 0xe8 */ iemOp_fld1,
13781 /* 0xe9 */ iemOp_fldl2t,
13782 /* 0xea */ iemOp_fldl2e,
13783 /* 0xeb */ iemOp_fldpi,
13784 /* 0xec */ iemOp_fldlg2,
13785 /* 0xed */ iemOp_fldln2,
13786 /* 0xee */ iemOp_fldz,
13787 /* 0xef */ iemOp_Invalid,
13788 /* 0xf0 */ iemOp_f2xm1,
13789 /* 0xf1 */ iemOp_fylx2,
13790 /* 0xf2 */ iemOp_fptan,
13791 /* 0xf3 */ iemOp_fpatan,
13792 /* 0xf4 */ iemOp_fxtract,
13793 /* 0xf5 */ iemOp_fprem1,
13794 /* 0xf6 */ iemOp_fdecstp,
13795 /* 0xf7 */ iemOp_fincstp,
13796 /* 0xf8 */ iemOp_fprem,
13797 /* 0xf9 */ iemOp_fyl2xp1,
13798 /* 0xfa */ iemOp_fsqrt,
13799 /* 0xfb */ iemOp_fsincos,
13800 /* 0xfc */ iemOp_frndint,
13801 /* 0xfd */ iemOp_fscale,
13802 /* 0xfe */ iemOp_fsin,
13803 /* 0xff */ iemOp_fcos
13804};
13805
13806
13807/** Opcode 0xd9. */
13808FNIEMOP_DEF(iemOp_EscF1)
13809{
13810 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13812 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13813 {
13814 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13815 {
13816 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13817 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13818 case 2:
13819 if (bRm == 0xd0)
13820 return FNIEMOP_CALL(iemOp_fnop);
13821 return IEMOP_RAISE_INVALID_OPCODE();
13822 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13823 case 4:
13824 case 5:
13825 case 6:
13826 case 7:
13827 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13828 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13830 }
13831 }
13832 else
13833 {
13834 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13835 {
13836 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13837 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13838 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13839 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13840 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13841 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13842 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13843 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13845 }
13846 }
13847}
13848
13849
13850/** Opcode 0xda 11/0. */
13851FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13852{
13853 IEMOP_MNEMONIC("fcmovb st0,stN");
13854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13855
13856 IEM_MC_BEGIN(0, 1);
13857 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13858
13859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13860 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13861
13862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13864 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13865 IEM_MC_ENDIF();
13866 IEM_MC_UPDATE_FPU_OPCODE_IP();
13867 IEM_MC_ELSE()
13868 IEM_MC_FPU_STACK_UNDERFLOW(0);
13869 IEM_MC_ENDIF();
13870 IEM_MC_USED_FPU();
13871 IEM_MC_ADVANCE_RIP();
13872
13873 IEM_MC_END();
13874 return VINF_SUCCESS;
13875}
13876
13877
13878/** Opcode 0xda 11/1. */
13879FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
13880{
13881 IEMOP_MNEMONIC("fcmove st0,stN");
13882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13883
13884 IEM_MC_BEGIN(0, 1);
13885 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13886
13887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13889
13890 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
13892 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13893 IEM_MC_ENDIF();
13894 IEM_MC_UPDATE_FPU_OPCODE_IP();
13895 IEM_MC_ELSE()
13896 IEM_MC_FPU_STACK_UNDERFLOW(0);
13897 IEM_MC_ENDIF();
13898 IEM_MC_USED_FPU();
13899 IEM_MC_ADVANCE_RIP();
13900
13901 IEM_MC_END();
13902 return VINF_SUCCESS;
13903}
13904
13905
13906/** Opcode 0xda 11/2. */
13907FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
13908{
13909 IEMOP_MNEMONIC("fcmovbe st0,stN");
13910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13911
13912 IEM_MC_BEGIN(0, 1);
13913 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13914
13915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13917
13918 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13919 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13920 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13921 IEM_MC_ENDIF();
13922 IEM_MC_UPDATE_FPU_OPCODE_IP();
13923 IEM_MC_ELSE()
13924 IEM_MC_FPU_STACK_UNDERFLOW(0);
13925 IEM_MC_ENDIF();
13926 IEM_MC_USED_FPU();
13927 IEM_MC_ADVANCE_RIP();
13928
13929 IEM_MC_END();
13930 return VINF_SUCCESS;
13931}
13932
13933
13934/** Opcode 0xda 11/3. */
13935FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
13936{
13937 IEMOP_MNEMONIC("fcmovu st0,stN");
13938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13939
13940 IEM_MC_BEGIN(0, 1);
13941 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13942
13943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13945
13946 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
13948 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13949 IEM_MC_ENDIF();
13950 IEM_MC_UPDATE_FPU_OPCODE_IP();
13951 IEM_MC_ELSE()
13952 IEM_MC_FPU_STACK_UNDERFLOW(0);
13953 IEM_MC_ENDIF();
13954 IEM_MC_USED_FPU();
13955 IEM_MC_ADVANCE_RIP();
13956
13957 IEM_MC_END();
13958 return VINF_SUCCESS;
13959}
13960
13961
13962/**
13963 * Common worker for FPU instructions working on ST0 and STn, only affecting
13964 * flags, and popping twice when done.
13965 *
13966 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13967 */
13968FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13969{
13970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13971
13972 IEM_MC_BEGIN(3, 1);
13973 IEM_MC_LOCAL(uint16_t, u16Fsw);
13974 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13977
13978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13980 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
13981 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13982 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
13983 IEM_MC_ELSE()
13984 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
13985 IEM_MC_ENDIF();
13986 IEM_MC_USED_FPU();
13987 IEM_MC_ADVANCE_RIP();
13988
13989 IEM_MC_END();
13990 return VINF_SUCCESS;
13991}
13992
13993
13994/** Opcode 0xda 0xe9. */
13995FNIEMOP_DEF(iemOp_fucompp)
13996{
13997 IEMOP_MNEMONIC("fucompp st0,stN");
13998 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
13999}
14000
14001
14002/**
14003 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14004 * the result in ST0.
14005 *
14006 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14007 */
14008FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14009{
14010 IEM_MC_BEGIN(3, 3);
14011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14013 IEM_MC_LOCAL(int32_t, i32Val2);
14014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14016 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14017
14018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14020
14021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14023 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14024
14025 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14026 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14027 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14028 IEM_MC_ELSE()
14029 IEM_MC_FPU_STACK_UNDERFLOW(0);
14030 IEM_MC_ENDIF();
14031 IEM_MC_USED_FPU();
14032 IEM_MC_ADVANCE_RIP();
14033
14034 IEM_MC_END();
14035 return VINF_SUCCESS;
14036}
14037
14038
14039/** Opcode 0xda !11/0. */
14040FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14041{
14042 IEMOP_MNEMONIC("fiadd m32i");
14043 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14044}
14045
14046
14047/** Opcode 0xda !11/1. */
14048FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14049{
14050 IEMOP_MNEMONIC("fimul m32i");
14051 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14052}
14053
14054
14055/** Opcode 0xda !11/2. */
14056FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14057{
14058 IEMOP_MNEMONIC("ficom st0,m32i");
14059
14060 IEM_MC_BEGIN(3, 3);
14061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14062 IEM_MC_LOCAL(uint16_t, u16Fsw);
14063 IEM_MC_LOCAL(int32_t, i32Val2);
14064 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14066 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14067
14068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14070
14071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14073 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14074
14075 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14076 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14077 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14078 IEM_MC_ELSE()
14079 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14080 IEM_MC_ENDIF();
14081 IEM_MC_USED_FPU();
14082 IEM_MC_ADVANCE_RIP();
14083
14084 IEM_MC_END();
14085 return VINF_SUCCESS;
14086}
14087
14088
14089/** Opcode 0xda !11/3. */
14090FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14091{
14092 IEMOP_MNEMONIC("ficomp st0,m32i");
14093
14094 IEM_MC_BEGIN(3, 3);
14095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14096 IEM_MC_LOCAL(uint16_t, u16Fsw);
14097 IEM_MC_LOCAL(int32_t, i32Val2);
14098 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14099 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14100 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14101
14102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14104
14105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14106 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14107 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14108
14109 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14110 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14111 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14112 IEM_MC_ELSE()
14113 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14114 IEM_MC_ENDIF();
14115 IEM_MC_USED_FPU();
14116 IEM_MC_ADVANCE_RIP();
14117
14118 IEM_MC_END();
14119 return VINF_SUCCESS;
14120}
14121
14122
14123/** Opcode 0xda !11/4. */
14124FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14125{
14126 IEMOP_MNEMONIC("fisub m32i");
14127 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14128}
14129
14130
14131/** Opcode 0xda !11/5. */
14132FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14133{
14134 IEMOP_MNEMONIC("fisubr m32i");
14135 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14136}
14137
14138
14139/** Opcode 0xda !11/6. */
14140FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14141{
14142 IEMOP_MNEMONIC("fidiv m32i");
14143 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14144}
14145
14146
14147/** Opcode 0xda !11/7. */
14148FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14149{
14150 IEMOP_MNEMONIC("fidivr m32i");
14151 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14152}
14153
14154
14155/** Opcode 0xda. */
14156FNIEMOP_DEF(iemOp_EscF2)
14157{
14158 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14161 {
14162 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14163 {
14164 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14165 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14166 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14167 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14168 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14169 case 5:
14170 if (bRm == 0xe9)
14171 return FNIEMOP_CALL(iemOp_fucompp);
14172 return IEMOP_RAISE_INVALID_OPCODE();
14173 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14174 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14176 }
14177 }
14178 else
14179 {
14180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14181 {
14182 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14183 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14184 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14185 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14186 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14187 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14188 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14189 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14191 }
14192 }
14193}
14194
14195
14196/** Opcode 0xdb !11/0. */
14197FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14198{
14199 IEMOP_MNEMONIC("fild m32i");
14200
14201 IEM_MC_BEGIN(2, 3);
14202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14203 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14204 IEM_MC_LOCAL(int32_t, i32Val);
14205 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14206 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14207
14208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14210
14211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14213 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14214
14215 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14216 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14217 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14218 IEM_MC_ELSE()
14219 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14220 IEM_MC_ENDIF();
14221 IEM_MC_USED_FPU();
14222 IEM_MC_ADVANCE_RIP();
14223
14224 IEM_MC_END();
14225 return VINF_SUCCESS;
14226}
14227
14228
14229/** Opcode 0xdb !11/1. */
14230FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14231{
14232 IEMOP_MNEMONIC("fisttp m32i");
14233 IEM_MC_BEGIN(3, 2);
14234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14235 IEM_MC_LOCAL(uint16_t, u16Fsw);
14236 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14237 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14238 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14239
14240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14244
14245 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14248 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14249 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14250 IEM_MC_ELSE()
14251 IEM_MC_IF_FCW_IM()
14252 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14253 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14254 IEM_MC_ENDIF();
14255 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14256 IEM_MC_ENDIF();
14257 IEM_MC_USED_FPU();
14258 IEM_MC_ADVANCE_RIP();
14259
14260 IEM_MC_END();
14261 return VINF_SUCCESS;
14262}
14263
14264
14265/** Opcode 0xdb !11/2. */
14266FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14267{
14268 IEMOP_MNEMONIC("fist m32i");
14269 IEM_MC_BEGIN(3, 2);
14270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14271 IEM_MC_LOCAL(uint16_t, u16Fsw);
14272 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14273 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14274 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14275
14276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14278 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14279 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14280
14281 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14282 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14283 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14284 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14285 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14286 IEM_MC_ELSE()
14287 IEM_MC_IF_FCW_IM()
14288 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14289 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14290 IEM_MC_ENDIF();
14291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14292 IEM_MC_ENDIF();
14293 IEM_MC_USED_FPU();
14294 IEM_MC_ADVANCE_RIP();
14295
14296 IEM_MC_END();
14297 return VINF_SUCCESS;
14298}
14299
14300
14301/** Opcode 0xdb !11/3. */
14302FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14303{
14304 IEMOP_MNEMONIC("fisttp m32i");
14305 IEM_MC_BEGIN(3, 2);
14306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14307 IEM_MC_LOCAL(uint16_t, u16Fsw);
14308 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14309 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14311
14312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14316
14317 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14318 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14319 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14320 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14321 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14322 IEM_MC_ELSE()
14323 IEM_MC_IF_FCW_IM()
14324 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14325 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14326 IEM_MC_ENDIF();
14327 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14328 IEM_MC_ENDIF();
14329 IEM_MC_USED_FPU();
14330 IEM_MC_ADVANCE_RIP();
14331
14332 IEM_MC_END();
14333 return VINF_SUCCESS;
14334}
14335
14336
14337/** Opcode 0xdb !11/5. */
14338FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14339{
14340 IEMOP_MNEMONIC("fld m80r");
14341
14342 IEM_MC_BEGIN(2, 3);
14343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14344 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14345 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14346 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14347 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14348
14349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14351
14352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14354 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14355
14356 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14357 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14358 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14359 IEM_MC_ELSE()
14360 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14361 IEM_MC_ENDIF();
14362 IEM_MC_USED_FPU();
14363 IEM_MC_ADVANCE_RIP();
14364
14365 IEM_MC_END();
14366 return VINF_SUCCESS;
14367}
14368
14369
14370/** Opcode 0xdb !11/7. */
14371FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14372{
14373 IEMOP_MNEMONIC("fstp m80r");
14374 IEM_MC_BEGIN(3, 2);
14375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14376 IEM_MC_LOCAL(uint16_t, u16Fsw);
14377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14378 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14380
14381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14385
14386 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14388 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14389 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14390 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14391 IEM_MC_ELSE()
14392 IEM_MC_IF_FCW_IM()
14393 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14394 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14395 IEM_MC_ENDIF();
14396 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14397 IEM_MC_ENDIF();
14398 IEM_MC_USED_FPU();
14399 IEM_MC_ADVANCE_RIP();
14400
14401 IEM_MC_END();
14402 return VINF_SUCCESS;
14403}
14404
14405
14406/** Opcode 0xdb 11/0. */
14407FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14408{
14409 IEMOP_MNEMONIC("fcmovnb st0,stN");
14410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14411
14412 IEM_MC_BEGIN(0, 1);
14413 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14414
14415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14417
14418 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14419 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14420 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14421 IEM_MC_ENDIF();
14422 IEM_MC_UPDATE_FPU_OPCODE_IP();
14423 IEM_MC_ELSE()
14424 IEM_MC_FPU_STACK_UNDERFLOW(0);
14425 IEM_MC_ENDIF();
14426 IEM_MC_USED_FPU();
14427 IEM_MC_ADVANCE_RIP();
14428
14429 IEM_MC_END();
14430 return VINF_SUCCESS;
14431}
14432
14433
14434/** Opcode 0xdb 11/1. */
14435FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14436{
14437 IEMOP_MNEMONIC("fcmovne st0,stN");
14438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14439
14440 IEM_MC_BEGIN(0, 1);
14441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14442
14443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14445
14446 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14447 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14448 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14449 IEM_MC_ENDIF();
14450 IEM_MC_UPDATE_FPU_OPCODE_IP();
14451 IEM_MC_ELSE()
14452 IEM_MC_FPU_STACK_UNDERFLOW(0);
14453 IEM_MC_ENDIF();
14454 IEM_MC_USED_FPU();
14455 IEM_MC_ADVANCE_RIP();
14456
14457 IEM_MC_END();
14458 return VINF_SUCCESS;
14459}
14460
14461
14462/** Opcode 0xdb 11/2. */
14463FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14464{
14465 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14467
14468 IEM_MC_BEGIN(0, 1);
14469 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14470
14471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14473
14474 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14475 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14476 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14477 IEM_MC_ENDIF();
14478 IEM_MC_UPDATE_FPU_OPCODE_IP();
14479 IEM_MC_ELSE()
14480 IEM_MC_FPU_STACK_UNDERFLOW(0);
14481 IEM_MC_ENDIF();
14482 IEM_MC_USED_FPU();
14483 IEM_MC_ADVANCE_RIP();
14484
14485 IEM_MC_END();
14486 return VINF_SUCCESS;
14487}
14488
14489
14490/** Opcode 0xdb 11/3. */
14491FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14492{
14493 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14495
14496 IEM_MC_BEGIN(0, 1);
14497 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14498
14499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14501
14502 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14503 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14504 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14505 IEM_MC_ENDIF();
14506 IEM_MC_UPDATE_FPU_OPCODE_IP();
14507 IEM_MC_ELSE()
14508 IEM_MC_FPU_STACK_UNDERFLOW(0);
14509 IEM_MC_ENDIF();
14510 IEM_MC_USED_FPU();
14511 IEM_MC_ADVANCE_RIP();
14512
14513 IEM_MC_END();
14514 return VINF_SUCCESS;
14515}
14516
14517
14518/** Opcode 0xdb 0xe0. */
14519FNIEMOP_DEF(iemOp_fneni)
14520{
14521 IEMOP_MNEMONIC("fneni (8087/ign)");
14522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14523 IEM_MC_BEGIN(0,0);
14524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14525 IEM_MC_ADVANCE_RIP();
14526 IEM_MC_END();
14527 return VINF_SUCCESS;
14528}
14529
14530
14531/** Opcode 0xdb 0xe1. */
14532FNIEMOP_DEF(iemOp_fndisi)
14533{
14534 IEMOP_MNEMONIC("fndisi (8087/ign)");
14535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14536 IEM_MC_BEGIN(0,0);
14537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14538 IEM_MC_ADVANCE_RIP();
14539 IEM_MC_END();
14540 return VINF_SUCCESS;
14541}
14542
14543
14544/** Opcode 0xdb 0xe2. */
14545FNIEMOP_DEF(iemOp_fnclex)
14546{
14547 IEMOP_MNEMONIC("fnclex");
14548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14549
14550 IEM_MC_BEGIN(0,0);
14551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14552 IEM_MC_CLEAR_FSW_EX();
14553 IEM_MC_ADVANCE_RIP();
14554 IEM_MC_END();
14555 return VINF_SUCCESS;
14556}
14557
14558
14559/** Opcode 0xdb 0xe3. */
14560FNIEMOP_DEF(iemOp_fninit)
14561{
14562 IEMOP_MNEMONIC("fninit");
14563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14564 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14565}
14566
14567
14568/** Opcode 0xdb 0xe4. */
14569FNIEMOP_DEF(iemOp_fnsetpm)
14570{
14571 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14573 IEM_MC_BEGIN(0,0);
14574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14575 IEM_MC_ADVANCE_RIP();
14576 IEM_MC_END();
14577 return VINF_SUCCESS;
14578}
14579
14580
14581/** Opcode 0xdb 0xe5. */
14582FNIEMOP_DEF(iemOp_frstpm)
14583{
14584 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14585#if 0 /* #UDs on newer CPUs */
14586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14587 IEM_MC_BEGIN(0,0);
14588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14589 IEM_MC_ADVANCE_RIP();
14590 IEM_MC_END();
14591 return VINF_SUCCESS;
14592#else
14593 return IEMOP_RAISE_INVALID_OPCODE();
14594#endif
14595}
14596
14597
14598/** Opcode 0xdb 11/5. */
14599FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14600{
14601 IEMOP_MNEMONIC("fucomi st0,stN");
14602 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14603}
14604
14605
14606/** Opcode 0xdb 11/6. */
14607FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14608{
14609 IEMOP_MNEMONIC("fcomi st0,stN");
14610 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14611}
14612
14613
14614/** Opcode 0xdb. */
14615FNIEMOP_DEF(iemOp_EscF3)
14616{
14617 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14620 {
14621 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14622 {
14623 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14624 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14625 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14626 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14627 case 4:
14628 switch (bRm)
14629 {
14630 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14631 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14632 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14633 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14634 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14635 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14636 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14637 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14639 }
14640 break;
14641 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14642 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14643 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14645 }
14646 }
14647 else
14648 {
14649 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14650 {
14651 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14652 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14653 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14654 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14655 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14656 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14657 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14658 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14660 }
14661 }
14662}
14663
14664
14665/**
14666 * Common worker for FPU instructions working on STn and ST0, and storing the
14667 * result in STn unless IE, DE or ZE was raised.
14668 *
14669 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14670 */
14671FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14672{
14673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14674
14675 IEM_MC_BEGIN(3, 1);
14676 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14677 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14678 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14679 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14680
14681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14683
14684 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14685 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14686 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14687 IEM_MC_ELSE()
14688 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14689 IEM_MC_ENDIF();
14690 IEM_MC_USED_FPU();
14691 IEM_MC_ADVANCE_RIP();
14692
14693 IEM_MC_END();
14694 return VINF_SUCCESS;
14695}
14696
14697
14698/** Opcode 0xdc 11/0. */
14699FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14700{
14701 IEMOP_MNEMONIC("fadd stN,st0");
14702 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14703}
14704
14705
14706/** Opcode 0xdc 11/1. */
14707FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14708{
14709 IEMOP_MNEMONIC("fmul stN,st0");
14710 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14711}
14712
14713
14714/** Opcode 0xdc 11/4. */
14715FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14716{
14717 IEMOP_MNEMONIC("fsubr stN,st0");
14718 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14719}
14720
14721
14722/** Opcode 0xdc 11/5. */
14723FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14724{
14725 IEMOP_MNEMONIC("fsub stN,st0");
14726 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14727}
14728
14729
14730/** Opcode 0xdc 11/6. */
14731FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14732{
14733 IEMOP_MNEMONIC("fdivr stN,st0");
14734 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14735}
14736
14737
14738/** Opcode 0xdc 11/7. */
14739FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14740{
14741 IEMOP_MNEMONIC("fdiv stN,st0");
14742 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14743}
14744
14745
14746/**
14747 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14748 * memory operand, and storing the result in ST0.
14749 *
14750 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14751 */
14752FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14753{
14754 IEM_MC_BEGIN(3, 3);
14755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14756 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14757 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14758 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14759 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14760 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14761
14762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14766
14767 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14768 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14769 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14770 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14771 IEM_MC_ELSE()
14772 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14773 IEM_MC_ENDIF();
14774 IEM_MC_USED_FPU();
14775 IEM_MC_ADVANCE_RIP();
14776
14777 IEM_MC_END();
14778 return VINF_SUCCESS;
14779}
14780
14781
14782/** Opcode 0xdc !11/0. */
14783FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14784{
14785 IEMOP_MNEMONIC("fadd m64r");
14786 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14787}
14788
14789
14790/** Opcode 0xdc !11/1. */
14791FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14792{
14793 IEMOP_MNEMONIC("fmul m64r");
14794 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14795}
14796
14797
14798/** Opcode 0xdc !11/2. */
14799FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14800{
14801 IEMOP_MNEMONIC("fcom st0,m64r");
14802
14803 IEM_MC_BEGIN(3, 3);
14804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14805 IEM_MC_LOCAL(uint16_t, u16Fsw);
14806 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14807 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14808 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14809 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14810
14811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14813
14814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14816 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14817
14818 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14819 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14820 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14821 IEM_MC_ELSE()
14822 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14823 IEM_MC_ENDIF();
14824 IEM_MC_USED_FPU();
14825 IEM_MC_ADVANCE_RIP();
14826
14827 IEM_MC_END();
14828 return VINF_SUCCESS;
14829}
14830
14831
14832/** Opcode 0xdc !11/3. */
14833FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14834{
14835 IEMOP_MNEMONIC("fcomp st0,m64r");
14836
14837 IEM_MC_BEGIN(3, 3);
14838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14839 IEM_MC_LOCAL(uint16_t, u16Fsw);
14840 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14841 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14842 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14843 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14844
14845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14847
14848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14850 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14851
14852 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14853 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14854 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14855 IEM_MC_ELSE()
14856 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14857 IEM_MC_ENDIF();
14858 IEM_MC_USED_FPU();
14859 IEM_MC_ADVANCE_RIP();
14860
14861 IEM_MC_END();
14862 return VINF_SUCCESS;
14863}
14864
14865
14866/** Opcode 0xdc !11/4. */
14867FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14868{
14869 IEMOP_MNEMONIC("fsub m64r");
14870 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14871}
14872
14873
14874/** Opcode 0xdc !11/5. */
14875FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14876{
14877 IEMOP_MNEMONIC("fsubr m64r");
14878 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
14879}
14880
14881
14882/** Opcode 0xdc !11/6. */
14883FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
14884{
14885 IEMOP_MNEMONIC("fdiv m64r");
14886 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
14887}
14888
14889
14890/** Opcode 0xdc !11/7. */
14891FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
14892{
14893 IEMOP_MNEMONIC("fdivr m64r");
14894 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
14895}
14896
14897
14898/** Opcode 0xdc. */
14899FNIEMOP_DEF(iemOp_EscF4)
14900{
14901 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14903 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14904 {
14905 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14906 {
14907 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
14908 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
14909 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
14910 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
14911 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
14912 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
14913 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
14914 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
14915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14916 }
14917 }
14918 else
14919 {
14920 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14921 {
14922 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
14923 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
14924 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
14925 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
14926 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
14927 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
14928 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
14929 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
14930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14931 }
14932 }
14933}
14934
14935
14936/** Opcode 0xdd !11/0.
14937 * @sa iemOp_fld_m32r */
14938FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
14939{
14940 IEMOP_MNEMONIC("fld m64r");
14941
14942 IEM_MC_BEGIN(2, 3);
14943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14944 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14945 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
14946 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14947 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
14948
14949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14953
14954 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14955 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14956 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
14957 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14958 IEM_MC_ELSE()
14959 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14960 IEM_MC_ENDIF();
14961 IEM_MC_USED_FPU();
14962 IEM_MC_ADVANCE_RIP();
14963
14964 IEM_MC_END();
14965 return VINF_SUCCESS;
14966}
14967
14968
14969/** Opcode 0xdd !11/0. */
14970FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
14971{
14972 IEMOP_MNEMONIC("fisttp m64i");
14973 IEM_MC_BEGIN(3, 2);
14974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14975 IEM_MC_LOCAL(uint16_t, u16Fsw);
14976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14977 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14979
14980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14984
14985 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14986 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14987 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14988 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14989 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14990 IEM_MC_ELSE()
14991 IEM_MC_IF_FCW_IM()
14992 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14993 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14994 IEM_MC_ENDIF();
14995 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14996 IEM_MC_ENDIF();
14997 IEM_MC_USED_FPU();
14998 IEM_MC_ADVANCE_RIP();
14999
15000 IEM_MC_END();
15001 return VINF_SUCCESS;
15002}
15003
15004
15005/** Opcode 0xdd !11/0. */
15006FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15007{
15008 IEMOP_MNEMONIC("fst m64r");
15009 IEM_MC_BEGIN(3, 2);
15010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15011 IEM_MC_LOCAL(uint16_t, u16Fsw);
15012 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15013 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15015
15016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15020
15021 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15023 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15024 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15025 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15026 IEM_MC_ELSE()
15027 IEM_MC_IF_FCW_IM()
15028 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15029 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15030 IEM_MC_ENDIF();
15031 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15032 IEM_MC_ENDIF();
15033 IEM_MC_USED_FPU();
15034 IEM_MC_ADVANCE_RIP();
15035
15036 IEM_MC_END();
15037 return VINF_SUCCESS;
15038}
15039
15040
15041
15042
15043/** Opcode 0xdd !11/0. */
15044FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15045{
15046 IEMOP_MNEMONIC("fstp m64r");
15047 IEM_MC_BEGIN(3, 2);
15048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15049 IEM_MC_LOCAL(uint16_t, u16Fsw);
15050 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15051 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15052 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15053
15054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15058
15059 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15060 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15061 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15062 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15063 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15064 IEM_MC_ELSE()
15065 IEM_MC_IF_FCW_IM()
15066 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15067 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15068 IEM_MC_ENDIF();
15069 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15070 IEM_MC_ENDIF();
15071 IEM_MC_USED_FPU();
15072 IEM_MC_ADVANCE_RIP();
15073
15074 IEM_MC_END();
15075 return VINF_SUCCESS;
15076}
15077
15078
15079/** Opcode 0xdd !11/0. */
15080FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15081{
15082 IEMOP_MNEMONIC("frstor m94/108byte");
15083 IEM_MC_BEGIN(3, 0);
15084 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15085 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15086 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15090 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15091 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15092 IEM_MC_END();
15093 return VINF_SUCCESS;
15094}
15095
15096
15097/** Opcode 0xdd !11/0. */
15098FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15099{
15100 IEMOP_MNEMONIC("fnsave m94/108byte");
15101 IEM_MC_BEGIN(3, 0);
15102 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15103 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15104 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15108 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15109 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15110 IEM_MC_END();
15111 return VINF_SUCCESS;
15112
15113}
15114
15115/** Opcode 0xdd !11/0. */
15116FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15117{
15118 IEMOP_MNEMONIC("fnstsw m16");
15119
15120 IEM_MC_BEGIN(0, 2);
15121 IEM_MC_LOCAL(uint16_t, u16Tmp);
15122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15123
15124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15127
15128 IEM_MC_FETCH_FSW(u16Tmp);
15129 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15130 IEM_MC_ADVANCE_RIP();
15131
15132/** @todo Debug / drop a hint to the verifier that things may differ
15133 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15134 * NT4SP1. (X86_FSW_PE) */
15135 IEM_MC_END();
15136 return VINF_SUCCESS;
15137}
15138
15139
15140/** Opcode 0xdd 11/0. */
15141FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15142{
15143 IEMOP_MNEMONIC("ffree stN");
15144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15145 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15146 unmodified. */
15147
15148 IEM_MC_BEGIN(0, 0);
15149
15150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15152
15153 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15154 IEM_MC_UPDATE_FPU_OPCODE_IP();
15155
15156 IEM_MC_USED_FPU();
15157 IEM_MC_ADVANCE_RIP();
15158 IEM_MC_END();
15159 return VINF_SUCCESS;
15160}
15161
15162
15163/** Opcode 0xdd 11/1. */
15164FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15165{
15166 IEMOP_MNEMONIC("fst st0,stN");
15167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15168
15169 IEM_MC_BEGIN(0, 2);
15170 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15171 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15175 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15176 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15177 IEM_MC_ELSE()
15178 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15179 IEM_MC_ENDIF();
15180 IEM_MC_USED_FPU();
15181 IEM_MC_ADVANCE_RIP();
15182 IEM_MC_END();
15183 return VINF_SUCCESS;
15184}
15185
15186
15187/** Opcode 0xdd 11/3. */
15188FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15189{
15190 IEMOP_MNEMONIC("fcom st0,stN");
15191 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15192}
15193
15194
15195/** Opcode 0xdd 11/4. */
15196FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15197{
15198 IEMOP_MNEMONIC("fcomp st0,stN");
15199 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15200}
15201
15202
15203/** Opcode 0xdd. */
15204FNIEMOP_DEF(iemOp_EscF5)
15205{
15206 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15209 {
15210 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15211 {
15212 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15213 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15214 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15215 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15216 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15217 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15218 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15219 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15221 }
15222 }
15223 else
15224 {
15225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15226 {
15227 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15228 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15229 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15230 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15231 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15232 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15233 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15234 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15236 }
15237 }
15238}
15239
15240
15241/** Opcode 0xde 11/0. */
15242FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15243{
15244 IEMOP_MNEMONIC("faddp stN,st0");
15245 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15246}
15247
15248
15249/** Opcode 0xde 11/0. */
15250FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15251{
15252 IEMOP_MNEMONIC("fmulp stN,st0");
15253 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15254}
15255
15256
15257/** Opcode 0xde 0xd9. */
15258FNIEMOP_DEF(iemOp_fcompp)
15259{
15260 IEMOP_MNEMONIC("fucompp st0,stN");
15261 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15262}
15263
15264
15265/** Opcode 0xde 11/4. */
15266FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15267{
15268 IEMOP_MNEMONIC("fsubrp stN,st0");
15269 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15270}
15271
15272
15273/** Opcode 0xde 11/5. */
15274FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15275{
15276 IEMOP_MNEMONIC("fsubp stN,st0");
15277 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15278}
15279
15280
15281/** Opcode 0xde 11/6. */
15282FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15283{
15284 IEMOP_MNEMONIC("fdivrp stN,st0");
15285 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15286}
15287
15288
15289/** Opcode 0xde 11/7. */
15290FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15291{
15292 IEMOP_MNEMONIC("fdivp stN,st0");
15293 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15294}
15295
15296
15297/**
15298 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15299 * the result in ST0.
15300 *
15301 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15302 */
15303FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15304{
15305 IEM_MC_BEGIN(3, 3);
15306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15307 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15308 IEM_MC_LOCAL(int16_t, i16Val2);
15309 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15311 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15312
15313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15315
15316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15318 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15319
15320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15321 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15322 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15323 IEM_MC_ELSE()
15324 IEM_MC_FPU_STACK_UNDERFLOW(0);
15325 IEM_MC_ENDIF();
15326 IEM_MC_USED_FPU();
15327 IEM_MC_ADVANCE_RIP();
15328
15329 IEM_MC_END();
15330 return VINF_SUCCESS;
15331}
15332
15333
15334/** Opcode 0xde !11/0. */
15335FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15336{
15337 IEMOP_MNEMONIC("fiadd m16i");
15338 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15339}
15340
15341
15342/** Opcode 0xde !11/1. */
15343FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15344{
15345 IEMOP_MNEMONIC("fimul m16i");
15346 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15347}
15348
15349
15350/** Opcode 0xde !11/2. */
15351FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15352{
15353 IEMOP_MNEMONIC("ficom st0,m16i");
15354
15355 IEM_MC_BEGIN(3, 3);
15356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15357 IEM_MC_LOCAL(uint16_t, u16Fsw);
15358 IEM_MC_LOCAL(int16_t, i16Val2);
15359 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15361 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15362
15363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15365
15366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15368 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15369
15370 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15371 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15372 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15373 IEM_MC_ELSE()
15374 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15375 IEM_MC_ENDIF();
15376 IEM_MC_USED_FPU();
15377 IEM_MC_ADVANCE_RIP();
15378
15379 IEM_MC_END();
15380 return VINF_SUCCESS;
15381}
15382
15383
15384/** Opcode 0xde !11/3. */
15385FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15386{
15387 IEMOP_MNEMONIC("ficomp st0,m16i");
15388
15389 IEM_MC_BEGIN(3, 3);
15390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15391 IEM_MC_LOCAL(uint16_t, u16Fsw);
15392 IEM_MC_LOCAL(int16_t, i16Val2);
15393 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15395 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15396
15397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15399
15400 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15401 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15402 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15403
15404 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15405 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15406 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15407 IEM_MC_ELSE()
15408 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15409 IEM_MC_ENDIF();
15410 IEM_MC_USED_FPU();
15411 IEM_MC_ADVANCE_RIP();
15412
15413 IEM_MC_END();
15414 return VINF_SUCCESS;
15415}
15416
15417
15418/** Opcode 0xde !11/4. */
15419FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15420{
15421 IEMOP_MNEMONIC("fisub m16i");
15422 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15423}
15424
15425
15426/** Opcode 0xde !11/5. */
15427FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15428{
15429 IEMOP_MNEMONIC("fisubr m16i");
15430 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15431}
15432
15433
15434/** Opcode 0xde !11/6. */
15435FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15436{
15437 IEMOP_MNEMONIC("fiadd m16i");
15438 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15439}
15440
15441
15442/** Opcode 0xde !11/7. */
15443FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15444{
15445 IEMOP_MNEMONIC("fiadd m16i");
15446 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15447}
15448
15449
15450/** Opcode 0xde. */
15451FNIEMOP_DEF(iemOp_EscF6)
15452{
15453 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15456 {
15457 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15458 {
15459 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15460 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15461 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15462 case 3: if (bRm == 0xd9)
15463 return FNIEMOP_CALL(iemOp_fcompp);
15464 return IEMOP_RAISE_INVALID_OPCODE();
15465 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15466 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15467 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15468 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15470 }
15471 }
15472 else
15473 {
15474 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15475 {
15476 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15477 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15478 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15479 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15480 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15481 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15482 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15483 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15485 }
15486 }
15487}
15488
15489
15490/** Opcode 0xdf 11/0.
15491 * Undocument instruction, assumed to work like ffree + fincstp. */
15492FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15493{
15494 IEMOP_MNEMONIC("ffreep stN");
15495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15496
15497 IEM_MC_BEGIN(0, 0);
15498
15499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15501
15502 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15503 IEM_MC_FPU_STACK_INC_TOP();
15504 IEM_MC_UPDATE_FPU_OPCODE_IP();
15505
15506 IEM_MC_USED_FPU();
15507 IEM_MC_ADVANCE_RIP();
15508 IEM_MC_END();
15509 return VINF_SUCCESS;
15510}
15511
15512
15513/** Opcode 0xdf 0xe0. */
15514FNIEMOP_DEF(iemOp_fnstsw_ax)
15515{
15516 IEMOP_MNEMONIC("fnstsw ax");
15517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15518
15519 IEM_MC_BEGIN(0, 1);
15520 IEM_MC_LOCAL(uint16_t, u16Tmp);
15521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15522 IEM_MC_FETCH_FSW(u16Tmp);
15523 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15524 IEM_MC_ADVANCE_RIP();
15525 IEM_MC_END();
15526 return VINF_SUCCESS;
15527}
15528
15529
15530/** Opcode 0xdf 11/5. */
15531FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15532{
15533 IEMOP_MNEMONIC("fcomip st0,stN");
15534 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15535}
15536
15537
15538/** Opcode 0xdf 11/6. */
15539FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15540{
15541 IEMOP_MNEMONIC("fcomip st0,stN");
15542 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15543}
15544
15545
15546/** Opcode 0xdf !11/0. */
15547FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15548{
15549 IEMOP_MNEMONIC("fild m16i");
15550
15551 IEM_MC_BEGIN(2, 3);
15552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15553 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15554 IEM_MC_LOCAL(int16_t, i16Val);
15555 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15556 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15557
15558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15560
15561 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15562 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15563 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15564
15565 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15566 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15567 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15568 IEM_MC_ELSE()
15569 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15570 IEM_MC_ENDIF();
15571 IEM_MC_USED_FPU();
15572 IEM_MC_ADVANCE_RIP();
15573
15574 IEM_MC_END();
15575 return VINF_SUCCESS;
15576}
15577
15578
15579/** Opcode 0xdf !11/1. */
15580FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15581{
15582 IEMOP_MNEMONIC("fisttp m16i");
15583 IEM_MC_BEGIN(3, 2);
15584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15585 IEM_MC_LOCAL(uint16_t, u16Fsw);
15586 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15587 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15588 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15589
15590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15594
15595 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15596 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15597 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15598 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15599 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15600 IEM_MC_ELSE()
15601 IEM_MC_IF_FCW_IM()
15602 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15603 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15604 IEM_MC_ENDIF();
15605 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15606 IEM_MC_ENDIF();
15607 IEM_MC_USED_FPU();
15608 IEM_MC_ADVANCE_RIP();
15609
15610 IEM_MC_END();
15611 return VINF_SUCCESS;
15612}
15613
15614
15615/** Opcode 0xdf !11/2. */
15616FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15617{
15618 IEMOP_MNEMONIC("fistp m16i");
15619 IEM_MC_BEGIN(3, 2);
15620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15621 IEM_MC_LOCAL(uint16_t, u16Fsw);
15622 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15623 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15624 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15625
15626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15630
15631 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15632 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15633 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15634 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15635 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15636 IEM_MC_ELSE()
15637 IEM_MC_IF_FCW_IM()
15638 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15639 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15640 IEM_MC_ENDIF();
15641 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15642 IEM_MC_ENDIF();
15643 IEM_MC_USED_FPU();
15644 IEM_MC_ADVANCE_RIP();
15645
15646 IEM_MC_END();
15647 return VINF_SUCCESS;
15648}
15649
15650
15651/** Opcode 0xdf !11/3. */
15652FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15653{
15654 IEMOP_MNEMONIC("fistp m16i");
15655 IEM_MC_BEGIN(3, 2);
15656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15657 IEM_MC_LOCAL(uint16_t, u16Fsw);
15658 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15659 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15660 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15661
15662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15666
15667 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15668 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15669 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15670 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15671 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15672 IEM_MC_ELSE()
15673 IEM_MC_IF_FCW_IM()
15674 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15675 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15676 IEM_MC_ENDIF();
15677 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15678 IEM_MC_ENDIF();
15679 IEM_MC_USED_FPU();
15680 IEM_MC_ADVANCE_RIP();
15681
15682 IEM_MC_END();
15683 return VINF_SUCCESS;
15684}
15685
15686
15687/** Opcode 0xdf !11/4. */
15688FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15689
15690
15691/** Opcode 0xdf !11/5. */
15692FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC("fild m64i");
15695
15696 IEM_MC_BEGIN(2, 3);
15697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15698 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15699 IEM_MC_LOCAL(int64_t, i64Val);
15700 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15701 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15702
15703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15705
15706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15708 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15709
15710 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15711 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15712 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15713 IEM_MC_ELSE()
15714 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15715 IEM_MC_ENDIF();
15716 IEM_MC_USED_FPU();
15717 IEM_MC_ADVANCE_RIP();
15718
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724/** Opcode 0xdf !11/6. */
15725FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15726
15727
15728/** Opcode 0xdf !11/7. */
15729FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15730{
15731 IEMOP_MNEMONIC("fistp m64i");
15732 IEM_MC_BEGIN(3, 2);
15733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15734 IEM_MC_LOCAL(uint16_t, u16Fsw);
15735 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15736 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15737 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15738
15739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15743
15744 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15745 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15746 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15747 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15748 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15749 IEM_MC_ELSE()
15750 IEM_MC_IF_FCW_IM()
15751 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15752 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15753 IEM_MC_ENDIF();
15754 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15755 IEM_MC_ENDIF();
15756 IEM_MC_USED_FPU();
15757 IEM_MC_ADVANCE_RIP();
15758
15759 IEM_MC_END();
15760 return VINF_SUCCESS;
15761}
15762
15763
15764/** Opcode 0xdf. */
15765FNIEMOP_DEF(iemOp_EscF7)
15766{
15767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15769 {
15770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15771 {
15772 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15773 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15774 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15775 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15776 case 4: if (bRm == 0xe0)
15777 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15778 return IEMOP_RAISE_INVALID_OPCODE();
15779 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15780 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15781 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15783 }
15784 }
15785 else
15786 {
15787 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15788 {
15789 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15790 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15791 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15792 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15793 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15794 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15795 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15796 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15798 }
15799 }
15800}
15801
15802
15803/** Opcode 0xe0. */
15804FNIEMOP_DEF(iemOp_loopne_Jb)
15805{
15806 IEMOP_MNEMONIC("loopne Jb");
15807 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15808 IEMOP_HLP_NO_LOCK_PREFIX();
15809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15810
15811 switch (pIemCpu->enmEffAddrMode)
15812 {
15813 case IEMMODE_16BIT:
15814 IEM_MC_BEGIN(0,0);
15815 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15816 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15817 IEM_MC_REL_JMP_S8(i8Imm);
15818 } IEM_MC_ELSE() {
15819 IEM_MC_ADVANCE_RIP();
15820 } IEM_MC_ENDIF();
15821 IEM_MC_END();
15822 return VINF_SUCCESS;
15823
15824 case IEMMODE_32BIT:
15825 IEM_MC_BEGIN(0,0);
15826 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15827 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15828 IEM_MC_REL_JMP_S8(i8Imm);
15829 } IEM_MC_ELSE() {
15830 IEM_MC_ADVANCE_RIP();
15831 } IEM_MC_ENDIF();
15832 IEM_MC_END();
15833 return VINF_SUCCESS;
15834
15835 case IEMMODE_64BIT:
15836 IEM_MC_BEGIN(0,0);
15837 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15838 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15839 IEM_MC_REL_JMP_S8(i8Imm);
15840 } IEM_MC_ELSE() {
15841 IEM_MC_ADVANCE_RIP();
15842 } IEM_MC_ENDIF();
15843 IEM_MC_END();
15844 return VINF_SUCCESS;
15845
15846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15847 }
15848}
15849
15850
15851/** Opcode 0xe1. */
15852FNIEMOP_DEF(iemOp_loope_Jb)
15853{
15854 IEMOP_MNEMONIC("loope Jb");
15855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15856 IEMOP_HLP_NO_LOCK_PREFIX();
15857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15858
15859 switch (pIemCpu->enmEffAddrMode)
15860 {
15861 case IEMMODE_16BIT:
15862 IEM_MC_BEGIN(0,0);
15863 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15864 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15865 IEM_MC_REL_JMP_S8(i8Imm);
15866 } IEM_MC_ELSE() {
15867 IEM_MC_ADVANCE_RIP();
15868 } IEM_MC_ENDIF();
15869 IEM_MC_END();
15870 return VINF_SUCCESS;
15871
15872 case IEMMODE_32BIT:
15873 IEM_MC_BEGIN(0,0);
15874 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15875 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15876 IEM_MC_REL_JMP_S8(i8Imm);
15877 } IEM_MC_ELSE() {
15878 IEM_MC_ADVANCE_RIP();
15879 } IEM_MC_ENDIF();
15880 IEM_MC_END();
15881 return VINF_SUCCESS;
15882
15883 case IEMMODE_64BIT:
15884 IEM_MC_BEGIN(0,0);
15885 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15886 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15887 IEM_MC_REL_JMP_S8(i8Imm);
15888 } IEM_MC_ELSE() {
15889 IEM_MC_ADVANCE_RIP();
15890 } IEM_MC_ENDIF();
15891 IEM_MC_END();
15892 return VINF_SUCCESS;
15893
15894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15895 }
15896}
15897
15898
15899/** Opcode 0xe2. */
15900FNIEMOP_DEF(iemOp_loop_Jb)
15901{
15902 IEMOP_MNEMONIC("loop Jb");
15903 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15904 IEMOP_HLP_NO_LOCK_PREFIX();
15905 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15906
15907 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
15908 * using the 32-bit operand size override. How can that be restarted? See
15909 * weird pseudo code in intel manual. */
15910 switch (pIemCpu->enmEffAddrMode)
15911 {
15912 case IEMMODE_16BIT:
15913 IEM_MC_BEGIN(0,0);
15914 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15915 {
15916 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15917 IEM_MC_IF_CX_IS_NZ() {
15918 IEM_MC_REL_JMP_S8(i8Imm);
15919 } IEM_MC_ELSE() {
15920 IEM_MC_ADVANCE_RIP();
15921 } IEM_MC_ENDIF();
15922 }
15923 else
15924 {
15925 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
15926 IEM_MC_ADVANCE_RIP();
15927 }
15928 IEM_MC_END();
15929 return VINF_SUCCESS;
15930
15931 case IEMMODE_32BIT:
15932 IEM_MC_BEGIN(0,0);
15933 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15934 {
15935 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15936 IEM_MC_IF_ECX_IS_NZ() {
15937 IEM_MC_REL_JMP_S8(i8Imm);
15938 } IEM_MC_ELSE() {
15939 IEM_MC_ADVANCE_RIP();
15940 } IEM_MC_ENDIF();
15941 }
15942 else
15943 {
15944 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
15945 IEM_MC_ADVANCE_RIP();
15946 }
15947 IEM_MC_END();
15948 return VINF_SUCCESS;
15949
15950 case IEMMODE_64BIT:
15951 IEM_MC_BEGIN(0,0);
15952 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15953 {
15954 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15955 IEM_MC_IF_RCX_IS_NZ() {
15956 IEM_MC_REL_JMP_S8(i8Imm);
15957 } IEM_MC_ELSE() {
15958 IEM_MC_ADVANCE_RIP();
15959 } IEM_MC_ENDIF();
15960 }
15961 else
15962 {
15963 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
15964 IEM_MC_ADVANCE_RIP();
15965 }
15966 IEM_MC_END();
15967 return VINF_SUCCESS;
15968
15969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15970 }
15971}
15972
15973
15974/** Opcode 0xe3. */
15975FNIEMOP_DEF(iemOp_jecxz_Jb)
15976{
15977 IEMOP_MNEMONIC("jecxz Jb");
15978 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15979 IEMOP_HLP_NO_LOCK_PREFIX();
15980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15981
15982 switch (pIemCpu->enmEffAddrMode)
15983 {
15984 case IEMMODE_16BIT:
15985 IEM_MC_BEGIN(0,0);
15986 IEM_MC_IF_CX_IS_NZ() {
15987 IEM_MC_ADVANCE_RIP();
15988 } IEM_MC_ELSE() {
15989 IEM_MC_REL_JMP_S8(i8Imm);
15990 } IEM_MC_ENDIF();
15991 IEM_MC_END();
15992 return VINF_SUCCESS;
15993
15994 case IEMMODE_32BIT:
15995 IEM_MC_BEGIN(0,0);
15996 IEM_MC_IF_ECX_IS_NZ() {
15997 IEM_MC_ADVANCE_RIP();
15998 } IEM_MC_ELSE() {
15999 IEM_MC_REL_JMP_S8(i8Imm);
16000 } IEM_MC_ENDIF();
16001 IEM_MC_END();
16002 return VINF_SUCCESS;
16003
16004 case IEMMODE_64BIT:
16005 IEM_MC_BEGIN(0,0);
16006 IEM_MC_IF_RCX_IS_NZ() {
16007 IEM_MC_ADVANCE_RIP();
16008 } IEM_MC_ELSE() {
16009 IEM_MC_REL_JMP_S8(i8Imm);
16010 } IEM_MC_ENDIF();
16011 IEM_MC_END();
16012 return VINF_SUCCESS;
16013
16014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16015 }
16016}
16017
16018
16019/** Opcode 0xe4 */
16020FNIEMOP_DEF(iemOp_in_AL_Ib)
16021{
16022 IEMOP_MNEMONIC("in eAX,Ib");
16023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16024 IEMOP_HLP_NO_LOCK_PREFIX();
16025 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16026}
16027
16028
16029/** Opcode 0xe5 */
16030FNIEMOP_DEF(iemOp_in_eAX_Ib)
16031{
16032 IEMOP_MNEMONIC("in eAX,Ib");
16033 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16034 IEMOP_HLP_NO_LOCK_PREFIX();
16035 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16036}
16037
16038
16039/** Opcode 0xe6 */
16040FNIEMOP_DEF(iemOp_out_Ib_AL)
16041{
16042 IEMOP_MNEMONIC("out Ib,AL");
16043 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16044 IEMOP_HLP_NO_LOCK_PREFIX();
16045 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16046}
16047
16048
16049/** Opcode 0xe7 */
16050FNIEMOP_DEF(iemOp_out_Ib_eAX)
16051{
16052 IEMOP_MNEMONIC("out Ib,eAX");
16053 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16054 IEMOP_HLP_NO_LOCK_PREFIX();
16055 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16056}
16057
16058
16059/** Opcode 0xe8. */
16060FNIEMOP_DEF(iemOp_call_Jv)
16061{
16062 IEMOP_MNEMONIC("call Jv");
16063 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16064 switch (pIemCpu->enmEffOpSize)
16065 {
16066 case IEMMODE_16BIT:
16067 {
16068 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16069 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16070 }
16071
16072 case IEMMODE_32BIT:
16073 {
16074 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16075 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16076 }
16077
16078 case IEMMODE_64BIT:
16079 {
16080 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16081 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16082 }
16083
16084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16085 }
16086}
16087
16088
16089/** Opcode 0xe9. */
16090FNIEMOP_DEF(iemOp_jmp_Jv)
16091{
16092 IEMOP_MNEMONIC("jmp Jv");
16093 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16094 switch (pIemCpu->enmEffOpSize)
16095 {
16096 case IEMMODE_16BIT:
16097 {
16098 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16099 IEM_MC_BEGIN(0, 0);
16100 IEM_MC_REL_JMP_S16(i16Imm);
16101 IEM_MC_END();
16102 return VINF_SUCCESS;
16103 }
16104
16105 case IEMMODE_64BIT:
16106 case IEMMODE_32BIT:
16107 {
16108 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16109 IEM_MC_BEGIN(0, 0);
16110 IEM_MC_REL_JMP_S32(i32Imm);
16111 IEM_MC_END();
16112 return VINF_SUCCESS;
16113 }
16114
16115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16116 }
16117}
16118
16119
16120/** Opcode 0xea. */
16121FNIEMOP_DEF(iemOp_jmp_Ap)
16122{
16123 IEMOP_MNEMONIC("jmp Ap");
16124 IEMOP_HLP_NO_64BIT();
16125
16126 /* Decode the far pointer address and pass it on to the far call C implementation. */
16127 uint32_t offSeg;
16128 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16129 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16130 else
16131 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16132 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16133 IEMOP_HLP_NO_LOCK_PREFIX();
16134 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16135}
16136
16137
16138/** Opcode 0xeb. */
16139FNIEMOP_DEF(iemOp_jmp_Jb)
16140{
16141 IEMOP_MNEMONIC("jmp Jb");
16142 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16143 IEMOP_HLP_NO_LOCK_PREFIX();
16144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16145
16146 IEM_MC_BEGIN(0, 0);
16147 IEM_MC_REL_JMP_S8(i8Imm);
16148 IEM_MC_END();
16149 return VINF_SUCCESS;
16150}
16151
16152
16153/** Opcode 0xec */
16154FNIEMOP_DEF(iemOp_in_AL_DX)
16155{
16156 IEMOP_MNEMONIC("in AL,DX");
16157 IEMOP_HLP_NO_LOCK_PREFIX();
16158 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16159}
16160
16161
16162/** Opcode 0xed */
16163FNIEMOP_DEF(iemOp_eAX_DX)
16164{
16165 IEMOP_MNEMONIC("in eAX,DX");
16166 IEMOP_HLP_NO_LOCK_PREFIX();
16167 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16168}
16169
16170
16171/** Opcode 0xee */
16172FNIEMOP_DEF(iemOp_out_DX_AL)
16173{
16174 IEMOP_MNEMONIC("out DX,AL");
16175 IEMOP_HLP_NO_LOCK_PREFIX();
16176 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16177}
16178
16179
16180/** Opcode 0xef */
16181FNIEMOP_DEF(iemOp_out_DX_eAX)
16182{
16183 IEMOP_MNEMONIC("out DX,eAX");
16184 IEMOP_HLP_NO_LOCK_PREFIX();
16185 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16186}
16187
16188
16189/** Opcode 0xf0. */
16190FNIEMOP_DEF(iemOp_lock)
16191{
16192 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16193 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16194
16195 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16196 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16197}
16198
16199
16200/** Opcode 0xf1. */
16201FNIEMOP_DEF(iemOp_int_1)
16202{
16203 IEMOP_MNEMONIC("int1"); /* icebp */
16204 /** @todo testcase! */
16205 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16206}
16207
16208
16209/** Opcode 0xf2. */
16210FNIEMOP_DEF(iemOp_repne)
16211{
16212 /* This overrides any previous REPE prefix. */
16213 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16214 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16215 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16216
16217 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16218 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16219}
16220
16221
16222/** Opcode 0xf3. */
16223FNIEMOP_DEF(iemOp_repe)
16224{
16225 /* This overrides any previous REPNE prefix. */
16226 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16227 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16228 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16229
16230 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16231 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16232}
16233
16234
16235/** Opcode 0xf4. */
16236FNIEMOP_DEF(iemOp_hlt)
16237{
16238 IEMOP_HLP_NO_LOCK_PREFIX();
16239 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16240}
16241
16242
16243/** Opcode 0xf5. */
16244FNIEMOP_DEF(iemOp_cmc)
16245{
16246 IEMOP_MNEMONIC("cmc");
16247 IEMOP_HLP_NO_LOCK_PREFIX();
16248 IEM_MC_BEGIN(0, 0);
16249 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16250 IEM_MC_ADVANCE_RIP();
16251 IEM_MC_END();
16252 return VINF_SUCCESS;
16253}
16254
16255
16256/**
16257 * Common implementation of 'inc/dec/not/neg Eb'.
16258 *
16259 * @param bRm The RM byte.
16260 * @param pImpl The instruction implementation.
16261 */
16262FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16263{
16264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16265 {
16266 /* register access */
16267 IEM_MC_BEGIN(2, 0);
16268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16269 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16270 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16271 IEM_MC_REF_EFLAGS(pEFlags);
16272 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16273 IEM_MC_ADVANCE_RIP();
16274 IEM_MC_END();
16275 }
16276 else
16277 {
16278 /* memory access. */
16279 IEM_MC_BEGIN(2, 2);
16280 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16283
16284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16285 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16286 IEM_MC_FETCH_EFLAGS(EFlags);
16287 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16288 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16289 else
16290 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16291
16292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16293 IEM_MC_COMMIT_EFLAGS(EFlags);
16294 IEM_MC_ADVANCE_RIP();
16295 IEM_MC_END();
16296 }
16297 return VINF_SUCCESS;
16298}
16299
16300
16301/**
16302 * Common implementation of 'inc/dec/not/neg Ev'.
16303 *
16304 * @param bRm The RM byte.
16305 * @param pImpl The instruction implementation.
16306 */
16307FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16308{
16309 /* Registers are handled by a common worker. */
16310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16311 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16312
16313 /* Memory we do here. */
16314 switch (pIemCpu->enmEffOpSize)
16315 {
16316 case IEMMODE_16BIT:
16317 IEM_MC_BEGIN(2, 2);
16318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16319 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16321
16322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16323 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16324 IEM_MC_FETCH_EFLAGS(EFlags);
16325 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16326 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16327 else
16328 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16329
16330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16331 IEM_MC_COMMIT_EFLAGS(EFlags);
16332 IEM_MC_ADVANCE_RIP();
16333 IEM_MC_END();
16334 return VINF_SUCCESS;
16335
16336 case IEMMODE_32BIT:
16337 IEM_MC_BEGIN(2, 2);
16338 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16341
16342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16343 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16344 IEM_MC_FETCH_EFLAGS(EFlags);
16345 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16346 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16347 else
16348 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16349
16350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16351 IEM_MC_COMMIT_EFLAGS(EFlags);
16352 IEM_MC_ADVANCE_RIP();
16353 IEM_MC_END();
16354 return VINF_SUCCESS;
16355
16356 case IEMMODE_64BIT:
16357 IEM_MC_BEGIN(2, 2);
16358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16361
16362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16363 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16364 IEM_MC_FETCH_EFLAGS(EFlags);
16365 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16366 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16367 else
16368 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16369
16370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16371 IEM_MC_COMMIT_EFLAGS(EFlags);
16372 IEM_MC_ADVANCE_RIP();
16373 IEM_MC_END();
16374 return VINF_SUCCESS;
16375
16376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16377 }
16378}
16379
16380
16381/** Opcode 0xf6 /0. */
16382FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16383{
16384 IEMOP_MNEMONIC("test Eb,Ib");
16385 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16386
16387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16388 {
16389 /* register access */
16390 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16391 IEMOP_HLP_NO_LOCK_PREFIX();
16392
16393 IEM_MC_BEGIN(3, 0);
16394 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16395 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16397 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16398 IEM_MC_REF_EFLAGS(pEFlags);
16399 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16400 IEM_MC_ADVANCE_RIP();
16401 IEM_MC_END();
16402 }
16403 else
16404 {
16405 /* memory access. */
16406 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16407
16408 IEM_MC_BEGIN(3, 2);
16409 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16410 IEM_MC_ARG(uint8_t, u8Src, 1);
16411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16413
16414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16415 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16416 IEM_MC_ASSIGN(u8Src, u8Imm);
16417 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16418 IEM_MC_FETCH_EFLAGS(EFlags);
16419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16420
16421 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16422 IEM_MC_COMMIT_EFLAGS(EFlags);
16423 IEM_MC_ADVANCE_RIP();
16424 IEM_MC_END();
16425 }
16426 return VINF_SUCCESS;
16427}
16428
16429
16430/** Opcode 0xf7 /0. */
16431FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16432{
16433 IEMOP_MNEMONIC("test Ev,Iv");
16434 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16435 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16436
16437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16438 {
16439 /* register access */
16440 switch (pIemCpu->enmEffOpSize)
16441 {
16442 case IEMMODE_16BIT:
16443 {
16444 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16445 IEM_MC_BEGIN(3, 0);
16446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16447 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16449 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16450 IEM_MC_REF_EFLAGS(pEFlags);
16451 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16452 IEM_MC_ADVANCE_RIP();
16453 IEM_MC_END();
16454 return VINF_SUCCESS;
16455 }
16456
16457 case IEMMODE_32BIT:
16458 {
16459 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16460 IEM_MC_BEGIN(3, 0);
16461 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16462 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16463 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16464 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16465 IEM_MC_REF_EFLAGS(pEFlags);
16466 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16467 /* No clearing the high dword here - test doesn't write back the result. */
16468 IEM_MC_ADVANCE_RIP();
16469 IEM_MC_END();
16470 return VINF_SUCCESS;
16471 }
16472
16473 case IEMMODE_64BIT:
16474 {
16475 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16476 IEM_MC_BEGIN(3, 0);
16477 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16478 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16480 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16481 IEM_MC_REF_EFLAGS(pEFlags);
16482 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16483 IEM_MC_ADVANCE_RIP();
16484 IEM_MC_END();
16485 return VINF_SUCCESS;
16486 }
16487
16488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16489 }
16490 }
16491 else
16492 {
16493 /* memory access. */
16494 switch (pIemCpu->enmEffOpSize)
16495 {
16496 case IEMMODE_16BIT:
16497 {
16498 IEM_MC_BEGIN(3, 2);
16499 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16500 IEM_MC_ARG(uint16_t, u16Src, 1);
16501 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16503
16504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16505 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16506 IEM_MC_ASSIGN(u16Src, u16Imm);
16507 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16508 IEM_MC_FETCH_EFLAGS(EFlags);
16509 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16510
16511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16512 IEM_MC_COMMIT_EFLAGS(EFlags);
16513 IEM_MC_ADVANCE_RIP();
16514 IEM_MC_END();
16515 return VINF_SUCCESS;
16516 }
16517
16518 case IEMMODE_32BIT:
16519 {
16520 IEM_MC_BEGIN(3, 2);
16521 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16522 IEM_MC_ARG(uint32_t, u32Src, 1);
16523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16525
16526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16527 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16528 IEM_MC_ASSIGN(u32Src, u32Imm);
16529 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16530 IEM_MC_FETCH_EFLAGS(EFlags);
16531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16532
16533 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16534 IEM_MC_COMMIT_EFLAGS(EFlags);
16535 IEM_MC_ADVANCE_RIP();
16536 IEM_MC_END();
16537 return VINF_SUCCESS;
16538 }
16539
16540 case IEMMODE_64BIT:
16541 {
16542 IEM_MC_BEGIN(3, 2);
16543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16544 IEM_MC_ARG(uint64_t, u64Src, 1);
16545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16547
16548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16549 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16550 IEM_MC_ASSIGN(u64Src, u64Imm);
16551 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16552 IEM_MC_FETCH_EFLAGS(EFlags);
16553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16554
16555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16556 IEM_MC_COMMIT_EFLAGS(EFlags);
16557 IEM_MC_ADVANCE_RIP();
16558 IEM_MC_END();
16559 return VINF_SUCCESS;
16560 }
16561
16562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16563 }
16564 }
16565}
16566
16567
16568/** Opcode 0xf6 /4, /5, /6 and /7. */
16569FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16570{
16571 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16572
16573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16574 {
16575 /* register access */
16576 IEMOP_HLP_NO_LOCK_PREFIX();
16577 IEM_MC_BEGIN(3, 1);
16578 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16579 IEM_MC_ARG(uint8_t, u8Value, 1);
16580 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16581 IEM_MC_LOCAL(int32_t, rc);
16582
16583 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16584 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16585 IEM_MC_REF_EFLAGS(pEFlags);
16586 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16587 IEM_MC_IF_LOCAL_IS_Z(rc) {
16588 IEM_MC_ADVANCE_RIP();
16589 } IEM_MC_ELSE() {
16590 IEM_MC_RAISE_DIVIDE_ERROR();
16591 } IEM_MC_ENDIF();
16592
16593 IEM_MC_END();
16594 }
16595 else
16596 {
16597 /* memory access. */
16598 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16599
16600 IEM_MC_BEGIN(3, 2);
16601 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16602 IEM_MC_ARG(uint8_t, u8Value, 1);
16603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16605 IEM_MC_LOCAL(int32_t, rc);
16606
16607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16608 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16609 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16610 IEM_MC_REF_EFLAGS(pEFlags);
16611 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16612 IEM_MC_IF_LOCAL_IS_Z(rc) {
16613 IEM_MC_ADVANCE_RIP();
16614 } IEM_MC_ELSE() {
16615 IEM_MC_RAISE_DIVIDE_ERROR();
16616 } IEM_MC_ENDIF();
16617
16618 IEM_MC_END();
16619 }
16620 return VINF_SUCCESS;
16621}
16622
16623
16624/** Opcode 0xf7 /4, /5, /6 and /7. */
16625FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16626{
16627 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16629
16630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16631 {
16632 /* register access */
16633 switch (pIemCpu->enmEffOpSize)
16634 {
16635 case IEMMODE_16BIT:
16636 {
16637 IEMOP_HLP_NO_LOCK_PREFIX();
16638 IEM_MC_BEGIN(4, 1);
16639 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16640 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16641 IEM_MC_ARG(uint16_t, u16Value, 2);
16642 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16643 IEM_MC_LOCAL(int32_t, rc);
16644
16645 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16646 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16647 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16648 IEM_MC_REF_EFLAGS(pEFlags);
16649 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16650 IEM_MC_IF_LOCAL_IS_Z(rc) {
16651 IEM_MC_ADVANCE_RIP();
16652 } IEM_MC_ELSE() {
16653 IEM_MC_RAISE_DIVIDE_ERROR();
16654 } IEM_MC_ENDIF();
16655
16656 IEM_MC_END();
16657 return VINF_SUCCESS;
16658 }
16659
16660 case IEMMODE_32BIT:
16661 {
16662 IEMOP_HLP_NO_LOCK_PREFIX();
16663 IEM_MC_BEGIN(4, 1);
16664 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16665 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16666 IEM_MC_ARG(uint32_t, u32Value, 2);
16667 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16668 IEM_MC_LOCAL(int32_t, rc);
16669
16670 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16671 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16672 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16673 IEM_MC_REF_EFLAGS(pEFlags);
16674 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16675 IEM_MC_IF_LOCAL_IS_Z(rc) {
16676 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16678 IEM_MC_ADVANCE_RIP();
16679 } IEM_MC_ELSE() {
16680 IEM_MC_RAISE_DIVIDE_ERROR();
16681 } IEM_MC_ENDIF();
16682
16683 IEM_MC_END();
16684 return VINF_SUCCESS;
16685 }
16686
16687 case IEMMODE_64BIT:
16688 {
16689 IEMOP_HLP_NO_LOCK_PREFIX();
16690 IEM_MC_BEGIN(4, 1);
16691 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16692 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16693 IEM_MC_ARG(uint64_t, u64Value, 2);
16694 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16695 IEM_MC_LOCAL(int32_t, rc);
16696
16697 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16698 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16699 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16700 IEM_MC_REF_EFLAGS(pEFlags);
16701 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16702 IEM_MC_IF_LOCAL_IS_Z(rc) {
16703 IEM_MC_ADVANCE_RIP();
16704 } IEM_MC_ELSE() {
16705 IEM_MC_RAISE_DIVIDE_ERROR();
16706 } IEM_MC_ENDIF();
16707
16708 IEM_MC_END();
16709 return VINF_SUCCESS;
16710 }
16711
16712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16713 }
16714 }
16715 else
16716 {
16717 /* memory access. */
16718 switch (pIemCpu->enmEffOpSize)
16719 {
16720 case IEMMODE_16BIT:
16721 {
16722 IEMOP_HLP_NO_LOCK_PREFIX();
16723 IEM_MC_BEGIN(4, 2);
16724 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16725 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16726 IEM_MC_ARG(uint16_t, u16Value, 2);
16727 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16729 IEM_MC_LOCAL(int32_t, rc);
16730
16731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16732 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16733 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16734 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16735 IEM_MC_REF_EFLAGS(pEFlags);
16736 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16737 IEM_MC_IF_LOCAL_IS_Z(rc) {
16738 IEM_MC_ADVANCE_RIP();
16739 } IEM_MC_ELSE() {
16740 IEM_MC_RAISE_DIVIDE_ERROR();
16741 } IEM_MC_ENDIF();
16742
16743 IEM_MC_END();
16744 return VINF_SUCCESS;
16745 }
16746
16747 case IEMMODE_32BIT:
16748 {
16749 IEMOP_HLP_NO_LOCK_PREFIX();
16750 IEM_MC_BEGIN(4, 2);
16751 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16752 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16753 IEM_MC_ARG(uint32_t, u32Value, 2);
16754 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16756 IEM_MC_LOCAL(int32_t, rc);
16757
16758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16759 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16760 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16761 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16762 IEM_MC_REF_EFLAGS(pEFlags);
16763 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16764 IEM_MC_IF_LOCAL_IS_Z(rc) {
16765 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16766 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16767 IEM_MC_ADVANCE_RIP();
16768 } IEM_MC_ELSE() {
16769 IEM_MC_RAISE_DIVIDE_ERROR();
16770 } IEM_MC_ENDIF();
16771
16772 IEM_MC_END();
16773 return VINF_SUCCESS;
16774 }
16775
16776 case IEMMODE_64BIT:
16777 {
16778 IEMOP_HLP_NO_LOCK_PREFIX();
16779 IEM_MC_BEGIN(4, 2);
16780 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16781 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16782 IEM_MC_ARG(uint64_t, u64Value, 2);
16783 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16785 IEM_MC_LOCAL(int32_t, rc);
16786
16787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16788 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16789 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16790 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16791 IEM_MC_REF_EFLAGS(pEFlags);
16792 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16793 IEM_MC_IF_LOCAL_IS_Z(rc) {
16794 IEM_MC_ADVANCE_RIP();
16795 } IEM_MC_ELSE() {
16796 IEM_MC_RAISE_DIVIDE_ERROR();
16797 } IEM_MC_ENDIF();
16798
16799 IEM_MC_END();
16800 return VINF_SUCCESS;
16801 }
16802
16803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16804 }
16805 }
16806}
16807
16808/** Opcode 0xf6. */
16809FNIEMOP_DEF(iemOp_Grp3_Eb)
16810{
16811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16812 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16813 {
16814 case 0:
16815 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16816 case 1:
16817 return IEMOP_RAISE_INVALID_OPCODE();
16818 case 2:
16819 IEMOP_MNEMONIC("not Eb");
16820 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16821 case 3:
16822 IEMOP_MNEMONIC("neg Eb");
16823 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16824 case 4:
16825 IEMOP_MNEMONIC("mul Eb");
16826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16827 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16828 case 5:
16829 IEMOP_MNEMONIC("imul Eb");
16830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16831 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16832 case 6:
16833 IEMOP_MNEMONIC("div Eb");
16834 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16835 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16836 case 7:
16837 IEMOP_MNEMONIC("idiv Eb");
16838 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16839 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16841 }
16842}
16843
16844
16845/** Opcode 0xf7. */
16846FNIEMOP_DEF(iemOp_Grp3_Ev)
16847{
16848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16849 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16850 {
16851 case 0:
16852 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16853 case 1:
16854 return IEMOP_RAISE_INVALID_OPCODE();
16855 case 2:
16856 IEMOP_MNEMONIC("not Ev");
16857 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16858 case 3:
16859 IEMOP_MNEMONIC("neg Ev");
16860 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16861 case 4:
16862 IEMOP_MNEMONIC("mul Ev");
16863 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16864 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16865 case 5:
16866 IEMOP_MNEMONIC("imul Ev");
16867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16868 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
16869 case 6:
16870 IEMOP_MNEMONIC("div Ev");
16871 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16872 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
16873 case 7:
16874 IEMOP_MNEMONIC("idiv Ev");
16875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16876 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
16877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16878 }
16879}
16880
16881
16882/** Opcode 0xf8. */
16883FNIEMOP_DEF(iemOp_clc)
16884{
16885 IEMOP_MNEMONIC("clc");
16886 IEMOP_HLP_NO_LOCK_PREFIX();
16887 IEM_MC_BEGIN(0, 0);
16888 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
16889 IEM_MC_ADVANCE_RIP();
16890 IEM_MC_END();
16891 return VINF_SUCCESS;
16892}
16893
16894
16895/** Opcode 0xf9. */
16896FNIEMOP_DEF(iemOp_stc)
16897{
16898 IEMOP_MNEMONIC("stc");
16899 IEMOP_HLP_NO_LOCK_PREFIX();
16900 IEM_MC_BEGIN(0, 0);
16901 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
16902 IEM_MC_ADVANCE_RIP();
16903 IEM_MC_END();
16904 return VINF_SUCCESS;
16905}
16906
16907
16908/** Opcode 0xfa. */
16909FNIEMOP_DEF(iemOp_cli)
16910{
16911 IEMOP_MNEMONIC("cli");
16912 IEMOP_HLP_NO_LOCK_PREFIX();
16913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
16914}
16915
16916
16917FNIEMOP_DEF(iemOp_sti)
16918{
16919 IEMOP_MNEMONIC("sti");
16920 IEMOP_HLP_NO_LOCK_PREFIX();
16921 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
16922}
16923
16924
16925/** Opcode 0xfc. */
16926FNIEMOP_DEF(iemOp_cld)
16927{
16928 IEMOP_MNEMONIC("cld");
16929 IEMOP_HLP_NO_LOCK_PREFIX();
16930 IEM_MC_BEGIN(0, 0);
16931 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
16932 IEM_MC_ADVANCE_RIP();
16933 IEM_MC_END();
16934 return VINF_SUCCESS;
16935}
16936
16937
16938/** Opcode 0xfd. */
16939FNIEMOP_DEF(iemOp_std)
16940{
16941 IEMOP_MNEMONIC("std");
16942 IEMOP_HLP_NO_LOCK_PREFIX();
16943 IEM_MC_BEGIN(0, 0);
16944 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
16945 IEM_MC_ADVANCE_RIP();
16946 IEM_MC_END();
16947 return VINF_SUCCESS;
16948}
16949
16950
16951/** Opcode 0xfe. */
16952FNIEMOP_DEF(iemOp_Grp4)
16953{
16954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16955 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16956 {
16957 case 0:
16958 IEMOP_MNEMONIC("inc Ev");
16959 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
16960 case 1:
16961 IEMOP_MNEMONIC("dec Ev");
16962 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
16963 default:
16964 IEMOP_MNEMONIC("grp4-ud");
16965 return IEMOP_RAISE_INVALID_OPCODE();
16966 }
16967}
16968
16969
16970/**
16971 * Opcode 0xff /2.
16972 * @param bRm The RM byte.
16973 */
16974FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
16975{
16976 IEMOP_MNEMONIC("calln Ev");
16977 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16979
16980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16981 {
16982 /* The new RIP is taken from a register. */
16983 switch (pIemCpu->enmEffOpSize)
16984 {
16985 case IEMMODE_16BIT:
16986 IEM_MC_BEGIN(1, 0);
16987 IEM_MC_ARG(uint16_t, u16Target, 0);
16988 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16989 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16990 IEM_MC_END()
16991 return VINF_SUCCESS;
16992
16993 case IEMMODE_32BIT:
16994 IEM_MC_BEGIN(1, 0);
16995 IEM_MC_ARG(uint32_t, u32Target, 0);
16996 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16997 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
16998 IEM_MC_END()
16999 return VINF_SUCCESS;
17000
17001 case IEMMODE_64BIT:
17002 IEM_MC_BEGIN(1, 0);
17003 IEM_MC_ARG(uint64_t, u64Target, 0);
17004 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17005 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17006 IEM_MC_END()
17007 return VINF_SUCCESS;
17008
17009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17010 }
17011 }
17012 else
17013 {
17014 /* The new RIP is taken from a register. */
17015 switch (pIemCpu->enmEffOpSize)
17016 {
17017 case IEMMODE_16BIT:
17018 IEM_MC_BEGIN(1, 1);
17019 IEM_MC_ARG(uint16_t, u16Target, 0);
17020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17022 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17023 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17024 IEM_MC_END()
17025 return VINF_SUCCESS;
17026
17027 case IEMMODE_32BIT:
17028 IEM_MC_BEGIN(1, 1);
17029 IEM_MC_ARG(uint32_t, u32Target, 0);
17030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17032 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17033 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17034 IEM_MC_END()
17035 return VINF_SUCCESS;
17036
17037 case IEMMODE_64BIT:
17038 IEM_MC_BEGIN(1, 1);
17039 IEM_MC_ARG(uint64_t, u64Target, 0);
17040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17042 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17043 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17044 IEM_MC_END()
17045 return VINF_SUCCESS;
17046
17047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17048 }
17049 }
17050}
17051
17052typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17053
17054FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17055{
17056 /* Registers? How?? */
17057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17058 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17059
17060 /* Far pointer loaded from memory. */
17061 switch (pIemCpu->enmEffOpSize)
17062 {
17063 case IEMMODE_16BIT:
17064 IEM_MC_BEGIN(3, 1);
17065 IEM_MC_ARG(uint16_t, u16Sel, 0);
17066 IEM_MC_ARG(uint16_t, offSeg, 1);
17067 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17071 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17072 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17073 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17074 IEM_MC_END();
17075 return VINF_SUCCESS;
17076
17077 case IEMMODE_64BIT:
17078 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17079 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17080 * and call far qword [rsp] encodings. */
17081 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17082 {
17083 IEM_MC_BEGIN(3, 1);
17084 IEM_MC_ARG(uint16_t, u16Sel, 0);
17085 IEM_MC_ARG(uint64_t, offSeg, 1);
17086 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17090 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17091 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17092 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17093 IEM_MC_END();
17094 return VINF_SUCCESS;
17095 }
17096 /* AMD falls thru. */
17097
17098 case IEMMODE_32BIT:
17099 IEM_MC_BEGIN(3, 1);
17100 IEM_MC_ARG(uint16_t, u16Sel, 0);
17101 IEM_MC_ARG(uint32_t, offSeg, 1);
17102 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17106 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17107 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17108 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17109 IEM_MC_END();
17110 return VINF_SUCCESS;
17111
17112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17113 }
17114}
17115
17116
17117/**
17118 * Opcode 0xff /3.
17119 * @param bRm The RM byte.
17120 */
17121FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17122{
17123 IEMOP_MNEMONIC("callf Ep");
17124 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17125}
17126
17127
17128/**
17129 * Opcode 0xff /4.
17130 * @param bRm The RM byte.
17131 */
17132FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17133{
17134 IEMOP_MNEMONIC("jmpn Ev");
17135 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17136 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17137
17138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17139 {
17140 /* The new RIP is taken from a register. */
17141 switch (pIemCpu->enmEffOpSize)
17142 {
17143 case IEMMODE_16BIT:
17144 IEM_MC_BEGIN(0, 1);
17145 IEM_MC_LOCAL(uint16_t, u16Target);
17146 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17147 IEM_MC_SET_RIP_U16(u16Target);
17148 IEM_MC_END()
17149 return VINF_SUCCESS;
17150
17151 case IEMMODE_32BIT:
17152 IEM_MC_BEGIN(0, 1);
17153 IEM_MC_LOCAL(uint32_t, u32Target);
17154 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17155 IEM_MC_SET_RIP_U32(u32Target);
17156 IEM_MC_END()
17157 return VINF_SUCCESS;
17158
17159 case IEMMODE_64BIT:
17160 IEM_MC_BEGIN(0, 1);
17161 IEM_MC_LOCAL(uint64_t, u64Target);
17162 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17163 IEM_MC_SET_RIP_U64(u64Target);
17164 IEM_MC_END()
17165 return VINF_SUCCESS;
17166
17167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17168 }
17169 }
17170 else
17171 {
17172 /* The new RIP is taken from a memory location. */
17173 switch (pIemCpu->enmEffOpSize)
17174 {
17175 case IEMMODE_16BIT:
17176 IEM_MC_BEGIN(0, 2);
17177 IEM_MC_LOCAL(uint16_t, u16Target);
17178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17180 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17181 IEM_MC_SET_RIP_U16(u16Target);
17182 IEM_MC_END()
17183 return VINF_SUCCESS;
17184
17185 case IEMMODE_32BIT:
17186 IEM_MC_BEGIN(0, 2);
17187 IEM_MC_LOCAL(uint32_t, u32Target);
17188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17190 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17191 IEM_MC_SET_RIP_U32(u32Target);
17192 IEM_MC_END()
17193 return VINF_SUCCESS;
17194
17195 case IEMMODE_64BIT:
17196 IEM_MC_BEGIN(0, 2);
17197 IEM_MC_LOCAL(uint64_t, u64Target);
17198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17200 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17201 IEM_MC_SET_RIP_U64(u64Target);
17202 IEM_MC_END()
17203 return VINF_SUCCESS;
17204
17205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17206 }
17207 }
17208}
17209
17210
17211/**
17212 * Opcode 0xff /5.
17213 * @param bRm The RM byte.
17214 */
17215FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17216{
17217 IEMOP_MNEMONIC("jmpf Ep");
17218 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17219}
17220
17221
17222/**
17223 * Opcode 0xff /6.
17224 * @param bRm The RM byte.
17225 */
17226FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17227{
17228 IEMOP_MNEMONIC("push Ev");
17229 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17230
17231 /* Registers are handled by a common worker. */
17232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17233 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17234
17235 /* Memory we do here. */
17236 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17237 switch (pIemCpu->enmEffOpSize)
17238 {
17239 case IEMMODE_16BIT:
17240 IEM_MC_BEGIN(0, 2);
17241 IEM_MC_LOCAL(uint16_t, u16Src);
17242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17244 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17245 IEM_MC_PUSH_U16(u16Src);
17246 IEM_MC_ADVANCE_RIP();
17247 IEM_MC_END();
17248 return VINF_SUCCESS;
17249
17250 case IEMMODE_32BIT:
17251 IEM_MC_BEGIN(0, 2);
17252 IEM_MC_LOCAL(uint32_t, u32Src);
17253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17255 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17256 IEM_MC_PUSH_U32(u32Src);
17257 IEM_MC_ADVANCE_RIP();
17258 IEM_MC_END();
17259 return VINF_SUCCESS;
17260
17261 case IEMMODE_64BIT:
17262 IEM_MC_BEGIN(0, 2);
17263 IEM_MC_LOCAL(uint64_t, u64Src);
17264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17266 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17267 IEM_MC_PUSH_U64(u64Src);
17268 IEM_MC_ADVANCE_RIP();
17269 IEM_MC_END();
17270 return VINF_SUCCESS;
17271
17272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17273 }
17274}
17275
17276
17277/** Opcode 0xff. */
17278FNIEMOP_DEF(iemOp_Grp5)
17279{
17280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17281 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17282 {
17283 case 0:
17284 IEMOP_MNEMONIC("inc Ev");
17285 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17286 case 1:
17287 IEMOP_MNEMONIC("dec Ev");
17288 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17289 case 2:
17290 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17291 case 3:
17292 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17293 case 4:
17294 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17295 case 5:
17296 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17297 case 6:
17298 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17299 case 7:
17300 IEMOP_MNEMONIC("grp5-ud");
17301 return IEMOP_RAISE_INVALID_OPCODE();
17302 }
17303 AssertFailedReturn(VERR_IEM_IPE_3);
17304}
17305
17306
17307
17308const PFNIEMOP g_apfnOneByteMap[256] =
17309{
17310 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17311 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17312 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17313 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17314 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17315 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17316 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17317 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17318 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17319 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17320 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17321 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17322 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17323 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17324 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17325 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17326 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17327 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17328 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17329 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17330 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17331 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17332 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17333 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17334 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17335 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17336 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17337 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17338 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17339 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17340 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17341 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17342 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17343 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17344 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17345 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17346 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17347 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17348 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17349 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17350 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17351 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17352 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17353 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17354 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17355 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17356 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17357 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17358 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17359 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17360 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17361 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17362 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17363 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17364 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17365 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17366 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17367 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17368 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17369 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17370 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17371 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17372 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17373 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17374};
17375
17376
17377/** @} */
17378
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette