VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60632

Last change on this file since 60632 was 60559, checked in by vboxsync, 9 years ago

IEM: ud2

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60559 2016-04-19 03:05:28Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
802 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
803 IEM_MC_END();
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmcall)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmresume)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /0. */
833FNIEMOP_DEF(iemOp_Grp7_vmxoff)
834{
835 IEMOP_BITCH_ABOUT_STUB();
836 return IEMOP_RAISE_INVALID_OPCODE();
837}
838
839
840/** Opcode 0x0f 0x01 /1. */
841FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
842{
843 IEMOP_MNEMONIC("sidt Ms");
844 IEMOP_HLP_MIN_286();
845 IEMOP_HLP_64BIT_OP_SIZE();
846 IEM_MC_BEGIN(3, 1);
847 IEM_MC_ARG(uint8_t, iEffSeg, 0);
848 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
853 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_monitor)
861{
862 IEMOP_MNEMONIC("monitor");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
864 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF(iemOp_Grp7_mwait)
870{
871 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
874}
875
876
877/** Opcode 0x0f 0x01 /2. */
878FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
879{
880 IEMOP_MNEMONIC("lgdt");
881 IEMOP_HLP_64BIT_OP_SIZE();
882 IEM_MC_BEGIN(3, 1);
883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
884 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
889 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
890 IEM_MC_END();
891 return VINF_SUCCESS;
892}
893
894
895/** Opcode 0x0f 0x01 0xd0. */
896FNIEMOP_DEF(iemOp_Grp7_xgetbv)
897{
898 IEMOP_MNEMONIC("xgetbv");
899 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
900 {
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
903 }
904 return IEMOP_RAISE_INVALID_OPCODE();
905}
906
907
908/** Opcode 0x0f 0x01 0xd1. */
909FNIEMOP_DEF(iemOp_Grp7_xsetbv)
910{
911 IEMOP_MNEMONIC("xsetbv");
912 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
913 {
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
916 }
917 return IEMOP_RAISE_INVALID_OPCODE();
918}
919
920
921/** Opcode 0x0f 0x01 /3. */
922FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
923{
924 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
925 ? IEMMODE_64BIT
926 : pIemCpu->enmEffOpSize;
927 IEM_MC_BEGIN(3, 1);
928 IEM_MC_ARG(uint8_t, iEffSeg, 0);
929 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
934 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
935 IEM_MC_END();
936 return VINF_SUCCESS;
937}
938
939
940/** Opcode 0x0f 0x01 0xd8. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
942
943/** Opcode 0x0f 0x01 0xd9. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
945
946/** Opcode 0x0f 0x01 0xda. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
948
949/** Opcode 0x0f 0x01 0xdb. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
951
952/** Opcode 0x0f 0x01 0xdc. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
954
955/** Opcode 0x0f 0x01 0xdd. */
956FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
957
958/** Opcode 0x0f 0x01 0xde. */
959FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
960
961/** Opcode 0x0f 0x01 0xdf. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
963
964/** Opcode 0x0f 0x01 /4. */
965FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
966{
967 IEMOP_MNEMONIC("smsw");
968 IEMOP_HLP_MIN_286();
969 IEMOP_HLP_NO_LOCK_PREFIX();
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 switch (pIemCpu->enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(0, 1);
976 IEM_MC_LOCAL(uint16_t, u16Tmp);
977 IEM_MC_FETCH_CR0_U16(u16Tmp);
978#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
979 if (pIemCpu->uTargetCpu == IEMTARGETCPU_286)
980 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); /* Reserved bits observed all set on real hw. */
981#endif
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1017 IEM_MC_ADVANCE_RIP();
1018 IEM_MC_END();
1019 return VINF_SUCCESS;
1020 }
1021}
1022
1023
1024/** Opcode 0x0f 0x01 /6. */
1025FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1026{
1027 /* The operand size is effectively ignored, all is 16-bit and only the
1028 lower 3-bits are used. */
1029 IEMOP_MNEMONIC("lmsw");
1030 IEMOP_HLP_MIN_286();
1031 IEMOP_HLP_NO_LOCK_PREFIX();
1032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1033 {
1034 IEM_MC_BEGIN(1, 0);
1035 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1036 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1037 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1038 IEM_MC_END();
1039 }
1040 else
1041 {
1042 IEM_MC_BEGIN(1, 1);
1043 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1046 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1047 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1048 IEM_MC_END();
1049 }
1050 return VINF_SUCCESS;
1051}
1052
1053
1054/** Opcode 0x0f 0x01 /7. */
1055FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1056{
1057 IEMOP_MNEMONIC("invlpg");
1058 IEMOP_HLP_MIN_486();
1059 IEMOP_HLP_NO_LOCK_PREFIX();
1060 IEM_MC_BEGIN(1, 1);
1061 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1063 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1064 IEM_MC_END();
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/** Opcode 0x0f 0x01 /7. */
1070FNIEMOP_DEF(iemOp_Grp7_swapgs)
1071{
1072 IEMOP_MNEMONIC("swapgs");
1073 IEMOP_HLP_ONLY_64BIT();
1074 IEMOP_HLP_NO_LOCK_PREFIX();
1075 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1076}
1077
1078
1079/** Opcode 0x0f 0x01 /7. */
1080FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1081{
1082 NOREF(pIemCpu);
1083 IEMOP_BITCH_ABOUT_STUB();
1084 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1085}
1086
1087
1088/** Opcode 0x0f 0x01. */
1089FNIEMOP_DEF(iemOp_Grp7)
1090{
1091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1092 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1093 {
1094 case 0:
1095 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1096 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1097 switch (bRm & X86_MODRM_RM_MASK)
1098 {
1099 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1100 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1101 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1102 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1103 }
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105
1106 case 1:
1107 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1108 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1109 switch (bRm & X86_MODRM_RM_MASK)
1110 {
1111 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1112 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1113 }
1114 return IEMOP_RAISE_INVALID_OPCODE();
1115
1116 case 2:
1117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1118 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1119 switch (bRm & X86_MODRM_RM_MASK)
1120 {
1121 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1122 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1123 }
1124 return IEMOP_RAISE_INVALID_OPCODE();
1125
1126 case 3:
1127 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1128 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1129 switch (bRm & X86_MODRM_RM_MASK)
1130 {
1131 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1132 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1133 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1134 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1135 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1136 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1137 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1138 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1140 }
1141
1142 case 4:
1143 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1144
1145 case 5:
1146 return IEMOP_RAISE_INVALID_OPCODE();
1147
1148 case 6:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1150
1151 case 7:
1152 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1153 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1154 switch (bRm & X86_MODRM_RM_MASK)
1155 {
1156 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1157 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1158 }
1159 return IEMOP_RAISE_INVALID_OPCODE();
1160
1161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1162 }
1163}
1164
1165/** Opcode 0x0f 0x00 /3. */
1166FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1167{
1168 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1170
1171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1172 {
1173 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 switch (pIemCpu->enmEffOpSize)
1175 {
1176 case IEMMODE_16BIT:
1177 {
1178 IEM_MC_BEGIN(4, 0);
1179 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1180 IEM_MC_ARG(uint16_t, u16Sel, 1);
1181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1182 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1183
1184 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1185 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1186 IEM_MC_REF_EFLAGS(pEFlags);
1187 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1188
1189 IEM_MC_END();
1190 return VINF_SUCCESS;
1191 }
1192
1193 case IEMMODE_32BIT:
1194 case IEMMODE_64BIT:
1195 {
1196 IEM_MC_BEGIN(4, 0);
1197 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1198 IEM_MC_ARG(uint16_t, u16Sel, 1);
1199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1200 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1201
1202 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1203 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1204 IEM_MC_REF_EFLAGS(pEFlags);
1205 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1206
1207 IEM_MC_END();
1208 return VINF_SUCCESS;
1209 }
1210
1211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1212 }
1213 }
1214 else
1215 {
1216 switch (pIemCpu->enmEffOpSize)
1217 {
1218 case IEMMODE_16BIT:
1219 {
1220 IEM_MC_BEGIN(4, 1);
1221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1222 IEM_MC_ARG(uint16_t, u16Sel, 1);
1223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1224 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226
1227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1228 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1229
1230 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1231 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1232 IEM_MC_REF_EFLAGS(pEFlags);
1233 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1234
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 case IEMMODE_32BIT:
1240 case IEMMODE_64BIT:
1241 {
1242 IEM_MC_BEGIN(4, 1);
1243 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1244 IEM_MC_ARG(uint16_t, u16Sel, 1);
1245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1246 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248
1249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1250 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1251/** @todo testcase: make sure it's a 16-bit read. */
1252
1253 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1254 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1255 IEM_MC_REF_EFLAGS(pEFlags);
1256 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1257
1258 IEM_MC_END();
1259 return VINF_SUCCESS;
1260 }
1261
1262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1263 }
1264 }
1265}
1266
1267
1268
1269/** Opcode 0x0f 0x02. */
1270FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1271{
1272 IEMOP_MNEMONIC("lar Gv,Ew");
1273 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1274}
1275
1276
1277/** Opcode 0x0f 0x03. */
1278FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1279{
1280 IEMOP_MNEMONIC("lsl Gv,Ew");
1281 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1282}
1283
1284
1285/** Opcode 0x0f 0x05. */
1286FNIEMOP_DEF(iemOp_syscall)
1287{
1288 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1289 IEMOP_HLP_NO_LOCK_PREFIX();
1290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1291}
1292
1293
1294/** Opcode 0x0f 0x06. */
1295FNIEMOP_DEF(iemOp_clts)
1296{
1297 IEMOP_MNEMONIC("clts");
1298 IEMOP_HLP_NO_LOCK_PREFIX();
1299 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1300}
1301
1302
1303/** Opcode 0x0f 0x07. */
1304FNIEMOP_DEF(iemOp_sysret)
1305{
1306 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1307 IEMOP_HLP_NO_LOCK_PREFIX();
1308 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1309}
1310
1311
1312/** Opcode 0x0f 0x08. */
1313FNIEMOP_STUB(iemOp_invd);
1314// IEMOP_HLP_MIN_486();
1315
1316
1317/** Opcode 0x0f 0x09. */
1318FNIEMOP_DEF(iemOp_wbinvd)
1319{
1320 IEMOP_MNEMONIC("wbinvd");
1321 IEMOP_HLP_MIN_486();
1322 IEMOP_HLP_NO_LOCK_PREFIX();
1323 IEM_MC_BEGIN(0, 0);
1324 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1325 IEM_MC_ADVANCE_RIP();
1326 IEM_MC_END();
1327 return VINF_SUCCESS; /* ignore for now */
1328}
1329
1330
1331/** Opcode 0x0f 0x0b. */
1332FNIEMOP_DEF(iemOp_ud2)
1333{
1334 IEMOP_MNEMONIC("ud2");
1335 return IEMOP_RAISE_INVALID_OPCODE();
1336}
1337
1338/** Opcode 0x0f 0x0d. */
1339FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1340{
1341 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1342 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1343 {
1344 IEMOP_MNEMONIC("GrpP");
1345 return IEMOP_RAISE_INVALID_OPCODE();
1346 }
1347
1348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1350 {
1351 IEMOP_MNEMONIC("GrpP");
1352 return IEMOP_RAISE_INVALID_OPCODE();
1353 }
1354
1355 IEMOP_HLP_NO_LOCK_PREFIX();
1356 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1357 {
1358 case 2: /* Aliased to /0 for the time being. */
1359 case 4: /* Aliased to /0 for the time being. */
1360 case 5: /* Aliased to /0 for the time being. */
1361 case 6: /* Aliased to /0 for the time being. */
1362 case 7: /* Aliased to /0 for the time being. */
1363 case 0: IEMOP_MNEMONIC("prefetch"); break;
1364 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1365 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1367 }
1368
1369 IEM_MC_BEGIN(0, 1);
1370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1372 /* Currently a NOP. */
1373 IEM_MC_ADVANCE_RIP();
1374 IEM_MC_END();
1375 return VINF_SUCCESS;
1376}
1377
1378
1379/** Opcode 0x0f 0x0e. */
1380FNIEMOP_STUB(iemOp_femms);
1381
1382
1383/** Opcode 0x0f 0x0f 0x0c. */
1384FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1385
1386/** Opcode 0x0f 0x0f 0x0d. */
1387FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1388
1389/** Opcode 0x0f 0x0f 0x1c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x1d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x8a. */
1396FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x8e. */
1399FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x90. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x94. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x96. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x97. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x9a. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x9e. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1418
1419/** Opcode 0x0f 0x0f 0xa0. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0xa4. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa6. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa7. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xaa. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xae. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xb0. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xb4. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb6. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb7. */
1447FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xbb. */
1450FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xbf. */
1453FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1454
1455
1456/** Opcode 0x0f 0x0f. */
1457FNIEMOP_DEF(iemOp_3Dnow)
1458{
1459 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1460 {
1461 IEMOP_MNEMONIC("3Dnow");
1462 return IEMOP_RAISE_INVALID_OPCODE();
1463 }
1464
1465 /* This is pretty sparse, use switch instead of table. */
1466 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1467 switch (b)
1468 {
1469 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1470 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1471 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1472 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1473 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1474 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1475 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1476 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1477 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1478 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1479 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1480 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1481 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1482 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1483 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1484 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1485 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1486 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1487 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1488 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1489 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1490 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1491 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1492 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1493 default:
1494 return IEMOP_RAISE_INVALID_OPCODE();
1495 }
1496}
1497
1498
1499/** Opcode 0x0f 0x10. */
1500FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1501/** Opcode 0x0f 0x11. */
1502FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1503/** Opcode 0x0f 0x12. */
1504FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1505/** Opcode 0x0f 0x13. */
1506FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1507/** Opcode 0x0f 0x14. */
1508FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1509/** Opcode 0x0f 0x15. */
1510FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1511/** Opcode 0x0f 0x16. */
1512FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1513/** Opcode 0x0f 0x17. */
1514FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1515
1516
1517/** Opcode 0x0f 0x18. */
1518FNIEMOP_DEF(iemOp_prefetch_Grp16)
1519{
1520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1521 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1522 {
1523 IEMOP_HLP_NO_LOCK_PREFIX();
1524 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1525 {
1526 case 4: /* Aliased to /0 for the time being according to AMD. */
1527 case 5: /* Aliased to /0 for the time being according to AMD. */
1528 case 6: /* Aliased to /0 for the time being according to AMD. */
1529 case 7: /* Aliased to /0 for the time being according to AMD. */
1530 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1531 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1532 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1533 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1535 }
1536
1537 IEM_MC_BEGIN(0, 1);
1538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 /* Currently a NOP. */
1541 IEM_MC_ADVANCE_RIP();
1542 IEM_MC_END();
1543 return VINF_SUCCESS;
1544 }
1545
1546 return IEMOP_RAISE_INVALID_OPCODE();
1547}
1548
1549
1550/** Opcode 0x0f 0x19..0x1f. */
1551FNIEMOP_DEF(iemOp_nop_Ev)
1552{
1553 IEMOP_HLP_NO_LOCK_PREFIX();
1554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1556 {
1557 IEM_MC_BEGIN(0, 0);
1558 IEM_MC_ADVANCE_RIP();
1559 IEM_MC_END();
1560 }
1561 else
1562 {
1563 IEM_MC_BEGIN(0, 1);
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1566 /* Currently a NOP. */
1567 IEM_MC_ADVANCE_RIP();
1568 IEM_MC_END();
1569 }
1570 return VINF_SUCCESS;
1571}
1572
1573
1574/** Opcode 0x0f 0x20. */
1575FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1576{
1577 /* mod is ignored, as is operand size overrides. */
1578 IEMOP_MNEMONIC("mov Rd,Cd");
1579 IEMOP_HLP_MIN_386();
1580 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1581 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1582 else
1583 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1584
1585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1586 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1587 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1588 {
1589 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1590 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1591 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1592 iCrReg |= 8;
1593 }
1594 switch (iCrReg)
1595 {
1596 case 0: case 2: case 3: case 4: case 8:
1597 break;
1598 default:
1599 return IEMOP_RAISE_INVALID_OPCODE();
1600 }
1601 IEMOP_HLP_DONE_DECODING();
1602
1603 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1604}
1605
1606
1607/** Opcode 0x0f 0x21. */
1608FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1609{
1610 IEMOP_MNEMONIC("mov Rd,Dd");
1611 IEMOP_HLP_MIN_386();
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 IEMOP_HLP_NO_LOCK_PREFIX();
1614 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1615 return IEMOP_RAISE_INVALID_OPCODE();
1616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1617 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1618 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1619}
1620
1621
1622/** Opcode 0x0f 0x22. */
1623FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1624{
1625 /* mod is ignored, as is operand size overrides. */
1626 IEMOP_MNEMONIC("mov Cd,Rd");
1627 IEMOP_HLP_MIN_386();
1628 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1629 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1630 else
1631 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1632
1633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1634 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1635 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1636 {
1637 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1638 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1639 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1640 iCrReg |= 8;
1641 }
1642 switch (iCrReg)
1643 {
1644 case 0: case 2: case 3: case 4: case 8:
1645 break;
1646 default:
1647 return IEMOP_RAISE_INVALID_OPCODE();
1648 }
1649 IEMOP_HLP_DONE_DECODING();
1650
1651 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1652}
1653
1654
1655/** Opcode 0x0f 0x23. */
1656FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1657{
1658 IEMOP_MNEMONIC("mov Dd,Rd");
1659 IEMOP_HLP_MIN_386();
1660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1662 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1663 return IEMOP_RAISE_INVALID_OPCODE();
1664 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1665 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1666 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1667}
1668
1669
1670/** Opcode 0x0f 0x24. */
1671FNIEMOP_DEF(iemOp_mov_Rd_Td)
1672{
1673 IEMOP_MNEMONIC("mov Rd,Td");
1674 /** @todo works on 386 and 486. */
1675 /* The RM byte is not considered, see testcase. */
1676 return IEMOP_RAISE_INVALID_OPCODE();
1677}
1678
1679
1680/** Opcode 0x0f 0x26. */
1681FNIEMOP_DEF(iemOp_mov_Td_Rd)
1682{
1683 IEMOP_MNEMONIC("mov Td,Rd");
1684 /** @todo works on 386 and 486. */
1685 /* The RM byte is not considered, see testcase. */
1686 return IEMOP_RAISE_INVALID_OPCODE();
1687}
1688
1689
1690/** Opcode 0x0f 0x28. */
1691FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1692/** Opcode 0x0f 0x29. */
1693FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1694/** Opcode 0x0f 0x2a. */
1695FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1696/** Opcode 0x0f 0x2b. */
1697FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1698/** Opcode 0x0f 0x2c. */
1699FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1700/** Opcode 0x0f 0x2d. */
1701FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1702/** Opcode 0x0f 0x2e. */
1703FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1704/** Opcode 0x0f 0x2f. */
1705FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1706
1707
1708/** Opcode 0x0f 0x30. */
1709FNIEMOP_DEF(iemOp_wrmsr)
1710{
1711 IEMOP_MNEMONIC("wrmsr");
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1714}
1715
1716
1717/** Opcode 0x0f 0x31. */
1718FNIEMOP_DEF(iemOp_rdtsc)
1719{
1720 IEMOP_MNEMONIC("rdtsc");
1721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1722 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1723}
1724
1725
1726/** Opcode 0x0f 0x33. */
1727FNIEMOP_DEF(iemOp_rdmsr)
1728{
1729 IEMOP_MNEMONIC("rdmsr");
1730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1732}
1733
1734
1735/** Opcode 0x0f 0x34. */
1736FNIEMOP_STUB(iemOp_rdpmc);
1737/** Opcode 0x0f 0x34. */
1738FNIEMOP_STUB(iemOp_sysenter);
1739/** Opcode 0x0f 0x35. */
1740FNIEMOP_STUB(iemOp_sysexit);
1741/** Opcode 0x0f 0x37. */
1742FNIEMOP_STUB(iemOp_getsec);
1743/** Opcode 0x0f 0x38. */
1744FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1745/** Opcode 0x0f 0x3a. */
1746FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1747/** Opcode 0x0f 0x3c (?). */
1748FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1749
1750/**
1751 * Implements a conditional move.
1752 *
1753 * Wish there was an obvious way to do this where we could share and reduce
1754 * code bloat.
1755 *
1756 * @param a_Cnd The conditional "microcode" operation.
1757 */
1758#define CMOV_X(a_Cnd) \
1759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1761 { \
1762 switch (pIemCpu->enmEffOpSize) \
1763 { \
1764 case IEMMODE_16BIT: \
1765 IEM_MC_BEGIN(0, 1); \
1766 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1767 a_Cnd { \
1768 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1769 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1770 } IEM_MC_ENDIF(); \
1771 IEM_MC_ADVANCE_RIP(); \
1772 IEM_MC_END(); \
1773 return VINF_SUCCESS; \
1774 \
1775 case IEMMODE_32BIT: \
1776 IEM_MC_BEGIN(0, 1); \
1777 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1778 a_Cnd { \
1779 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1780 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1781 } IEM_MC_ELSE() { \
1782 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1783 } IEM_MC_ENDIF(); \
1784 IEM_MC_ADVANCE_RIP(); \
1785 IEM_MC_END(); \
1786 return VINF_SUCCESS; \
1787 \
1788 case IEMMODE_64BIT: \
1789 IEM_MC_BEGIN(0, 1); \
1790 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1791 a_Cnd { \
1792 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1793 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1794 } IEM_MC_ENDIF(); \
1795 IEM_MC_ADVANCE_RIP(); \
1796 IEM_MC_END(); \
1797 return VINF_SUCCESS; \
1798 \
1799 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1800 } \
1801 } \
1802 else \
1803 { \
1804 switch (pIemCpu->enmEffOpSize) \
1805 { \
1806 case IEMMODE_16BIT: \
1807 IEM_MC_BEGIN(0, 2); \
1808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1809 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1811 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1812 a_Cnd { \
1813 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1814 } IEM_MC_ENDIF(); \
1815 IEM_MC_ADVANCE_RIP(); \
1816 IEM_MC_END(); \
1817 return VINF_SUCCESS; \
1818 \
1819 case IEMMODE_32BIT: \
1820 IEM_MC_BEGIN(0, 2); \
1821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1822 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1824 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1825 a_Cnd { \
1826 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1827 } IEM_MC_ELSE() { \
1828 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1829 } IEM_MC_ENDIF(); \
1830 IEM_MC_ADVANCE_RIP(); \
1831 IEM_MC_END(); \
1832 return VINF_SUCCESS; \
1833 \
1834 case IEMMODE_64BIT: \
1835 IEM_MC_BEGIN(0, 2); \
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1837 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1839 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1840 a_Cnd { \
1841 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1842 } IEM_MC_ENDIF(); \
1843 IEM_MC_ADVANCE_RIP(); \
1844 IEM_MC_END(); \
1845 return VINF_SUCCESS; \
1846 \
1847 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1848 } \
1849 } do {} while (0)
1850
1851
1852
1853/** Opcode 0x0f 0x40. */
1854FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1855{
1856 IEMOP_MNEMONIC("cmovo Gv,Ev");
1857 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1858}
1859
1860
1861/** Opcode 0x0f 0x41. */
1862FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1863{
1864 IEMOP_MNEMONIC("cmovno Gv,Ev");
1865 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1866}
1867
1868
1869/** Opcode 0x0f 0x42. */
1870FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1871{
1872 IEMOP_MNEMONIC("cmovc Gv,Ev");
1873 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1874}
1875
1876
1877/** Opcode 0x0f 0x43. */
1878FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1879{
1880 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1881 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1882}
1883
1884
1885/** Opcode 0x0f 0x44. */
1886FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1887{
1888 IEMOP_MNEMONIC("cmove Gv,Ev");
1889 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1890}
1891
1892
1893/** Opcode 0x0f 0x45. */
1894FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1895{
1896 IEMOP_MNEMONIC("cmovne Gv,Ev");
1897 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1898}
1899
1900
1901/** Opcode 0x0f 0x46. */
1902FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1903{
1904 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1905 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1906}
1907
1908
1909/** Opcode 0x0f 0x47. */
1910FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1911{
1912 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1913 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1914}
1915
1916
1917/** Opcode 0x0f 0x48. */
1918FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1919{
1920 IEMOP_MNEMONIC("cmovs Gv,Ev");
1921 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1922}
1923
1924
1925/** Opcode 0x0f 0x49. */
1926FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1927{
1928 IEMOP_MNEMONIC("cmovns Gv,Ev");
1929 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1930}
1931
1932
1933/** Opcode 0x0f 0x4a. */
1934FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1935{
1936 IEMOP_MNEMONIC("cmovp Gv,Ev");
1937 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1938}
1939
1940
1941/** Opcode 0x0f 0x4b. */
1942FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1943{
1944 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1945 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1946}
1947
1948
1949/** Opcode 0x0f 0x4c. */
1950FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1951{
1952 IEMOP_MNEMONIC("cmovl Gv,Ev");
1953 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1954}
1955
1956
1957/** Opcode 0x0f 0x4d. */
1958FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1959{
1960 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1961 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1962}
1963
1964
1965/** Opcode 0x0f 0x4e. */
1966FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1967{
1968 IEMOP_MNEMONIC("cmovle Gv,Ev");
1969 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1970}
1971
1972
1973/** Opcode 0x0f 0x4f. */
1974FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1975{
1976 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1977 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1978}
1979
1980#undef CMOV_X
1981
1982/** Opcode 0x0f 0x50. */
1983FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1984/** Opcode 0x0f 0x51. */
1985FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1986/** Opcode 0x0f 0x52. */
1987FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1988/** Opcode 0x0f 0x53. */
1989FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1990/** Opcode 0x0f 0x54. */
1991FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1992/** Opcode 0x0f 0x55. */
1993FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1994/** Opcode 0x0f 0x56. */
1995FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1996/** Opcode 0x0f 0x57. */
1997FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1998/** Opcode 0x0f 0x58. */
1999FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2000/** Opcode 0x0f 0x59. */
2001FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2002/** Opcode 0x0f 0x5a. */
2003FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2004/** Opcode 0x0f 0x5b. */
2005FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2006/** Opcode 0x0f 0x5c. */
2007FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2008/** Opcode 0x0f 0x5d. */
2009FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2010/** Opcode 0x0f 0x5e. */
2011FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2012/** Opcode 0x0f 0x5f. */
2013FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2014
2015
2016/**
2017 * Common worker for SSE2 and MMX instructions on the forms:
2018 * pxxxx xmm1, xmm2/mem128
2019 * pxxxx mm1, mm2/mem32
2020 *
2021 * The 2nd operand is the first half of a register, which in the memory case
2022 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2023 * memory accessed for MMX.
2024 *
2025 * Exceptions type 4.
2026 */
2027FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2028{
2029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2030 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2031 {
2032 case IEM_OP_PRF_SIZE_OP: /* SSE */
2033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2034 {
2035 /*
2036 * Register, register.
2037 */
2038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2039 IEM_MC_BEGIN(2, 0);
2040 IEM_MC_ARG(uint128_t *, pDst, 0);
2041 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2042 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2043 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2044 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2045 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2046 IEM_MC_ADVANCE_RIP();
2047 IEM_MC_END();
2048 }
2049 else
2050 {
2051 /*
2052 * Register, memory.
2053 */
2054 IEM_MC_BEGIN(2, 2);
2055 IEM_MC_ARG(uint128_t *, pDst, 0);
2056 IEM_MC_LOCAL(uint64_t, uSrc);
2057 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2059
2060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2063 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2064
2065 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2066 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2067
2068 IEM_MC_ADVANCE_RIP();
2069 IEM_MC_END();
2070 }
2071 return VINF_SUCCESS;
2072
2073 case 0: /* MMX */
2074 if (!pImpl->pfnU64)
2075 return IEMOP_RAISE_INVALID_OPCODE();
2076 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2077 {
2078 /*
2079 * Register, register.
2080 */
2081 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2082 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 IEM_MC_BEGIN(2, 0);
2085 IEM_MC_ARG(uint64_t *, pDst, 0);
2086 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2088 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2089 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2090 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2091 IEM_MC_ADVANCE_RIP();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 /*
2097 * Register, memory.
2098 */
2099 IEM_MC_BEGIN(2, 2);
2100 IEM_MC_ARG(uint64_t *, pDst, 0);
2101 IEM_MC_LOCAL(uint32_t, uSrc);
2102 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2104
2105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2107 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2108 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2109
2110 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2111 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2112
2113 IEM_MC_ADVANCE_RIP();
2114 IEM_MC_END();
2115 }
2116 return VINF_SUCCESS;
2117
2118 default:
2119 return IEMOP_RAISE_INVALID_OPCODE();
2120 }
2121}
2122
2123
2124/** Opcode 0x0f 0x60. */
2125FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2126{
2127 IEMOP_MNEMONIC("punpcklbw");
2128 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2129}
2130
2131
2132/** Opcode 0x0f 0x61. */
2133FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2134{
2135 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2136 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2137}
2138
2139
2140/** Opcode 0x0f 0x62. */
2141FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2142{
2143 IEMOP_MNEMONIC("punpckldq");
2144 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2145}
2146
2147
2148/** Opcode 0x0f 0x63. */
2149FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2150/** Opcode 0x0f 0x64. */
2151FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2152/** Opcode 0x0f 0x65. */
2153FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2154/** Opcode 0x0f 0x66. */
2155FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2156/** Opcode 0x0f 0x67. */
2157FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2158
2159
2160/**
2161 * Common worker for SSE2 and MMX instructions on the forms:
2162 * pxxxx xmm1, xmm2/mem128
2163 * pxxxx mm1, mm2/mem64
2164 *
2165 * The 2nd operand is the second half of a register, which in the memory case
2166 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2167 * where it may read the full 128 bits or only the upper 64 bits.
2168 *
2169 * Exceptions type 4.
2170 */
2171FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2172{
2173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2174 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2175 {
2176 case IEM_OP_PRF_SIZE_OP: /* SSE */
2177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2178 {
2179 /*
2180 * Register, register.
2181 */
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_BEGIN(2, 0);
2184 IEM_MC_ARG(uint128_t *, pDst, 0);
2185 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2186 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2187 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2188 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2189 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2190 IEM_MC_ADVANCE_RIP();
2191 IEM_MC_END();
2192 }
2193 else
2194 {
2195 /*
2196 * Register, memory.
2197 */
2198 IEM_MC_BEGIN(2, 2);
2199 IEM_MC_ARG(uint128_t *, pDst, 0);
2200 IEM_MC_LOCAL(uint128_t, uSrc);
2201 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2203
2204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2207 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2208
2209 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2210 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2211
2212 IEM_MC_ADVANCE_RIP();
2213 IEM_MC_END();
2214 }
2215 return VINF_SUCCESS;
2216
2217 case 0: /* MMX */
2218 if (!pImpl->pfnU64)
2219 return IEMOP_RAISE_INVALID_OPCODE();
2220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2221 {
2222 /*
2223 * Register, register.
2224 */
2225 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2226 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2228 IEM_MC_BEGIN(2, 0);
2229 IEM_MC_ARG(uint64_t *, pDst, 0);
2230 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2232 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2233 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2234 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2235 IEM_MC_ADVANCE_RIP();
2236 IEM_MC_END();
2237 }
2238 else
2239 {
2240 /*
2241 * Register, memory.
2242 */
2243 IEM_MC_BEGIN(2, 2);
2244 IEM_MC_ARG(uint64_t *, pDst, 0);
2245 IEM_MC_LOCAL(uint64_t, uSrc);
2246 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2248
2249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2252 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2253
2254 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2255 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2256
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 return VINF_SUCCESS;
2261
2262 default:
2263 return IEMOP_RAISE_INVALID_OPCODE();
2264 }
2265}
2266
2267
2268/** Opcode 0x0f 0x68. */
2269FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2270{
2271 IEMOP_MNEMONIC("punpckhbw");
2272 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2273}
2274
2275
2276/** Opcode 0x0f 0x69. */
2277FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2278{
2279 IEMOP_MNEMONIC("punpckhwd");
2280 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2281}
2282
2283
2284/** Opcode 0x0f 0x6a. */
2285FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2286{
2287 IEMOP_MNEMONIC("punpckhdq");
2288 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2289}
2290
2291/** Opcode 0x0f 0x6b. */
2292FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2293
2294
2295/** Opcode 0x0f 0x6c. */
2296FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2297{
2298 IEMOP_MNEMONIC("punpcklqdq");
2299 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2300}
2301
2302
2303/** Opcode 0x0f 0x6d. */
2304FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2305{
2306 IEMOP_MNEMONIC("punpckhqdq");
2307 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2308}
2309
2310
2311/** Opcode 0x0f 0x6e. */
2312FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2313{
2314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2315 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2316 {
2317 case IEM_OP_PRF_SIZE_OP: /* SSE */
2318 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2320 {
2321 /* XMM, greg*/
2322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2323 IEM_MC_BEGIN(0, 1);
2324 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2325 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2326 {
2327 IEM_MC_LOCAL(uint64_t, u64Tmp);
2328 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2329 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2330 }
2331 else
2332 {
2333 IEM_MC_LOCAL(uint32_t, u32Tmp);
2334 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2335 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2336 }
2337 IEM_MC_ADVANCE_RIP();
2338 IEM_MC_END();
2339 }
2340 else
2341 {
2342 /* XMM, [mem] */
2343 IEM_MC_BEGIN(0, 2);
2344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2345 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2348 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2349 {
2350 IEM_MC_LOCAL(uint64_t, u64Tmp);
2351 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2352 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2353 }
2354 else
2355 {
2356 IEM_MC_LOCAL(uint32_t, u32Tmp);
2357 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2358 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2359 }
2360 IEM_MC_ADVANCE_RIP();
2361 IEM_MC_END();
2362 }
2363 return VINF_SUCCESS;
2364
2365 case 0: /* MMX */
2366 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2368 {
2369 /* MMX, greg */
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_BEGIN(0, 1);
2372 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2373 IEM_MC_LOCAL(uint64_t, u64Tmp);
2374 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2375 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2376 else
2377 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2378 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2379 IEM_MC_ADVANCE_RIP();
2380 IEM_MC_END();
2381 }
2382 else
2383 {
2384 /* MMX, [mem] */
2385 IEM_MC_BEGIN(0, 2);
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2387 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2391 {
2392 IEM_MC_LOCAL(uint64_t, u64Tmp);
2393 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2394 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2395 }
2396 else
2397 {
2398 IEM_MC_LOCAL(uint32_t, u32Tmp);
2399 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2400 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2401 }
2402 IEM_MC_ADVANCE_RIP();
2403 IEM_MC_END();
2404 }
2405 return VINF_SUCCESS;
2406
2407 default:
2408 return IEMOP_RAISE_INVALID_OPCODE();
2409 }
2410}
2411
2412
2413/** Opcode 0x0f 0x6f. */
2414FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2415{
2416 bool fAligned = false;
2417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2418 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2419 {
2420 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2421 fAligned = true;
2422 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2423 if (fAligned)
2424 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2425 else
2426 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2427 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2428 {
2429 /*
2430 * Register, register.
2431 */
2432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2433 IEM_MC_BEGIN(0, 1);
2434 IEM_MC_LOCAL(uint128_t, u128Tmp);
2435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2436 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2437 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2438 IEM_MC_ADVANCE_RIP();
2439 IEM_MC_END();
2440 }
2441 else
2442 {
2443 /*
2444 * Register, memory.
2445 */
2446 IEM_MC_BEGIN(0, 2);
2447 IEM_MC_LOCAL(uint128_t, u128Tmp);
2448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2449
2450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2452 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2453 if (fAligned)
2454 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2455 else
2456 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2457 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2458
2459 IEM_MC_ADVANCE_RIP();
2460 IEM_MC_END();
2461 }
2462 return VINF_SUCCESS;
2463
2464 case 0: /* MMX */
2465 IEMOP_MNEMONIC("movq Pq,Qq");
2466 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2467 {
2468 /*
2469 * Register, register.
2470 */
2471 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2472 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2474 IEM_MC_BEGIN(0, 1);
2475 IEM_MC_LOCAL(uint64_t, u64Tmp);
2476 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2477 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2478 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2479 IEM_MC_ADVANCE_RIP();
2480 IEM_MC_END();
2481 }
2482 else
2483 {
2484 /*
2485 * Register, memory.
2486 */
2487 IEM_MC_BEGIN(0, 2);
2488 IEM_MC_LOCAL(uint64_t, u64Tmp);
2489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2490
2491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2493 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2494 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2495 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2496
2497 IEM_MC_ADVANCE_RIP();
2498 IEM_MC_END();
2499 }
2500 return VINF_SUCCESS;
2501
2502 default:
2503 return IEMOP_RAISE_INVALID_OPCODE();
2504 }
2505}
2506
2507
2508/** Opcode 0x0f 0x70. The immediate here is evil! */
2509FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2510{
2511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2512 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2513 {
2514 case IEM_OP_PRF_SIZE_OP: /* SSE */
2515 case IEM_OP_PRF_REPNZ: /* SSE */
2516 case IEM_OP_PRF_REPZ: /* SSE */
2517 {
2518 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2519 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2520 {
2521 case IEM_OP_PRF_SIZE_OP:
2522 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2523 pfnAImpl = iemAImpl_pshufd;
2524 break;
2525 case IEM_OP_PRF_REPNZ:
2526 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2527 pfnAImpl = iemAImpl_pshuflw;
2528 break;
2529 case IEM_OP_PRF_REPZ:
2530 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2531 pfnAImpl = iemAImpl_pshufhw;
2532 break;
2533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2534 }
2535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2536 {
2537 /*
2538 * Register, register.
2539 */
2540 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2542
2543 IEM_MC_BEGIN(3, 0);
2544 IEM_MC_ARG(uint128_t *, pDst, 0);
2545 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2546 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2548 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2549 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2550 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2551 IEM_MC_ADVANCE_RIP();
2552 IEM_MC_END();
2553 }
2554 else
2555 {
2556 /*
2557 * Register, memory.
2558 */
2559 IEM_MC_BEGIN(3, 2);
2560 IEM_MC_ARG(uint128_t *, pDst, 0);
2561 IEM_MC_LOCAL(uint128_t, uSrc);
2562 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2564
2565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2566 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2567 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2569 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2570
2571 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2572 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2573 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2574
2575 IEM_MC_ADVANCE_RIP();
2576 IEM_MC_END();
2577 }
2578 return VINF_SUCCESS;
2579 }
2580
2581 case 0: /* MMX Extension */
2582 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2584 {
2585 /*
2586 * Register, register.
2587 */
2588 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2590
2591 IEM_MC_BEGIN(3, 0);
2592 IEM_MC_ARG(uint64_t *, pDst, 0);
2593 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2594 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2596 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2597 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2598 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2599 IEM_MC_ADVANCE_RIP();
2600 IEM_MC_END();
2601 }
2602 else
2603 {
2604 /*
2605 * Register, memory.
2606 */
2607 IEM_MC_BEGIN(3, 2);
2608 IEM_MC_ARG(uint64_t *, pDst, 0);
2609 IEM_MC_LOCAL(uint64_t, uSrc);
2610 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2615 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2617 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2618
2619 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2620 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2621 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2622
2623 IEM_MC_ADVANCE_RIP();
2624 IEM_MC_END();
2625 }
2626 return VINF_SUCCESS;
2627
2628 default:
2629 return IEMOP_RAISE_INVALID_OPCODE();
2630 }
2631}
2632
2633
2634/** Opcode 0x0f 0x71 11/2. */
2635FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2636
2637/** Opcode 0x66 0x0f 0x71 11/2. */
2638FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2639
2640/** Opcode 0x0f 0x71 11/4. */
2641FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2642
2643/** Opcode 0x66 0x0f 0x71 11/4. */
2644FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2645
2646/** Opcode 0x0f 0x71 11/6. */
2647FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2648
2649/** Opcode 0x66 0x0f 0x71 11/6. */
2650FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2651
2652
2653/** Opcode 0x0f 0x71. */
2654FNIEMOP_DEF(iemOp_Grp12)
2655{
2656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2657 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2658 return IEMOP_RAISE_INVALID_OPCODE();
2659 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2660 {
2661 case 0: case 1: case 3: case 5: case 7:
2662 return IEMOP_RAISE_INVALID_OPCODE();
2663 case 2:
2664 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2665 {
2666 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2667 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2668 default: return IEMOP_RAISE_INVALID_OPCODE();
2669 }
2670 case 4:
2671 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2672 {
2673 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2674 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2675 default: return IEMOP_RAISE_INVALID_OPCODE();
2676 }
2677 case 6:
2678 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2679 {
2680 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2681 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2682 default: return IEMOP_RAISE_INVALID_OPCODE();
2683 }
2684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2685 }
2686}
2687
2688
2689/** Opcode 0x0f 0x72 11/2. */
2690FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2691
2692/** Opcode 0x66 0x0f 0x72 11/2. */
2693FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2694
2695/** Opcode 0x0f 0x72 11/4. */
2696FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2697
2698/** Opcode 0x66 0x0f 0x72 11/4. */
2699FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2700
2701/** Opcode 0x0f 0x72 11/6. */
2702FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2703
2704/** Opcode 0x66 0x0f 0x72 11/6. */
2705FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2706
2707
2708/** Opcode 0x0f 0x72. */
2709FNIEMOP_DEF(iemOp_Grp13)
2710{
2711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2712 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2713 return IEMOP_RAISE_INVALID_OPCODE();
2714 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2715 {
2716 case 0: case 1: case 3: case 5: case 7:
2717 return IEMOP_RAISE_INVALID_OPCODE();
2718 case 2:
2719 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2720 {
2721 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2722 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2723 default: return IEMOP_RAISE_INVALID_OPCODE();
2724 }
2725 case 4:
2726 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2727 {
2728 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2729 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2730 default: return IEMOP_RAISE_INVALID_OPCODE();
2731 }
2732 case 6:
2733 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2734 {
2735 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2736 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2737 default: return IEMOP_RAISE_INVALID_OPCODE();
2738 }
2739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2740 }
2741}
2742
2743
2744/** Opcode 0x0f 0x73 11/2. */
2745FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2746
2747/** Opcode 0x66 0x0f 0x73 11/2. */
2748FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2749
2750/** Opcode 0x66 0x0f 0x73 11/3. */
2751FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2752
2753/** Opcode 0x0f 0x73 11/6. */
2754FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2755
2756/** Opcode 0x66 0x0f 0x73 11/6. */
2757FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2758
2759/** Opcode 0x66 0x0f 0x73 11/7. */
2760FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2761
2762
2763/** Opcode 0x0f 0x73. */
2764FNIEMOP_DEF(iemOp_Grp14)
2765{
2766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2767 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2768 return IEMOP_RAISE_INVALID_OPCODE();
2769 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2770 {
2771 case 0: case 1: case 4: case 5:
2772 return IEMOP_RAISE_INVALID_OPCODE();
2773 case 2:
2774 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2775 {
2776 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2777 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2778 default: return IEMOP_RAISE_INVALID_OPCODE();
2779 }
2780 case 3:
2781 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2782 {
2783 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2784 default: return IEMOP_RAISE_INVALID_OPCODE();
2785 }
2786 case 6:
2787 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2788 {
2789 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2790 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2791 default: return IEMOP_RAISE_INVALID_OPCODE();
2792 }
2793 case 7:
2794 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2795 {
2796 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2797 default: return IEMOP_RAISE_INVALID_OPCODE();
2798 }
2799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2800 }
2801}
2802
2803
2804/**
2805 * Common worker for SSE2 and MMX instructions on the forms:
2806 * pxxx mm1, mm2/mem64
2807 * pxxx xmm1, xmm2/mem128
2808 *
2809 * Proper alignment of the 128-bit operand is enforced.
2810 * Exceptions type 4. SSE2 and MMX cpuid checks.
2811 */
2812FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2813{
2814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2815 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2816 {
2817 case IEM_OP_PRF_SIZE_OP: /* SSE */
2818 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2819 {
2820 /*
2821 * Register, register.
2822 */
2823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2824 IEM_MC_BEGIN(2, 0);
2825 IEM_MC_ARG(uint128_t *, pDst, 0);
2826 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2827 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2828 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2829 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2830 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2831 IEM_MC_ADVANCE_RIP();
2832 IEM_MC_END();
2833 }
2834 else
2835 {
2836 /*
2837 * Register, memory.
2838 */
2839 IEM_MC_BEGIN(2, 2);
2840 IEM_MC_ARG(uint128_t *, pDst, 0);
2841 IEM_MC_LOCAL(uint128_t, uSrc);
2842 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2844
2845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2848 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2849
2850 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2851 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2852
2853 IEM_MC_ADVANCE_RIP();
2854 IEM_MC_END();
2855 }
2856 return VINF_SUCCESS;
2857
2858 case 0: /* MMX */
2859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2860 {
2861 /*
2862 * Register, register.
2863 */
2864 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2865 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2867 IEM_MC_BEGIN(2, 0);
2868 IEM_MC_ARG(uint64_t *, pDst, 0);
2869 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2870 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2871 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2872 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2873 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2874 IEM_MC_ADVANCE_RIP();
2875 IEM_MC_END();
2876 }
2877 else
2878 {
2879 /*
2880 * Register, memory.
2881 */
2882 IEM_MC_BEGIN(2, 2);
2883 IEM_MC_ARG(uint64_t *, pDst, 0);
2884 IEM_MC_LOCAL(uint64_t, uSrc);
2885 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2887
2888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2891 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2892
2893 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2894 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2895
2896 IEM_MC_ADVANCE_RIP();
2897 IEM_MC_END();
2898 }
2899 return VINF_SUCCESS;
2900
2901 default:
2902 return IEMOP_RAISE_INVALID_OPCODE();
2903 }
2904}
2905
2906
2907/** Opcode 0x0f 0x74. */
2908FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2909{
2910 IEMOP_MNEMONIC("pcmpeqb");
2911 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2912}
2913
2914
2915/** Opcode 0x0f 0x75. */
2916FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2917{
2918 IEMOP_MNEMONIC("pcmpeqw");
2919 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2920}
2921
2922
2923/** Opcode 0x0f 0x76. */
2924FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2925{
2926 IEMOP_MNEMONIC("pcmpeqd");
2927 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2928}
2929
2930
2931/** Opcode 0x0f 0x77. */
2932FNIEMOP_STUB(iemOp_emms);
2933/** Opcode 0x0f 0x78. */
2934FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2935/** Opcode 0x0f 0x79. */
2936FNIEMOP_UD_STUB(iemOp_vmwrite);
2937/** Opcode 0x0f 0x7c. */
2938FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2939/** Opcode 0x0f 0x7d. */
2940FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2941
2942
2943/** Opcode 0x0f 0x7e. */
2944FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2945{
2946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2947 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2948 {
2949 case IEM_OP_PRF_SIZE_OP: /* SSE */
2950 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2952 {
2953 /* greg, XMM */
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_BEGIN(0, 1);
2956 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2957 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2958 {
2959 IEM_MC_LOCAL(uint64_t, u64Tmp);
2960 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2961 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2962 }
2963 else
2964 {
2965 IEM_MC_LOCAL(uint32_t, u32Tmp);
2966 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2967 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2968 }
2969 IEM_MC_ADVANCE_RIP();
2970 IEM_MC_END();
2971 }
2972 else
2973 {
2974 /* [mem], XMM */
2975 IEM_MC_BEGIN(0, 2);
2976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2977 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2980 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2981 {
2982 IEM_MC_LOCAL(uint64_t, u64Tmp);
2983 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2984 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2985 }
2986 else
2987 {
2988 IEM_MC_LOCAL(uint32_t, u32Tmp);
2989 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2990 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2991 }
2992 IEM_MC_ADVANCE_RIP();
2993 IEM_MC_END();
2994 }
2995 return VINF_SUCCESS;
2996
2997 case 0: /* MMX */
2998 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3000 {
3001 /* greg, MMX */
3002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3003 IEM_MC_BEGIN(0, 1);
3004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3005 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3006 {
3007 IEM_MC_LOCAL(uint64_t, u64Tmp);
3008 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3009 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3010 }
3011 else
3012 {
3013 IEM_MC_LOCAL(uint32_t, u32Tmp);
3014 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3015 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3016 }
3017 IEM_MC_ADVANCE_RIP();
3018 IEM_MC_END();
3019 }
3020 else
3021 {
3022 /* [mem], MMX */
3023 IEM_MC_BEGIN(0, 2);
3024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3025 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3028 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3029 {
3030 IEM_MC_LOCAL(uint64_t, u64Tmp);
3031 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3032 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3033 }
3034 else
3035 {
3036 IEM_MC_LOCAL(uint32_t, u32Tmp);
3037 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3038 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3039 }
3040 IEM_MC_ADVANCE_RIP();
3041 IEM_MC_END();
3042 }
3043 return VINF_SUCCESS;
3044
3045 default:
3046 return IEMOP_RAISE_INVALID_OPCODE();
3047 }
3048}
3049
3050
3051/** Opcode 0x0f 0x7f. */
3052FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3053{
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 bool fAligned = false;
3056 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3057 {
3058 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3059 fAligned = true;
3060 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3061 if (fAligned)
3062 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3063 else
3064 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3065 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3066 {
3067 /*
3068 * Register, register.
3069 */
3070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3071 IEM_MC_BEGIN(0, 1);
3072 IEM_MC_LOCAL(uint128_t, u128Tmp);
3073 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3074 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3075 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3076 IEM_MC_ADVANCE_RIP();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /*
3082 * Register, memory.
3083 */
3084 IEM_MC_BEGIN(0, 2);
3085 IEM_MC_LOCAL(uint128_t, u128Tmp);
3086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3087
3088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3091 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3092 if (fAligned)
3093 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3094 else
3095 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3096
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 return VINF_SUCCESS;
3101
3102 case 0: /* MMX */
3103 IEMOP_MNEMONIC("movq Qq,Pq");
3104
3105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3106 {
3107 /*
3108 * Register, register.
3109 */
3110 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3111 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 IEM_MC_BEGIN(0, 1);
3114 IEM_MC_LOCAL(uint64_t, u64Tmp);
3115 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3116 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3117 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3118 IEM_MC_ADVANCE_RIP();
3119 IEM_MC_END();
3120 }
3121 else
3122 {
3123 /*
3124 * Register, memory.
3125 */
3126 IEM_MC_BEGIN(0, 2);
3127 IEM_MC_LOCAL(uint64_t, u64Tmp);
3128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3129
3130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3132 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3133 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3134 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3135
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 }
3139 return VINF_SUCCESS;
3140
3141 default:
3142 return IEMOP_RAISE_INVALID_OPCODE();
3143 }
3144}
3145
3146
3147
3148/** Opcode 0x0f 0x80. */
3149FNIEMOP_DEF(iemOp_jo_Jv)
3150{
3151 IEMOP_MNEMONIC("jo Jv");
3152 IEMOP_HLP_MIN_386();
3153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3154 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3155 {
3156 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3157 IEMOP_HLP_NO_LOCK_PREFIX();
3158
3159 IEM_MC_BEGIN(0, 0);
3160 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3161 IEM_MC_REL_JMP_S16(i16Imm);
3162 } IEM_MC_ELSE() {
3163 IEM_MC_ADVANCE_RIP();
3164 } IEM_MC_ENDIF();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3170 IEMOP_HLP_NO_LOCK_PREFIX();
3171
3172 IEM_MC_BEGIN(0, 0);
3173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3174 IEM_MC_REL_JMP_S32(i32Imm);
3175 } IEM_MC_ELSE() {
3176 IEM_MC_ADVANCE_RIP();
3177 } IEM_MC_ENDIF();
3178 IEM_MC_END();
3179 }
3180 return VINF_SUCCESS;
3181}
3182
3183
3184/** Opcode 0x0f 0x81. */
3185FNIEMOP_DEF(iemOp_jno_Jv)
3186{
3187 IEMOP_MNEMONIC("jno Jv");
3188 IEMOP_HLP_MIN_386();
3189 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3190 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3191 {
3192 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3193 IEMOP_HLP_NO_LOCK_PREFIX();
3194
3195 IEM_MC_BEGIN(0, 0);
3196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3197 IEM_MC_ADVANCE_RIP();
3198 } IEM_MC_ELSE() {
3199 IEM_MC_REL_JMP_S16(i16Imm);
3200 } IEM_MC_ENDIF();
3201 IEM_MC_END();
3202 }
3203 else
3204 {
3205 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3206 IEMOP_HLP_NO_LOCK_PREFIX();
3207
3208 IEM_MC_BEGIN(0, 0);
3209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3210 IEM_MC_ADVANCE_RIP();
3211 } IEM_MC_ELSE() {
3212 IEM_MC_REL_JMP_S32(i32Imm);
3213 } IEM_MC_ENDIF();
3214 IEM_MC_END();
3215 }
3216 return VINF_SUCCESS;
3217}
3218
3219
3220/** Opcode 0x0f 0x82. */
3221FNIEMOP_DEF(iemOp_jc_Jv)
3222{
3223 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3224 IEMOP_HLP_MIN_386();
3225 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3226 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3227 {
3228 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3229 IEMOP_HLP_NO_LOCK_PREFIX();
3230
3231 IEM_MC_BEGIN(0, 0);
3232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3233 IEM_MC_REL_JMP_S16(i16Imm);
3234 } IEM_MC_ELSE() {
3235 IEM_MC_ADVANCE_RIP();
3236 } IEM_MC_ENDIF();
3237 IEM_MC_END();
3238 }
3239 else
3240 {
3241 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3242 IEMOP_HLP_NO_LOCK_PREFIX();
3243
3244 IEM_MC_BEGIN(0, 0);
3245 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3246 IEM_MC_REL_JMP_S32(i32Imm);
3247 } IEM_MC_ELSE() {
3248 IEM_MC_ADVANCE_RIP();
3249 } IEM_MC_ENDIF();
3250 IEM_MC_END();
3251 }
3252 return VINF_SUCCESS;
3253}
3254
3255
3256/** Opcode 0x0f 0x83. */
3257FNIEMOP_DEF(iemOp_jnc_Jv)
3258{
3259 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3260 IEMOP_HLP_MIN_386();
3261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3262 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3263 {
3264 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3265 IEMOP_HLP_NO_LOCK_PREFIX();
3266
3267 IEM_MC_BEGIN(0, 0);
3268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3269 IEM_MC_ADVANCE_RIP();
3270 } IEM_MC_ELSE() {
3271 IEM_MC_REL_JMP_S16(i16Imm);
3272 } IEM_MC_ENDIF();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3278 IEMOP_HLP_NO_LOCK_PREFIX();
3279
3280 IEM_MC_BEGIN(0, 0);
3281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3282 IEM_MC_ADVANCE_RIP();
3283 } IEM_MC_ELSE() {
3284 IEM_MC_REL_JMP_S32(i32Imm);
3285 } IEM_MC_ENDIF();
3286 IEM_MC_END();
3287 }
3288 return VINF_SUCCESS;
3289}
3290
3291
3292/** Opcode 0x0f 0x84. */
3293FNIEMOP_DEF(iemOp_je_Jv)
3294{
3295 IEMOP_MNEMONIC("je/jz Jv");
3296 IEMOP_HLP_MIN_386();
3297 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3298 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3299 {
3300 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3301 IEMOP_HLP_NO_LOCK_PREFIX();
3302
3303 IEM_MC_BEGIN(0, 0);
3304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3305 IEM_MC_REL_JMP_S16(i16Imm);
3306 } IEM_MC_ELSE() {
3307 IEM_MC_ADVANCE_RIP();
3308 } IEM_MC_ENDIF();
3309 IEM_MC_END();
3310 }
3311 else
3312 {
3313 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3314 IEMOP_HLP_NO_LOCK_PREFIX();
3315
3316 IEM_MC_BEGIN(0, 0);
3317 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3318 IEM_MC_REL_JMP_S32(i32Imm);
3319 } IEM_MC_ELSE() {
3320 IEM_MC_ADVANCE_RIP();
3321 } IEM_MC_ENDIF();
3322 IEM_MC_END();
3323 }
3324 return VINF_SUCCESS;
3325}
3326
3327
3328/** Opcode 0x0f 0x85. */
3329FNIEMOP_DEF(iemOp_jne_Jv)
3330{
3331 IEMOP_MNEMONIC("jne/jnz Jv");
3332 IEMOP_HLP_MIN_386();
3333 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3334 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3335 {
3336 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3337 IEMOP_HLP_NO_LOCK_PREFIX();
3338
3339 IEM_MC_BEGIN(0, 0);
3340 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3341 IEM_MC_ADVANCE_RIP();
3342 } IEM_MC_ELSE() {
3343 IEM_MC_REL_JMP_S16(i16Imm);
3344 } IEM_MC_ENDIF();
3345 IEM_MC_END();
3346 }
3347 else
3348 {
3349 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3350 IEMOP_HLP_NO_LOCK_PREFIX();
3351
3352 IEM_MC_BEGIN(0, 0);
3353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3354 IEM_MC_ADVANCE_RIP();
3355 } IEM_MC_ELSE() {
3356 IEM_MC_REL_JMP_S32(i32Imm);
3357 } IEM_MC_ENDIF();
3358 IEM_MC_END();
3359 }
3360 return VINF_SUCCESS;
3361}
3362
3363
3364/** Opcode 0x0f 0x86. */
3365FNIEMOP_DEF(iemOp_jbe_Jv)
3366{
3367 IEMOP_MNEMONIC("jbe/jna Jv");
3368 IEMOP_HLP_MIN_386();
3369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3370 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3371 {
3372 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3373 IEMOP_HLP_NO_LOCK_PREFIX();
3374
3375 IEM_MC_BEGIN(0, 0);
3376 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3377 IEM_MC_REL_JMP_S16(i16Imm);
3378 } IEM_MC_ELSE() {
3379 IEM_MC_ADVANCE_RIP();
3380 } IEM_MC_ENDIF();
3381 IEM_MC_END();
3382 }
3383 else
3384 {
3385 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3386 IEMOP_HLP_NO_LOCK_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3390 IEM_MC_REL_JMP_S32(i32Imm);
3391 } IEM_MC_ELSE() {
3392 IEM_MC_ADVANCE_RIP();
3393 } IEM_MC_ENDIF();
3394 IEM_MC_END();
3395 }
3396 return VINF_SUCCESS;
3397}
3398
3399
3400/** Opcode 0x0f 0x87. */
3401FNIEMOP_DEF(iemOp_jnbe_Jv)
3402{
3403 IEMOP_MNEMONIC("jnbe/ja Jv");
3404 IEMOP_HLP_MIN_386();
3405 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3406 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3407 {
3408 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3409 IEMOP_HLP_NO_LOCK_PREFIX();
3410
3411 IEM_MC_BEGIN(0, 0);
3412 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3413 IEM_MC_ADVANCE_RIP();
3414 } IEM_MC_ELSE() {
3415 IEM_MC_REL_JMP_S16(i16Imm);
3416 } IEM_MC_ENDIF();
3417 IEM_MC_END();
3418 }
3419 else
3420 {
3421 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3422 IEMOP_HLP_NO_LOCK_PREFIX();
3423
3424 IEM_MC_BEGIN(0, 0);
3425 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3426 IEM_MC_ADVANCE_RIP();
3427 } IEM_MC_ELSE() {
3428 IEM_MC_REL_JMP_S32(i32Imm);
3429 } IEM_MC_ENDIF();
3430 IEM_MC_END();
3431 }
3432 return VINF_SUCCESS;
3433}
3434
3435
3436/** Opcode 0x0f 0x88. */
3437FNIEMOP_DEF(iemOp_js_Jv)
3438{
3439 IEMOP_MNEMONIC("js Jv");
3440 IEMOP_HLP_MIN_386();
3441 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3442 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3443 {
3444 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3445 IEMOP_HLP_NO_LOCK_PREFIX();
3446
3447 IEM_MC_BEGIN(0, 0);
3448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3449 IEM_MC_REL_JMP_S16(i16Imm);
3450 } IEM_MC_ELSE() {
3451 IEM_MC_ADVANCE_RIP();
3452 } IEM_MC_ENDIF();
3453 IEM_MC_END();
3454 }
3455 else
3456 {
3457 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3458 IEMOP_HLP_NO_LOCK_PREFIX();
3459
3460 IEM_MC_BEGIN(0, 0);
3461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3462 IEM_MC_REL_JMP_S32(i32Imm);
3463 } IEM_MC_ELSE() {
3464 IEM_MC_ADVANCE_RIP();
3465 } IEM_MC_ENDIF();
3466 IEM_MC_END();
3467 }
3468 return VINF_SUCCESS;
3469}
3470
3471
3472/** Opcode 0x0f 0x89. */
3473FNIEMOP_DEF(iemOp_jns_Jv)
3474{
3475 IEMOP_MNEMONIC("jns Jv");
3476 IEMOP_HLP_MIN_386();
3477 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3478 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3479 {
3480 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3481 IEMOP_HLP_NO_LOCK_PREFIX();
3482
3483 IEM_MC_BEGIN(0, 0);
3484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3485 IEM_MC_ADVANCE_RIP();
3486 } IEM_MC_ELSE() {
3487 IEM_MC_REL_JMP_S16(i16Imm);
3488 } IEM_MC_ENDIF();
3489 IEM_MC_END();
3490 }
3491 else
3492 {
3493 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3494 IEMOP_HLP_NO_LOCK_PREFIX();
3495
3496 IEM_MC_BEGIN(0, 0);
3497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3498 IEM_MC_ADVANCE_RIP();
3499 } IEM_MC_ELSE() {
3500 IEM_MC_REL_JMP_S32(i32Imm);
3501 } IEM_MC_ENDIF();
3502 IEM_MC_END();
3503 }
3504 return VINF_SUCCESS;
3505}
3506
3507
3508/** Opcode 0x0f 0x8a. */
3509FNIEMOP_DEF(iemOp_jp_Jv)
3510{
3511 IEMOP_MNEMONIC("jp Jv");
3512 IEMOP_HLP_MIN_386();
3513 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3514 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3515 {
3516 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3517 IEMOP_HLP_NO_LOCK_PREFIX();
3518
3519 IEM_MC_BEGIN(0, 0);
3520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3521 IEM_MC_REL_JMP_S16(i16Imm);
3522 } IEM_MC_ELSE() {
3523 IEM_MC_ADVANCE_RIP();
3524 } IEM_MC_ENDIF();
3525 IEM_MC_END();
3526 }
3527 else
3528 {
3529 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3530 IEMOP_HLP_NO_LOCK_PREFIX();
3531
3532 IEM_MC_BEGIN(0, 0);
3533 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3534 IEM_MC_REL_JMP_S32(i32Imm);
3535 } IEM_MC_ELSE() {
3536 IEM_MC_ADVANCE_RIP();
3537 } IEM_MC_ENDIF();
3538 IEM_MC_END();
3539 }
3540 return VINF_SUCCESS;
3541}
3542
3543
3544/** Opcode 0x0f 0x8b. */
3545FNIEMOP_DEF(iemOp_jnp_Jv)
3546{
3547 IEMOP_MNEMONIC("jo Jv");
3548 IEMOP_HLP_MIN_386();
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3550 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3551 {
3552 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3553 IEMOP_HLP_NO_LOCK_PREFIX();
3554
3555 IEM_MC_BEGIN(0, 0);
3556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3557 IEM_MC_ADVANCE_RIP();
3558 } IEM_MC_ELSE() {
3559 IEM_MC_REL_JMP_S16(i16Imm);
3560 } IEM_MC_ENDIF();
3561 IEM_MC_END();
3562 }
3563 else
3564 {
3565 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3566 IEMOP_HLP_NO_LOCK_PREFIX();
3567
3568 IEM_MC_BEGIN(0, 0);
3569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3570 IEM_MC_ADVANCE_RIP();
3571 } IEM_MC_ELSE() {
3572 IEM_MC_REL_JMP_S32(i32Imm);
3573 } IEM_MC_ENDIF();
3574 IEM_MC_END();
3575 }
3576 return VINF_SUCCESS;
3577}
3578
3579
3580/** Opcode 0x0f 0x8c. */
3581FNIEMOP_DEF(iemOp_jl_Jv)
3582{
3583 IEMOP_MNEMONIC("jl/jnge Jv");
3584 IEMOP_HLP_MIN_386();
3585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3586 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3587 {
3588 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3589 IEMOP_HLP_NO_LOCK_PREFIX();
3590
3591 IEM_MC_BEGIN(0, 0);
3592 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3593 IEM_MC_REL_JMP_S16(i16Imm);
3594 } IEM_MC_ELSE() {
3595 IEM_MC_ADVANCE_RIP();
3596 } IEM_MC_ENDIF();
3597 IEM_MC_END();
3598 }
3599 else
3600 {
3601 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3602 IEMOP_HLP_NO_LOCK_PREFIX();
3603
3604 IEM_MC_BEGIN(0, 0);
3605 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3606 IEM_MC_REL_JMP_S32(i32Imm);
3607 } IEM_MC_ELSE() {
3608 IEM_MC_ADVANCE_RIP();
3609 } IEM_MC_ENDIF();
3610 IEM_MC_END();
3611 }
3612 return VINF_SUCCESS;
3613}
3614
3615
3616/** Opcode 0x0f 0x8d. */
3617FNIEMOP_DEF(iemOp_jnl_Jv)
3618{
3619 IEMOP_MNEMONIC("jnl/jge Jv");
3620 IEMOP_HLP_MIN_386();
3621 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3622 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3623 {
3624 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3625 IEMOP_HLP_NO_LOCK_PREFIX();
3626
3627 IEM_MC_BEGIN(0, 0);
3628 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3629 IEM_MC_ADVANCE_RIP();
3630 } IEM_MC_ELSE() {
3631 IEM_MC_REL_JMP_S16(i16Imm);
3632 } IEM_MC_ENDIF();
3633 IEM_MC_END();
3634 }
3635 else
3636 {
3637 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3638 IEMOP_HLP_NO_LOCK_PREFIX();
3639
3640 IEM_MC_BEGIN(0, 0);
3641 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3642 IEM_MC_ADVANCE_RIP();
3643 } IEM_MC_ELSE() {
3644 IEM_MC_REL_JMP_S32(i32Imm);
3645 } IEM_MC_ENDIF();
3646 IEM_MC_END();
3647 }
3648 return VINF_SUCCESS;
3649}
3650
3651
3652/** Opcode 0x0f 0x8e. */
3653FNIEMOP_DEF(iemOp_jle_Jv)
3654{
3655 IEMOP_MNEMONIC("jle/jng Jv");
3656 IEMOP_HLP_MIN_386();
3657 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3658 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3659 {
3660 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3661 IEMOP_HLP_NO_LOCK_PREFIX();
3662
3663 IEM_MC_BEGIN(0, 0);
3664 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3665 IEM_MC_REL_JMP_S16(i16Imm);
3666 } IEM_MC_ELSE() {
3667 IEM_MC_ADVANCE_RIP();
3668 } IEM_MC_ENDIF();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3674 IEMOP_HLP_NO_LOCK_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0);
3677 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3678 IEM_MC_REL_JMP_S32(i32Imm);
3679 } IEM_MC_ELSE() {
3680 IEM_MC_ADVANCE_RIP();
3681 } IEM_MC_ENDIF();
3682 IEM_MC_END();
3683 }
3684 return VINF_SUCCESS;
3685}
3686
3687
3688/** Opcode 0x0f 0x8f. */
3689FNIEMOP_DEF(iemOp_jnle_Jv)
3690{
3691 IEMOP_MNEMONIC("jnle/jg Jv");
3692 IEMOP_HLP_MIN_386();
3693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3694 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3695 {
3696 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3697 IEMOP_HLP_NO_LOCK_PREFIX();
3698
3699 IEM_MC_BEGIN(0, 0);
3700 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3701 IEM_MC_ADVANCE_RIP();
3702 } IEM_MC_ELSE() {
3703 IEM_MC_REL_JMP_S16(i16Imm);
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706 }
3707 else
3708 {
3709 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3710 IEMOP_HLP_NO_LOCK_PREFIX();
3711
3712 IEM_MC_BEGIN(0, 0);
3713 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3714 IEM_MC_ADVANCE_RIP();
3715 } IEM_MC_ELSE() {
3716 IEM_MC_REL_JMP_S32(i32Imm);
3717 } IEM_MC_ENDIF();
3718 IEM_MC_END();
3719 }
3720 return VINF_SUCCESS;
3721}
3722
3723
3724/** Opcode 0x0f 0x90. */
3725FNIEMOP_DEF(iemOp_seto_Eb)
3726{
3727 IEMOP_MNEMONIC("seto Eb");
3728 IEMOP_HLP_MIN_386();
3729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3730 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3731
3732 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3733 * any way. AMD says it's "unused", whatever that means. We're
3734 * ignoring for now. */
3735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3736 {
3737 /* register target */
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3740 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_ADVANCE_RIP();
3745 IEM_MC_END();
3746 }
3747 else
3748 {
3749 /* memory target */
3750 IEM_MC_BEGIN(0, 1);
3751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3754 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3755 } IEM_MC_ELSE() {
3756 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3757 } IEM_MC_ENDIF();
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 }
3761 return VINF_SUCCESS;
3762}
3763
3764
3765/** Opcode 0x0f 0x91. */
3766FNIEMOP_DEF(iemOp_setno_Eb)
3767{
3768 IEMOP_MNEMONIC("setno Eb");
3769 IEMOP_HLP_MIN_386();
3770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3771 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3772
3773 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3774 * any way. AMD says it's "unused", whatever that means. We're
3775 * ignoring for now. */
3776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3777 {
3778 /* register target */
3779 IEM_MC_BEGIN(0, 0);
3780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3781 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3784 } IEM_MC_ENDIF();
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 /* memory target */
3791 IEM_MC_BEGIN(0, 1);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3795 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3796 } IEM_MC_ELSE() {
3797 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3798 } IEM_MC_ENDIF();
3799 IEM_MC_ADVANCE_RIP();
3800 IEM_MC_END();
3801 }
3802 return VINF_SUCCESS;
3803}
3804
3805
3806/** Opcode 0x0f 0x92. */
3807FNIEMOP_DEF(iemOp_setc_Eb)
3808{
3809 IEMOP_MNEMONIC("setc Eb");
3810 IEMOP_HLP_MIN_386();
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3813
3814 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3815 * any way. AMD says it's "unused", whatever that means. We're
3816 * ignoring for now. */
3817 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3818 {
3819 /* register target */
3820 IEM_MC_BEGIN(0, 0);
3821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3822 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3823 } IEM_MC_ELSE() {
3824 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3825 } IEM_MC_ENDIF();
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 }
3829 else
3830 {
3831 /* memory target */
3832 IEM_MC_BEGIN(0, 1);
3833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3835 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3836 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3837 } IEM_MC_ELSE() {
3838 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3839 } IEM_MC_ENDIF();
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 }
3843 return VINF_SUCCESS;
3844}
3845
3846
3847/** Opcode 0x0f 0x93. */
3848FNIEMOP_DEF(iemOp_setnc_Eb)
3849{
3850 IEMOP_MNEMONIC("setnc Eb");
3851 IEMOP_HLP_MIN_386();
3852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3853 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3854
3855 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3856 * any way. AMD says it's "unused", whatever that means. We're
3857 * ignoring for now. */
3858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3859 {
3860 /* register target */
3861 IEM_MC_BEGIN(0, 0);
3862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3863 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3864 } IEM_MC_ELSE() {
3865 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3866 } IEM_MC_ENDIF();
3867 IEM_MC_ADVANCE_RIP();
3868 IEM_MC_END();
3869 }
3870 else
3871 {
3872 /* memory target */
3873 IEM_MC_BEGIN(0, 1);
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3876 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3877 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3878 } IEM_MC_ELSE() {
3879 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3880 } IEM_MC_ENDIF();
3881 IEM_MC_ADVANCE_RIP();
3882 IEM_MC_END();
3883 }
3884 return VINF_SUCCESS;
3885}
3886
3887
3888/** Opcode 0x0f 0x94. */
3889FNIEMOP_DEF(iemOp_sete_Eb)
3890{
3891 IEMOP_MNEMONIC("sete Eb");
3892 IEMOP_HLP_MIN_386();
3893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3894 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3895
3896 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3897 * any way. AMD says it's "unused", whatever that means. We're
3898 * ignoring for now. */
3899 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3900 {
3901 /* register target */
3902 IEM_MC_BEGIN(0, 0);
3903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3904 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3905 } IEM_MC_ELSE() {
3906 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3907 } IEM_MC_ENDIF();
3908 IEM_MC_ADVANCE_RIP();
3909 IEM_MC_END();
3910 }
3911 else
3912 {
3913 /* memory target */
3914 IEM_MC_BEGIN(0, 1);
3915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3918 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3919 } IEM_MC_ELSE() {
3920 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3921 } IEM_MC_ENDIF();
3922 IEM_MC_ADVANCE_RIP();
3923 IEM_MC_END();
3924 }
3925 return VINF_SUCCESS;
3926}
3927
3928
3929/** Opcode 0x0f 0x95. */
3930FNIEMOP_DEF(iemOp_setne_Eb)
3931{
3932 IEMOP_MNEMONIC("setne Eb");
3933 IEMOP_HLP_MIN_386();
3934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3935 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3936
3937 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3938 * any way. AMD says it's "unused", whatever that means. We're
3939 * ignoring for now. */
3940 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3941 {
3942 /* register target */
3943 IEM_MC_BEGIN(0, 0);
3944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3945 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3946 } IEM_MC_ELSE() {
3947 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3948 } IEM_MC_ENDIF();
3949 IEM_MC_ADVANCE_RIP();
3950 IEM_MC_END();
3951 }
3952 else
3953 {
3954 /* memory target */
3955 IEM_MC_BEGIN(0, 1);
3956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3958 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3959 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3962 } IEM_MC_ENDIF();
3963 IEM_MC_ADVANCE_RIP();
3964 IEM_MC_END();
3965 }
3966 return VINF_SUCCESS;
3967}
3968
3969
3970/** Opcode 0x0f 0x96. */
3971FNIEMOP_DEF(iemOp_setbe_Eb)
3972{
3973 IEMOP_MNEMONIC("setbe Eb");
3974 IEMOP_HLP_MIN_386();
3975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3976 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3977
3978 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3979 * any way. AMD says it's "unused", whatever that means. We're
3980 * ignoring for now. */
3981 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3982 {
3983 /* register target */
3984 IEM_MC_BEGIN(0, 0);
3985 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3986 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3987 } IEM_MC_ELSE() {
3988 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3989 } IEM_MC_ENDIF();
3990 IEM_MC_ADVANCE_RIP();
3991 IEM_MC_END();
3992 }
3993 else
3994 {
3995 /* memory target */
3996 IEM_MC_BEGIN(0, 1);
3997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3999 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4000 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4001 } IEM_MC_ELSE() {
4002 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4003 } IEM_MC_ENDIF();
4004 IEM_MC_ADVANCE_RIP();
4005 IEM_MC_END();
4006 }
4007 return VINF_SUCCESS;
4008}
4009
4010
4011/** Opcode 0x0f 0x97. */
4012FNIEMOP_DEF(iemOp_setnbe_Eb)
4013{
4014 IEMOP_MNEMONIC("setnbe Eb");
4015 IEMOP_HLP_MIN_386();
4016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4017 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4018
4019 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4020 * any way. AMD says it's "unused", whatever that means. We're
4021 * ignoring for now. */
4022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4023 {
4024 /* register target */
4025 IEM_MC_BEGIN(0, 0);
4026 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4027 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4028 } IEM_MC_ELSE() {
4029 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4030 } IEM_MC_ENDIF();
4031 IEM_MC_ADVANCE_RIP();
4032 IEM_MC_END();
4033 }
4034 else
4035 {
4036 /* memory target */
4037 IEM_MC_BEGIN(0, 1);
4038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4040 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4041 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4042 } IEM_MC_ELSE() {
4043 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4044 } IEM_MC_ENDIF();
4045 IEM_MC_ADVANCE_RIP();
4046 IEM_MC_END();
4047 }
4048 return VINF_SUCCESS;
4049}
4050
4051
4052/** Opcode 0x0f 0x98. */
4053FNIEMOP_DEF(iemOp_sets_Eb)
4054{
4055 IEMOP_MNEMONIC("sets Eb");
4056 IEMOP_HLP_MIN_386();
4057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4058 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4059
4060 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4061 * any way. AMD says it's "unused", whatever that means. We're
4062 * ignoring for now. */
4063 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4064 {
4065 /* register target */
4066 IEM_MC_BEGIN(0, 0);
4067 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4068 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4069 } IEM_MC_ELSE() {
4070 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4071 } IEM_MC_ENDIF();
4072 IEM_MC_ADVANCE_RIP();
4073 IEM_MC_END();
4074 }
4075 else
4076 {
4077 /* memory target */
4078 IEM_MC_BEGIN(0, 1);
4079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4082 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4083 } IEM_MC_ELSE() {
4084 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4085 } IEM_MC_ENDIF();
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 }
4089 return VINF_SUCCESS;
4090}
4091
4092
4093/** Opcode 0x0f 0x99. */
4094FNIEMOP_DEF(iemOp_setns_Eb)
4095{
4096 IEMOP_MNEMONIC("setns Eb");
4097 IEMOP_HLP_MIN_386();
4098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4099 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4100
4101 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4102 * any way. AMD says it's "unused", whatever that means. We're
4103 * ignoring for now. */
4104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4105 {
4106 /* register target */
4107 IEM_MC_BEGIN(0, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4109 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4112 } IEM_MC_ENDIF();
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 else
4117 {
4118 /* memory target */
4119 IEM_MC_BEGIN(0, 1);
4120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4122 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4123 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ELSE() {
4125 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4126 } IEM_MC_ENDIF();
4127 IEM_MC_ADVANCE_RIP();
4128 IEM_MC_END();
4129 }
4130 return VINF_SUCCESS;
4131}
4132
4133
4134/** Opcode 0x0f 0x9a. */
4135FNIEMOP_DEF(iemOp_setp_Eb)
4136{
4137 IEMOP_MNEMONIC("setnp Eb");
4138 IEMOP_HLP_MIN_386();
4139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4140 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4141
4142 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4143 * any way. AMD says it's "unused", whatever that means. We're
4144 * ignoring for now. */
4145 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4146 {
4147 /* register target */
4148 IEM_MC_BEGIN(0, 0);
4149 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4151 } IEM_MC_ELSE() {
4152 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4153 } IEM_MC_ENDIF();
4154 IEM_MC_ADVANCE_RIP();
4155 IEM_MC_END();
4156 }
4157 else
4158 {
4159 /* memory target */
4160 IEM_MC_BEGIN(0, 1);
4161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4164 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4165 } IEM_MC_ELSE() {
4166 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4167 } IEM_MC_ENDIF();
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 }
4171 return VINF_SUCCESS;
4172}
4173
4174
4175/** Opcode 0x0f 0x9b. */
4176FNIEMOP_DEF(iemOp_setnp_Eb)
4177{
4178 IEMOP_MNEMONIC("setnp Eb");
4179 IEMOP_HLP_MIN_386();
4180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4181 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4182
4183 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4184 * any way. AMD says it's "unused", whatever that means. We're
4185 * ignoring for now. */
4186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4187 {
4188 /* register target */
4189 IEM_MC_BEGIN(0, 0);
4190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4191 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4192 } IEM_MC_ELSE() {
4193 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4194 } IEM_MC_ENDIF();
4195 IEM_MC_ADVANCE_RIP();
4196 IEM_MC_END();
4197 }
4198 else
4199 {
4200 /* memory target */
4201 IEM_MC_BEGIN(0, 1);
4202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x9c. */
4217FNIEMOP_DEF(iemOp_setl_Eb)
4218{
4219 IEMOP_MNEMONIC("setl Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4223
4224 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4225 * any way. AMD says it's "unused", whatever that means. We're
4226 * ignoring for now. */
4227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4228 {
4229 /* register target */
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4246 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4247 } IEM_MC_ELSE() {
4248 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4249 } IEM_MC_ENDIF();
4250 IEM_MC_ADVANCE_RIP();
4251 IEM_MC_END();
4252 }
4253 return VINF_SUCCESS;
4254}
4255
4256
4257/** Opcode 0x0f 0x9d. */
4258FNIEMOP_DEF(iemOp_setnl_Eb)
4259{
4260 IEMOP_MNEMONIC("setnl Eb");
4261 IEMOP_HLP_MIN_386();
4262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4263 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEM_MC_BEGIN(0, 0);
4272 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4273 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4274 } IEM_MC_ELSE() {
4275 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4276 } IEM_MC_ENDIF();
4277 IEM_MC_ADVANCE_RIP();
4278 IEM_MC_END();
4279 }
4280 else
4281 {
4282 /* memory target */
4283 IEM_MC_BEGIN(0, 1);
4284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4286 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4287 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4288 } IEM_MC_ELSE() {
4289 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ENDIF();
4291 IEM_MC_ADVANCE_RIP();
4292 IEM_MC_END();
4293 }
4294 return VINF_SUCCESS;
4295}
4296
4297
4298/** Opcode 0x0f 0x9e. */
4299FNIEMOP_DEF(iemOp_setle_Eb)
4300{
4301 IEMOP_MNEMONIC("setle Eb");
4302 IEMOP_HLP_MIN_386();
4303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4304 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4305
4306 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4307 * any way. AMD says it's "unused", whatever that means. We're
4308 * ignoring for now. */
4309 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4310 {
4311 /* register target */
4312 IEM_MC_BEGIN(0, 0);
4313 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4314 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4315 } IEM_MC_ELSE() {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4317 } IEM_MC_ENDIF();
4318 IEM_MC_ADVANCE_RIP();
4319 IEM_MC_END();
4320 }
4321 else
4322 {
4323 /* memory target */
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4327 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4328 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4329 } IEM_MC_ELSE() {
4330 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4331 } IEM_MC_ENDIF();
4332 IEM_MC_ADVANCE_RIP();
4333 IEM_MC_END();
4334 }
4335 return VINF_SUCCESS;
4336}
4337
4338
4339/** Opcode 0x0f 0x9f. */
4340FNIEMOP_DEF(iemOp_setnle_Eb)
4341{
4342 IEMOP_MNEMONIC("setnle Eb");
4343 IEMOP_HLP_MIN_386();
4344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4345 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4346
4347 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4348 * any way. AMD says it's "unused", whatever that means. We're
4349 * ignoring for now. */
4350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4351 {
4352 /* register target */
4353 IEM_MC_BEGIN(0, 0);
4354 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4355 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4356 } IEM_MC_ELSE() {
4357 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4358 } IEM_MC_ENDIF();
4359 IEM_MC_ADVANCE_RIP();
4360 IEM_MC_END();
4361 }
4362 else
4363 {
4364 /* memory target */
4365 IEM_MC_BEGIN(0, 1);
4366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4368 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4369 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4370 } IEM_MC_ELSE() {
4371 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4372 } IEM_MC_ENDIF();
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 return VINF_SUCCESS;
4377}
4378
4379
4380/**
4381 * Common 'push segment-register' helper.
4382 */
4383FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4384{
4385 IEMOP_HLP_NO_LOCK_PREFIX();
4386 if (iReg < X86_SREG_FS)
4387 IEMOP_HLP_NO_64BIT();
4388 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4389
4390 switch (pIemCpu->enmEffOpSize)
4391 {
4392 case IEMMODE_16BIT:
4393 IEM_MC_BEGIN(0, 1);
4394 IEM_MC_LOCAL(uint16_t, u16Value);
4395 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4396 IEM_MC_PUSH_U16(u16Value);
4397 IEM_MC_ADVANCE_RIP();
4398 IEM_MC_END();
4399 break;
4400
4401 case IEMMODE_32BIT:
4402 IEM_MC_BEGIN(0, 1);
4403 IEM_MC_LOCAL(uint32_t, u32Value);
4404 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4405 IEM_MC_PUSH_U32_SREG(u32Value);
4406 IEM_MC_ADVANCE_RIP();
4407 IEM_MC_END();
4408 break;
4409
4410 case IEMMODE_64BIT:
4411 IEM_MC_BEGIN(0, 1);
4412 IEM_MC_LOCAL(uint64_t, u64Value);
4413 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4414 IEM_MC_PUSH_U64(u64Value);
4415 IEM_MC_ADVANCE_RIP();
4416 IEM_MC_END();
4417 break;
4418 }
4419
4420 return VINF_SUCCESS;
4421}
4422
4423
4424/** Opcode 0x0f 0xa0. */
4425FNIEMOP_DEF(iemOp_push_fs)
4426{
4427 IEMOP_MNEMONIC("push fs");
4428 IEMOP_HLP_MIN_386();
4429 IEMOP_HLP_NO_LOCK_PREFIX();
4430 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4431}
4432
4433
4434/** Opcode 0x0f 0xa1. */
4435FNIEMOP_DEF(iemOp_pop_fs)
4436{
4437 IEMOP_MNEMONIC("pop fs");
4438 IEMOP_HLP_MIN_386();
4439 IEMOP_HLP_NO_LOCK_PREFIX();
4440 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4441}
4442
4443
4444/** Opcode 0x0f 0xa2. */
4445FNIEMOP_DEF(iemOp_cpuid)
4446{
4447 IEMOP_MNEMONIC("cpuid");
4448 IEMOP_HLP_MIN_486(); /* not all 486es. */
4449 IEMOP_HLP_NO_LOCK_PREFIX();
4450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4451}
4452
4453
4454/**
4455 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4456 * iemOp_bts_Ev_Gv.
4457 */
4458FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4459{
4460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4461 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4462
4463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4464 {
4465 /* register destination. */
4466 IEMOP_HLP_NO_LOCK_PREFIX();
4467 switch (pIemCpu->enmEffOpSize)
4468 {
4469 case IEMMODE_16BIT:
4470 IEM_MC_BEGIN(3, 0);
4471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4472 IEM_MC_ARG(uint16_t, u16Src, 1);
4473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4474
4475 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4476 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4477 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4478 IEM_MC_REF_EFLAGS(pEFlags);
4479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4480
4481 IEM_MC_ADVANCE_RIP();
4482 IEM_MC_END();
4483 return VINF_SUCCESS;
4484
4485 case IEMMODE_32BIT:
4486 IEM_MC_BEGIN(3, 0);
4487 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4488 IEM_MC_ARG(uint32_t, u32Src, 1);
4489 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4490
4491 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4492 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4493 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4494 IEM_MC_REF_EFLAGS(pEFlags);
4495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4496
4497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4498 IEM_MC_ADVANCE_RIP();
4499 IEM_MC_END();
4500 return VINF_SUCCESS;
4501
4502 case IEMMODE_64BIT:
4503 IEM_MC_BEGIN(3, 0);
4504 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4505 IEM_MC_ARG(uint64_t, u64Src, 1);
4506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4507
4508 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4509 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4510 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4511 IEM_MC_REF_EFLAGS(pEFlags);
4512 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4513
4514 IEM_MC_ADVANCE_RIP();
4515 IEM_MC_END();
4516 return VINF_SUCCESS;
4517
4518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4519 }
4520 }
4521 else
4522 {
4523 /* memory destination. */
4524
4525 uint32_t fAccess;
4526 if (pImpl->pfnLockedU16)
4527 fAccess = IEM_ACCESS_DATA_RW;
4528 else /* BT */
4529 {
4530 IEMOP_HLP_NO_LOCK_PREFIX();
4531 fAccess = IEM_ACCESS_DATA_R;
4532 }
4533
4534 NOREF(fAccess);
4535
4536 /** @todo test negative bit offsets! */
4537 switch (pIemCpu->enmEffOpSize)
4538 {
4539 case IEMMODE_16BIT:
4540 IEM_MC_BEGIN(3, 2);
4541 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4542 IEM_MC_ARG(uint16_t, u16Src, 1);
4543 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4545 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4546
4547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4548 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4549 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4550 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4551 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4552 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4553 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4554 IEM_MC_FETCH_EFLAGS(EFlags);
4555
4556 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4557 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4559 else
4560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4562
4563 IEM_MC_COMMIT_EFLAGS(EFlags);
4564 IEM_MC_ADVANCE_RIP();
4565 IEM_MC_END();
4566 return VINF_SUCCESS;
4567
4568 case IEMMODE_32BIT:
4569 IEM_MC_BEGIN(3, 2);
4570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4571 IEM_MC_ARG(uint32_t, u32Src, 1);
4572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4574 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4575
4576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4577 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4578 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4579 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4580 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4581 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4582 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4583 IEM_MC_FETCH_EFLAGS(EFlags);
4584
4585 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4586 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4587 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4588 else
4589 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4591
4592 IEM_MC_COMMIT_EFLAGS(EFlags);
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 return VINF_SUCCESS;
4596
4597 case IEMMODE_64BIT:
4598 IEM_MC_BEGIN(3, 2);
4599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4600 IEM_MC_ARG(uint64_t, u64Src, 1);
4601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4603 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4604
4605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4606 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4607 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4608 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4609 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4610 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4611 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4612 IEM_MC_FETCH_EFLAGS(EFlags);
4613
4614 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4615 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4617 else
4618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4620
4621 IEM_MC_COMMIT_EFLAGS(EFlags);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4627 }
4628 }
4629}
4630
4631
4632/** Opcode 0x0f 0xa3. */
4633FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4634{
4635 IEMOP_MNEMONIC("bt Gv,Gv");
4636 IEMOP_HLP_MIN_386();
4637 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4638}
4639
4640
4641/**
4642 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4643 */
4644FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4645{
4646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4647 IEMOP_HLP_NO_LOCK_PREFIX();
4648 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4649
4650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4651 {
4652 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4653 IEMOP_HLP_NO_LOCK_PREFIX();
4654
4655 switch (pIemCpu->enmEffOpSize)
4656 {
4657 case IEMMODE_16BIT:
4658 IEM_MC_BEGIN(4, 0);
4659 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4660 IEM_MC_ARG(uint16_t, u16Src, 1);
4661 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4662 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4663
4664 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4665 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4666 IEM_MC_REF_EFLAGS(pEFlags);
4667 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4668
4669 IEM_MC_ADVANCE_RIP();
4670 IEM_MC_END();
4671 return VINF_SUCCESS;
4672
4673 case IEMMODE_32BIT:
4674 IEM_MC_BEGIN(4, 0);
4675 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4676 IEM_MC_ARG(uint32_t, u32Src, 1);
4677 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4679
4680 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4681 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4682 IEM_MC_REF_EFLAGS(pEFlags);
4683 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4684
4685 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4686 IEM_MC_ADVANCE_RIP();
4687 IEM_MC_END();
4688 return VINF_SUCCESS;
4689
4690 case IEMMODE_64BIT:
4691 IEM_MC_BEGIN(4, 0);
4692 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4693 IEM_MC_ARG(uint64_t, u64Src, 1);
4694 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4695 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4696
4697 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4698 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4699 IEM_MC_REF_EFLAGS(pEFlags);
4700 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4701
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 return VINF_SUCCESS;
4705
4706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4707 }
4708 }
4709 else
4710 {
4711 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4712
4713 switch (pIemCpu->enmEffOpSize)
4714 {
4715 case IEMMODE_16BIT:
4716 IEM_MC_BEGIN(4, 2);
4717 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4718 IEM_MC_ARG(uint16_t, u16Src, 1);
4719 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4722
4723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4724 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4725 IEM_MC_ASSIGN(cShiftArg, cShift);
4726 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4727 IEM_MC_FETCH_EFLAGS(EFlags);
4728 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4729 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4730
4731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4732 IEM_MC_COMMIT_EFLAGS(EFlags);
4733 IEM_MC_ADVANCE_RIP();
4734 IEM_MC_END();
4735 return VINF_SUCCESS;
4736
4737 case IEMMODE_32BIT:
4738 IEM_MC_BEGIN(4, 2);
4739 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4740 IEM_MC_ARG(uint32_t, u32Src, 1);
4741 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4742 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4744
4745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4746 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4747 IEM_MC_ASSIGN(cShiftArg, cShift);
4748 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4749 IEM_MC_FETCH_EFLAGS(EFlags);
4750 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4751 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4752
4753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4754 IEM_MC_COMMIT_EFLAGS(EFlags);
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 return VINF_SUCCESS;
4758
4759 case IEMMODE_64BIT:
4760 IEM_MC_BEGIN(4, 2);
4761 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4762 IEM_MC_ARG(uint64_t, u64Src, 1);
4763 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4764 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4766
4767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4768 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4769 IEM_MC_ASSIGN(cShiftArg, cShift);
4770 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4771 IEM_MC_FETCH_EFLAGS(EFlags);
4772 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4773 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4774
4775 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4776 IEM_MC_COMMIT_EFLAGS(EFlags);
4777 IEM_MC_ADVANCE_RIP();
4778 IEM_MC_END();
4779 return VINF_SUCCESS;
4780
4781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4782 }
4783 }
4784}
4785
4786
4787/**
4788 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4789 */
4790FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4791{
4792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4793 IEMOP_HLP_NO_LOCK_PREFIX();
4794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4795
4796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4797 {
4798 IEMOP_HLP_NO_LOCK_PREFIX();
4799
4800 switch (pIemCpu->enmEffOpSize)
4801 {
4802 case IEMMODE_16BIT:
4803 IEM_MC_BEGIN(4, 0);
4804 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4805 IEM_MC_ARG(uint16_t, u16Src, 1);
4806 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4807 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4808
4809 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4810 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4811 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4812 IEM_MC_REF_EFLAGS(pEFlags);
4813 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4814
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 return VINF_SUCCESS;
4818
4819 case IEMMODE_32BIT:
4820 IEM_MC_BEGIN(4, 0);
4821 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4822 IEM_MC_ARG(uint32_t, u32Src, 1);
4823 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4824 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4825
4826 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4827 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4828 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4829 IEM_MC_REF_EFLAGS(pEFlags);
4830 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4831
4832 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4833 IEM_MC_ADVANCE_RIP();
4834 IEM_MC_END();
4835 return VINF_SUCCESS;
4836
4837 case IEMMODE_64BIT:
4838 IEM_MC_BEGIN(4, 0);
4839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4840 IEM_MC_ARG(uint64_t, u64Src, 1);
4841 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4842 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4843
4844 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4845 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4846 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4847 IEM_MC_REF_EFLAGS(pEFlags);
4848 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4849
4850 IEM_MC_ADVANCE_RIP();
4851 IEM_MC_END();
4852 return VINF_SUCCESS;
4853
4854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4855 }
4856 }
4857 else
4858 {
4859 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4860
4861 switch (pIemCpu->enmEffOpSize)
4862 {
4863 case IEMMODE_16BIT:
4864 IEM_MC_BEGIN(4, 2);
4865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4866 IEM_MC_ARG(uint16_t, u16Src, 1);
4867 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4868 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870
4871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4872 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4873 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4874 IEM_MC_FETCH_EFLAGS(EFlags);
4875 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4876 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4877
4878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4879 IEM_MC_COMMIT_EFLAGS(EFlags);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_32BIT:
4885 IEM_MC_BEGIN(4, 2);
4886 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4887 IEM_MC_ARG(uint32_t, u32Src, 1);
4888 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4891
4892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4893 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4894 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4895 IEM_MC_FETCH_EFLAGS(EFlags);
4896 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4897 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4898
4899 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4900 IEM_MC_COMMIT_EFLAGS(EFlags);
4901 IEM_MC_ADVANCE_RIP();
4902 IEM_MC_END();
4903 return VINF_SUCCESS;
4904
4905 case IEMMODE_64BIT:
4906 IEM_MC_BEGIN(4, 2);
4907 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4908 IEM_MC_ARG(uint64_t, u64Src, 1);
4909 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4910 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4912
4913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4914 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4915 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4916 IEM_MC_FETCH_EFLAGS(EFlags);
4917 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4918 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4919
4920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4921 IEM_MC_COMMIT_EFLAGS(EFlags);
4922 IEM_MC_ADVANCE_RIP();
4923 IEM_MC_END();
4924 return VINF_SUCCESS;
4925
4926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4927 }
4928 }
4929}
4930
4931
4932
4933/** Opcode 0x0f 0xa4. */
4934FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4935{
4936 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4937 IEMOP_HLP_MIN_386();
4938 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4939}
4940
4941
4942/** Opcode 0x0f 0xa5. */
4943FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4944{
4945 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4946 IEMOP_HLP_MIN_386();
4947 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4948}
4949
4950
4951/** Opcode 0x0f 0xa8. */
4952FNIEMOP_DEF(iemOp_push_gs)
4953{
4954 IEMOP_MNEMONIC("push gs");
4955 IEMOP_HLP_MIN_386();
4956 IEMOP_HLP_NO_LOCK_PREFIX();
4957 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4958}
4959
4960
4961/** Opcode 0x0f 0xa9. */
4962FNIEMOP_DEF(iemOp_pop_gs)
4963{
4964 IEMOP_MNEMONIC("pop gs");
4965 IEMOP_HLP_MIN_386();
4966 IEMOP_HLP_NO_LOCK_PREFIX();
4967 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4968}
4969
4970
4971/** Opcode 0x0f 0xaa. */
4972FNIEMOP_STUB(iemOp_rsm);
4973//IEMOP_HLP_MIN_386();
4974
4975
4976/** Opcode 0x0f 0xab. */
4977FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4978{
4979 IEMOP_MNEMONIC("bts Ev,Gv");
4980 IEMOP_HLP_MIN_386();
4981 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4982}
4983
4984
4985/** Opcode 0x0f 0xac. */
4986FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4987{
4988 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4989 IEMOP_HLP_MIN_386();
4990 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4991}
4992
4993
4994/** Opcode 0x0f 0xad. */
4995FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4996{
4997 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4998 IEMOP_HLP_MIN_386();
4999 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5000}
5001
5002
5003/** Opcode 0x0f 0xae mem/0. */
5004FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5005{
5006 IEMOP_MNEMONIC("fxsave m512");
5007 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5008 return IEMOP_RAISE_INVALID_OPCODE();
5009
5010 IEM_MC_BEGIN(3, 1);
5011 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5012 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5013 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5016 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5017 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5018 IEM_MC_END();
5019 return VINF_SUCCESS;
5020}
5021
5022
5023/** Opcode 0x0f 0xae mem/1. */
5024FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC("fxrstor m512");
5027 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5028 return IEMOP_RAISE_INVALID_OPCODE();
5029
5030 IEM_MC_BEGIN(3, 1);
5031 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5032 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5033 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5036 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5037 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5038 IEM_MC_END();
5039 return VINF_SUCCESS;
5040}
5041
5042
5043/** Opcode 0x0f 0xae mem/2. */
5044FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5045
5046/** Opcode 0x0f 0xae mem/3. */
5047FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5048
5049/** Opcode 0x0f 0xae mem/4. */
5050FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5051
5052/** Opcode 0x0f 0xae mem/5. */
5053FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5054
5055/** Opcode 0x0f 0xae mem/6. */
5056FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5057
5058/** Opcode 0x0f 0xae mem/7. */
5059FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5060
5061
5062/** Opcode 0x0f 0xae 11b/5. */
5063FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5064{
5065 IEMOP_MNEMONIC("lfence");
5066 IEMOP_HLP_NO_LOCK_PREFIX();
5067 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5068 return IEMOP_RAISE_INVALID_OPCODE();
5069
5070 IEM_MC_BEGIN(0, 0);
5071 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5072 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5073 else
5074 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5075 IEM_MC_ADVANCE_RIP();
5076 IEM_MC_END();
5077 return VINF_SUCCESS;
5078}
5079
5080
5081/** Opcode 0x0f 0xae 11b/6. */
5082FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5083{
5084 IEMOP_MNEMONIC("mfence");
5085 IEMOP_HLP_NO_LOCK_PREFIX();
5086 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5087 return IEMOP_RAISE_INVALID_OPCODE();
5088
5089 IEM_MC_BEGIN(0, 0);
5090 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5091 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5092 else
5093 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 return VINF_SUCCESS;
5097}
5098
5099
5100/** Opcode 0x0f 0xae 11b/7. */
5101FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5102{
5103 IEMOP_MNEMONIC("sfence");
5104 IEMOP_HLP_NO_LOCK_PREFIX();
5105 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5106 return IEMOP_RAISE_INVALID_OPCODE();
5107
5108 IEM_MC_BEGIN(0, 0);
5109 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5110 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5111 else
5112 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5113 IEM_MC_ADVANCE_RIP();
5114 IEM_MC_END();
5115 return VINF_SUCCESS;
5116}
5117
5118
5119/** Opcode 0xf3 0x0f 0xae 11b/0. */
5120FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5121
5122/** Opcode 0xf3 0x0f 0xae 11b/1. */
5123FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5124
5125/** Opcode 0xf3 0x0f 0xae 11b/2. */
5126FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5127
5128/** Opcode 0xf3 0x0f 0xae 11b/3. */
5129FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5130
5131
5132/** Opcode 0x0f 0xae. */
5133FNIEMOP_DEF(iemOp_Grp15)
5134{
5135 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5137 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5138 {
5139 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5140 {
5141 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5142 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5143 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5144 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5145 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5146 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5147 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5148 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5150 }
5151 }
5152 else
5153 {
5154 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5155 {
5156 case 0:
5157 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5158 {
5159 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5160 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5161 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5162 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5163 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5164 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5165 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5166 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5168 }
5169 break;
5170
5171 case IEM_OP_PRF_REPZ:
5172 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5173 {
5174 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5175 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5176 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5177 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5178 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5179 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5180 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5181 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5183 }
5184 break;
5185
5186 default:
5187 return IEMOP_RAISE_INVALID_OPCODE();
5188 }
5189 }
5190}
5191
5192
5193/** Opcode 0x0f 0xaf. */
5194FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5195{
5196 IEMOP_MNEMONIC("imul Gv,Ev");
5197 IEMOP_HLP_MIN_386();
5198 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5199 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5200}
5201
5202
5203/** Opcode 0x0f 0xb0. */
5204FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5205{
5206 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5207 IEMOP_HLP_MIN_486();
5208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5209
5210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5211 {
5212 IEMOP_HLP_DONE_DECODING();
5213 IEM_MC_BEGIN(4, 0);
5214 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5215 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5216 IEM_MC_ARG(uint8_t, u8Src, 2);
5217 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5218
5219 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5220 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5221 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5222 IEM_MC_REF_EFLAGS(pEFlags);
5223 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5224 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5225 else
5226 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5227
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 }
5231 else
5232 {
5233 IEM_MC_BEGIN(4, 3);
5234 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5235 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5236 IEM_MC_ARG(uint8_t, u8Src, 2);
5237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5239 IEM_MC_LOCAL(uint8_t, u8Al);
5240
5241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5242 IEMOP_HLP_DONE_DECODING();
5243 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5244 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5245 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5246 IEM_MC_FETCH_EFLAGS(EFlags);
5247 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5248 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5250 else
5251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5252
5253 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5254 IEM_MC_COMMIT_EFLAGS(EFlags);
5255 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5256 IEM_MC_ADVANCE_RIP();
5257 IEM_MC_END();
5258 }
5259 return VINF_SUCCESS;
5260}
5261
5262/** Opcode 0x0f 0xb1. */
5263FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5264{
5265 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5266 IEMOP_HLP_MIN_486();
5267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5268
5269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5270 {
5271 IEMOP_HLP_DONE_DECODING();
5272 switch (pIemCpu->enmEffOpSize)
5273 {
5274 case IEMMODE_16BIT:
5275 IEM_MC_BEGIN(4, 0);
5276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5277 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5278 IEM_MC_ARG(uint16_t, u16Src, 2);
5279 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5280
5281 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5282 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5283 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5284 IEM_MC_REF_EFLAGS(pEFlags);
5285 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5286 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5287 else
5288 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5289
5290 IEM_MC_ADVANCE_RIP();
5291 IEM_MC_END();
5292 return VINF_SUCCESS;
5293
5294 case IEMMODE_32BIT:
5295 IEM_MC_BEGIN(4, 0);
5296 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5297 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5298 IEM_MC_ARG(uint32_t, u32Src, 2);
5299 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5300
5301 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5302 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5303 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5304 IEM_MC_REF_EFLAGS(pEFlags);
5305 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5306 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5307 else
5308 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5309
5310 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5311 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5312 IEM_MC_ADVANCE_RIP();
5313 IEM_MC_END();
5314 return VINF_SUCCESS;
5315
5316 case IEMMODE_64BIT:
5317 IEM_MC_BEGIN(4, 0);
5318 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5319 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5320#ifdef RT_ARCH_X86
5321 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5322#else
5323 IEM_MC_ARG(uint64_t, u64Src, 2);
5324#endif
5325 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5326
5327 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5328 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5329 IEM_MC_REF_EFLAGS(pEFlags);
5330#ifdef RT_ARCH_X86
5331 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5332 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5334 else
5335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5336#else
5337 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5338 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5340 else
5341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5342#endif
5343
5344 IEM_MC_ADVANCE_RIP();
5345 IEM_MC_END();
5346 return VINF_SUCCESS;
5347
5348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5349 }
5350 }
5351 else
5352 {
5353 switch (pIemCpu->enmEffOpSize)
5354 {
5355 case IEMMODE_16BIT:
5356 IEM_MC_BEGIN(4, 3);
5357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5358 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5359 IEM_MC_ARG(uint16_t, u16Src, 2);
5360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(uint16_t, u16Ax);
5363
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5365 IEMOP_HLP_DONE_DECODING();
5366 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5367 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5368 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5369 IEM_MC_FETCH_EFLAGS(EFlags);
5370 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5371 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5372 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5373 else
5374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5375
5376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5377 IEM_MC_COMMIT_EFLAGS(EFlags);
5378 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5379 IEM_MC_ADVANCE_RIP();
5380 IEM_MC_END();
5381 return VINF_SUCCESS;
5382
5383 case IEMMODE_32BIT:
5384 IEM_MC_BEGIN(4, 3);
5385 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5386 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5387 IEM_MC_ARG(uint32_t, u32Src, 2);
5388 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5390 IEM_MC_LOCAL(uint32_t, u32Eax);
5391
5392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5393 IEMOP_HLP_DONE_DECODING();
5394 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5395 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5396 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5397 IEM_MC_FETCH_EFLAGS(EFlags);
5398 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5399 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5401 else
5402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5403
5404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5405 IEM_MC_COMMIT_EFLAGS(EFlags);
5406 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5407 IEM_MC_ADVANCE_RIP();
5408 IEM_MC_END();
5409 return VINF_SUCCESS;
5410
5411 case IEMMODE_64BIT:
5412 IEM_MC_BEGIN(4, 3);
5413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5414 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5415#ifdef RT_ARCH_X86
5416 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5417#else
5418 IEM_MC_ARG(uint64_t, u64Src, 2);
5419#endif
5420 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5422 IEM_MC_LOCAL(uint64_t, u64Rax);
5423
5424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5425 IEMOP_HLP_DONE_DECODING();
5426 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5427 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5428 IEM_MC_FETCH_EFLAGS(EFlags);
5429 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5430#ifdef RT_ARCH_X86
5431 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5432 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5433 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5434 else
5435 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5436#else
5437 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5438 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5440 else
5441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5442#endif
5443
5444 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5445 IEM_MC_COMMIT_EFLAGS(EFlags);
5446 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5447 IEM_MC_ADVANCE_RIP();
5448 IEM_MC_END();
5449 return VINF_SUCCESS;
5450
5451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5452 }
5453 }
5454}
5455
5456
5457FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5458{
5459 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5460 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5461
5462 switch (pIemCpu->enmEffOpSize)
5463 {
5464 case IEMMODE_16BIT:
5465 IEM_MC_BEGIN(5, 1);
5466 IEM_MC_ARG(uint16_t, uSel, 0);
5467 IEM_MC_ARG(uint16_t, offSeg, 1);
5468 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5469 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5470 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5471 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5475 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5476 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5477 IEM_MC_END();
5478 return VINF_SUCCESS;
5479
5480 case IEMMODE_32BIT:
5481 IEM_MC_BEGIN(5, 1);
5482 IEM_MC_ARG(uint16_t, uSel, 0);
5483 IEM_MC_ARG(uint32_t, offSeg, 1);
5484 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5485 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5486 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5487 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5490 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5491 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5492 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5493 IEM_MC_END();
5494 return VINF_SUCCESS;
5495
5496 case IEMMODE_64BIT:
5497 IEM_MC_BEGIN(5, 1);
5498 IEM_MC_ARG(uint16_t, uSel, 0);
5499 IEM_MC_ARG(uint64_t, offSeg, 1);
5500 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5501 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5502 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5503 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5506 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5507 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5508 else
5509 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5510 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5511 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5512 IEM_MC_END();
5513 return VINF_SUCCESS;
5514
5515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5516 }
5517}
5518
5519
5520/** Opcode 0x0f 0xb2. */
5521FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5522{
5523 IEMOP_MNEMONIC("lss Gv,Mp");
5524 IEMOP_HLP_MIN_386();
5525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5527 return IEMOP_RAISE_INVALID_OPCODE();
5528 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5529}
5530
5531
5532/** Opcode 0x0f 0xb3. */
5533FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5534{
5535 IEMOP_MNEMONIC("btr Ev,Gv");
5536 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5537}
5538
5539
5540/** Opcode 0x0f 0xb4. */
5541FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5542{
5543 IEMOP_MNEMONIC("lfs Gv,Mp");
5544 IEMOP_HLP_MIN_386();
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5547 return IEMOP_RAISE_INVALID_OPCODE();
5548 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5549}
5550
5551
5552/** Opcode 0x0f 0xb5. */
5553FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5554{
5555 IEMOP_MNEMONIC("lgs Gv,Mp");
5556 IEMOP_HLP_MIN_386();
5557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5559 return IEMOP_RAISE_INVALID_OPCODE();
5560 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5561}
5562
5563
5564/** Opcode 0x0f 0xb6. */
5565FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5566{
5567 IEMOP_MNEMONIC("movzx Gv,Eb");
5568 IEMOP_HLP_MIN_386();
5569
5570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5571 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5572
5573 /*
5574 * If rm is denoting a register, no more instruction bytes.
5575 */
5576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5577 {
5578 switch (pIemCpu->enmEffOpSize)
5579 {
5580 case IEMMODE_16BIT:
5581 IEM_MC_BEGIN(0, 1);
5582 IEM_MC_LOCAL(uint16_t, u16Value);
5583 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5584 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5585 IEM_MC_ADVANCE_RIP();
5586 IEM_MC_END();
5587 return VINF_SUCCESS;
5588
5589 case IEMMODE_32BIT:
5590 IEM_MC_BEGIN(0, 1);
5591 IEM_MC_LOCAL(uint32_t, u32Value);
5592 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5593 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5594 IEM_MC_ADVANCE_RIP();
5595 IEM_MC_END();
5596 return VINF_SUCCESS;
5597
5598 case IEMMODE_64BIT:
5599 IEM_MC_BEGIN(0, 1);
5600 IEM_MC_LOCAL(uint64_t, u64Value);
5601 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5602 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5603 IEM_MC_ADVANCE_RIP();
5604 IEM_MC_END();
5605 return VINF_SUCCESS;
5606
5607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5608 }
5609 }
5610 else
5611 {
5612 /*
5613 * We're loading a register from memory.
5614 */
5615 switch (pIemCpu->enmEffOpSize)
5616 {
5617 case IEMMODE_16BIT:
5618 IEM_MC_BEGIN(0, 2);
5619 IEM_MC_LOCAL(uint16_t, u16Value);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5622 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5623 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 return VINF_SUCCESS;
5627
5628 case IEMMODE_32BIT:
5629 IEM_MC_BEGIN(0, 2);
5630 IEM_MC_LOCAL(uint32_t, u32Value);
5631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5633 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5634 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5635 IEM_MC_ADVANCE_RIP();
5636 IEM_MC_END();
5637 return VINF_SUCCESS;
5638
5639 case IEMMODE_64BIT:
5640 IEM_MC_BEGIN(0, 2);
5641 IEM_MC_LOCAL(uint64_t, u64Value);
5642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5644 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5645 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5646 IEM_MC_ADVANCE_RIP();
5647 IEM_MC_END();
5648 return VINF_SUCCESS;
5649
5650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5651 }
5652 }
5653}
5654
5655
5656/** Opcode 0x0f 0xb7. */
5657FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5658{
5659 IEMOP_MNEMONIC("movzx Gv,Ew");
5660 IEMOP_HLP_MIN_386();
5661
5662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5663 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5664
5665 /** @todo Not entirely sure how the operand size prefix is handled here,
5666 * assuming that it will be ignored. Would be nice to have a few
5667 * test for this. */
5668 /*
5669 * If rm is denoting a register, no more instruction bytes.
5670 */
5671 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5672 {
5673 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5674 {
5675 IEM_MC_BEGIN(0, 1);
5676 IEM_MC_LOCAL(uint32_t, u32Value);
5677 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5678 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5679 IEM_MC_ADVANCE_RIP();
5680 IEM_MC_END();
5681 }
5682 else
5683 {
5684 IEM_MC_BEGIN(0, 1);
5685 IEM_MC_LOCAL(uint64_t, u64Value);
5686 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5687 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5688 IEM_MC_ADVANCE_RIP();
5689 IEM_MC_END();
5690 }
5691 }
5692 else
5693 {
5694 /*
5695 * We're loading a register from memory.
5696 */
5697 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5698 {
5699 IEM_MC_BEGIN(0, 2);
5700 IEM_MC_LOCAL(uint32_t, u32Value);
5701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5703 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5704 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5705 IEM_MC_ADVANCE_RIP();
5706 IEM_MC_END();
5707 }
5708 else
5709 {
5710 IEM_MC_BEGIN(0, 2);
5711 IEM_MC_LOCAL(uint64_t, u64Value);
5712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5714 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5715 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5716 IEM_MC_ADVANCE_RIP();
5717 IEM_MC_END();
5718 }
5719 }
5720 return VINF_SUCCESS;
5721}
5722
5723
5724/** Opcode 0x0f 0xb8. */
5725FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5726
5727
5728/** Opcode 0x0f 0xb9. */
5729FNIEMOP_DEF(iemOp_Grp10)
5730{
5731 Log(("iemOp_Grp10 -> #UD\n"));
5732 return IEMOP_RAISE_INVALID_OPCODE();
5733}
5734
5735
5736/** Opcode 0x0f 0xba. */
5737FNIEMOP_DEF(iemOp_Grp8)
5738{
5739 IEMOP_HLP_MIN_386();
5740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5741 PCIEMOPBINSIZES pImpl;
5742 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5743 {
5744 case 0: case 1: case 2: case 3:
5745 return IEMOP_RAISE_INVALID_OPCODE();
5746 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5747 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5748 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5749 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5751 }
5752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5753
5754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5755 {
5756 /* register destination. */
5757 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5758 IEMOP_HLP_NO_LOCK_PREFIX();
5759
5760 switch (pIemCpu->enmEffOpSize)
5761 {
5762 case IEMMODE_16BIT:
5763 IEM_MC_BEGIN(3, 0);
5764 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5765 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5766 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5767
5768 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5769 IEM_MC_REF_EFLAGS(pEFlags);
5770 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5771
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 return VINF_SUCCESS;
5775
5776 case IEMMODE_32BIT:
5777 IEM_MC_BEGIN(3, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5779 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5780 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5781
5782 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5783 IEM_MC_REF_EFLAGS(pEFlags);
5784 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5785
5786 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5787 IEM_MC_ADVANCE_RIP();
5788 IEM_MC_END();
5789 return VINF_SUCCESS;
5790
5791 case IEMMODE_64BIT:
5792 IEM_MC_BEGIN(3, 0);
5793 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5794 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5795 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5796
5797 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5798 IEM_MC_REF_EFLAGS(pEFlags);
5799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5800
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804
5805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5806 }
5807 }
5808 else
5809 {
5810 /* memory destination. */
5811
5812 uint32_t fAccess;
5813 if (pImpl->pfnLockedU16)
5814 fAccess = IEM_ACCESS_DATA_RW;
5815 else /* BT */
5816 {
5817 IEMOP_HLP_NO_LOCK_PREFIX();
5818 fAccess = IEM_ACCESS_DATA_R;
5819 }
5820
5821 /** @todo test negative bit offsets! */
5822 switch (pIemCpu->enmEffOpSize)
5823 {
5824 case IEMMODE_16BIT:
5825 IEM_MC_BEGIN(3, 1);
5826 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5827 IEM_MC_ARG(uint16_t, u16Src, 1);
5828 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5830
5831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5832 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5833 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5834 IEM_MC_FETCH_EFLAGS(EFlags);
5835 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5836 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5838 else
5839 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5841
5842 IEM_MC_COMMIT_EFLAGS(EFlags);
5843 IEM_MC_ADVANCE_RIP();
5844 IEM_MC_END();
5845 return VINF_SUCCESS;
5846
5847 case IEMMODE_32BIT:
5848 IEM_MC_BEGIN(3, 1);
5849 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5850 IEM_MC_ARG(uint32_t, u32Src, 1);
5851 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5853
5854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5855 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5856 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5857 IEM_MC_FETCH_EFLAGS(EFlags);
5858 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5859 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5861 else
5862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5863 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5864
5865 IEM_MC_COMMIT_EFLAGS(EFlags);
5866 IEM_MC_ADVANCE_RIP();
5867 IEM_MC_END();
5868 return VINF_SUCCESS;
5869
5870 case IEMMODE_64BIT:
5871 IEM_MC_BEGIN(3, 1);
5872 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5873 IEM_MC_ARG(uint64_t, u64Src, 1);
5874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5876
5877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5878 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5879 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5880 IEM_MC_FETCH_EFLAGS(EFlags);
5881 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5882 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5884 else
5885 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5887
5888 IEM_MC_COMMIT_EFLAGS(EFlags);
5889 IEM_MC_ADVANCE_RIP();
5890 IEM_MC_END();
5891 return VINF_SUCCESS;
5892
5893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5894 }
5895 }
5896
5897}
5898
5899
5900/** Opcode 0x0f 0xbb. */
5901FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5902{
5903 IEMOP_MNEMONIC("btc Ev,Gv");
5904 IEMOP_HLP_MIN_386();
5905 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5906}
5907
5908
5909/** Opcode 0x0f 0xbc. */
5910FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5911{
5912 IEMOP_MNEMONIC("bsf Gv,Ev");
5913 IEMOP_HLP_MIN_386();
5914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5915 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5916}
5917
5918
5919/** Opcode 0x0f 0xbd. */
5920FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5921{
5922 IEMOP_MNEMONIC("bsr Gv,Ev");
5923 IEMOP_HLP_MIN_386();
5924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5926}
5927
5928
5929/** Opcode 0x0f 0xbe. */
5930FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5931{
5932 IEMOP_MNEMONIC("movsx Gv,Eb");
5933 IEMOP_HLP_MIN_386();
5934
5935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5936 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5937
5938 /*
5939 * If rm is denoting a register, no more instruction bytes.
5940 */
5941 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5942 {
5943 switch (pIemCpu->enmEffOpSize)
5944 {
5945 case IEMMODE_16BIT:
5946 IEM_MC_BEGIN(0, 1);
5947 IEM_MC_LOCAL(uint16_t, u16Value);
5948 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5949 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5950 IEM_MC_ADVANCE_RIP();
5951 IEM_MC_END();
5952 return VINF_SUCCESS;
5953
5954 case IEMMODE_32BIT:
5955 IEM_MC_BEGIN(0, 1);
5956 IEM_MC_LOCAL(uint32_t, u32Value);
5957 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5958 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5959 IEM_MC_ADVANCE_RIP();
5960 IEM_MC_END();
5961 return VINF_SUCCESS;
5962
5963 case IEMMODE_64BIT:
5964 IEM_MC_BEGIN(0, 1);
5965 IEM_MC_LOCAL(uint64_t, u64Value);
5966 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5967 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5968 IEM_MC_ADVANCE_RIP();
5969 IEM_MC_END();
5970 return VINF_SUCCESS;
5971
5972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5973 }
5974 }
5975 else
5976 {
5977 /*
5978 * We're loading a register from memory.
5979 */
5980 switch (pIemCpu->enmEffOpSize)
5981 {
5982 case IEMMODE_16BIT:
5983 IEM_MC_BEGIN(0, 2);
5984 IEM_MC_LOCAL(uint16_t, u16Value);
5985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5987 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5988 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5989 IEM_MC_ADVANCE_RIP();
5990 IEM_MC_END();
5991 return VINF_SUCCESS;
5992
5993 case IEMMODE_32BIT:
5994 IEM_MC_BEGIN(0, 2);
5995 IEM_MC_LOCAL(uint32_t, u32Value);
5996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5998 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5999 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6000 IEM_MC_ADVANCE_RIP();
6001 IEM_MC_END();
6002 return VINF_SUCCESS;
6003
6004 case IEMMODE_64BIT:
6005 IEM_MC_BEGIN(0, 2);
6006 IEM_MC_LOCAL(uint64_t, u64Value);
6007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6009 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6010 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6011 IEM_MC_ADVANCE_RIP();
6012 IEM_MC_END();
6013 return VINF_SUCCESS;
6014
6015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6016 }
6017 }
6018}
6019
6020
6021/** Opcode 0x0f 0xbf. */
6022FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6023{
6024 IEMOP_MNEMONIC("movsx Gv,Ew");
6025 IEMOP_HLP_MIN_386();
6026
6027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6028 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6029
6030 /** @todo Not entirely sure how the operand size prefix is handled here,
6031 * assuming that it will be ignored. Would be nice to have a few
6032 * test for this. */
6033 /*
6034 * If rm is denoting a register, no more instruction bytes.
6035 */
6036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6037 {
6038 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6039 {
6040 IEM_MC_BEGIN(0, 1);
6041 IEM_MC_LOCAL(uint32_t, u32Value);
6042 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6043 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 }
6047 else
6048 {
6049 IEM_MC_BEGIN(0, 1);
6050 IEM_MC_LOCAL(uint64_t, u64Value);
6051 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6052 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6053 IEM_MC_ADVANCE_RIP();
6054 IEM_MC_END();
6055 }
6056 }
6057 else
6058 {
6059 /*
6060 * We're loading a register from memory.
6061 */
6062 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6063 {
6064 IEM_MC_BEGIN(0, 2);
6065 IEM_MC_LOCAL(uint32_t, u32Value);
6066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6068 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6069 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6070 IEM_MC_ADVANCE_RIP();
6071 IEM_MC_END();
6072 }
6073 else
6074 {
6075 IEM_MC_BEGIN(0, 2);
6076 IEM_MC_LOCAL(uint64_t, u64Value);
6077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6079 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6080 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6081 IEM_MC_ADVANCE_RIP();
6082 IEM_MC_END();
6083 }
6084 }
6085 return VINF_SUCCESS;
6086}
6087
6088
6089/** Opcode 0x0f 0xc0. */
6090FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6091{
6092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6093 IEMOP_HLP_MIN_486();
6094 IEMOP_MNEMONIC("xadd Eb,Gb");
6095
6096 /*
6097 * If rm is denoting a register, no more instruction bytes.
6098 */
6099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6100 {
6101 IEMOP_HLP_NO_LOCK_PREFIX();
6102
6103 IEM_MC_BEGIN(3, 0);
6104 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6105 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6107
6108 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6109 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6110 IEM_MC_REF_EFLAGS(pEFlags);
6111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6112
6113 IEM_MC_ADVANCE_RIP();
6114 IEM_MC_END();
6115 }
6116 else
6117 {
6118 /*
6119 * We're accessing memory.
6120 */
6121 IEM_MC_BEGIN(3, 3);
6122 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6123 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6124 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6125 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6127
6128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6129 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6130 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6131 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6132 IEM_MC_FETCH_EFLAGS(EFlags);
6133 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6135 else
6136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6137
6138 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6139 IEM_MC_COMMIT_EFLAGS(EFlags);
6140 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6141 IEM_MC_ADVANCE_RIP();
6142 IEM_MC_END();
6143 return VINF_SUCCESS;
6144 }
6145 return VINF_SUCCESS;
6146}
6147
6148
6149/** Opcode 0x0f 0xc1. */
6150FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6151{
6152 IEMOP_MNEMONIC("xadd Ev,Gv");
6153 IEMOP_HLP_MIN_486();
6154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6155
6156 /*
6157 * If rm is denoting a register, no more instruction bytes.
6158 */
6159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6160 {
6161 IEMOP_HLP_NO_LOCK_PREFIX();
6162
6163 switch (pIemCpu->enmEffOpSize)
6164 {
6165 case IEMMODE_16BIT:
6166 IEM_MC_BEGIN(3, 0);
6167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6168 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6170
6171 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6172 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6173 IEM_MC_REF_EFLAGS(pEFlags);
6174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6175
6176 IEM_MC_ADVANCE_RIP();
6177 IEM_MC_END();
6178 return VINF_SUCCESS;
6179
6180 case IEMMODE_32BIT:
6181 IEM_MC_BEGIN(3, 0);
6182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6183 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6184 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6185
6186 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6187 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6188 IEM_MC_REF_EFLAGS(pEFlags);
6189 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6190
6191 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6192 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6193 IEM_MC_ADVANCE_RIP();
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196
6197 case IEMMODE_64BIT:
6198 IEM_MC_BEGIN(3, 0);
6199 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6200 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6202
6203 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6204 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6205 IEM_MC_REF_EFLAGS(pEFlags);
6206 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6207
6208 IEM_MC_ADVANCE_RIP();
6209 IEM_MC_END();
6210 return VINF_SUCCESS;
6211
6212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6213 }
6214 }
6215 else
6216 {
6217 /*
6218 * We're accessing memory.
6219 */
6220 switch (pIemCpu->enmEffOpSize)
6221 {
6222 case IEMMODE_16BIT:
6223 IEM_MC_BEGIN(3, 3);
6224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6225 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6226 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6227 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6229
6230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6231 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6232 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6233 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6234 IEM_MC_FETCH_EFLAGS(EFlags);
6235 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6239
6240 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6243 IEM_MC_ADVANCE_RIP();
6244 IEM_MC_END();
6245 return VINF_SUCCESS;
6246
6247 case IEMMODE_32BIT:
6248 IEM_MC_BEGIN(3, 3);
6249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6250 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6252 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6254
6255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6256 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6257 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6258 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6259 IEM_MC_FETCH_EFLAGS(EFlags);
6260 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6261 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6262 else
6263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6264
6265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6266 IEM_MC_COMMIT_EFLAGS(EFlags);
6267 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6268 IEM_MC_ADVANCE_RIP();
6269 IEM_MC_END();
6270 return VINF_SUCCESS;
6271
6272 case IEMMODE_64BIT:
6273 IEM_MC_BEGIN(3, 3);
6274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6275 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6277 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6281 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6282 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6283 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6284 IEM_MC_FETCH_EFLAGS(EFlags);
6285 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6287 else
6288 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6289
6290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6291 IEM_MC_COMMIT_EFLAGS(EFlags);
6292 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6293 IEM_MC_ADVANCE_RIP();
6294 IEM_MC_END();
6295 return VINF_SUCCESS;
6296
6297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6298 }
6299 }
6300}
6301
6302/** Opcode 0x0f 0xc2. */
6303FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6304
6305/** Opcode 0x0f 0xc3. */
6306FNIEMOP_STUB(iemOp_movnti_My_Gy);
6307
6308/** Opcode 0x0f 0xc4. */
6309FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6310
6311/** Opcode 0x0f 0xc5. */
6312FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6313
6314/** Opcode 0x0f 0xc6. */
6315FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6316
6317
6318/** Opcode 0x0f 0xc7 !11/1. */
6319FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6320{
6321 IEMOP_MNEMONIC("cmpxchg8b Mq");
6322
6323 IEM_MC_BEGIN(4, 3);
6324 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6325 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6326 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6327 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6328 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6329 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6331
6332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6333 IEMOP_HLP_DONE_DECODING();
6334 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6335
6336 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6337 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6338 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6339
6340 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6341 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6342 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6343
6344 IEM_MC_FETCH_EFLAGS(EFlags);
6345 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6346 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6347 else
6348 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6349
6350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6351 IEM_MC_COMMIT_EFLAGS(EFlags);
6352 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6353 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6354 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6355 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6356 IEM_MC_ENDIF();
6357 IEM_MC_ADVANCE_RIP();
6358
6359 IEM_MC_END();
6360 return VINF_SUCCESS;
6361}
6362
6363
6364/** Opcode REX.W 0x0f 0xc7 !11/1. */
6365FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6366
6367/** Opcode 0x0f 0xc7 11/6. */
6368FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6369
6370/** Opcode 0x0f 0xc7 !11/6. */
6371FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6372
6373/** Opcode 0x66 0x0f 0xc7 !11/6. */
6374FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6375
6376/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6377FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6378
6379/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6380FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6381
6382
6383/** Opcode 0x0f 0xc7. */
6384FNIEMOP_DEF(iemOp_Grp9)
6385{
6386 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6388 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6389 {
6390 case 0: case 2: case 3: case 4: case 5:
6391 return IEMOP_RAISE_INVALID_OPCODE();
6392 case 1:
6393 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6394 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6395 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6396 return IEMOP_RAISE_INVALID_OPCODE();
6397 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6398 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6399 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6400 case 6:
6401 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6402 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6403 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6404 {
6405 case 0:
6406 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6407 case IEM_OP_PRF_SIZE_OP:
6408 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6409 case IEM_OP_PRF_REPZ:
6410 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6411 default:
6412 return IEMOP_RAISE_INVALID_OPCODE();
6413 }
6414 case 7:
6415 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6416 {
6417 case 0:
6418 case IEM_OP_PRF_REPZ:
6419 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6420 default:
6421 return IEMOP_RAISE_INVALID_OPCODE();
6422 }
6423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6424 }
6425}
6426
6427
6428/**
6429 * Common 'bswap register' helper.
6430 */
6431FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6432{
6433 IEMOP_HLP_NO_LOCK_PREFIX();
6434 switch (pIemCpu->enmEffOpSize)
6435 {
6436 case IEMMODE_16BIT:
6437 IEM_MC_BEGIN(1, 0);
6438 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6439 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6440 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6441 IEM_MC_ADVANCE_RIP();
6442 IEM_MC_END();
6443 return VINF_SUCCESS;
6444
6445 case IEMMODE_32BIT:
6446 IEM_MC_BEGIN(1, 0);
6447 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6448 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6449 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6450 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6451 IEM_MC_ADVANCE_RIP();
6452 IEM_MC_END();
6453 return VINF_SUCCESS;
6454
6455 case IEMMODE_64BIT:
6456 IEM_MC_BEGIN(1, 0);
6457 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6458 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6459 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6460 IEM_MC_ADVANCE_RIP();
6461 IEM_MC_END();
6462 return VINF_SUCCESS;
6463
6464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6465 }
6466}
6467
6468
6469/** Opcode 0x0f 0xc8. */
6470FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6471{
6472 IEMOP_MNEMONIC("bswap rAX/r8");
6473 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6474 prefix. REX.B is the correct prefix it appears. For a parallel
6475 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6476 IEMOP_HLP_MIN_486();
6477 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6478}
6479
6480
6481/** Opcode 0x0f 0xc9. */
6482FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6483{
6484 IEMOP_MNEMONIC("bswap rCX/r9");
6485 IEMOP_HLP_MIN_486();
6486 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6487}
6488
6489
6490/** Opcode 0x0f 0xca. */
6491FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6492{
6493 IEMOP_MNEMONIC("bswap rDX/r9");
6494 IEMOP_HLP_MIN_486();
6495 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6496}
6497
6498
6499/** Opcode 0x0f 0xcb. */
6500FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6501{
6502 IEMOP_MNEMONIC("bswap rBX/r9");
6503 IEMOP_HLP_MIN_486();
6504 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6505}
6506
6507
6508/** Opcode 0x0f 0xcc. */
6509FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6510{
6511 IEMOP_MNEMONIC("bswap rSP/r12");
6512 IEMOP_HLP_MIN_486();
6513 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6514}
6515
6516
6517/** Opcode 0x0f 0xcd. */
6518FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6519{
6520 IEMOP_MNEMONIC("bswap rBP/r13");
6521 IEMOP_HLP_MIN_486();
6522 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6523}
6524
6525
6526/** Opcode 0x0f 0xce. */
6527FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6528{
6529 IEMOP_MNEMONIC("bswap rSI/r14");
6530 IEMOP_HLP_MIN_486();
6531 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6532}
6533
6534
6535/** Opcode 0x0f 0xcf. */
6536FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6537{
6538 IEMOP_MNEMONIC("bswap rDI/r15");
6539 IEMOP_HLP_MIN_486();
6540 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6541}
6542
6543
6544
6545/** Opcode 0x0f 0xd0. */
6546FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6547/** Opcode 0x0f 0xd1. */
6548FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6549/** Opcode 0x0f 0xd2. */
6550FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6551/** Opcode 0x0f 0xd3. */
6552FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6553/** Opcode 0x0f 0xd4. */
6554FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6555/** Opcode 0x0f 0xd5. */
6556FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6557/** Opcode 0x0f 0xd6. */
6558FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6559
6560
6561/** Opcode 0x0f 0xd7. */
6562FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6563{
6564 /* Docs says register only. */
6565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6566 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6567 return IEMOP_RAISE_INVALID_OPCODE();
6568
6569 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6570 /** @todo testcase: Check that the instruction implicitly clears the high
6571 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6572 * and opcode modifications are made to work with the whole width (not
6573 * just 128). */
6574 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6575 {
6576 case IEM_OP_PRF_SIZE_OP: /* SSE */
6577 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6578 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6579 IEM_MC_BEGIN(2, 0);
6580 IEM_MC_ARG(uint64_t *, pDst, 0);
6581 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6583 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6584 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6585 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6586 IEM_MC_ADVANCE_RIP();
6587 IEM_MC_END();
6588 return VINF_SUCCESS;
6589
6590 case 0: /* MMX */
6591 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6592 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6593 IEM_MC_BEGIN(2, 0);
6594 IEM_MC_ARG(uint64_t *, pDst, 0);
6595 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6596 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6597 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6598 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6599 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6600 IEM_MC_ADVANCE_RIP();
6601 IEM_MC_END();
6602 return VINF_SUCCESS;
6603
6604 default:
6605 return IEMOP_RAISE_INVALID_OPCODE();
6606 }
6607}
6608
6609
6610/** Opcode 0x0f 0xd8. */
6611FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6612/** Opcode 0x0f 0xd9. */
6613FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6614/** Opcode 0x0f 0xda. */
6615FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6616/** Opcode 0x0f 0xdb. */
6617FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6618/** Opcode 0x0f 0xdc. */
6619FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6620/** Opcode 0x0f 0xdd. */
6621FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6622/** Opcode 0x0f 0xde. */
6623FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6624/** Opcode 0x0f 0xdf. */
6625FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6626/** Opcode 0x0f 0xe0. */
6627FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6628/** Opcode 0x0f 0xe1. */
6629FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6630/** Opcode 0x0f 0xe2. */
6631FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6632/** Opcode 0x0f 0xe3. */
6633FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6634/** Opcode 0x0f 0xe4. */
6635FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6636/** Opcode 0x0f 0xe5. */
6637FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6638/** Opcode 0x0f 0xe6. */
6639FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6640/** Opcode 0x0f 0xe7. */
6641FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6642/** Opcode 0x0f 0xe8. */
6643FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6644/** Opcode 0x0f 0xe9. */
6645FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6646/** Opcode 0x0f 0xea. */
6647FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6648/** Opcode 0x0f 0xeb. */
6649FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6650/** Opcode 0x0f 0xec. */
6651FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6652/** Opcode 0x0f 0xed. */
6653FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6654/** Opcode 0x0f 0xee. */
6655FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6656
6657
6658/** Opcode 0x0f 0xef. */
6659FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6660{
6661 IEMOP_MNEMONIC("pxor");
6662 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6663}
6664
6665
6666/** Opcode 0x0f 0xf0. */
6667FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6668/** Opcode 0x0f 0xf1. */
6669FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6670/** Opcode 0x0f 0xf2. */
6671FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6672/** Opcode 0x0f 0xf3. */
6673FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6674/** Opcode 0x0f 0xf4. */
6675FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6676/** Opcode 0x0f 0xf5. */
6677FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6678/** Opcode 0x0f 0xf6. */
6679FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6680/** Opcode 0x0f 0xf7. */
6681FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6682/** Opcode 0x0f 0xf8. */
6683FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6684/** Opcode 0x0f 0xf9. */
6685FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6686/** Opcode 0x0f 0xfa. */
6687FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6688/** Opcode 0x0f 0xfb. */
6689FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6690/** Opcode 0x0f 0xfc. */
6691FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6692/** Opcode 0x0f 0xfd. */
6693FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6694/** Opcode 0x0f 0xfe. */
6695FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6696
6697
6698const PFNIEMOP g_apfnTwoByteMap[256] =
6699{
6700 /* 0x00 */ iemOp_Grp6,
6701 /* 0x01 */ iemOp_Grp7,
6702 /* 0x02 */ iemOp_lar_Gv_Ew,
6703 /* 0x03 */ iemOp_lsl_Gv_Ew,
6704 /* 0x04 */ iemOp_Invalid,
6705 /* 0x05 */ iemOp_syscall,
6706 /* 0x06 */ iemOp_clts,
6707 /* 0x07 */ iemOp_sysret,
6708 /* 0x08 */ iemOp_invd,
6709 /* 0x09 */ iemOp_wbinvd,
6710 /* 0x0a */ iemOp_Invalid,
6711 /* 0x0b */ iemOp_ud2,
6712 /* 0x0c */ iemOp_Invalid,
6713 /* 0x0d */ iemOp_nop_Ev_GrpP,
6714 /* 0x0e */ iemOp_femms,
6715 /* 0x0f */ iemOp_3Dnow,
6716 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6717 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6718 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6719 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6720 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6721 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6722 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6723 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6724 /* 0x18 */ iemOp_prefetch_Grp16,
6725 /* 0x19 */ iemOp_nop_Ev,
6726 /* 0x1a */ iemOp_nop_Ev,
6727 /* 0x1b */ iemOp_nop_Ev,
6728 /* 0x1c */ iemOp_nop_Ev,
6729 /* 0x1d */ iemOp_nop_Ev,
6730 /* 0x1e */ iemOp_nop_Ev,
6731 /* 0x1f */ iemOp_nop_Ev,
6732 /* 0x20 */ iemOp_mov_Rd_Cd,
6733 /* 0x21 */ iemOp_mov_Rd_Dd,
6734 /* 0x22 */ iemOp_mov_Cd_Rd,
6735 /* 0x23 */ iemOp_mov_Dd_Rd,
6736 /* 0x24 */ iemOp_mov_Rd_Td,
6737 /* 0x25 */ iemOp_Invalid,
6738 /* 0x26 */ iemOp_mov_Td_Rd,
6739 /* 0x27 */ iemOp_Invalid,
6740 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6741 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6742 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6743 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6744 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6745 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6746 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6747 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6748 /* 0x30 */ iemOp_wrmsr,
6749 /* 0x31 */ iemOp_rdtsc,
6750 /* 0x32 */ iemOp_rdmsr,
6751 /* 0x33 */ iemOp_rdpmc,
6752 /* 0x34 */ iemOp_sysenter,
6753 /* 0x35 */ iemOp_sysexit,
6754 /* 0x36 */ iemOp_Invalid,
6755 /* 0x37 */ iemOp_getsec,
6756 /* 0x38 */ iemOp_3byte_Esc_A4,
6757 /* 0x39 */ iemOp_Invalid,
6758 /* 0x3a */ iemOp_3byte_Esc_A5,
6759 /* 0x3b */ iemOp_Invalid,
6760 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6761 /* 0x3d */ iemOp_Invalid,
6762 /* 0x3e */ iemOp_Invalid,
6763 /* 0x3f */ iemOp_Invalid,
6764 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6765 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6766 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6767 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6768 /* 0x44 */ iemOp_cmove_Gv_Ev,
6769 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6770 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6771 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6772 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6773 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6774 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6775 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6776 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6777 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6778 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6779 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6780 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6781 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6782 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6783 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6784 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6785 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6786 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6787 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6788 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6789 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6790 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6791 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6792 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6793 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6794 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6795 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6796 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6797 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6798 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6799 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6800 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6801 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6802 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6803 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6804 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6805 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6806 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6807 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6808 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6809 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6810 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6811 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6812 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6813 /* 0x71 */ iemOp_Grp12,
6814 /* 0x72 */ iemOp_Grp13,
6815 /* 0x73 */ iemOp_Grp14,
6816 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6817 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6818 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6819 /* 0x77 */ iemOp_emms,
6820 /* 0x78 */ iemOp_vmread_AmdGrp17,
6821 /* 0x79 */ iemOp_vmwrite,
6822 /* 0x7a */ iemOp_Invalid,
6823 /* 0x7b */ iemOp_Invalid,
6824 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6825 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6826 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6827 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6828 /* 0x80 */ iemOp_jo_Jv,
6829 /* 0x81 */ iemOp_jno_Jv,
6830 /* 0x82 */ iemOp_jc_Jv,
6831 /* 0x83 */ iemOp_jnc_Jv,
6832 /* 0x84 */ iemOp_je_Jv,
6833 /* 0x85 */ iemOp_jne_Jv,
6834 /* 0x86 */ iemOp_jbe_Jv,
6835 /* 0x87 */ iemOp_jnbe_Jv,
6836 /* 0x88 */ iemOp_js_Jv,
6837 /* 0x89 */ iemOp_jns_Jv,
6838 /* 0x8a */ iemOp_jp_Jv,
6839 /* 0x8b */ iemOp_jnp_Jv,
6840 /* 0x8c */ iemOp_jl_Jv,
6841 /* 0x8d */ iemOp_jnl_Jv,
6842 /* 0x8e */ iemOp_jle_Jv,
6843 /* 0x8f */ iemOp_jnle_Jv,
6844 /* 0x90 */ iemOp_seto_Eb,
6845 /* 0x91 */ iemOp_setno_Eb,
6846 /* 0x92 */ iemOp_setc_Eb,
6847 /* 0x93 */ iemOp_setnc_Eb,
6848 /* 0x94 */ iemOp_sete_Eb,
6849 /* 0x95 */ iemOp_setne_Eb,
6850 /* 0x96 */ iemOp_setbe_Eb,
6851 /* 0x97 */ iemOp_setnbe_Eb,
6852 /* 0x98 */ iemOp_sets_Eb,
6853 /* 0x99 */ iemOp_setns_Eb,
6854 /* 0x9a */ iemOp_setp_Eb,
6855 /* 0x9b */ iemOp_setnp_Eb,
6856 /* 0x9c */ iemOp_setl_Eb,
6857 /* 0x9d */ iemOp_setnl_Eb,
6858 /* 0x9e */ iemOp_setle_Eb,
6859 /* 0x9f */ iemOp_setnle_Eb,
6860 /* 0xa0 */ iemOp_push_fs,
6861 /* 0xa1 */ iemOp_pop_fs,
6862 /* 0xa2 */ iemOp_cpuid,
6863 /* 0xa3 */ iemOp_bt_Ev_Gv,
6864 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6865 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6866 /* 0xa6 */ iemOp_Invalid,
6867 /* 0xa7 */ iemOp_Invalid,
6868 /* 0xa8 */ iemOp_push_gs,
6869 /* 0xa9 */ iemOp_pop_gs,
6870 /* 0xaa */ iemOp_rsm,
6871 /* 0xab */ iemOp_bts_Ev_Gv,
6872 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6873 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6874 /* 0xae */ iemOp_Grp15,
6875 /* 0xaf */ iemOp_imul_Gv_Ev,
6876 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6877 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6878 /* 0xb2 */ iemOp_lss_Gv_Mp,
6879 /* 0xb3 */ iemOp_btr_Ev_Gv,
6880 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6881 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6882 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6883 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6884 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6885 /* 0xb9 */ iemOp_Grp10,
6886 /* 0xba */ iemOp_Grp8,
6887 /* 0xbd */ iemOp_btc_Ev_Gv,
6888 /* 0xbc */ iemOp_bsf_Gv_Ev,
6889 /* 0xbd */ iemOp_bsr_Gv_Ev,
6890 /* 0xbe */ iemOp_movsx_Gv_Eb,
6891 /* 0xbf */ iemOp_movsx_Gv_Ew,
6892 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6893 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6894 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6895 /* 0xc3 */ iemOp_movnti_My_Gy,
6896 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6897 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6898 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6899 /* 0xc7 */ iemOp_Grp9,
6900 /* 0xc8 */ iemOp_bswap_rAX_r8,
6901 /* 0xc9 */ iemOp_bswap_rCX_r9,
6902 /* 0xca */ iemOp_bswap_rDX_r10,
6903 /* 0xcb */ iemOp_bswap_rBX_r11,
6904 /* 0xcc */ iemOp_bswap_rSP_r12,
6905 /* 0xcd */ iemOp_bswap_rBP_r13,
6906 /* 0xce */ iemOp_bswap_rSI_r14,
6907 /* 0xcf */ iemOp_bswap_rDI_r15,
6908 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6909 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6910 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6911 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6912 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6913 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6914 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6915 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6916 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6917 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6918 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6919 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6920 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6921 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6922 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6923 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6924 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6925 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6926 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6927 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6928 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6929 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6930 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6931 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6932 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6933 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6934 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6935 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6936 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6937 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6938 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6939 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6940 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6941 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6942 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6943 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6944 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6945 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6946 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6947 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6948 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6949 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6950 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6951 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6952 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6953 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6954 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6955 /* 0xff */ iemOp_Invalid
6956};
6957
6958/** @} */
6959
6960
6961/** @name One byte opcodes.
6962 *
6963 * @{
6964 */
6965
6966/** Opcode 0x00. */
6967FNIEMOP_DEF(iemOp_add_Eb_Gb)
6968{
6969 IEMOP_MNEMONIC("add Eb,Gb");
6970 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6971}
6972
6973
6974/** Opcode 0x01. */
6975FNIEMOP_DEF(iemOp_add_Ev_Gv)
6976{
6977 IEMOP_MNEMONIC("add Ev,Gv");
6978 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6979}
6980
6981
6982/** Opcode 0x02. */
6983FNIEMOP_DEF(iemOp_add_Gb_Eb)
6984{
6985 IEMOP_MNEMONIC("add Gb,Eb");
6986 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6987}
6988
6989
6990/** Opcode 0x03. */
6991FNIEMOP_DEF(iemOp_add_Gv_Ev)
6992{
6993 IEMOP_MNEMONIC("add Gv,Ev");
6994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6995}
6996
6997
6998/** Opcode 0x04. */
6999FNIEMOP_DEF(iemOp_add_Al_Ib)
7000{
7001 IEMOP_MNEMONIC("add al,Ib");
7002 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7003}
7004
7005
7006/** Opcode 0x05. */
7007FNIEMOP_DEF(iemOp_add_eAX_Iz)
7008{
7009 IEMOP_MNEMONIC("add rAX,Iz");
7010 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7011}
7012
7013
7014/** Opcode 0x06. */
7015FNIEMOP_DEF(iemOp_push_ES)
7016{
7017 IEMOP_MNEMONIC("push es");
7018 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7019}
7020
7021
7022/** Opcode 0x07. */
7023FNIEMOP_DEF(iemOp_pop_ES)
7024{
7025 IEMOP_MNEMONIC("pop es");
7026 IEMOP_HLP_NO_64BIT();
7027 IEMOP_HLP_NO_LOCK_PREFIX();
7028 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7029}
7030
7031
7032/** Opcode 0x08. */
7033FNIEMOP_DEF(iemOp_or_Eb_Gb)
7034{
7035 IEMOP_MNEMONIC("or Eb,Gb");
7036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7037 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7038}
7039
7040
7041/** Opcode 0x09. */
7042FNIEMOP_DEF(iemOp_or_Ev_Gv)
7043{
7044 IEMOP_MNEMONIC("or Ev,Gv ");
7045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7046 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7047}
7048
7049
7050/** Opcode 0x0a. */
7051FNIEMOP_DEF(iemOp_or_Gb_Eb)
7052{
7053 IEMOP_MNEMONIC("or Gb,Eb");
7054 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7055 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7056}
7057
7058
7059/** Opcode 0x0b. */
7060FNIEMOP_DEF(iemOp_or_Gv_Ev)
7061{
7062 IEMOP_MNEMONIC("or Gv,Ev");
7063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7064 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7065}
7066
7067
7068/** Opcode 0x0c. */
7069FNIEMOP_DEF(iemOp_or_Al_Ib)
7070{
7071 IEMOP_MNEMONIC("or al,Ib");
7072 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7073 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7074}
7075
7076
7077/** Opcode 0x0d. */
7078FNIEMOP_DEF(iemOp_or_eAX_Iz)
7079{
7080 IEMOP_MNEMONIC("or rAX,Iz");
7081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7082 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7083}
7084
7085
7086/** Opcode 0x0e. */
7087FNIEMOP_DEF(iemOp_push_CS)
7088{
7089 IEMOP_MNEMONIC("push cs");
7090 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7091}
7092
7093
7094/** Opcode 0x0f. */
7095FNIEMOP_DEF(iemOp_2byteEscape)
7096{
7097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7098 /** @todo PUSH CS on 8086, undefined on 80186. */
7099 IEMOP_HLP_MIN_286();
7100 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7101}
7102
7103/** Opcode 0x10. */
7104FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7105{
7106 IEMOP_MNEMONIC("adc Eb,Gb");
7107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7108}
7109
7110
7111/** Opcode 0x11. */
7112FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7113{
7114 IEMOP_MNEMONIC("adc Ev,Gv");
7115 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7116}
7117
7118
7119/** Opcode 0x12. */
7120FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7121{
7122 IEMOP_MNEMONIC("adc Gb,Eb");
7123 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7124}
7125
7126
7127/** Opcode 0x13. */
7128FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7129{
7130 IEMOP_MNEMONIC("adc Gv,Ev");
7131 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7132}
7133
7134
7135/** Opcode 0x14. */
7136FNIEMOP_DEF(iemOp_adc_Al_Ib)
7137{
7138 IEMOP_MNEMONIC("adc al,Ib");
7139 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7140}
7141
7142
7143/** Opcode 0x15. */
7144FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7145{
7146 IEMOP_MNEMONIC("adc rAX,Iz");
7147 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7148}
7149
7150
7151/** Opcode 0x16. */
7152FNIEMOP_DEF(iemOp_push_SS)
7153{
7154 IEMOP_MNEMONIC("push ss");
7155 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7156}
7157
7158
7159/** Opcode 0x17. */
7160FNIEMOP_DEF(iemOp_pop_SS)
7161{
7162 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7163 IEMOP_HLP_NO_LOCK_PREFIX();
7164 IEMOP_HLP_NO_64BIT();
7165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7166}
7167
7168
7169/** Opcode 0x18. */
7170FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7171{
7172 IEMOP_MNEMONIC("sbb Eb,Gb");
7173 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7174}
7175
7176
7177/** Opcode 0x19. */
7178FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7179{
7180 IEMOP_MNEMONIC("sbb Ev,Gv");
7181 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7182}
7183
7184
7185/** Opcode 0x1a. */
7186FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7187{
7188 IEMOP_MNEMONIC("sbb Gb,Eb");
7189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7190}
7191
7192
7193/** Opcode 0x1b. */
7194FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7195{
7196 IEMOP_MNEMONIC("sbb Gv,Ev");
7197 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7198}
7199
7200
7201/** Opcode 0x1c. */
7202FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7203{
7204 IEMOP_MNEMONIC("sbb al,Ib");
7205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7206}
7207
7208
7209/** Opcode 0x1d. */
7210FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7211{
7212 IEMOP_MNEMONIC("sbb rAX,Iz");
7213 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7214}
7215
7216
7217/** Opcode 0x1e. */
7218FNIEMOP_DEF(iemOp_push_DS)
7219{
7220 IEMOP_MNEMONIC("push ds");
7221 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7222}
7223
7224
7225/** Opcode 0x1f. */
7226FNIEMOP_DEF(iemOp_pop_DS)
7227{
7228 IEMOP_MNEMONIC("pop ds");
7229 IEMOP_HLP_NO_LOCK_PREFIX();
7230 IEMOP_HLP_NO_64BIT();
7231 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7232}
7233
7234
7235/** Opcode 0x20. */
7236FNIEMOP_DEF(iemOp_and_Eb_Gb)
7237{
7238 IEMOP_MNEMONIC("and Eb,Gb");
7239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7241}
7242
7243
7244/** Opcode 0x21. */
7245FNIEMOP_DEF(iemOp_and_Ev_Gv)
7246{
7247 IEMOP_MNEMONIC("and Ev,Gv");
7248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7250}
7251
7252
7253/** Opcode 0x22. */
7254FNIEMOP_DEF(iemOp_and_Gb_Eb)
7255{
7256 IEMOP_MNEMONIC("and Gb,Eb");
7257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7258 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7259}
7260
7261
7262/** Opcode 0x23. */
7263FNIEMOP_DEF(iemOp_and_Gv_Ev)
7264{
7265 IEMOP_MNEMONIC("and Gv,Ev");
7266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7267 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7268}
7269
7270
7271/** Opcode 0x24. */
7272FNIEMOP_DEF(iemOp_and_Al_Ib)
7273{
7274 IEMOP_MNEMONIC("and al,Ib");
7275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7277}
7278
7279
7280/** Opcode 0x25. */
7281FNIEMOP_DEF(iemOp_and_eAX_Iz)
7282{
7283 IEMOP_MNEMONIC("and rAX,Iz");
7284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7285 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7286}
7287
7288
7289/** Opcode 0x26. */
7290FNIEMOP_DEF(iemOp_seg_ES)
7291{
7292 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7293 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7294 pIemCpu->iEffSeg = X86_SREG_ES;
7295
7296 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7297 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7298}
7299
7300
7301/** Opcode 0x27. */
7302FNIEMOP_DEF(iemOp_daa)
7303{
7304 IEMOP_MNEMONIC("daa AL");
7305 IEMOP_HLP_NO_64BIT();
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7308 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7309}
7310
7311
7312/** Opcode 0x28. */
7313FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7314{
7315 IEMOP_MNEMONIC("sub Eb,Gb");
7316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7317}
7318
7319
7320/** Opcode 0x29. */
7321FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7322{
7323 IEMOP_MNEMONIC("sub Ev,Gv");
7324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7325}
7326
7327
7328/** Opcode 0x2a. */
7329FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7330{
7331 IEMOP_MNEMONIC("sub Gb,Eb");
7332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7333}
7334
7335
7336/** Opcode 0x2b. */
7337FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7338{
7339 IEMOP_MNEMONIC("sub Gv,Ev");
7340 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7341}
7342
7343
7344/** Opcode 0x2c. */
7345FNIEMOP_DEF(iemOp_sub_Al_Ib)
7346{
7347 IEMOP_MNEMONIC("sub al,Ib");
7348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7349}
7350
7351
7352/** Opcode 0x2d. */
7353FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7354{
7355 IEMOP_MNEMONIC("sub rAX,Iz");
7356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7357}
7358
7359
7360/** Opcode 0x2e. */
7361FNIEMOP_DEF(iemOp_seg_CS)
7362{
7363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7364 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7365 pIemCpu->iEffSeg = X86_SREG_CS;
7366
7367 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7368 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7369}
7370
7371
7372/** Opcode 0x2f. */
7373FNIEMOP_DEF(iemOp_das)
7374{
7375 IEMOP_MNEMONIC("das AL");
7376 IEMOP_HLP_NO_64BIT();
7377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7378 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7379 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7380}
7381
7382
7383/** Opcode 0x30. */
7384FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7385{
7386 IEMOP_MNEMONIC("xor Eb,Gb");
7387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7388 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7389}
7390
7391
7392/** Opcode 0x31. */
7393FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7394{
7395 IEMOP_MNEMONIC("xor Ev,Gv");
7396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7397 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7398}
7399
7400
7401/** Opcode 0x32. */
7402FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7403{
7404 IEMOP_MNEMONIC("xor Gb,Eb");
7405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7406 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7407}
7408
7409
7410/** Opcode 0x33. */
7411FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7412{
7413 IEMOP_MNEMONIC("xor Gv,Ev");
7414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7416}
7417
7418
7419/** Opcode 0x34. */
7420FNIEMOP_DEF(iemOp_xor_Al_Ib)
7421{
7422 IEMOP_MNEMONIC("xor al,Ib");
7423 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7425}
7426
7427
7428/** Opcode 0x35. */
7429FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7430{
7431 IEMOP_MNEMONIC("xor rAX,Iz");
7432 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7433 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7434}
7435
7436
7437/** Opcode 0x36. */
7438FNIEMOP_DEF(iemOp_seg_SS)
7439{
7440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7441 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7442 pIemCpu->iEffSeg = X86_SREG_SS;
7443
7444 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7445 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7446}
7447
7448
7449/** Opcode 0x37. */
7450FNIEMOP_STUB(iemOp_aaa);
7451
7452
7453/** Opcode 0x38. */
7454FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7455{
7456 IEMOP_MNEMONIC("cmp Eb,Gb");
7457 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7458 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7459}
7460
7461
7462/** Opcode 0x39. */
7463FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7464{
7465 IEMOP_MNEMONIC("cmp Ev,Gv");
7466 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7468}
7469
7470
7471/** Opcode 0x3a. */
7472FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7473{
7474 IEMOP_MNEMONIC("cmp Gb,Eb");
7475 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7476}
7477
7478
7479/** Opcode 0x3b. */
7480FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7481{
7482 IEMOP_MNEMONIC("cmp Gv,Ev");
7483 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7484}
7485
7486
7487/** Opcode 0x3c. */
7488FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7489{
7490 IEMOP_MNEMONIC("cmp al,Ib");
7491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7492}
7493
7494
7495/** Opcode 0x3d. */
7496FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7497{
7498 IEMOP_MNEMONIC("cmp rAX,Iz");
7499 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7500}
7501
7502
7503/** Opcode 0x3e. */
7504FNIEMOP_DEF(iemOp_seg_DS)
7505{
7506 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7507 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7508 pIemCpu->iEffSeg = X86_SREG_DS;
7509
7510 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7511 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7512}
7513
7514
7515/** Opcode 0x3f. */
7516FNIEMOP_STUB(iemOp_aas);
7517
7518/**
7519 * Common 'inc/dec/not/neg register' helper.
7520 */
7521FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7522{
7523 IEMOP_HLP_NO_LOCK_PREFIX();
7524 switch (pIemCpu->enmEffOpSize)
7525 {
7526 case IEMMODE_16BIT:
7527 IEM_MC_BEGIN(2, 0);
7528 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7529 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7530 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7531 IEM_MC_REF_EFLAGS(pEFlags);
7532 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7533 IEM_MC_ADVANCE_RIP();
7534 IEM_MC_END();
7535 return VINF_SUCCESS;
7536
7537 case IEMMODE_32BIT:
7538 IEM_MC_BEGIN(2, 0);
7539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7540 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7541 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7542 IEM_MC_REF_EFLAGS(pEFlags);
7543 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7544 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7545 IEM_MC_ADVANCE_RIP();
7546 IEM_MC_END();
7547 return VINF_SUCCESS;
7548
7549 case IEMMODE_64BIT:
7550 IEM_MC_BEGIN(2, 0);
7551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7552 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7553 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7554 IEM_MC_REF_EFLAGS(pEFlags);
7555 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7556 IEM_MC_ADVANCE_RIP();
7557 IEM_MC_END();
7558 return VINF_SUCCESS;
7559 }
7560 return VINF_SUCCESS;
7561}
7562
7563
7564/** Opcode 0x40. */
7565FNIEMOP_DEF(iemOp_inc_eAX)
7566{
7567 /*
7568 * This is a REX prefix in 64-bit mode.
7569 */
7570 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7571 {
7572 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7573 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7574
7575 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7576 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7577 }
7578
7579 IEMOP_MNEMONIC("inc eAX");
7580 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7581}
7582
7583
7584/** Opcode 0x41. */
7585FNIEMOP_DEF(iemOp_inc_eCX)
7586{
7587 /*
7588 * This is a REX prefix in 64-bit mode.
7589 */
7590 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7591 {
7592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7593 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7594 pIemCpu->uRexB = 1 << 3;
7595
7596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7598 }
7599
7600 IEMOP_MNEMONIC("inc eCX");
7601 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7602}
7603
7604
7605/** Opcode 0x42. */
7606FNIEMOP_DEF(iemOp_inc_eDX)
7607{
7608 /*
7609 * This is a REX prefix in 64-bit mode.
7610 */
7611 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7612 {
7613 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7614 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7615 pIemCpu->uRexIndex = 1 << 3;
7616
7617 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7618 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7619 }
7620
7621 IEMOP_MNEMONIC("inc eDX");
7622 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7623}
7624
7625
7626
7627/** Opcode 0x43. */
7628FNIEMOP_DEF(iemOp_inc_eBX)
7629{
7630 /*
7631 * This is a REX prefix in 64-bit mode.
7632 */
7633 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7634 {
7635 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7636 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7637 pIemCpu->uRexB = 1 << 3;
7638 pIemCpu->uRexIndex = 1 << 3;
7639
7640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7642 }
7643
7644 IEMOP_MNEMONIC("inc eBX");
7645 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7646}
7647
7648
7649/** Opcode 0x44. */
7650FNIEMOP_DEF(iemOp_inc_eSP)
7651{
7652 /*
7653 * This is a REX prefix in 64-bit mode.
7654 */
7655 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7656 {
7657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7658 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7659 pIemCpu->uRexReg = 1 << 3;
7660
7661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7663 }
7664
7665 IEMOP_MNEMONIC("inc eSP");
7666 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7667}
7668
7669
7670/** Opcode 0x45. */
7671FNIEMOP_DEF(iemOp_inc_eBP)
7672{
7673 /*
7674 * This is a REX prefix in 64-bit mode.
7675 */
7676 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7677 {
7678 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7679 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7680 pIemCpu->uRexReg = 1 << 3;
7681 pIemCpu->uRexB = 1 << 3;
7682
7683 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7684 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7685 }
7686
7687 IEMOP_MNEMONIC("inc eBP");
7688 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7689}
7690
7691
7692/** Opcode 0x46. */
7693FNIEMOP_DEF(iemOp_inc_eSI)
7694{
7695 /*
7696 * This is a REX prefix in 64-bit mode.
7697 */
7698 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7699 {
7700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7701 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7702 pIemCpu->uRexReg = 1 << 3;
7703 pIemCpu->uRexIndex = 1 << 3;
7704
7705 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7706 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7707 }
7708
7709 IEMOP_MNEMONIC("inc eSI");
7710 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7711}
7712
7713
7714/** Opcode 0x47. */
7715FNIEMOP_DEF(iemOp_inc_eDI)
7716{
7717 /*
7718 * This is a REX prefix in 64-bit mode.
7719 */
7720 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7721 {
7722 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7723 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7724 pIemCpu->uRexReg = 1 << 3;
7725 pIemCpu->uRexB = 1 << 3;
7726 pIemCpu->uRexIndex = 1 << 3;
7727
7728 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7729 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7730 }
7731
7732 IEMOP_MNEMONIC("inc eDI");
7733 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7734}
7735
7736
7737/** Opcode 0x48. */
7738FNIEMOP_DEF(iemOp_dec_eAX)
7739{
7740 /*
7741 * This is a REX prefix in 64-bit mode.
7742 */
7743 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7744 {
7745 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7746 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7747 iemRecalEffOpSize(pIemCpu);
7748
7749 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7750 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7751 }
7752
7753 IEMOP_MNEMONIC("dec eAX");
7754 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7755}
7756
7757
7758/** Opcode 0x49. */
7759FNIEMOP_DEF(iemOp_dec_eCX)
7760{
7761 /*
7762 * This is a REX prefix in 64-bit mode.
7763 */
7764 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7765 {
7766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7767 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7768 pIemCpu->uRexB = 1 << 3;
7769 iemRecalEffOpSize(pIemCpu);
7770
7771 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7772 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7773 }
7774
7775 IEMOP_MNEMONIC("dec eCX");
7776 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7777}
7778
7779
7780/** Opcode 0x4a. */
7781FNIEMOP_DEF(iemOp_dec_eDX)
7782{
7783 /*
7784 * This is a REX prefix in 64-bit mode.
7785 */
7786 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7787 {
7788 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7789 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7790 pIemCpu->uRexIndex = 1 << 3;
7791 iemRecalEffOpSize(pIemCpu);
7792
7793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7795 }
7796
7797 IEMOP_MNEMONIC("dec eDX");
7798 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7799}
7800
7801
7802/** Opcode 0x4b. */
7803FNIEMOP_DEF(iemOp_dec_eBX)
7804{
7805 /*
7806 * This is a REX prefix in 64-bit mode.
7807 */
7808 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7809 {
7810 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7811 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7812 pIemCpu->uRexB = 1 << 3;
7813 pIemCpu->uRexIndex = 1 << 3;
7814 iemRecalEffOpSize(pIemCpu);
7815
7816 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7817 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7818 }
7819
7820 IEMOP_MNEMONIC("dec eBX");
7821 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7822}
7823
7824
7825/** Opcode 0x4c. */
7826FNIEMOP_DEF(iemOp_dec_eSP)
7827{
7828 /*
7829 * This is a REX prefix in 64-bit mode.
7830 */
7831 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7832 {
7833 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7834 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7835 pIemCpu->uRexReg = 1 << 3;
7836 iemRecalEffOpSize(pIemCpu);
7837
7838 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7839 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7840 }
7841
7842 IEMOP_MNEMONIC("dec eSP");
7843 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7844}
7845
7846
7847/** Opcode 0x4d. */
7848FNIEMOP_DEF(iemOp_dec_eBP)
7849{
7850 /*
7851 * This is a REX prefix in 64-bit mode.
7852 */
7853 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7854 {
7855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7856 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7857 pIemCpu->uRexReg = 1 << 3;
7858 pIemCpu->uRexB = 1 << 3;
7859 iemRecalEffOpSize(pIemCpu);
7860
7861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7863 }
7864
7865 IEMOP_MNEMONIC("dec eBP");
7866 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7867}
7868
7869
7870/** Opcode 0x4e. */
7871FNIEMOP_DEF(iemOp_dec_eSI)
7872{
7873 /*
7874 * This is a REX prefix in 64-bit mode.
7875 */
7876 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7877 {
7878 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7879 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7880 pIemCpu->uRexReg = 1 << 3;
7881 pIemCpu->uRexIndex = 1 << 3;
7882 iemRecalEffOpSize(pIemCpu);
7883
7884 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7885 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7886 }
7887
7888 IEMOP_MNEMONIC("dec eSI");
7889 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7890}
7891
7892
7893/** Opcode 0x4f. */
7894FNIEMOP_DEF(iemOp_dec_eDI)
7895{
7896 /*
7897 * This is a REX prefix in 64-bit mode.
7898 */
7899 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7900 {
7901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7902 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7903 pIemCpu->uRexReg = 1 << 3;
7904 pIemCpu->uRexB = 1 << 3;
7905 pIemCpu->uRexIndex = 1 << 3;
7906 iemRecalEffOpSize(pIemCpu);
7907
7908 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7909 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7910 }
7911
7912 IEMOP_MNEMONIC("dec eDI");
7913 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7914}
7915
7916
7917/**
7918 * Common 'push register' helper.
7919 */
7920FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7921{
7922 IEMOP_HLP_NO_LOCK_PREFIX();
7923 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7924 {
7925 iReg |= pIemCpu->uRexB;
7926 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7927 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7928 }
7929
7930 switch (pIemCpu->enmEffOpSize)
7931 {
7932 case IEMMODE_16BIT:
7933 IEM_MC_BEGIN(0, 1);
7934 IEM_MC_LOCAL(uint16_t, u16Value);
7935 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7936 IEM_MC_PUSH_U16(u16Value);
7937 IEM_MC_ADVANCE_RIP();
7938 IEM_MC_END();
7939 break;
7940
7941 case IEMMODE_32BIT:
7942 IEM_MC_BEGIN(0, 1);
7943 IEM_MC_LOCAL(uint32_t, u32Value);
7944 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7945 IEM_MC_PUSH_U32(u32Value);
7946 IEM_MC_ADVANCE_RIP();
7947 IEM_MC_END();
7948 break;
7949
7950 case IEMMODE_64BIT:
7951 IEM_MC_BEGIN(0, 1);
7952 IEM_MC_LOCAL(uint64_t, u64Value);
7953 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7954 IEM_MC_PUSH_U64(u64Value);
7955 IEM_MC_ADVANCE_RIP();
7956 IEM_MC_END();
7957 break;
7958 }
7959
7960 return VINF_SUCCESS;
7961}
7962
7963
7964/** Opcode 0x50. */
7965FNIEMOP_DEF(iemOp_push_eAX)
7966{
7967 IEMOP_MNEMONIC("push rAX");
7968 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7969}
7970
7971
7972/** Opcode 0x51. */
7973FNIEMOP_DEF(iemOp_push_eCX)
7974{
7975 IEMOP_MNEMONIC("push rCX");
7976 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7977}
7978
7979
7980/** Opcode 0x52. */
7981FNIEMOP_DEF(iemOp_push_eDX)
7982{
7983 IEMOP_MNEMONIC("push rDX");
7984 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7985}
7986
7987
7988/** Opcode 0x53. */
7989FNIEMOP_DEF(iemOp_push_eBX)
7990{
7991 IEMOP_MNEMONIC("push rBX");
7992 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7993}
7994
7995
7996/** Opcode 0x54. */
7997FNIEMOP_DEF(iemOp_push_eSP)
7998{
7999 IEMOP_MNEMONIC("push rSP");
8000#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
8001 if (pIemCpu->uTargetCpu == IEMTARGETCPU_8086)
8002 {
8003 IEM_MC_BEGIN(0, 1);
8004 IEM_MC_LOCAL(uint16_t, u16Value);
8005 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8006 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8007 IEM_MC_PUSH_U16(u16Value);
8008 IEM_MC_ADVANCE_RIP();
8009 IEM_MC_END();
8010 }
8011#endif
8012 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8013}
8014
8015
8016/** Opcode 0x55. */
8017FNIEMOP_DEF(iemOp_push_eBP)
8018{
8019 IEMOP_MNEMONIC("push rBP");
8020 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8021}
8022
8023
8024/** Opcode 0x56. */
8025FNIEMOP_DEF(iemOp_push_eSI)
8026{
8027 IEMOP_MNEMONIC("push rSI");
8028 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8029}
8030
8031
8032/** Opcode 0x57. */
8033FNIEMOP_DEF(iemOp_push_eDI)
8034{
8035 IEMOP_MNEMONIC("push rDI");
8036 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8037}
8038
8039
8040/**
8041 * Common 'pop register' helper.
8042 */
8043FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8044{
8045 IEMOP_HLP_NO_LOCK_PREFIX();
8046 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8047 {
8048 iReg |= pIemCpu->uRexB;
8049 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8050 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8051 }
8052
8053 switch (pIemCpu->enmEffOpSize)
8054 {
8055 case IEMMODE_16BIT:
8056 IEM_MC_BEGIN(0, 1);
8057 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8058 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8059 IEM_MC_POP_U16(pu16Dst);
8060 IEM_MC_ADVANCE_RIP();
8061 IEM_MC_END();
8062 break;
8063
8064 case IEMMODE_32BIT:
8065 IEM_MC_BEGIN(0, 1);
8066 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8067 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8068 IEM_MC_POP_U32(pu32Dst);
8069 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8070 IEM_MC_ADVANCE_RIP();
8071 IEM_MC_END();
8072 break;
8073
8074 case IEMMODE_64BIT:
8075 IEM_MC_BEGIN(0, 1);
8076 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8077 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8078 IEM_MC_POP_U64(pu64Dst);
8079 IEM_MC_ADVANCE_RIP();
8080 IEM_MC_END();
8081 break;
8082 }
8083
8084 return VINF_SUCCESS;
8085}
8086
8087
8088/** Opcode 0x58. */
8089FNIEMOP_DEF(iemOp_pop_eAX)
8090{
8091 IEMOP_MNEMONIC("pop rAX");
8092 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8093}
8094
8095
8096/** Opcode 0x59. */
8097FNIEMOP_DEF(iemOp_pop_eCX)
8098{
8099 IEMOP_MNEMONIC("pop rCX");
8100 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8101}
8102
8103
8104/** Opcode 0x5a. */
8105FNIEMOP_DEF(iemOp_pop_eDX)
8106{
8107 IEMOP_MNEMONIC("pop rDX");
8108 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8109}
8110
8111
8112/** Opcode 0x5b. */
8113FNIEMOP_DEF(iemOp_pop_eBX)
8114{
8115 IEMOP_MNEMONIC("pop rBX");
8116 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8117}
8118
8119
8120/** Opcode 0x5c. */
8121FNIEMOP_DEF(iemOp_pop_eSP)
8122{
8123 IEMOP_MNEMONIC("pop rSP");
8124 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8125 {
8126 if (pIemCpu->uRexB)
8127 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8128 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8129 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8130 }
8131
8132 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8133 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8134 /** @todo add testcase for this instruction. */
8135 switch (pIemCpu->enmEffOpSize)
8136 {
8137 case IEMMODE_16BIT:
8138 IEM_MC_BEGIN(0, 1);
8139 IEM_MC_LOCAL(uint16_t, u16Dst);
8140 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8141 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8142 IEM_MC_ADVANCE_RIP();
8143 IEM_MC_END();
8144 break;
8145
8146 case IEMMODE_32BIT:
8147 IEM_MC_BEGIN(0, 1);
8148 IEM_MC_LOCAL(uint32_t, u32Dst);
8149 IEM_MC_POP_U32(&u32Dst);
8150 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8151 IEM_MC_ADVANCE_RIP();
8152 IEM_MC_END();
8153 break;
8154
8155 case IEMMODE_64BIT:
8156 IEM_MC_BEGIN(0, 1);
8157 IEM_MC_LOCAL(uint64_t, u64Dst);
8158 IEM_MC_POP_U64(&u64Dst);
8159 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8160 IEM_MC_ADVANCE_RIP();
8161 IEM_MC_END();
8162 break;
8163 }
8164
8165 return VINF_SUCCESS;
8166}
8167
8168
8169/** Opcode 0x5d. */
8170FNIEMOP_DEF(iemOp_pop_eBP)
8171{
8172 IEMOP_MNEMONIC("pop rBP");
8173 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8174}
8175
8176
8177/** Opcode 0x5e. */
8178FNIEMOP_DEF(iemOp_pop_eSI)
8179{
8180 IEMOP_MNEMONIC("pop rSI");
8181 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8182}
8183
8184
8185/** Opcode 0x5f. */
8186FNIEMOP_DEF(iemOp_pop_eDI)
8187{
8188 IEMOP_MNEMONIC("pop rDI");
8189 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8190}
8191
8192
8193/** Opcode 0x60. */
8194FNIEMOP_DEF(iemOp_pusha)
8195{
8196 IEMOP_MNEMONIC("pusha");
8197 IEMOP_HLP_MIN_186();
8198 IEMOP_HLP_NO_64BIT();
8199 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8200 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8201 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8202 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8203}
8204
8205
8206/** Opcode 0x61. */
8207FNIEMOP_DEF(iemOp_popa)
8208{
8209 IEMOP_MNEMONIC("popa");
8210 IEMOP_HLP_MIN_186();
8211 IEMOP_HLP_NO_64BIT();
8212 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8213 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8214 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8215 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8216}
8217
8218
8219/** Opcode 0x62. */
8220FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8221// IEMOP_HLP_MIN_186();
8222
8223
8224/** Opcode 0x63 - non-64-bit modes. */
8225FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8226{
8227 IEMOP_MNEMONIC("arpl Ew,Gw");
8228 IEMOP_HLP_MIN_286();
8229 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8231
8232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8233 {
8234 /* Register */
8235 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8236 IEM_MC_BEGIN(3, 0);
8237 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8238 IEM_MC_ARG(uint16_t, u16Src, 1);
8239 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8240
8241 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8242 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8243 IEM_MC_REF_EFLAGS(pEFlags);
8244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8245
8246 IEM_MC_ADVANCE_RIP();
8247 IEM_MC_END();
8248 }
8249 else
8250 {
8251 /* Memory */
8252 IEM_MC_BEGIN(3, 2);
8253 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8254 IEM_MC_ARG(uint16_t, u16Src, 1);
8255 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8257
8258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8259 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8260 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8261 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8262 IEM_MC_FETCH_EFLAGS(EFlags);
8263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8264
8265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8266 IEM_MC_COMMIT_EFLAGS(EFlags);
8267 IEM_MC_ADVANCE_RIP();
8268 IEM_MC_END();
8269 }
8270 return VINF_SUCCESS;
8271
8272}
8273
8274
8275/** Opcode 0x63.
8276 * @note This is a weird one. It works like a regular move instruction if
8277 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8278 * @todo This definitely needs a testcase to verify the odd cases. */
8279FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8280{
8281 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8282
8283 IEMOP_MNEMONIC("movsxd Gv,Ev");
8284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8285
8286 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8287 {
8288 /*
8289 * Register to register.
8290 */
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292 IEM_MC_BEGIN(0, 1);
8293 IEM_MC_LOCAL(uint64_t, u64Value);
8294 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8295 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8296 IEM_MC_ADVANCE_RIP();
8297 IEM_MC_END();
8298 }
8299 else
8300 {
8301 /*
8302 * We're loading a register from memory.
8303 */
8304 IEM_MC_BEGIN(0, 2);
8305 IEM_MC_LOCAL(uint64_t, u64Value);
8306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8309 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8310 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8311 IEM_MC_ADVANCE_RIP();
8312 IEM_MC_END();
8313 }
8314 return VINF_SUCCESS;
8315}
8316
8317
8318/** Opcode 0x64. */
8319FNIEMOP_DEF(iemOp_seg_FS)
8320{
8321 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8322 IEMOP_HLP_MIN_386();
8323
8324 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8325 pIemCpu->iEffSeg = X86_SREG_FS;
8326
8327 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8328 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8329}
8330
8331
8332/** Opcode 0x65. */
8333FNIEMOP_DEF(iemOp_seg_GS)
8334{
8335 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8336 IEMOP_HLP_MIN_386();
8337
8338 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8339 pIemCpu->iEffSeg = X86_SREG_GS;
8340
8341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8343}
8344
8345
8346/** Opcode 0x66. */
8347FNIEMOP_DEF(iemOp_op_size)
8348{
8349 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8350 IEMOP_HLP_MIN_386();
8351
8352 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8353 iemRecalEffOpSize(pIemCpu);
8354
8355 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8356 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8357}
8358
8359
8360/** Opcode 0x67. */
8361FNIEMOP_DEF(iemOp_addr_size)
8362{
8363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8364 IEMOP_HLP_MIN_386();
8365
8366 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8367 switch (pIemCpu->enmDefAddrMode)
8368 {
8369 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8370 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8371 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8372 default: AssertFailed();
8373 }
8374
8375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8377}
8378
8379
8380/** Opcode 0x68. */
8381FNIEMOP_DEF(iemOp_push_Iz)
8382{
8383 IEMOP_MNEMONIC("push Iz");
8384 IEMOP_HLP_MIN_186();
8385 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8386 switch (pIemCpu->enmEffOpSize)
8387 {
8388 case IEMMODE_16BIT:
8389 {
8390 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8391 IEMOP_HLP_NO_LOCK_PREFIX();
8392 IEM_MC_BEGIN(0,0);
8393 IEM_MC_PUSH_U16(u16Imm);
8394 IEM_MC_ADVANCE_RIP();
8395 IEM_MC_END();
8396 return VINF_SUCCESS;
8397 }
8398
8399 case IEMMODE_32BIT:
8400 {
8401 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8402 IEMOP_HLP_NO_LOCK_PREFIX();
8403 IEM_MC_BEGIN(0,0);
8404 IEM_MC_PUSH_U32(u32Imm);
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 return VINF_SUCCESS;
8408 }
8409
8410 case IEMMODE_64BIT:
8411 {
8412 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8413 IEMOP_HLP_NO_LOCK_PREFIX();
8414 IEM_MC_BEGIN(0,0);
8415 IEM_MC_PUSH_U64(u64Imm);
8416 IEM_MC_ADVANCE_RIP();
8417 IEM_MC_END();
8418 return VINF_SUCCESS;
8419 }
8420
8421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8422 }
8423}
8424
8425
8426/** Opcode 0x69. */
8427FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8428{
8429 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8430 IEMOP_HLP_MIN_186();
8431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8432 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8433
8434 switch (pIemCpu->enmEffOpSize)
8435 {
8436 case IEMMODE_16BIT:
8437 {
8438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8439 {
8440 /* register operand */
8441 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8443
8444 IEM_MC_BEGIN(3, 1);
8445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8446 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8447 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8448 IEM_MC_LOCAL(uint16_t, u16Tmp);
8449
8450 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8451 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8452 IEM_MC_REF_EFLAGS(pEFlags);
8453 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8454 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8455
8456 IEM_MC_ADVANCE_RIP();
8457 IEM_MC_END();
8458 }
8459 else
8460 {
8461 /* memory operand */
8462 IEM_MC_BEGIN(3, 2);
8463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8464 IEM_MC_ARG(uint16_t, u16Src, 1);
8465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8466 IEM_MC_LOCAL(uint16_t, u16Tmp);
8467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8468
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8470 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8471 IEM_MC_ASSIGN(u16Src, u16Imm);
8472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8473 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8474 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8475 IEM_MC_REF_EFLAGS(pEFlags);
8476 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8477 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8478
8479 IEM_MC_ADVANCE_RIP();
8480 IEM_MC_END();
8481 }
8482 return VINF_SUCCESS;
8483 }
8484
8485 case IEMMODE_32BIT:
8486 {
8487 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8488 {
8489 /* register operand */
8490 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8492
8493 IEM_MC_BEGIN(3, 1);
8494 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8495 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8496 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8497 IEM_MC_LOCAL(uint32_t, u32Tmp);
8498
8499 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8500 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8501 IEM_MC_REF_EFLAGS(pEFlags);
8502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8503 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8504
8505 IEM_MC_ADVANCE_RIP();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory operand */
8511 IEM_MC_BEGIN(3, 2);
8512 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8513 IEM_MC_ARG(uint32_t, u32Src, 1);
8514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8515 IEM_MC_LOCAL(uint32_t, u32Tmp);
8516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8517
8518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8519 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8520 IEM_MC_ASSIGN(u32Src, u32Imm);
8521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8522 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8523 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8524 IEM_MC_REF_EFLAGS(pEFlags);
8525 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8526 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8527
8528 IEM_MC_ADVANCE_RIP();
8529 IEM_MC_END();
8530 }
8531 return VINF_SUCCESS;
8532 }
8533
8534 case IEMMODE_64BIT:
8535 {
8536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8537 {
8538 /* register operand */
8539 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541
8542 IEM_MC_BEGIN(3, 1);
8543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8544 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8546 IEM_MC_LOCAL(uint64_t, u64Tmp);
8547
8548 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8549 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8550 IEM_MC_REF_EFLAGS(pEFlags);
8551 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8552 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8553
8554 IEM_MC_ADVANCE_RIP();
8555 IEM_MC_END();
8556 }
8557 else
8558 {
8559 /* memory operand */
8560 IEM_MC_BEGIN(3, 2);
8561 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8562 IEM_MC_ARG(uint64_t, u64Src, 1);
8563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8564 IEM_MC_LOCAL(uint64_t, u64Tmp);
8565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8566
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8568 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8569 IEM_MC_ASSIGN(u64Src, u64Imm);
8570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8571 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8572 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8573 IEM_MC_REF_EFLAGS(pEFlags);
8574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8575 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8576
8577 IEM_MC_ADVANCE_RIP();
8578 IEM_MC_END();
8579 }
8580 return VINF_SUCCESS;
8581 }
8582 }
8583 AssertFailedReturn(VERR_IEM_IPE_9);
8584}
8585
8586
8587/** Opcode 0x6a. */
8588FNIEMOP_DEF(iemOp_push_Ib)
8589{
8590 IEMOP_MNEMONIC("push Ib");
8591 IEMOP_HLP_MIN_186();
8592 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8593 IEMOP_HLP_NO_LOCK_PREFIX();
8594 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8595
8596 IEM_MC_BEGIN(0,0);
8597 switch (pIemCpu->enmEffOpSize)
8598 {
8599 case IEMMODE_16BIT:
8600 IEM_MC_PUSH_U16(i8Imm);
8601 break;
8602 case IEMMODE_32BIT:
8603 IEM_MC_PUSH_U32(i8Imm);
8604 break;
8605 case IEMMODE_64BIT:
8606 IEM_MC_PUSH_U64(i8Imm);
8607 break;
8608 }
8609 IEM_MC_ADVANCE_RIP();
8610 IEM_MC_END();
8611 return VINF_SUCCESS;
8612}
8613
8614
8615/** Opcode 0x6b. */
8616FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8617{
8618 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8619 IEMOP_HLP_MIN_186();
8620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8621 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8622
8623 switch (pIemCpu->enmEffOpSize)
8624 {
8625 case IEMMODE_16BIT:
8626 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8627 {
8628 /* register operand */
8629 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8631
8632 IEM_MC_BEGIN(3, 1);
8633 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8634 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8635 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8636 IEM_MC_LOCAL(uint16_t, u16Tmp);
8637
8638 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8639 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8640 IEM_MC_REF_EFLAGS(pEFlags);
8641 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8642 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8643
8644 IEM_MC_ADVANCE_RIP();
8645 IEM_MC_END();
8646 }
8647 else
8648 {
8649 /* memory operand */
8650 IEM_MC_BEGIN(3, 2);
8651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8652 IEM_MC_ARG(uint16_t, u16Src, 1);
8653 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8654 IEM_MC_LOCAL(uint16_t, u16Tmp);
8655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8656
8657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8659 IEM_MC_ASSIGN(u16Src, u16Imm);
8660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8661 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8662 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8663 IEM_MC_REF_EFLAGS(pEFlags);
8664 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8665 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8666
8667 IEM_MC_ADVANCE_RIP();
8668 IEM_MC_END();
8669 }
8670 return VINF_SUCCESS;
8671
8672 case IEMMODE_32BIT:
8673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8674 {
8675 /* register operand */
8676 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678
8679 IEM_MC_BEGIN(3, 1);
8680 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8682 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8683 IEM_MC_LOCAL(uint32_t, u32Tmp);
8684
8685 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8686 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8687 IEM_MC_REF_EFLAGS(pEFlags);
8688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8689 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8690
8691 IEM_MC_ADVANCE_RIP();
8692 IEM_MC_END();
8693 }
8694 else
8695 {
8696 /* memory operand */
8697 IEM_MC_BEGIN(3, 2);
8698 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8699 IEM_MC_ARG(uint32_t, u32Src, 1);
8700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8701 IEM_MC_LOCAL(uint32_t, u32Tmp);
8702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8703
8704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8705 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8706 IEM_MC_ASSIGN(u32Src, u32Imm);
8707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8708 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8709 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8710 IEM_MC_REF_EFLAGS(pEFlags);
8711 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8712 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8713
8714 IEM_MC_ADVANCE_RIP();
8715 IEM_MC_END();
8716 }
8717 return VINF_SUCCESS;
8718
8719 case IEMMODE_64BIT:
8720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8721 {
8722 /* register operand */
8723 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8725
8726 IEM_MC_BEGIN(3, 1);
8727 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8728 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8729 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8730 IEM_MC_LOCAL(uint64_t, u64Tmp);
8731
8732 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8733 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8734 IEM_MC_REF_EFLAGS(pEFlags);
8735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8736 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8737
8738 IEM_MC_ADVANCE_RIP();
8739 IEM_MC_END();
8740 }
8741 else
8742 {
8743 /* memory operand */
8744 IEM_MC_BEGIN(3, 2);
8745 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8746 IEM_MC_ARG(uint64_t, u64Src, 1);
8747 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8748 IEM_MC_LOCAL(uint64_t, u64Tmp);
8749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8750
8751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8752 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8753 IEM_MC_ASSIGN(u64Src, u64Imm);
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8756 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8757 IEM_MC_REF_EFLAGS(pEFlags);
8758 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8759 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8760
8761 IEM_MC_ADVANCE_RIP();
8762 IEM_MC_END();
8763 }
8764 return VINF_SUCCESS;
8765 }
8766 AssertFailedReturn(VERR_IEM_IPE_8);
8767}
8768
8769
8770/** Opcode 0x6c. */
8771FNIEMOP_DEF(iemOp_insb_Yb_DX)
8772{
8773 IEMOP_HLP_MIN_186();
8774 IEMOP_HLP_NO_LOCK_PREFIX();
8775 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8776 {
8777 IEMOP_MNEMONIC("rep ins Yb,DX");
8778 switch (pIemCpu->enmEffAddrMode)
8779 {
8780 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8781 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8782 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8784 }
8785 }
8786 else
8787 {
8788 IEMOP_MNEMONIC("ins Yb,DX");
8789 switch (pIemCpu->enmEffAddrMode)
8790 {
8791 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8792 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8793 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8795 }
8796 }
8797}
8798
8799
8800/** Opcode 0x6d. */
8801FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8802{
8803 IEMOP_HLP_MIN_186();
8804 IEMOP_HLP_NO_LOCK_PREFIX();
8805 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8806 {
8807 IEMOP_MNEMONIC("rep ins Yv,DX");
8808 switch (pIemCpu->enmEffOpSize)
8809 {
8810 case IEMMODE_16BIT:
8811 switch (pIemCpu->enmEffAddrMode)
8812 {
8813 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8814 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8815 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8817 }
8818 break;
8819 case IEMMODE_64BIT:
8820 case IEMMODE_32BIT:
8821 switch (pIemCpu->enmEffAddrMode)
8822 {
8823 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8824 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8825 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8827 }
8828 break;
8829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8830 }
8831 }
8832 else
8833 {
8834 IEMOP_MNEMONIC("ins Yv,DX");
8835 switch (pIemCpu->enmEffOpSize)
8836 {
8837 case IEMMODE_16BIT:
8838 switch (pIemCpu->enmEffAddrMode)
8839 {
8840 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8841 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8842 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8844 }
8845 break;
8846 case IEMMODE_64BIT:
8847 case IEMMODE_32BIT:
8848 switch (pIemCpu->enmEffAddrMode)
8849 {
8850 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8851 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8852 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8854 }
8855 break;
8856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8857 }
8858 }
8859}
8860
8861
8862/** Opcode 0x6e. */
8863FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8864{
8865 IEMOP_HLP_MIN_186();
8866 IEMOP_HLP_NO_LOCK_PREFIX();
8867 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8868 {
8869 IEMOP_MNEMONIC("rep outs DX,Yb");
8870 switch (pIemCpu->enmEffAddrMode)
8871 {
8872 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8873 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8874 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8876 }
8877 }
8878 else
8879 {
8880 IEMOP_MNEMONIC("outs DX,Yb");
8881 switch (pIemCpu->enmEffAddrMode)
8882 {
8883 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8884 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8885 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8887 }
8888 }
8889}
8890
8891
8892/** Opcode 0x6f. */
8893FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8894{
8895 IEMOP_HLP_MIN_186();
8896 IEMOP_HLP_NO_LOCK_PREFIX();
8897 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8898 {
8899 IEMOP_MNEMONIC("rep outs DX,Yv");
8900 switch (pIemCpu->enmEffOpSize)
8901 {
8902 case IEMMODE_16BIT:
8903 switch (pIemCpu->enmEffAddrMode)
8904 {
8905 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8906 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8907 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8909 }
8910 break;
8911 case IEMMODE_64BIT:
8912 case IEMMODE_32BIT:
8913 switch (pIemCpu->enmEffAddrMode)
8914 {
8915 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8916 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8917 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8919 }
8920 break;
8921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8922 }
8923 }
8924 else
8925 {
8926 IEMOP_MNEMONIC("outs DX,Yv");
8927 switch (pIemCpu->enmEffOpSize)
8928 {
8929 case IEMMODE_16BIT:
8930 switch (pIemCpu->enmEffAddrMode)
8931 {
8932 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8933 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8934 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8936 }
8937 break;
8938 case IEMMODE_64BIT:
8939 case IEMMODE_32BIT:
8940 switch (pIemCpu->enmEffAddrMode)
8941 {
8942 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8943 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8944 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8946 }
8947 break;
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8949 }
8950 }
8951}
8952
8953
8954/** Opcode 0x70. */
8955FNIEMOP_DEF(iemOp_jo_Jb)
8956{
8957 IEMOP_MNEMONIC("jo Jb");
8958 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8959 IEMOP_HLP_NO_LOCK_PREFIX();
8960 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8961
8962 IEM_MC_BEGIN(0, 0);
8963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8964 IEM_MC_REL_JMP_S8(i8Imm);
8965 } IEM_MC_ELSE() {
8966 IEM_MC_ADVANCE_RIP();
8967 } IEM_MC_ENDIF();
8968 IEM_MC_END();
8969 return VINF_SUCCESS;
8970}
8971
8972
8973/** Opcode 0x71. */
8974FNIEMOP_DEF(iemOp_jno_Jb)
8975{
8976 IEMOP_MNEMONIC("jno Jb");
8977 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8978 IEMOP_HLP_NO_LOCK_PREFIX();
8979 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8980
8981 IEM_MC_BEGIN(0, 0);
8982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8983 IEM_MC_ADVANCE_RIP();
8984 } IEM_MC_ELSE() {
8985 IEM_MC_REL_JMP_S8(i8Imm);
8986 } IEM_MC_ENDIF();
8987 IEM_MC_END();
8988 return VINF_SUCCESS;
8989}
8990
8991/** Opcode 0x72. */
8992FNIEMOP_DEF(iemOp_jc_Jb)
8993{
8994 IEMOP_MNEMONIC("jc/jnae Jb");
8995 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8996 IEMOP_HLP_NO_LOCK_PREFIX();
8997 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8998
8999 IEM_MC_BEGIN(0, 0);
9000 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9001 IEM_MC_REL_JMP_S8(i8Imm);
9002 } IEM_MC_ELSE() {
9003 IEM_MC_ADVANCE_RIP();
9004 } IEM_MC_ENDIF();
9005 IEM_MC_END();
9006 return VINF_SUCCESS;
9007}
9008
9009
9010/** Opcode 0x73. */
9011FNIEMOP_DEF(iemOp_jnc_Jb)
9012{
9013 IEMOP_MNEMONIC("jnc/jnb Jb");
9014 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9015 IEMOP_HLP_NO_LOCK_PREFIX();
9016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9017
9018 IEM_MC_BEGIN(0, 0);
9019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9020 IEM_MC_ADVANCE_RIP();
9021 } IEM_MC_ELSE() {
9022 IEM_MC_REL_JMP_S8(i8Imm);
9023 } IEM_MC_ENDIF();
9024 IEM_MC_END();
9025 return VINF_SUCCESS;
9026}
9027
9028
9029/** Opcode 0x74. */
9030FNIEMOP_DEF(iemOp_je_Jb)
9031{
9032 IEMOP_MNEMONIC("je/jz Jb");
9033 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9034 IEMOP_HLP_NO_LOCK_PREFIX();
9035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9036
9037 IEM_MC_BEGIN(0, 0);
9038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9039 IEM_MC_REL_JMP_S8(i8Imm);
9040 } IEM_MC_ELSE() {
9041 IEM_MC_ADVANCE_RIP();
9042 } IEM_MC_ENDIF();
9043 IEM_MC_END();
9044 return VINF_SUCCESS;
9045}
9046
9047
9048/** Opcode 0x75. */
9049FNIEMOP_DEF(iemOp_jne_Jb)
9050{
9051 IEMOP_MNEMONIC("jne/jnz Jb");
9052 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9053 IEMOP_HLP_NO_LOCK_PREFIX();
9054 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9055
9056 IEM_MC_BEGIN(0, 0);
9057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9058 IEM_MC_ADVANCE_RIP();
9059 } IEM_MC_ELSE() {
9060 IEM_MC_REL_JMP_S8(i8Imm);
9061 } IEM_MC_ENDIF();
9062 IEM_MC_END();
9063 return VINF_SUCCESS;
9064}
9065
9066
9067/** Opcode 0x76. */
9068FNIEMOP_DEF(iemOp_jbe_Jb)
9069{
9070 IEMOP_MNEMONIC("jbe/jna Jb");
9071 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9072 IEMOP_HLP_NO_LOCK_PREFIX();
9073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9074
9075 IEM_MC_BEGIN(0, 0);
9076 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9077 IEM_MC_REL_JMP_S8(i8Imm);
9078 } IEM_MC_ELSE() {
9079 IEM_MC_ADVANCE_RIP();
9080 } IEM_MC_ENDIF();
9081 IEM_MC_END();
9082 return VINF_SUCCESS;
9083}
9084
9085
9086/** Opcode 0x77. */
9087FNIEMOP_DEF(iemOp_jnbe_Jb)
9088{
9089 IEMOP_MNEMONIC("jnbe/ja Jb");
9090 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9091 IEMOP_HLP_NO_LOCK_PREFIX();
9092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9093
9094 IEM_MC_BEGIN(0, 0);
9095 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9096 IEM_MC_ADVANCE_RIP();
9097 } IEM_MC_ELSE() {
9098 IEM_MC_REL_JMP_S8(i8Imm);
9099 } IEM_MC_ENDIF();
9100 IEM_MC_END();
9101 return VINF_SUCCESS;
9102}
9103
9104
9105/** Opcode 0x78. */
9106FNIEMOP_DEF(iemOp_js_Jb)
9107{
9108 IEMOP_MNEMONIC("js Jb");
9109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9110 IEMOP_HLP_NO_LOCK_PREFIX();
9111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9112
9113 IEM_MC_BEGIN(0, 0);
9114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9115 IEM_MC_REL_JMP_S8(i8Imm);
9116 } IEM_MC_ELSE() {
9117 IEM_MC_ADVANCE_RIP();
9118 } IEM_MC_ENDIF();
9119 IEM_MC_END();
9120 return VINF_SUCCESS;
9121}
9122
9123
9124/** Opcode 0x79. */
9125FNIEMOP_DEF(iemOp_jns_Jb)
9126{
9127 IEMOP_MNEMONIC("jns Jb");
9128 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9129 IEMOP_HLP_NO_LOCK_PREFIX();
9130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9131
9132 IEM_MC_BEGIN(0, 0);
9133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9134 IEM_MC_ADVANCE_RIP();
9135 } IEM_MC_ELSE() {
9136 IEM_MC_REL_JMP_S8(i8Imm);
9137 } IEM_MC_ENDIF();
9138 IEM_MC_END();
9139 return VINF_SUCCESS;
9140}
9141
9142
9143/** Opcode 0x7a. */
9144FNIEMOP_DEF(iemOp_jp_Jb)
9145{
9146 IEMOP_MNEMONIC("jp Jb");
9147 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9148 IEMOP_HLP_NO_LOCK_PREFIX();
9149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9150
9151 IEM_MC_BEGIN(0, 0);
9152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9153 IEM_MC_REL_JMP_S8(i8Imm);
9154 } IEM_MC_ELSE() {
9155 IEM_MC_ADVANCE_RIP();
9156 } IEM_MC_ENDIF();
9157 IEM_MC_END();
9158 return VINF_SUCCESS;
9159}
9160
9161
9162/** Opcode 0x7b. */
9163FNIEMOP_DEF(iemOp_jnp_Jb)
9164{
9165 IEMOP_MNEMONIC("jnp Jb");
9166 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9167 IEMOP_HLP_NO_LOCK_PREFIX();
9168 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9169
9170 IEM_MC_BEGIN(0, 0);
9171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9172 IEM_MC_ADVANCE_RIP();
9173 } IEM_MC_ELSE() {
9174 IEM_MC_REL_JMP_S8(i8Imm);
9175 } IEM_MC_ENDIF();
9176 IEM_MC_END();
9177 return VINF_SUCCESS;
9178}
9179
9180
9181/** Opcode 0x7c. */
9182FNIEMOP_DEF(iemOp_jl_Jb)
9183{
9184 IEMOP_MNEMONIC("jl/jnge Jb");
9185 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9186 IEMOP_HLP_NO_LOCK_PREFIX();
9187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9188
9189 IEM_MC_BEGIN(0, 0);
9190 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9191 IEM_MC_REL_JMP_S8(i8Imm);
9192 } IEM_MC_ELSE() {
9193 IEM_MC_ADVANCE_RIP();
9194 } IEM_MC_ENDIF();
9195 IEM_MC_END();
9196 return VINF_SUCCESS;
9197}
9198
9199
9200/** Opcode 0x7d. */
9201FNIEMOP_DEF(iemOp_jnl_Jb)
9202{
9203 IEMOP_MNEMONIC("jnl/jge Jb");
9204 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9205 IEMOP_HLP_NO_LOCK_PREFIX();
9206 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9207
9208 IEM_MC_BEGIN(0, 0);
9209 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9210 IEM_MC_ADVANCE_RIP();
9211 } IEM_MC_ELSE() {
9212 IEM_MC_REL_JMP_S8(i8Imm);
9213 } IEM_MC_ENDIF();
9214 IEM_MC_END();
9215 return VINF_SUCCESS;
9216}
9217
9218
9219/** Opcode 0x7e. */
9220FNIEMOP_DEF(iemOp_jle_Jb)
9221{
9222 IEMOP_MNEMONIC("jle/jng Jb");
9223 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9224 IEMOP_HLP_NO_LOCK_PREFIX();
9225 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9226
9227 IEM_MC_BEGIN(0, 0);
9228 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9229 IEM_MC_REL_JMP_S8(i8Imm);
9230 } IEM_MC_ELSE() {
9231 IEM_MC_ADVANCE_RIP();
9232 } IEM_MC_ENDIF();
9233 IEM_MC_END();
9234 return VINF_SUCCESS;
9235}
9236
9237
9238/** Opcode 0x7f. */
9239FNIEMOP_DEF(iemOp_jnle_Jb)
9240{
9241 IEMOP_MNEMONIC("jnle/jg Jb");
9242 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9243 IEMOP_HLP_NO_LOCK_PREFIX();
9244 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9245
9246 IEM_MC_BEGIN(0, 0);
9247 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9248 IEM_MC_ADVANCE_RIP();
9249 } IEM_MC_ELSE() {
9250 IEM_MC_REL_JMP_S8(i8Imm);
9251 } IEM_MC_ENDIF();
9252 IEM_MC_END();
9253 return VINF_SUCCESS;
9254}
9255
9256
9257/** Opcode 0x80. */
9258FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9259{
9260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9261 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9262 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9263
9264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9265 {
9266 /* register target */
9267 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9268 IEMOP_HLP_NO_LOCK_PREFIX();
9269 IEM_MC_BEGIN(3, 0);
9270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9271 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9273
9274 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9275 IEM_MC_REF_EFLAGS(pEFlags);
9276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9277
9278 IEM_MC_ADVANCE_RIP();
9279 IEM_MC_END();
9280 }
9281 else
9282 {
9283 /* memory target */
9284 uint32_t fAccess;
9285 if (pImpl->pfnLockedU8)
9286 fAccess = IEM_ACCESS_DATA_RW;
9287 else
9288 { /* CMP */
9289 IEMOP_HLP_NO_LOCK_PREFIX();
9290 fAccess = IEM_ACCESS_DATA_R;
9291 }
9292 IEM_MC_BEGIN(3, 2);
9293 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9294 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9296
9297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9298 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9299 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9300
9301 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9302 IEM_MC_FETCH_EFLAGS(EFlags);
9303 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9304 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9305 else
9306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9307
9308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9309 IEM_MC_COMMIT_EFLAGS(EFlags);
9310 IEM_MC_ADVANCE_RIP();
9311 IEM_MC_END();
9312 }
9313 return VINF_SUCCESS;
9314}
9315
9316
9317/** Opcode 0x81. */
9318FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9319{
9320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9321 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9322 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9323
9324 switch (pIemCpu->enmEffOpSize)
9325 {
9326 case IEMMODE_16BIT:
9327 {
9328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9329 {
9330 /* register target */
9331 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9332 IEMOP_HLP_NO_LOCK_PREFIX();
9333 IEM_MC_BEGIN(3, 0);
9334 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9335 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9336 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9337
9338 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9339 IEM_MC_REF_EFLAGS(pEFlags);
9340 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9341
9342 IEM_MC_ADVANCE_RIP();
9343 IEM_MC_END();
9344 }
9345 else
9346 {
9347 /* memory target */
9348 uint32_t fAccess;
9349 if (pImpl->pfnLockedU16)
9350 fAccess = IEM_ACCESS_DATA_RW;
9351 else
9352 { /* CMP, TEST */
9353 IEMOP_HLP_NO_LOCK_PREFIX();
9354 fAccess = IEM_ACCESS_DATA_R;
9355 }
9356 IEM_MC_BEGIN(3, 2);
9357 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9358 IEM_MC_ARG(uint16_t, u16Src, 1);
9359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9361
9362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9363 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9364 IEM_MC_ASSIGN(u16Src, u16Imm);
9365 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9366 IEM_MC_FETCH_EFLAGS(EFlags);
9367 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9369 else
9370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9371
9372 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9373 IEM_MC_COMMIT_EFLAGS(EFlags);
9374 IEM_MC_ADVANCE_RIP();
9375 IEM_MC_END();
9376 }
9377 break;
9378 }
9379
9380 case IEMMODE_32BIT:
9381 {
9382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9383 {
9384 /* register target */
9385 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9386 IEMOP_HLP_NO_LOCK_PREFIX();
9387 IEM_MC_BEGIN(3, 0);
9388 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9389 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9390 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9391
9392 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9393 IEM_MC_REF_EFLAGS(pEFlags);
9394 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9395 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9396
9397 IEM_MC_ADVANCE_RIP();
9398 IEM_MC_END();
9399 }
9400 else
9401 {
9402 /* memory target */
9403 uint32_t fAccess;
9404 if (pImpl->pfnLockedU32)
9405 fAccess = IEM_ACCESS_DATA_RW;
9406 else
9407 { /* CMP, TEST */
9408 IEMOP_HLP_NO_LOCK_PREFIX();
9409 fAccess = IEM_ACCESS_DATA_R;
9410 }
9411 IEM_MC_BEGIN(3, 2);
9412 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9413 IEM_MC_ARG(uint32_t, u32Src, 1);
9414 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9416
9417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9418 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9419 IEM_MC_ASSIGN(u32Src, u32Imm);
9420 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9421 IEM_MC_FETCH_EFLAGS(EFlags);
9422 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9424 else
9425 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9426
9427 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9428 IEM_MC_COMMIT_EFLAGS(EFlags);
9429 IEM_MC_ADVANCE_RIP();
9430 IEM_MC_END();
9431 }
9432 break;
9433 }
9434
9435 case IEMMODE_64BIT:
9436 {
9437 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9438 {
9439 /* register target */
9440 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9441 IEMOP_HLP_NO_LOCK_PREFIX();
9442 IEM_MC_BEGIN(3, 0);
9443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9444 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9445 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9446
9447 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9448 IEM_MC_REF_EFLAGS(pEFlags);
9449 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9450
9451 IEM_MC_ADVANCE_RIP();
9452 IEM_MC_END();
9453 }
9454 else
9455 {
9456 /* memory target */
9457 uint32_t fAccess;
9458 if (pImpl->pfnLockedU64)
9459 fAccess = IEM_ACCESS_DATA_RW;
9460 else
9461 { /* CMP */
9462 IEMOP_HLP_NO_LOCK_PREFIX();
9463 fAccess = IEM_ACCESS_DATA_R;
9464 }
9465 IEM_MC_BEGIN(3, 2);
9466 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9467 IEM_MC_ARG(uint64_t, u64Src, 1);
9468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9470
9471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9472 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9473 IEM_MC_ASSIGN(u64Src, u64Imm);
9474 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9475 IEM_MC_FETCH_EFLAGS(EFlags);
9476 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9478 else
9479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9480
9481 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9482 IEM_MC_COMMIT_EFLAGS(EFlags);
9483 IEM_MC_ADVANCE_RIP();
9484 IEM_MC_END();
9485 }
9486 break;
9487 }
9488 }
9489 return VINF_SUCCESS;
9490}
9491
9492
9493/** Opcode 0x82. */
9494FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9495{
9496 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9497 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9498}
9499
9500
9501/** Opcode 0x83. */
9502FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9503{
9504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9505 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9506 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9507 to the 386 even if absent in the intel reference manuals and some
9508 3rd party opcode listings. */
9509 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9510
9511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9512 {
9513 /*
9514 * Register target
9515 */
9516 IEMOP_HLP_NO_LOCK_PREFIX();
9517 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9518 switch (pIemCpu->enmEffOpSize)
9519 {
9520 case IEMMODE_16BIT:
9521 {
9522 IEM_MC_BEGIN(3, 0);
9523 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9524 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9526
9527 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9528 IEM_MC_REF_EFLAGS(pEFlags);
9529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9530
9531 IEM_MC_ADVANCE_RIP();
9532 IEM_MC_END();
9533 break;
9534 }
9535
9536 case IEMMODE_32BIT:
9537 {
9538 IEM_MC_BEGIN(3, 0);
9539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9540 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9541 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9542
9543 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9544 IEM_MC_REF_EFLAGS(pEFlags);
9545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9546 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9547
9548 IEM_MC_ADVANCE_RIP();
9549 IEM_MC_END();
9550 break;
9551 }
9552
9553 case IEMMODE_64BIT:
9554 {
9555 IEM_MC_BEGIN(3, 0);
9556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9557 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9558 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9559
9560 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9561 IEM_MC_REF_EFLAGS(pEFlags);
9562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9563
9564 IEM_MC_ADVANCE_RIP();
9565 IEM_MC_END();
9566 break;
9567 }
9568 }
9569 }
9570 else
9571 {
9572 /*
9573 * Memory target.
9574 */
9575 uint32_t fAccess;
9576 if (pImpl->pfnLockedU16)
9577 fAccess = IEM_ACCESS_DATA_RW;
9578 else
9579 { /* CMP */
9580 IEMOP_HLP_NO_LOCK_PREFIX();
9581 fAccess = IEM_ACCESS_DATA_R;
9582 }
9583
9584 switch (pIemCpu->enmEffOpSize)
9585 {
9586 case IEMMODE_16BIT:
9587 {
9588 IEM_MC_BEGIN(3, 2);
9589 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9590 IEM_MC_ARG(uint16_t, u16Src, 1);
9591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9593
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9595 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9596 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9597 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9598 IEM_MC_FETCH_EFLAGS(EFlags);
9599 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9601 else
9602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9603
9604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9605 IEM_MC_COMMIT_EFLAGS(EFlags);
9606 IEM_MC_ADVANCE_RIP();
9607 IEM_MC_END();
9608 break;
9609 }
9610
9611 case IEMMODE_32BIT:
9612 {
9613 IEM_MC_BEGIN(3, 2);
9614 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9615 IEM_MC_ARG(uint32_t, u32Src, 1);
9616 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9618
9619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9620 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9621 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9622 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9623 IEM_MC_FETCH_EFLAGS(EFlags);
9624 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9626 else
9627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9628
9629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9630 IEM_MC_COMMIT_EFLAGS(EFlags);
9631 IEM_MC_ADVANCE_RIP();
9632 IEM_MC_END();
9633 break;
9634 }
9635
9636 case IEMMODE_64BIT:
9637 {
9638 IEM_MC_BEGIN(3, 2);
9639 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9640 IEM_MC_ARG(uint64_t, u64Src, 1);
9641 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9643
9644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9646 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9647 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9648 IEM_MC_FETCH_EFLAGS(EFlags);
9649 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9651 else
9652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9653
9654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9655 IEM_MC_COMMIT_EFLAGS(EFlags);
9656 IEM_MC_ADVANCE_RIP();
9657 IEM_MC_END();
9658 break;
9659 }
9660 }
9661 }
9662 return VINF_SUCCESS;
9663}
9664
9665
9666/** Opcode 0x84. */
9667FNIEMOP_DEF(iemOp_test_Eb_Gb)
9668{
9669 IEMOP_MNEMONIC("test Eb,Gb");
9670 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9673}
9674
9675
9676/** Opcode 0x85. */
9677FNIEMOP_DEF(iemOp_test_Ev_Gv)
9678{
9679 IEMOP_MNEMONIC("test Ev,Gv");
9680 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9682 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9683}
9684
9685
9686/** Opcode 0x86. */
9687FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9688{
9689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9690 IEMOP_MNEMONIC("xchg Eb,Gb");
9691
9692 /*
9693 * If rm is denoting a register, no more instruction bytes.
9694 */
9695 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9696 {
9697 IEMOP_HLP_NO_LOCK_PREFIX();
9698
9699 IEM_MC_BEGIN(0, 2);
9700 IEM_MC_LOCAL(uint8_t, uTmp1);
9701 IEM_MC_LOCAL(uint8_t, uTmp2);
9702
9703 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9704 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9705 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9706 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9707
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 }
9711 else
9712 {
9713 /*
9714 * We're accessing memory.
9715 */
9716/** @todo the register must be committed separately! */
9717 IEM_MC_BEGIN(2, 2);
9718 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9719 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9721
9722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9723 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9724 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9725 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9726 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9727
9728 IEM_MC_ADVANCE_RIP();
9729 IEM_MC_END();
9730 }
9731 return VINF_SUCCESS;
9732}
9733
9734
9735/** Opcode 0x87. */
9736FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9737{
9738 IEMOP_MNEMONIC("xchg Ev,Gv");
9739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9740
9741 /*
9742 * If rm is denoting a register, no more instruction bytes.
9743 */
9744 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9745 {
9746 IEMOP_HLP_NO_LOCK_PREFIX();
9747
9748 switch (pIemCpu->enmEffOpSize)
9749 {
9750 case IEMMODE_16BIT:
9751 IEM_MC_BEGIN(0, 2);
9752 IEM_MC_LOCAL(uint16_t, uTmp1);
9753 IEM_MC_LOCAL(uint16_t, uTmp2);
9754
9755 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9756 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9757 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9758 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9759
9760 IEM_MC_ADVANCE_RIP();
9761 IEM_MC_END();
9762 return VINF_SUCCESS;
9763
9764 case IEMMODE_32BIT:
9765 IEM_MC_BEGIN(0, 2);
9766 IEM_MC_LOCAL(uint32_t, uTmp1);
9767 IEM_MC_LOCAL(uint32_t, uTmp2);
9768
9769 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9770 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9771 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9772 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9773
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 return VINF_SUCCESS;
9777
9778 case IEMMODE_64BIT:
9779 IEM_MC_BEGIN(0, 2);
9780 IEM_MC_LOCAL(uint64_t, uTmp1);
9781 IEM_MC_LOCAL(uint64_t, uTmp2);
9782
9783 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9784 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9785 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9786 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9787
9788 IEM_MC_ADVANCE_RIP();
9789 IEM_MC_END();
9790 return VINF_SUCCESS;
9791
9792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9793 }
9794 }
9795 else
9796 {
9797 /*
9798 * We're accessing memory.
9799 */
9800 switch (pIemCpu->enmEffOpSize)
9801 {
9802/** @todo the register must be committed separately! */
9803 case IEMMODE_16BIT:
9804 IEM_MC_BEGIN(2, 2);
9805 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9806 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9808
9809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9810 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9811 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9812 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9814
9815 IEM_MC_ADVANCE_RIP();
9816 IEM_MC_END();
9817 return VINF_SUCCESS;
9818
9819 case IEMMODE_32BIT:
9820 IEM_MC_BEGIN(2, 2);
9821 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9822 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9824
9825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9826 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9827 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9828 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9830
9831 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 return VINF_SUCCESS;
9835
9836 case IEMMODE_64BIT:
9837 IEM_MC_BEGIN(2, 2);
9838 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9839 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9841
9842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9843 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9844 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9845 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9847
9848 IEM_MC_ADVANCE_RIP();
9849 IEM_MC_END();
9850 return VINF_SUCCESS;
9851
9852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9853 }
9854 }
9855}
9856
9857
9858/** Opcode 0x88. */
9859FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9860{
9861 IEMOP_MNEMONIC("mov Eb,Gb");
9862
9863 uint8_t bRm;
9864 IEM_OPCODE_GET_NEXT_U8(&bRm);
9865 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9866
9867 /*
9868 * If rm is denoting a register, no more instruction bytes.
9869 */
9870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9871 {
9872 IEM_MC_BEGIN(0, 1);
9873 IEM_MC_LOCAL(uint8_t, u8Value);
9874 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9875 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9876 IEM_MC_ADVANCE_RIP();
9877 IEM_MC_END();
9878 }
9879 else
9880 {
9881 /*
9882 * We're writing a register to memory.
9883 */
9884 IEM_MC_BEGIN(0, 2);
9885 IEM_MC_LOCAL(uint8_t, u8Value);
9886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9888 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9889 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9890 IEM_MC_ADVANCE_RIP();
9891 IEM_MC_END();
9892 }
9893 return VINF_SUCCESS;
9894
9895}
9896
9897
9898/** Opcode 0x89. */
9899FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9900{
9901 IEMOP_MNEMONIC("mov Ev,Gv");
9902
9903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9904 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9905
9906 /*
9907 * If rm is denoting a register, no more instruction bytes.
9908 */
9909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9910 {
9911 switch (pIemCpu->enmEffOpSize)
9912 {
9913 case IEMMODE_16BIT:
9914 IEM_MC_BEGIN(0, 1);
9915 IEM_MC_LOCAL(uint16_t, u16Value);
9916 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9917 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 break;
9921
9922 case IEMMODE_32BIT:
9923 IEM_MC_BEGIN(0, 1);
9924 IEM_MC_LOCAL(uint32_t, u32Value);
9925 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9926 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9927 IEM_MC_ADVANCE_RIP();
9928 IEM_MC_END();
9929 break;
9930
9931 case IEMMODE_64BIT:
9932 IEM_MC_BEGIN(0, 1);
9933 IEM_MC_LOCAL(uint64_t, u64Value);
9934 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9935 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9936 IEM_MC_ADVANCE_RIP();
9937 IEM_MC_END();
9938 break;
9939 }
9940 }
9941 else
9942 {
9943 /*
9944 * We're writing a register to memory.
9945 */
9946 switch (pIemCpu->enmEffOpSize)
9947 {
9948 case IEMMODE_16BIT:
9949 IEM_MC_BEGIN(0, 2);
9950 IEM_MC_LOCAL(uint16_t, u16Value);
9951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9953 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9954 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9955 IEM_MC_ADVANCE_RIP();
9956 IEM_MC_END();
9957 break;
9958
9959 case IEMMODE_32BIT:
9960 IEM_MC_BEGIN(0, 2);
9961 IEM_MC_LOCAL(uint32_t, u32Value);
9962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9964 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9965 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9966 IEM_MC_ADVANCE_RIP();
9967 IEM_MC_END();
9968 break;
9969
9970 case IEMMODE_64BIT:
9971 IEM_MC_BEGIN(0, 2);
9972 IEM_MC_LOCAL(uint64_t, u64Value);
9973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9975 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9976 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9977 IEM_MC_ADVANCE_RIP();
9978 IEM_MC_END();
9979 break;
9980 }
9981 }
9982 return VINF_SUCCESS;
9983}
9984
9985
9986/** Opcode 0x8a. */
9987FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9988{
9989 IEMOP_MNEMONIC("mov Gb,Eb");
9990
9991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9992 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9993
9994 /*
9995 * If rm is denoting a register, no more instruction bytes.
9996 */
9997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9998 {
9999 IEM_MC_BEGIN(0, 1);
10000 IEM_MC_LOCAL(uint8_t, u8Value);
10001 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10002 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10003 IEM_MC_ADVANCE_RIP();
10004 IEM_MC_END();
10005 }
10006 else
10007 {
10008 /*
10009 * We're loading a register from memory.
10010 */
10011 IEM_MC_BEGIN(0, 2);
10012 IEM_MC_LOCAL(uint8_t, u8Value);
10013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10015 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10016 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10017 IEM_MC_ADVANCE_RIP();
10018 IEM_MC_END();
10019 }
10020 return VINF_SUCCESS;
10021}
10022
10023
10024/** Opcode 0x8b. */
10025FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10026{
10027 IEMOP_MNEMONIC("mov Gv,Ev");
10028
10029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10030 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10031
10032 /*
10033 * If rm is denoting a register, no more instruction bytes.
10034 */
10035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10036 {
10037 switch (pIemCpu->enmEffOpSize)
10038 {
10039 case IEMMODE_16BIT:
10040 IEM_MC_BEGIN(0, 1);
10041 IEM_MC_LOCAL(uint16_t, u16Value);
10042 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10043 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10044 IEM_MC_ADVANCE_RIP();
10045 IEM_MC_END();
10046 break;
10047
10048 case IEMMODE_32BIT:
10049 IEM_MC_BEGIN(0, 1);
10050 IEM_MC_LOCAL(uint32_t, u32Value);
10051 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10052 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10053 IEM_MC_ADVANCE_RIP();
10054 IEM_MC_END();
10055 break;
10056
10057 case IEMMODE_64BIT:
10058 IEM_MC_BEGIN(0, 1);
10059 IEM_MC_LOCAL(uint64_t, u64Value);
10060 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10061 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10062 IEM_MC_ADVANCE_RIP();
10063 IEM_MC_END();
10064 break;
10065 }
10066 }
10067 else
10068 {
10069 /*
10070 * We're loading a register from memory.
10071 */
10072 switch (pIemCpu->enmEffOpSize)
10073 {
10074 case IEMMODE_16BIT:
10075 IEM_MC_BEGIN(0, 2);
10076 IEM_MC_LOCAL(uint16_t, u16Value);
10077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10079 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10080 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10081 IEM_MC_ADVANCE_RIP();
10082 IEM_MC_END();
10083 break;
10084
10085 case IEMMODE_32BIT:
10086 IEM_MC_BEGIN(0, 2);
10087 IEM_MC_LOCAL(uint32_t, u32Value);
10088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10090 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10091 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10092 IEM_MC_ADVANCE_RIP();
10093 IEM_MC_END();
10094 break;
10095
10096 case IEMMODE_64BIT:
10097 IEM_MC_BEGIN(0, 2);
10098 IEM_MC_LOCAL(uint64_t, u64Value);
10099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10101 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10103 IEM_MC_ADVANCE_RIP();
10104 IEM_MC_END();
10105 break;
10106 }
10107 }
10108 return VINF_SUCCESS;
10109}
10110
10111
10112/** Opcode 0x63. */
10113FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10114{
10115 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10116 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10117 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10118 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10119 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10120}
10121
10122
10123/** Opcode 0x8c. */
10124FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10125{
10126 IEMOP_MNEMONIC("mov Ev,Sw");
10127
10128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10129 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10130
10131 /*
10132 * Check that the destination register exists. The REX.R prefix is ignored.
10133 */
10134 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10135 if ( iSegReg > X86_SREG_GS)
10136 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10137
10138 /*
10139 * If rm is denoting a register, no more instruction bytes.
10140 * In that case, the operand size is respected and the upper bits are
10141 * cleared (starting with some pentium).
10142 */
10143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10144 {
10145 switch (pIemCpu->enmEffOpSize)
10146 {
10147 case IEMMODE_16BIT:
10148 IEM_MC_BEGIN(0, 1);
10149 IEM_MC_LOCAL(uint16_t, u16Value);
10150 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10151 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10152 IEM_MC_ADVANCE_RIP();
10153 IEM_MC_END();
10154 break;
10155
10156 case IEMMODE_32BIT:
10157 IEM_MC_BEGIN(0, 1);
10158 IEM_MC_LOCAL(uint32_t, u32Value);
10159 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10160 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10161 IEM_MC_ADVANCE_RIP();
10162 IEM_MC_END();
10163 break;
10164
10165 case IEMMODE_64BIT:
10166 IEM_MC_BEGIN(0, 1);
10167 IEM_MC_LOCAL(uint64_t, u64Value);
10168 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10169 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10170 IEM_MC_ADVANCE_RIP();
10171 IEM_MC_END();
10172 break;
10173 }
10174 }
10175 else
10176 {
10177 /*
10178 * We're saving the register to memory. The access is word sized
10179 * regardless of operand size prefixes.
10180 */
10181#if 0 /* not necessary */
10182 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10183#endif
10184 IEM_MC_BEGIN(0, 2);
10185 IEM_MC_LOCAL(uint16_t, u16Value);
10186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10188 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10189 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10190 IEM_MC_ADVANCE_RIP();
10191 IEM_MC_END();
10192 }
10193 return VINF_SUCCESS;
10194}
10195
10196
10197
10198
10199/** Opcode 0x8d. */
10200FNIEMOP_DEF(iemOp_lea_Gv_M)
10201{
10202 IEMOP_MNEMONIC("lea Gv,M");
10203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10204 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10206 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10207
10208 switch (pIemCpu->enmEffOpSize)
10209 {
10210 case IEMMODE_16BIT:
10211 IEM_MC_BEGIN(0, 2);
10212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10213 IEM_MC_LOCAL(uint16_t, u16Cast);
10214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10215 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10216 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10217 IEM_MC_ADVANCE_RIP();
10218 IEM_MC_END();
10219 return VINF_SUCCESS;
10220
10221 case IEMMODE_32BIT:
10222 IEM_MC_BEGIN(0, 2);
10223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10224 IEM_MC_LOCAL(uint32_t, u32Cast);
10225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10226 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10227 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10228 IEM_MC_ADVANCE_RIP();
10229 IEM_MC_END();
10230 return VINF_SUCCESS;
10231
10232 case IEMMODE_64BIT:
10233 IEM_MC_BEGIN(0, 1);
10234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10236 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10237 IEM_MC_ADVANCE_RIP();
10238 IEM_MC_END();
10239 return VINF_SUCCESS;
10240 }
10241 AssertFailedReturn(VERR_IEM_IPE_7);
10242}
10243
10244
10245/** Opcode 0x8e. */
10246FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10247{
10248 IEMOP_MNEMONIC("mov Sw,Ev");
10249
10250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10251 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10252
10253 /*
10254 * The practical operand size is 16-bit.
10255 */
10256#if 0 /* not necessary */
10257 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10258#endif
10259
10260 /*
10261 * Check that the destination register exists and can be used with this
10262 * instruction. The REX.R prefix is ignored.
10263 */
10264 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10265 if ( iSegReg == X86_SREG_CS
10266 || iSegReg > X86_SREG_GS)
10267 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10268
10269 /*
10270 * If rm is denoting a register, no more instruction bytes.
10271 */
10272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10273 {
10274 IEM_MC_BEGIN(2, 0);
10275 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10276 IEM_MC_ARG(uint16_t, u16Value, 1);
10277 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10278 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10279 IEM_MC_END();
10280 }
10281 else
10282 {
10283 /*
10284 * We're loading the register from memory. The access is word sized
10285 * regardless of operand size prefixes.
10286 */
10287 IEM_MC_BEGIN(2, 1);
10288 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10289 IEM_MC_ARG(uint16_t, u16Value, 1);
10290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10292 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10293 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10294 IEM_MC_END();
10295 }
10296 return VINF_SUCCESS;
10297}
10298
10299
10300/** Opcode 0x8f /0. */
10301FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10302{
10303 /* This bugger is rather annoying as it requires rSP to be updated before
10304 doing the effective address calculations. Will eventually require a
10305 split between the R/M+SIB decoding and the effective address
10306 calculation - which is something that is required for any attempt at
10307 reusing this code for a recompiler. It may also be good to have if we
10308 need to delay #UD exception caused by invalid lock prefixes.
10309
10310 For now, we'll do a mostly safe interpreter-only implementation here. */
10311 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10312 * now until tests show it's checked.. */
10313 IEMOP_MNEMONIC("pop Ev");
10314 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10315
10316 /* Register access is relatively easy and can share code. */
10317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10318 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10319
10320 /*
10321 * Memory target.
10322 *
10323 * Intel says that RSP is incremented before it's used in any effective
10324 * address calcuations. This means some serious extra annoyance here since
10325 * we decode and calculate the effective address in one step and like to
10326 * delay committing registers till everything is done.
10327 *
10328 * So, we'll decode and calculate the effective address twice. This will
10329 * require some recoding if turned into a recompiler.
10330 */
10331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10332
10333#ifndef TST_IEM_CHECK_MC
10334 /* Calc effective address with modified ESP. */
10335 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10336 RTGCPTR GCPtrEff;
10337 VBOXSTRICTRC rcStrict;
10338 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10339 if (rcStrict != VINF_SUCCESS)
10340 return rcStrict;
10341 pIemCpu->offOpcode = offOpcodeSaved;
10342
10343 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10344 uint64_t const RspSaved = pCtx->rsp;
10345 switch (pIemCpu->enmEffOpSize)
10346 {
10347 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10348 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10349 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10351 }
10352 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10353 Assert(rcStrict == VINF_SUCCESS);
10354 pCtx->rsp = RspSaved;
10355
10356 /* Perform the operation - this should be CImpl. */
10357 RTUINT64U TmpRsp;
10358 TmpRsp.u = pCtx->rsp;
10359 switch (pIemCpu->enmEffOpSize)
10360 {
10361 case IEMMODE_16BIT:
10362 {
10363 uint16_t u16Value;
10364 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10365 if (rcStrict == VINF_SUCCESS)
10366 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10367 break;
10368 }
10369
10370 case IEMMODE_32BIT:
10371 {
10372 uint32_t u32Value;
10373 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10374 if (rcStrict == VINF_SUCCESS)
10375 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10376 break;
10377 }
10378
10379 case IEMMODE_64BIT:
10380 {
10381 uint64_t u64Value;
10382 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10383 if (rcStrict == VINF_SUCCESS)
10384 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10385 break;
10386 }
10387
10388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10389 }
10390 if (rcStrict == VINF_SUCCESS)
10391 {
10392 pCtx->rsp = TmpRsp.u;
10393 iemRegUpdateRipAndClearRF(pIemCpu);
10394 }
10395 return rcStrict;
10396
10397#else
10398 return VERR_IEM_IPE_2;
10399#endif
10400}
10401
10402
10403/** Opcode 0x8f. */
10404FNIEMOP_DEF(iemOp_Grp1A)
10405{
10406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10407 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10408 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10409
10410 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10411 /** @todo XOP decoding. */
10412 IEMOP_MNEMONIC("3-byte-xop");
10413 return IEMOP_RAISE_INVALID_OPCODE();
10414}
10415
10416
10417/**
10418 * Common 'xchg reg,rAX' helper.
10419 */
10420FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10421{
10422 IEMOP_HLP_NO_LOCK_PREFIX();
10423
10424 iReg |= pIemCpu->uRexB;
10425 switch (pIemCpu->enmEffOpSize)
10426 {
10427 case IEMMODE_16BIT:
10428 IEM_MC_BEGIN(0, 2);
10429 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10430 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10431 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10432 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10433 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10434 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10435 IEM_MC_ADVANCE_RIP();
10436 IEM_MC_END();
10437 return VINF_SUCCESS;
10438
10439 case IEMMODE_32BIT:
10440 IEM_MC_BEGIN(0, 2);
10441 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10442 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10443 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10444 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10445 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10446 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10447 IEM_MC_ADVANCE_RIP();
10448 IEM_MC_END();
10449 return VINF_SUCCESS;
10450
10451 case IEMMODE_64BIT:
10452 IEM_MC_BEGIN(0, 2);
10453 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10454 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10455 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10456 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10457 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10458 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10459 IEM_MC_ADVANCE_RIP();
10460 IEM_MC_END();
10461 return VINF_SUCCESS;
10462
10463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10464 }
10465}
10466
10467
10468/** Opcode 0x90. */
10469FNIEMOP_DEF(iemOp_nop)
10470{
10471 /* R8/R8D and RAX/EAX can be exchanged. */
10472 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10473 {
10474 IEMOP_MNEMONIC("xchg r8,rAX");
10475 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10476 }
10477
10478 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10479 IEMOP_MNEMONIC("pause");
10480 else
10481 IEMOP_MNEMONIC("nop");
10482 IEM_MC_BEGIN(0, 0);
10483 IEM_MC_ADVANCE_RIP();
10484 IEM_MC_END();
10485 return VINF_SUCCESS;
10486}
10487
10488
10489/** Opcode 0x91. */
10490FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10491{
10492 IEMOP_MNEMONIC("xchg rCX,rAX");
10493 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10494}
10495
10496
10497/** Opcode 0x92. */
10498FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10499{
10500 IEMOP_MNEMONIC("xchg rDX,rAX");
10501 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10502}
10503
10504
10505/** Opcode 0x93. */
10506FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10507{
10508 IEMOP_MNEMONIC("xchg rBX,rAX");
10509 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10510}
10511
10512
10513/** Opcode 0x94. */
10514FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10515{
10516 IEMOP_MNEMONIC("xchg rSX,rAX");
10517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10518}
10519
10520
10521/** Opcode 0x95. */
10522FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10523{
10524 IEMOP_MNEMONIC("xchg rBP,rAX");
10525 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10526}
10527
10528
10529/** Opcode 0x96. */
10530FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10531{
10532 IEMOP_MNEMONIC("xchg rSI,rAX");
10533 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10534}
10535
10536
10537/** Opcode 0x97. */
10538FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10539{
10540 IEMOP_MNEMONIC("xchg rDI,rAX");
10541 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10542}
10543
10544
10545/** Opcode 0x98. */
10546FNIEMOP_DEF(iemOp_cbw)
10547{
10548 IEMOP_HLP_NO_LOCK_PREFIX();
10549 switch (pIemCpu->enmEffOpSize)
10550 {
10551 case IEMMODE_16BIT:
10552 IEMOP_MNEMONIC("cbw");
10553 IEM_MC_BEGIN(0, 1);
10554 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10555 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10556 } IEM_MC_ELSE() {
10557 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10558 } IEM_MC_ENDIF();
10559 IEM_MC_ADVANCE_RIP();
10560 IEM_MC_END();
10561 return VINF_SUCCESS;
10562
10563 case IEMMODE_32BIT:
10564 IEMOP_MNEMONIC("cwde");
10565 IEM_MC_BEGIN(0, 1);
10566 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10567 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10568 } IEM_MC_ELSE() {
10569 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10570 } IEM_MC_ENDIF();
10571 IEM_MC_ADVANCE_RIP();
10572 IEM_MC_END();
10573 return VINF_SUCCESS;
10574
10575 case IEMMODE_64BIT:
10576 IEMOP_MNEMONIC("cdqe");
10577 IEM_MC_BEGIN(0, 1);
10578 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10579 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10580 } IEM_MC_ELSE() {
10581 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10582 } IEM_MC_ENDIF();
10583 IEM_MC_ADVANCE_RIP();
10584 IEM_MC_END();
10585 return VINF_SUCCESS;
10586
10587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10588 }
10589}
10590
10591
10592/** Opcode 0x99. */
10593FNIEMOP_DEF(iemOp_cwd)
10594{
10595 IEMOP_HLP_NO_LOCK_PREFIX();
10596 switch (pIemCpu->enmEffOpSize)
10597 {
10598 case IEMMODE_16BIT:
10599 IEMOP_MNEMONIC("cwd");
10600 IEM_MC_BEGIN(0, 1);
10601 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10602 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10603 } IEM_MC_ELSE() {
10604 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10605 } IEM_MC_ENDIF();
10606 IEM_MC_ADVANCE_RIP();
10607 IEM_MC_END();
10608 return VINF_SUCCESS;
10609
10610 case IEMMODE_32BIT:
10611 IEMOP_MNEMONIC("cdq");
10612 IEM_MC_BEGIN(0, 1);
10613 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10614 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10615 } IEM_MC_ELSE() {
10616 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10617 } IEM_MC_ENDIF();
10618 IEM_MC_ADVANCE_RIP();
10619 IEM_MC_END();
10620 return VINF_SUCCESS;
10621
10622 case IEMMODE_64BIT:
10623 IEMOP_MNEMONIC("cqo");
10624 IEM_MC_BEGIN(0, 1);
10625 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10626 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10627 } IEM_MC_ELSE() {
10628 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10629 } IEM_MC_ENDIF();
10630 IEM_MC_ADVANCE_RIP();
10631 IEM_MC_END();
10632 return VINF_SUCCESS;
10633
10634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10635 }
10636}
10637
10638
10639/** Opcode 0x9a. */
10640FNIEMOP_DEF(iemOp_call_Ap)
10641{
10642 IEMOP_MNEMONIC("call Ap");
10643 IEMOP_HLP_NO_64BIT();
10644
10645 /* Decode the far pointer address and pass it on to the far call C implementation. */
10646 uint32_t offSeg;
10647 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10648 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10649 else
10650 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10651 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10653 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10654}
10655
10656
10657/** Opcode 0x9b. (aka fwait) */
10658FNIEMOP_DEF(iemOp_wait)
10659{
10660 IEMOP_MNEMONIC("wait");
10661 IEMOP_HLP_NO_LOCK_PREFIX();
10662
10663 IEM_MC_BEGIN(0, 0);
10664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10666 IEM_MC_ADVANCE_RIP();
10667 IEM_MC_END();
10668 return VINF_SUCCESS;
10669}
10670
10671
10672/** Opcode 0x9c. */
10673FNIEMOP_DEF(iemOp_pushf_Fv)
10674{
10675 IEMOP_HLP_NO_LOCK_PREFIX();
10676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10677 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10678}
10679
10680
10681/** Opcode 0x9d. */
10682FNIEMOP_DEF(iemOp_popf_Fv)
10683{
10684 IEMOP_HLP_NO_LOCK_PREFIX();
10685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10686 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10687}
10688
10689
10690/** Opcode 0x9e. */
10691FNIEMOP_DEF(iemOp_sahf)
10692{
10693 IEMOP_MNEMONIC("sahf");
10694 IEMOP_HLP_NO_LOCK_PREFIX();
10695 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10696 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10697 return IEMOP_RAISE_INVALID_OPCODE();
10698 IEM_MC_BEGIN(0, 2);
10699 IEM_MC_LOCAL(uint32_t, u32Flags);
10700 IEM_MC_LOCAL(uint32_t, EFlags);
10701 IEM_MC_FETCH_EFLAGS(EFlags);
10702 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10703 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10704 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10705 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10706 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10707 IEM_MC_COMMIT_EFLAGS(EFlags);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711}
10712
10713
10714/** Opcode 0x9f. */
10715FNIEMOP_DEF(iemOp_lahf)
10716{
10717 IEMOP_MNEMONIC("lahf");
10718 IEMOP_HLP_NO_LOCK_PREFIX();
10719 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10720 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10721 return IEMOP_RAISE_INVALID_OPCODE();
10722 IEM_MC_BEGIN(0, 1);
10723 IEM_MC_LOCAL(uint8_t, u8Flags);
10724 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10725 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10726 IEM_MC_ADVANCE_RIP();
10727 IEM_MC_END();
10728 return VINF_SUCCESS;
10729}
10730
10731
10732/**
10733 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10734 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10735 * prefixes. Will return on failures.
10736 * @param a_GCPtrMemOff The variable to store the offset in.
10737 */
10738#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10739 do \
10740 { \
10741 switch (pIemCpu->enmEffAddrMode) \
10742 { \
10743 case IEMMODE_16BIT: \
10744 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10745 break; \
10746 case IEMMODE_32BIT: \
10747 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10748 break; \
10749 case IEMMODE_64BIT: \
10750 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10751 break; \
10752 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10753 } \
10754 IEMOP_HLP_NO_LOCK_PREFIX(); \
10755 } while (0)
10756
10757/** Opcode 0xa0. */
10758FNIEMOP_DEF(iemOp_mov_Al_Ob)
10759{
10760 /*
10761 * Get the offset and fend of lock prefixes.
10762 */
10763 RTGCPTR GCPtrMemOff;
10764 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10765
10766 /*
10767 * Fetch AL.
10768 */
10769 IEM_MC_BEGIN(0,1);
10770 IEM_MC_LOCAL(uint8_t, u8Tmp);
10771 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10772 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10773 IEM_MC_ADVANCE_RIP();
10774 IEM_MC_END();
10775 return VINF_SUCCESS;
10776}
10777
10778
10779/** Opcode 0xa1. */
10780FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10781{
10782 /*
10783 * Get the offset and fend of lock prefixes.
10784 */
10785 IEMOP_MNEMONIC("mov rAX,Ov");
10786 RTGCPTR GCPtrMemOff;
10787 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10788
10789 /*
10790 * Fetch rAX.
10791 */
10792 switch (pIemCpu->enmEffOpSize)
10793 {
10794 case IEMMODE_16BIT:
10795 IEM_MC_BEGIN(0,1);
10796 IEM_MC_LOCAL(uint16_t, u16Tmp);
10797 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10798 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10799 IEM_MC_ADVANCE_RIP();
10800 IEM_MC_END();
10801 return VINF_SUCCESS;
10802
10803 case IEMMODE_32BIT:
10804 IEM_MC_BEGIN(0,1);
10805 IEM_MC_LOCAL(uint32_t, u32Tmp);
10806 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10807 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10808 IEM_MC_ADVANCE_RIP();
10809 IEM_MC_END();
10810 return VINF_SUCCESS;
10811
10812 case IEMMODE_64BIT:
10813 IEM_MC_BEGIN(0,1);
10814 IEM_MC_LOCAL(uint64_t, u64Tmp);
10815 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10816 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10817 IEM_MC_ADVANCE_RIP();
10818 IEM_MC_END();
10819 return VINF_SUCCESS;
10820
10821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10822 }
10823}
10824
10825
10826/** Opcode 0xa2. */
10827FNIEMOP_DEF(iemOp_mov_Ob_AL)
10828{
10829 /*
10830 * Get the offset and fend of lock prefixes.
10831 */
10832 RTGCPTR GCPtrMemOff;
10833 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10834
10835 /*
10836 * Store AL.
10837 */
10838 IEM_MC_BEGIN(0,1);
10839 IEM_MC_LOCAL(uint8_t, u8Tmp);
10840 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10841 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10842 IEM_MC_ADVANCE_RIP();
10843 IEM_MC_END();
10844 return VINF_SUCCESS;
10845}
10846
10847
10848/** Opcode 0xa3. */
10849FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10850{
10851 /*
10852 * Get the offset and fend of lock prefixes.
10853 */
10854 RTGCPTR GCPtrMemOff;
10855 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10856
10857 /*
10858 * Store rAX.
10859 */
10860 switch (pIemCpu->enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT:
10863 IEM_MC_BEGIN(0,1);
10864 IEM_MC_LOCAL(uint16_t, u16Tmp);
10865 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10866 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10867 IEM_MC_ADVANCE_RIP();
10868 IEM_MC_END();
10869 return VINF_SUCCESS;
10870
10871 case IEMMODE_32BIT:
10872 IEM_MC_BEGIN(0,1);
10873 IEM_MC_LOCAL(uint32_t, u32Tmp);
10874 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10875 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10876 IEM_MC_ADVANCE_RIP();
10877 IEM_MC_END();
10878 return VINF_SUCCESS;
10879
10880 case IEMMODE_64BIT:
10881 IEM_MC_BEGIN(0,1);
10882 IEM_MC_LOCAL(uint64_t, u64Tmp);
10883 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10884 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10885 IEM_MC_ADVANCE_RIP();
10886 IEM_MC_END();
10887 return VINF_SUCCESS;
10888
10889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10890 }
10891}
10892
10893/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10894#define IEM_MOVS_CASE(ValBits, AddrBits) \
10895 IEM_MC_BEGIN(0, 2); \
10896 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10897 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10898 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10899 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10900 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10901 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10902 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10903 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10904 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10905 } IEM_MC_ELSE() { \
10906 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10907 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10908 } IEM_MC_ENDIF(); \
10909 IEM_MC_ADVANCE_RIP(); \
10910 IEM_MC_END();
10911
10912/** Opcode 0xa4. */
10913FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10914{
10915 IEMOP_HLP_NO_LOCK_PREFIX();
10916
10917 /*
10918 * Use the C implementation if a repeat prefix is encountered.
10919 */
10920 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10921 {
10922 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10923 switch (pIemCpu->enmEffAddrMode)
10924 {
10925 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10926 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10927 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10929 }
10930 }
10931 IEMOP_MNEMONIC("movsb Xb,Yb");
10932
10933 /*
10934 * Sharing case implementation with movs[wdq] below.
10935 */
10936 switch (pIemCpu->enmEffAddrMode)
10937 {
10938 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10939 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10940 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10942 }
10943 return VINF_SUCCESS;
10944}
10945
10946
10947/** Opcode 0xa5. */
10948FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10949{
10950 IEMOP_HLP_NO_LOCK_PREFIX();
10951
10952 /*
10953 * Use the C implementation if a repeat prefix is encountered.
10954 */
10955 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10956 {
10957 IEMOP_MNEMONIC("rep movs Xv,Yv");
10958 switch (pIemCpu->enmEffOpSize)
10959 {
10960 case IEMMODE_16BIT:
10961 switch (pIemCpu->enmEffAddrMode)
10962 {
10963 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10964 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10965 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10967 }
10968 break;
10969 case IEMMODE_32BIT:
10970 switch (pIemCpu->enmEffAddrMode)
10971 {
10972 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10973 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10974 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10976 }
10977 case IEMMODE_64BIT:
10978 switch (pIemCpu->enmEffAddrMode)
10979 {
10980 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10984 }
10985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10986 }
10987 }
10988 IEMOP_MNEMONIC("movs Xv,Yv");
10989
10990 /*
10991 * Annoying double switch here.
10992 * Using ugly macro for implementing the cases, sharing it with movsb.
10993 */
10994 switch (pIemCpu->enmEffOpSize)
10995 {
10996 case IEMMODE_16BIT:
10997 switch (pIemCpu->enmEffAddrMode)
10998 {
10999 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11000 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11001 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11003 }
11004 break;
11005
11006 case IEMMODE_32BIT:
11007 switch (pIemCpu->enmEffAddrMode)
11008 {
11009 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11010 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11011 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11013 }
11014 break;
11015
11016 case IEMMODE_64BIT:
11017 switch (pIemCpu->enmEffAddrMode)
11018 {
11019 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11020 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11021 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11023 }
11024 break;
11025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11026 }
11027 return VINF_SUCCESS;
11028}
11029
11030#undef IEM_MOVS_CASE
11031
11032/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11033#define IEM_CMPS_CASE(ValBits, AddrBits) \
11034 IEM_MC_BEGIN(3, 3); \
11035 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11036 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11037 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11038 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11039 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11040 \
11041 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11042 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11043 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11044 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11045 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11046 IEM_MC_REF_EFLAGS(pEFlags); \
11047 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11048 \
11049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11050 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11051 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11052 } IEM_MC_ELSE() { \
11053 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11054 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11055 } IEM_MC_ENDIF(); \
11056 IEM_MC_ADVANCE_RIP(); \
11057 IEM_MC_END(); \
11058
11059/** Opcode 0xa6. */
11060FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11061{
11062 IEMOP_HLP_NO_LOCK_PREFIX();
11063
11064 /*
11065 * Use the C implementation if a repeat prefix is encountered.
11066 */
11067 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11068 {
11069 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11070 switch (pIemCpu->enmEffAddrMode)
11071 {
11072 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11073 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11074 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11076 }
11077 }
11078 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11079 {
11080 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11081 switch (pIemCpu->enmEffAddrMode)
11082 {
11083 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11084 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11085 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11087 }
11088 }
11089 IEMOP_MNEMONIC("cmps Xb,Yb");
11090
11091 /*
11092 * Sharing case implementation with cmps[wdq] below.
11093 */
11094 switch (pIemCpu->enmEffAddrMode)
11095 {
11096 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11097 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11098 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11100 }
11101 return VINF_SUCCESS;
11102
11103}
11104
11105
11106/** Opcode 0xa7. */
11107FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11108{
11109 IEMOP_HLP_NO_LOCK_PREFIX();
11110
11111 /*
11112 * Use the C implementation if a repeat prefix is encountered.
11113 */
11114 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11115 {
11116 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11117 switch (pIemCpu->enmEffOpSize)
11118 {
11119 case IEMMODE_16BIT:
11120 switch (pIemCpu->enmEffAddrMode)
11121 {
11122 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11123 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11124 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11126 }
11127 break;
11128 case IEMMODE_32BIT:
11129 switch (pIemCpu->enmEffAddrMode)
11130 {
11131 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11132 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11133 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11135 }
11136 case IEMMODE_64BIT:
11137 switch (pIemCpu->enmEffAddrMode)
11138 {
11139 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11140 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11141 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11143 }
11144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11145 }
11146 }
11147
11148 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11149 {
11150 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11151 switch (pIemCpu->enmEffOpSize)
11152 {
11153 case IEMMODE_16BIT:
11154 switch (pIemCpu->enmEffAddrMode)
11155 {
11156 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11157 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11158 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11160 }
11161 break;
11162 case IEMMODE_32BIT:
11163 switch (pIemCpu->enmEffAddrMode)
11164 {
11165 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11169 }
11170 case IEMMODE_64BIT:
11171 switch (pIemCpu->enmEffAddrMode)
11172 {
11173 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11174 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11175 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11177 }
11178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11179 }
11180 }
11181
11182 IEMOP_MNEMONIC("cmps Xv,Yv");
11183
11184 /*
11185 * Annoying double switch here.
11186 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11187 */
11188 switch (pIemCpu->enmEffOpSize)
11189 {
11190 case IEMMODE_16BIT:
11191 switch (pIemCpu->enmEffAddrMode)
11192 {
11193 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11194 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11195 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11197 }
11198 break;
11199
11200 case IEMMODE_32BIT:
11201 switch (pIemCpu->enmEffAddrMode)
11202 {
11203 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11204 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11205 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11207 }
11208 break;
11209
11210 case IEMMODE_64BIT:
11211 switch (pIemCpu->enmEffAddrMode)
11212 {
11213 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11214 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11215 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11217 }
11218 break;
11219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11220 }
11221 return VINF_SUCCESS;
11222
11223}
11224
11225#undef IEM_CMPS_CASE
11226
11227/** Opcode 0xa8. */
11228FNIEMOP_DEF(iemOp_test_AL_Ib)
11229{
11230 IEMOP_MNEMONIC("test al,Ib");
11231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11233}
11234
11235
11236/** Opcode 0xa9. */
11237FNIEMOP_DEF(iemOp_test_eAX_Iz)
11238{
11239 IEMOP_MNEMONIC("test rAX,Iz");
11240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11242}
11243
11244
11245/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11246#define IEM_STOS_CASE(ValBits, AddrBits) \
11247 IEM_MC_BEGIN(0, 2); \
11248 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11249 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11250 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11251 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11252 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11254 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11255 } IEM_MC_ELSE() { \
11256 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11257 } IEM_MC_ENDIF(); \
11258 IEM_MC_ADVANCE_RIP(); \
11259 IEM_MC_END(); \
11260
11261/** Opcode 0xaa. */
11262FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11263{
11264 IEMOP_HLP_NO_LOCK_PREFIX();
11265
11266 /*
11267 * Use the C implementation if a repeat prefix is encountered.
11268 */
11269 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11270 {
11271 IEMOP_MNEMONIC("rep stos Yb,al");
11272 switch (pIemCpu->enmEffAddrMode)
11273 {
11274 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11275 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11276 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11278 }
11279 }
11280 IEMOP_MNEMONIC("stos Yb,al");
11281
11282 /*
11283 * Sharing case implementation with stos[wdq] below.
11284 */
11285 switch (pIemCpu->enmEffAddrMode)
11286 {
11287 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11288 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11289 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11291 }
11292 return VINF_SUCCESS;
11293}
11294
11295
11296/** Opcode 0xab. */
11297FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11298{
11299 IEMOP_HLP_NO_LOCK_PREFIX();
11300
11301 /*
11302 * Use the C implementation if a repeat prefix is encountered.
11303 */
11304 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11305 {
11306 IEMOP_MNEMONIC("rep stos Yv,rAX");
11307 switch (pIemCpu->enmEffOpSize)
11308 {
11309 case IEMMODE_16BIT:
11310 switch (pIemCpu->enmEffAddrMode)
11311 {
11312 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11313 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11314 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11316 }
11317 break;
11318 case IEMMODE_32BIT:
11319 switch (pIemCpu->enmEffAddrMode)
11320 {
11321 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11322 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11323 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11325 }
11326 case IEMMODE_64BIT:
11327 switch (pIemCpu->enmEffAddrMode)
11328 {
11329 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11330 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11331 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11333 }
11334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11335 }
11336 }
11337 IEMOP_MNEMONIC("stos Yv,rAX");
11338
11339 /*
11340 * Annoying double switch here.
11341 * Using ugly macro for implementing the cases, sharing it with stosb.
11342 */
11343 switch (pIemCpu->enmEffOpSize)
11344 {
11345 case IEMMODE_16BIT:
11346 switch (pIemCpu->enmEffAddrMode)
11347 {
11348 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11349 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11350 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11352 }
11353 break;
11354
11355 case IEMMODE_32BIT:
11356 switch (pIemCpu->enmEffAddrMode)
11357 {
11358 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11359 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11360 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11362 }
11363 break;
11364
11365 case IEMMODE_64BIT:
11366 switch (pIemCpu->enmEffAddrMode)
11367 {
11368 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11369 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11370 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11372 }
11373 break;
11374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11375 }
11376 return VINF_SUCCESS;
11377}
11378
11379#undef IEM_STOS_CASE
11380
11381/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11382#define IEM_LODS_CASE(ValBits, AddrBits) \
11383 IEM_MC_BEGIN(0, 2); \
11384 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11385 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11386 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11387 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11388 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11390 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11391 } IEM_MC_ELSE() { \
11392 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11393 } IEM_MC_ENDIF(); \
11394 IEM_MC_ADVANCE_RIP(); \
11395 IEM_MC_END();
11396
11397/** Opcode 0xac. */
11398FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11399{
11400 IEMOP_HLP_NO_LOCK_PREFIX();
11401
11402 /*
11403 * Use the C implementation if a repeat prefix is encountered.
11404 */
11405 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11406 {
11407 IEMOP_MNEMONIC("rep lodsb al,Xb");
11408 switch (pIemCpu->enmEffAddrMode)
11409 {
11410 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11411 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11412 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11414 }
11415 }
11416 IEMOP_MNEMONIC("lodsb al,Xb");
11417
11418 /*
11419 * Sharing case implementation with stos[wdq] below.
11420 */
11421 switch (pIemCpu->enmEffAddrMode)
11422 {
11423 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11424 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11425 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11427 }
11428 return VINF_SUCCESS;
11429}
11430
11431
11432/** Opcode 0xad. */
11433FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11434{
11435 IEMOP_HLP_NO_LOCK_PREFIX();
11436
11437 /*
11438 * Use the C implementation if a repeat prefix is encountered.
11439 */
11440 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11441 {
11442 IEMOP_MNEMONIC("rep lods rAX,Xv");
11443 switch (pIemCpu->enmEffOpSize)
11444 {
11445 case IEMMODE_16BIT:
11446 switch (pIemCpu->enmEffAddrMode)
11447 {
11448 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11449 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11450 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11452 }
11453 break;
11454 case IEMMODE_32BIT:
11455 switch (pIemCpu->enmEffAddrMode)
11456 {
11457 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11458 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11459 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11461 }
11462 case IEMMODE_64BIT:
11463 switch (pIemCpu->enmEffAddrMode)
11464 {
11465 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11466 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11467 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11469 }
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 }
11473 IEMOP_MNEMONIC("lods rAX,Xv");
11474
11475 /*
11476 * Annoying double switch here.
11477 * Using ugly macro for implementing the cases, sharing it with lodsb.
11478 */
11479 switch (pIemCpu->enmEffOpSize)
11480 {
11481 case IEMMODE_16BIT:
11482 switch (pIemCpu->enmEffAddrMode)
11483 {
11484 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11485 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11486 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11488 }
11489 break;
11490
11491 case IEMMODE_32BIT:
11492 switch (pIemCpu->enmEffAddrMode)
11493 {
11494 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11495 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11496 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11498 }
11499 break;
11500
11501 case IEMMODE_64BIT:
11502 switch (pIemCpu->enmEffAddrMode)
11503 {
11504 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11505 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11506 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11508 }
11509 break;
11510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11511 }
11512 return VINF_SUCCESS;
11513}
11514
11515#undef IEM_LODS_CASE
11516
11517/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11518#define IEM_SCAS_CASE(ValBits, AddrBits) \
11519 IEM_MC_BEGIN(3, 2); \
11520 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11521 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11522 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11523 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11524 \
11525 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11526 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11527 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11528 IEM_MC_REF_EFLAGS(pEFlags); \
11529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11530 \
11531 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11532 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11533 } IEM_MC_ELSE() { \
11534 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11535 } IEM_MC_ENDIF(); \
11536 IEM_MC_ADVANCE_RIP(); \
11537 IEM_MC_END();
11538
11539/** Opcode 0xae. */
11540FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11541{
11542 IEMOP_HLP_NO_LOCK_PREFIX();
11543
11544 /*
11545 * Use the C implementation if a repeat prefix is encountered.
11546 */
11547 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11548 {
11549 IEMOP_MNEMONIC("repe scasb al,Xb");
11550 switch (pIemCpu->enmEffAddrMode)
11551 {
11552 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11553 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11554 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11556 }
11557 }
11558 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11559 {
11560 IEMOP_MNEMONIC("repne scasb al,Xb");
11561 switch (pIemCpu->enmEffAddrMode)
11562 {
11563 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11564 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11565 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11567 }
11568 }
11569 IEMOP_MNEMONIC("scasb al,Xb");
11570
11571 /*
11572 * Sharing case implementation with stos[wdq] below.
11573 */
11574 switch (pIemCpu->enmEffAddrMode)
11575 {
11576 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11577 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11578 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11580 }
11581 return VINF_SUCCESS;
11582}
11583
11584
11585/** Opcode 0xaf. */
11586FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11587{
11588 IEMOP_HLP_NO_LOCK_PREFIX();
11589
11590 /*
11591 * Use the C implementation if a repeat prefix is encountered.
11592 */
11593 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11594 {
11595 IEMOP_MNEMONIC("repe scas rAX,Xv");
11596 switch (pIemCpu->enmEffOpSize)
11597 {
11598 case IEMMODE_16BIT:
11599 switch (pIemCpu->enmEffAddrMode)
11600 {
11601 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11602 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11603 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11605 }
11606 break;
11607 case IEMMODE_32BIT:
11608 switch (pIemCpu->enmEffAddrMode)
11609 {
11610 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11611 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11612 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11614 }
11615 case IEMMODE_64BIT:
11616 switch (pIemCpu->enmEffAddrMode)
11617 {
11618 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11622 }
11623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11624 }
11625 }
11626 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11627 {
11628 IEMOP_MNEMONIC("repne scas rAX,Xv");
11629 switch (pIemCpu->enmEffOpSize)
11630 {
11631 case IEMMODE_16BIT:
11632 switch (pIemCpu->enmEffAddrMode)
11633 {
11634 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11635 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11636 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11638 }
11639 break;
11640 case IEMMODE_32BIT:
11641 switch (pIemCpu->enmEffAddrMode)
11642 {
11643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11647 }
11648 case IEMMODE_64BIT:
11649 switch (pIemCpu->enmEffAddrMode)
11650 {
11651 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11652 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11653 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11655 }
11656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11657 }
11658 }
11659 IEMOP_MNEMONIC("scas rAX,Xv");
11660
11661 /*
11662 * Annoying double switch here.
11663 * Using ugly macro for implementing the cases, sharing it with scasb.
11664 */
11665 switch (pIemCpu->enmEffOpSize)
11666 {
11667 case IEMMODE_16BIT:
11668 switch (pIemCpu->enmEffAddrMode)
11669 {
11670 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11671 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11672 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11674 }
11675 break;
11676
11677 case IEMMODE_32BIT:
11678 switch (pIemCpu->enmEffAddrMode)
11679 {
11680 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11681 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11682 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11684 }
11685 break;
11686
11687 case IEMMODE_64BIT:
11688 switch (pIemCpu->enmEffAddrMode)
11689 {
11690 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11691 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11692 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11694 }
11695 break;
11696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11697 }
11698 return VINF_SUCCESS;
11699}
11700
11701#undef IEM_SCAS_CASE
11702
11703/**
11704 * Common 'mov r8, imm8' helper.
11705 */
11706FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11707{
11708 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11709 IEMOP_HLP_NO_LOCK_PREFIX();
11710
11711 IEM_MC_BEGIN(0, 1);
11712 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11713 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11714 IEM_MC_ADVANCE_RIP();
11715 IEM_MC_END();
11716
11717 return VINF_SUCCESS;
11718}
11719
11720
11721/** Opcode 0xb0. */
11722FNIEMOP_DEF(iemOp_mov_AL_Ib)
11723{
11724 IEMOP_MNEMONIC("mov AL,Ib");
11725 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11726}
11727
11728
11729/** Opcode 0xb1. */
11730FNIEMOP_DEF(iemOp_CL_Ib)
11731{
11732 IEMOP_MNEMONIC("mov CL,Ib");
11733 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11734}
11735
11736
11737/** Opcode 0xb2. */
11738FNIEMOP_DEF(iemOp_DL_Ib)
11739{
11740 IEMOP_MNEMONIC("mov DL,Ib");
11741 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11742}
11743
11744
11745/** Opcode 0xb3. */
11746FNIEMOP_DEF(iemOp_BL_Ib)
11747{
11748 IEMOP_MNEMONIC("mov BL,Ib");
11749 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11750}
11751
11752
11753/** Opcode 0xb4. */
11754FNIEMOP_DEF(iemOp_mov_AH_Ib)
11755{
11756 IEMOP_MNEMONIC("mov AH,Ib");
11757 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11758}
11759
11760
11761/** Opcode 0xb5. */
11762FNIEMOP_DEF(iemOp_CH_Ib)
11763{
11764 IEMOP_MNEMONIC("mov CH,Ib");
11765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11766}
11767
11768
11769/** Opcode 0xb6. */
11770FNIEMOP_DEF(iemOp_DH_Ib)
11771{
11772 IEMOP_MNEMONIC("mov DH,Ib");
11773 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11774}
11775
11776
11777/** Opcode 0xb7. */
11778FNIEMOP_DEF(iemOp_BH_Ib)
11779{
11780 IEMOP_MNEMONIC("mov BH,Ib");
11781 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11782}
11783
11784
11785/**
11786 * Common 'mov regX,immX' helper.
11787 */
11788FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11789{
11790 switch (pIemCpu->enmEffOpSize)
11791 {
11792 case IEMMODE_16BIT:
11793 {
11794 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11795 IEMOP_HLP_NO_LOCK_PREFIX();
11796
11797 IEM_MC_BEGIN(0, 1);
11798 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11799 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11800 IEM_MC_ADVANCE_RIP();
11801 IEM_MC_END();
11802 break;
11803 }
11804
11805 case IEMMODE_32BIT:
11806 {
11807 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11808 IEMOP_HLP_NO_LOCK_PREFIX();
11809
11810 IEM_MC_BEGIN(0, 1);
11811 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11812 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11813 IEM_MC_ADVANCE_RIP();
11814 IEM_MC_END();
11815 break;
11816 }
11817 case IEMMODE_64BIT:
11818 {
11819 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11820 IEMOP_HLP_NO_LOCK_PREFIX();
11821
11822 IEM_MC_BEGIN(0, 1);
11823 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11824 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11825 IEM_MC_ADVANCE_RIP();
11826 IEM_MC_END();
11827 break;
11828 }
11829 }
11830
11831 return VINF_SUCCESS;
11832}
11833
11834
11835/** Opcode 0xb8. */
11836FNIEMOP_DEF(iemOp_eAX_Iv)
11837{
11838 IEMOP_MNEMONIC("mov rAX,IV");
11839 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11840}
11841
11842
11843/** Opcode 0xb9. */
11844FNIEMOP_DEF(iemOp_eCX_Iv)
11845{
11846 IEMOP_MNEMONIC("mov rCX,IV");
11847 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11848}
11849
11850
11851/** Opcode 0xba. */
11852FNIEMOP_DEF(iemOp_eDX_Iv)
11853{
11854 IEMOP_MNEMONIC("mov rDX,IV");
11855 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11856}
11857
11858
11859/** Opcode 0xbb. */
11860FNIEMOP_DEF(iemOp_eBX_Iv)
11861{
11862 IEMOP_MNEMONIC("mov rBX,IV");
11863 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11864}
11865
11866
11867/** Opcode 0xbc. */
11868FNIEMOP_DEF(iemOp_eSP_Iv)
11869{
11870 IEMOP_MNEMONIC("mov rSP,IV");
11871 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11872}
11873
11874
11875/** Opcode 0xbd. */
11876FNIEMOP_DEF(iemOp_eBP_Iv)
11877{
11878 IEMOP_MNEMONIC("mov rBP,IV");
11879 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11880}
11881
11882
11883/** Opcode 0xbe. */
11884FNIEMOP_DEF(iemOp_eSI_Iv)
11885{
11886 IEMOP_MNEMONIC("mov rSI,IV");
11887 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11888}
11889
11890
11891/** Opcode 0xbf. */
11892FNIEMOP_DEF(iemOp_eDI_Iv)
11893{
11894 IEMOP_MNEMONIC("mov rDI,IV");
11895 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11896}
11897
11898
11899/** Opcode 0xc0. */
11900FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11901{
11902 IEMOP_HLP_MIN_186();
11903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11904 PCIEMOPSHIFTSIZES pImpl;
11905 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11906 {
11907 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11908 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11909 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11910 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11911 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11912 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11913 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11914 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11915 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11916 }
11917 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11918
11919 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11920 {
11921 /* register */
11922 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11923 IEMOP_HLP_NO_LOCK_PREFIX();
11924 IEM_MC_BEGIN(3, 0);
11925 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11926 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11928 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11929 IEM_MC_REF_EFLAGS(pEFlags);
11930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11931 IEM_MC_ADVANCE_RIP();
11932 IEM_MC_END();
11933 }
11934 else
11935 {
11936 /* memory */
11937 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11938 IEM_MC_BEGIN(3, 2);
11939 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11940 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11941 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11945 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11946 IEM_MC_ASSIGN(cShiftArg, cShift);
11947 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11948 IEM_MC_FETCH_EFLAGS(EFlags);
11949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11950
11951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11952 IEM_MC_COMMIT_EFLAGS(EFlags);
11953 IEM_MC_ADVANCE_RIP();
11954 IEM_MC_END();
11955 }
11956 return VINF_SUCCESS;
11957}
11958
11959
11960/** Opcode 0xc1. */
11961FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11962{
11963 IEMOP_HLP_MIN_186();
11964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11965 PCIEMOPSHIFTSIZES pImpl;
11966 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11967 {
11968 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11969 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11970 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11971 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11972 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11973 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11974 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11975 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11976 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11977 }
11978 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11979
11980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11981 {
11982 /* register */
11983 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11984 IEMOP_HLP_NO_LOCK_PREFIX();
11985 switch (pIemCpu->enmEffOpSize)
11986 {
11987 case IEMMODE_16BIT:
11988 IEM_MC_BEGIN(3, 0);
11989 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11990 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11992 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11993 IEM_MC_REF_EFLAGS(pEFlags);
11994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11995 IEM_MC_ADVANCE_RIP();
11996 IEM_MC_END();
11997 return VINF_SUCCESS;
11998
11999 case IEMMODE_32BIT:
12000 IEM_MC_BEGIN(3, 0);
12001 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12002 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12004 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12005 IEM_MC_REF_EFLAGS(pEFlags);
12006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12007 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12008 IEM_MC_ADVANCE_RIP();
12009 IEM_MC_END();
12010 return VINF_SUCCESS;
12011
12012 case IEMMODE_64BIT:
12013 IEM_MC_BEGIN(3, 0);
12014 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12015 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12017 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12018 IEM_MC_REF_EFLAGS(pEFlags);
12019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12020 IEM_MC_ADVANCE_RIP();
12021 IEM_MC_END();
12022 return VINF_SUCCESS;
12023
12024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12025 }
12026 }
12027 else
12028 {
12029 /* memory */
12030 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12031 switch (pIemCpu->enmEffOpSize)
12032 {
12033 case IEMMODE_16BIT:
12034 IEM_MC_BEGIN(3, 2);
12035 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12039
12040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12042 IEM_MC_ASSIGN(cShiftArg, cShift);
12043 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12044 IEM_MC_FETCH_EFLAGS(EFlags);
12045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12046
12047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12048 IEM_MC_COMMIT_EFLAGS(EFlags);
12049 IEM_MC_ADVANCE_RIP();
12050 IEM_MC_END();
12051 return VINF_SUCCESS;
12052
12053 case IEMMODE_32BIT:
12054 IEM_MC_BEGIN(3, 2);
12055 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12056 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12057 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12059
12060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12061 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12062 IEM_MC_ASSIGN(cShiftArg, cShift);
12063 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12064 IEM_MC_FETCH_EFLAGS(EFlags);
12065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12066
12067 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12068 IEM_MC_COMMIT_EFLAGS(EFlags);
12069 IEM_MC_ADVANCE_RIP();
12070 IEM_MC_END();
12071 return VINF_SUCCESS;
12072
12073 case IEMMODE_64BIT:
12074 IEM_MC_BEGIN(3, 2);
12075 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12076 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12077 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12079
12080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12082 IEM_MC_ASSIGN(cShiftArg, cShift);
12083 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12084 IEM_MC_FETCH_EFLAGS(EFlags);
12085 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12086
12087 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12088 IEM_MC_COMMIT_EFLAGS(EFlags);
12089 IEM_MC_ADVANCE_RIP();
12090 IEM_MC_END();
12091 return VINF_SUCCESS;
12092
12093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12094 }
12095 }
12096}
12097
12098
12099/** Opcode 0xc2. */
12100FNIEMOP_DEF(iemOp_retn_Iw)
12101{
12102 IEMOP_MNEMONIC("retn Iw");
12103 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12104 IEMOP_HLP_NO_LOCK_PREFIX();
12105 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12106 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12107}
12108
12109
12110/** Opcode 0xc3. */
12111FNIEMOP_DEF(iemOp_retn)
12112{
12113 IEMOP_MNEMONIC("retn");
12114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12115 IEMOP_HLP_NO_LOCK_PREFIX();
12116 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12117}
12118
12119
12120/** Opcode 0xc4. */
12121FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12122{
12123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12124 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12125 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12126 {
12127 IEMOP_MNEMONIC("2-byte-vex");
12128 /* The LES instruction is invalid 64-bit mode. In legacy and
12129 compatability mode it is invalid with MOD=3.
12130 The use as a VEX prefix is made possible by assigning the inverted
12131 REX.R to the top MOD bit, and the top bit in the inverted register
12132 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12133 to accessing registers 0..7 in this VEX form. */
12134 /** @todo VEX: Just use new tables for it. */
12135 return IEMOP_RAISE_INVALID_OPCODE();
12136 }
12137 IEMOP_MNEMONIC("les Gv,Mp");
12138 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12139}
12140
12141
12142/** Opcode 0xc5. */
12143FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12144{
12145 /* The LDS instruction is invalid 64-bit mode. In legacy and
12146 compatability mode it is invalid with MOD=3.
12147 The use as a VEX prefix is made possible by assigning the inverted
12148 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12149 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12151 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12152 {
12153 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12154 {
12155 IEMOP_MNEMONIC("lds Gv,Mp");
12156 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12157 }
12158 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12159 }
12160
12161 IEMOP_MNEMONIC("3-byte-vex");
12162 /** @todo Test when exctly the VEX conformance checks kick in during
12163 * instruction decoding and fetching (using \#PF). */
12164 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12165 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12166 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12167#if 0 /* will make sense of this next week... */
12168 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12169 &&
12170 )
12171 {
12172
12173 }
12174#endif
12175
12176 /** @todo VEX: Just use new tables for it. */
12177 return IEMOP_RAISE_INVALID_OPCODE();
12178}
12179
12180
12181/** Opcode 0xc6. */
12182FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12183{
12184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12185 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12186 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12187 return IEMOP_RAISE_INVALID_OPCODE();
12188 IEMOP_MNEMONIC("mov Eb,Ib");
12189
12190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12191 {
12192 /* register access */
12193 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12194 IEM_MC_BEGIN(0, 0);
12195 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12196 IEM_MC_ADVANCE_RIP();
12197 IEM_MC_END();
12198 }
12199 else
12200 {
12201 /* memory access. */
12202 IEM_MC_BEGIN(0, 1);
12203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12205 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12206 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12207 IEM_MC_ADVANCE_RIP();
12208 IEM_MC_END();
12209 }
12210 return VINF_SUCCESS;
12211}
12212
12213
12214/** Opcode 0xc7. */
12215FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12216{
12217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12218 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12219 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12220 return IEMOP_RAISE_INVALID_OPCODE();
12221 IEMOP_MNEMONIC("mov Ev,Iz");
12222
12223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12224 {
12225 /* register access */
12226 switch (pIemCpu->enmEffOpSize)
12227 {
12228 case IEMMODE_16BIT:
12229 IEM_MC_BEGIN(0, 0);
12230 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12231 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12232 IEM_MC_ADVANCE_RIP();
12233 IEM_MC_END();
12234 return VINF_SUCCESS;
12235
12236 case IEMMODE_32BIT:
12237 IEM_MC_BEGIN(0, 0);
12238 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12239 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12240 IEM_MC_ADVANCE_RIP();
12241 IEM_MC_END();
12242 return VINF_SUCCESS;
12243
12244 case IEMMODE_64BIT:
12245 IEM_MC_BEGIN(0, 0);
12246 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12247 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12248 IEM_MC_ADVANCE_RIP();
12249 IEM_MC_END();
12250 return VINF_SUCCESS;
12251
12252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12253 }
12254 }
12255 else
12256 {
12257 /* memory access. */
12258 switch (pIemCpu->enmEffOpSize)
12259 {
12260 case IEMMODE_16BIT:
12261 IEM_MC_BEGIN(0, 1);
12262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12264 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12265 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12266 IEM_MC_ADVANCE_RIP();
12267 IEM_MC_END();
12268 return VINF_SUCCESS;
12269
12270 case IEMMODE_32BIT:
12271 IEM_MC_BEGIN(0, 1);
12272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12274 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12275 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12276 IEM_MC_ADVANCE_RIP();
12277 IEM_MC_END();
12278 return VINF_SUCCESS;
12279
12280 case IEMMODE_64BIT:
12281 IEM_MC_BEGIN(0, 1);
12282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12284 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12285 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12286 IEM_MC_ADVANCE_RIP();
12287 IEM_MC_END();
12288 return VINF_SUCCESS;
12289
12290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12291 }
12292 }
12293}
12294
12295
12296
12297
12298/** Opcode 0xc8. */
12299FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12300{
12301 IEMOP_MNEMONIC("enter Iw,Ib");
12302 IEMOP_HLP_MIN_186();
12303 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12304 IEMOP_HLP_NO_LOCK_PREFIX();
12305 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12306 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12307 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12308}
12309
12310
12311/** Opcode 0xc9. */
12312FNIEMOP_DEF(iemOp_leave)
12313{
12314 IEMOP_MNEMONIC("retn");
12315 IEMOP_HLP_MIN_186();
12316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12317 IEMOP_HLP_NO_LOCK_PREFIX();
12318 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12319}
12320
12321
12322/** Opcode 0xca. */
12323FNIEMOP_DEF(iemOp_retf_Iw)
12324{
12325 IEMOP_MNEMONIC("retf Iw");
12326 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12327 IEMOP_HLP_NO_LOCK_PREFIX();
12328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12329 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12330}
12331
12332
12333/** Opcode 0xcb. */
12334FNIEMOP_DEF(iemOp_retf)
12335{
12336 IEMOP_MNEMONIC("retf");
12337 IEMOP_HLP_NO_LOCK_PREFIX();
12338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12339 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12340}
12341
12342
12343/** Opcode 0xcc. */
12344FNIEMOP_DEF(iemOp_int_3)
12345{
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12348}
12349
12350
12351/** Opcode 0xcd. */
12352FNIEMOP_DEF(iemOp_int_Ib)
12353{
12354 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12357}
12358
12359
12360/** Opcode 0xce. */
12361FNIEMOP_DEF(iemOp_into)
12362{
12363 IEMOP_MNEMONIC("into");
12364 IEMOP_HLP_NO_64BIT();
12365
12366 IEM_MC_BEGIN(2, 0);
12367 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12368 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12369 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12370 IEM_MC_END();
12371 return VINF_SUCCESS;
12372}
12373
12374
12375/** Opcode 0xcf. */
12376FNIEMOP_DEF(iemOp_iret)
12377{
12378 IEMOP_MNEMONIC("iret");
12379 IEMOP_HLP_NO_LOCK_PREFIX();
12380 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12381}
12382
12383
12384/** Opcode 0xd0. */
12385FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12386{
12387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12388 PCIEMOPSHIFTSIZES pImpl;
12389 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12390 {
12391 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12392 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12393 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12394 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12395 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12396 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12397 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12398 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12400 }
12401 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12402
12403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12404 {
12405 /* register */
12406 IEMOP_HLP_NO_LOCK_PREFIX();
12407 IEM_MC_BEGIN(3, 0);
12408 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12409 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12411 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12412 IEM_MC_REF_EFLAGS(pEFlags);
12413 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12414 IEM_MC_ADVANCE_RIP();
12415 IEM_MC_END();
12416 }
12417 else
12418 {
12419 /* memory */
12420 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12421 IEM_MC_BEGIN(3, 2);
12422 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12423 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12426
12427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12428 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12429 IEM_MC_FETCH_EFLAGS(EFlags);
12430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12431
12432 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12433 IEM_MC_COMMIT_EFLAGS(EFlags);
12434 IEM_MC_ADVANCE_RIP();
12435 IEM_MC_END();
12436 }
12437 return VINF_SUCCESS;
12438}
12439
12440
12441
12442/** Opcode 0xd1. */
12443FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12444{
12445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12446 PCIEMOPSHIFTSIZES pImpl;
12447 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12448 {
12449 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12450 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12451 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12452 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12453 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12454 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12455 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12456 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12457 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12458 }
12459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12460
12461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12462 {
12463 /* register */
12464 IEMOP_HLP_NO_LOCK_PREFIX();
12465 switch (pIemCpu->enmEffOpSize)
12466 {
12467 case IEMMODE_16BIT:
12468 IEM_MC_BEGIN(3, 0);
12469 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12470 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12471 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12472 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12473 IEM_MC_REF_EFLAGS(pEFlags);
12474 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12475 IEM_MC_ADVANCE_RIP();
12476 IEM_MC_END();
12477 return VINF_SUCCESS;
12478
12479 case IEMMODE_32BIT:
12480 IEM_MC_BEGIN(3, 0);
12481 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12482 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12483 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12484 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12485 IEM_MC_REF_EFLAGS(pEFlags);
12486 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12488 IEM_MC_ADVANCE_RIP();
12489 IEM_MC_END();
12490 return VINF_SUCCESS;
12491
12492 case IEMMODE_64BIT:
12493 IEM_MC_BEGIN(3, 0);
12494 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12495 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12496 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12497 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12498 IEM_MC_REF_EFLAGS(pEFlags);
12499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12500 IEM_MC_ADVANCE_RIP();
12501 IEM_MC_END();
12502 return VINF_SUCCESS;
12503
12504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12505 }
12506 }
12507 else
12508 {
12509 /* memory */
12510 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12511 switch (pIemCpu->enmEffOpSize)
12512 {
12513 case IEMMODE_16BIT:
12514 IEM_MC_BEGIN(3, 2);
12515 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12516 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12517 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12519
12520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12521 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12522 IEM_MC_FETCH_EFLAGS(EFlags);
12523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12524
12525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12526 IEM_MC_COMMIT_EFLAGS(EFlags);
12527 IEM_MC_ADVANCE_RIP();
12528 IEM_MC_END();
12529 return VINF_SUCCESS;
12530
12531 case IEMMODE_32BIT:
12532 IEM_MC_BEGIN(3, 2);
12533 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12534 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12535 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12537
12538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12539 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12540 IEM_MC_FETCH_EFLAGS(EFlags);
12541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12542
12543 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12544 IEM_MC_COMMIT_EFLAGS(EFlags);
12545 IEM_MC_ADVANCE_RIP();
12546 IEM_MC_END();
12547 return VINF_SUCCESS;
12548
12549 case IEMMODE_64BIT:
12550 IEM_MC_BEGIN(3, 2);
12551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12552 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12553 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12555
12556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12557 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12558 IEM_MC_FETCH_EFLAGS(EFlags);
12559 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12560
12561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12562 IEM_MC_COMMIT_EFLAGS(EFlags);
12563 IEM_MC_ADVANCE_RIP();
12564 IEM_MC_END();
12565 return VINF_SUCCESS;
12566
12567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12568 }
12569 }
12570}
12571
12572
12573/** Opcode 0xd2. */
12574FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12575{
12576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12577 PCIEMOPSHIFTSIZES pImpl;
12578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12579 {
12580 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12581 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12582 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12583 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12584 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12585 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12586 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12587 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12588 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12589 }
12590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12591
12592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12593 {
12594 /* register */
12595 IEMOP_HLP_NO_LOCK_PREFIX();
12596 IEM_MC_BEGIN(3, 0);
12597 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12598 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12600 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12601 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12602 IEM_MC_REF_EFLAGS(pEFlags);
12603 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12604 IEM_MC_ADVANCE_RIP();
12605 IEM_MC_END();
12606 }
12607 else
12608 {
12609 /* memory */
12610 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12611 IEM_MC_BEGIN(3, 2);
12612 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12613 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12614 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12616
12617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12618 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12619 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12620 IEM_MC_FETCH_EFLAGS(EFlags);
12621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12622
12623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12624 IEM_MC_COMMIT_EFLAGS(EFlags);
12625 IEM_MC_ADVANCE_RIP();
12626 IEM_MC_END();
12627 }
12628 return VINF_SUCCESS;
12629}
12630
12631
12632/** Opcode 0xd3. */
12633FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12634{
12635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12636 PCIEMOPSHIFTSIZES pImpl;
12637 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12638 {
12639 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12640 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12641 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12642 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12643 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12644 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12645 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12646 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12647 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12648 }
12649 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12650
12651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12652 {
12653 /* register */
12654 IEMOP_HLP_NO_LOCK_PREFIX();
12655 switch (pIemCpu->enmEffOpSize)
12656 {
12657 case IEMMODE_16BIT:
12658 IEM_MC_BEGIN(3, 0);
12659 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12660 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12662 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12663 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12664 IEM_MC_REF_EFLAGS(pEFlags);
12665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12666 IEM_MC_ADVANCE_RIP();
12667 IEM_MC_END();
12668 return VINF_SUCCESS;
12669
12670 case IEMMODE_32BIT:
12671 IEM_MC_BEGIN(3, 0);
12672 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12673 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12675 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12676 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12677 IEM_MC_REF_EFLAGS(pEFlags);
12678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12679 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12680 IEM_MC_ADVANCE_RIP();
12681 IEM_MC_END();
12682 return VINF_SUCCESS;
12683
12684 case IEMMODE_64BIT:
12685 IEM_MC_BEGIN(3, 0);
12686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12687 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12689 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12690 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12691 IEM_MC_REF_EFLAGS(pEFlags);
12692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12693 IEM_MC_ADVANCE_RIP();
12694 IEM_MC_END();
12695 return VINF_SUCCESS;
12696
12697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12698 }
12699 }
12700 else
12701 {
12702 /* memory */
12703 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12704 switch (pIemCpu->enmEffOpSize)
12705 {
12706 case IEMMODE_16BIT:
12707 IEM_MC_BEGIN(3, 2);
12708 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12709 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12710 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12712
12713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12714 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12715 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12716 IEM_MC_FETCH_EFLAGS(EFlags);
12717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12718
12719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12720 IEM_MC_COMMIT_EFLAGS(EFlags);
12721 IEM_MC_ADVANCE_RIP();
12722 IEM_MC_END();
12723 return VINF_SUCCESS;
12724
12725 case IEMMODE_32BIT:
12726 IEM_MC_BEGIN(3, 2);
12727 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12728 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12729 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12731
12732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12733 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12734 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12735 IEM_MC_FETCH_EFLAGS(EFlags);
12736 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12737
12738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12739 IEM_MC_COMMIT_EFLAGS(EFlags);
12740 IEM_MC_ADVANCE_RIP();
12741 IEM_MC_END();
12742 return VINF_SUCCESS;
12743
12744 case IEMMODE_64BIT:
12745 IEM_MC_BEGIN(3, 2);
12746 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12747 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12748 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12750
12751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12752 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12753 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12754 IEM_MC_FETCH_EFLAGS(EFlags);
12755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12756
12757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12758 IEM_MC_COMMIT_EFLAGS(EFlags);
12759 IEM_MC_ADVANCE_RIP();
12760 IEM_MC_END();
12761 return VINF_SUCCESS;
12762
12763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12764 }
12765 }
12766}
12767
12768/** Opcode 0xd4. */
12769FNIEMOP_DEF(iemOp_aam_Ib)
12770{
12771 IEMOP_MNEMONIC("aam Ib");
12772 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12773 IEMOP_HLP_NO_LOCK_PREFIX();
12774 IEMOP_HLP_NO_64BIT();
12775 if (!bImm)
12776 return IEMOP_RAISE_DIVIDE_ERROR();
12777 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12778}
12779
12780
12781/** Opcode 0xd5. */
12782FNIEMOP_DEF(iemOp_aad_Ib)
12783{
12784 IEMOP_MNEMONIC("aad Ib");
12785 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12786 IEMOP_HLP_NO_LOCK_PREFIX();
12787 IEMOP_HLP_NO_64BIT();
12788 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12789}
12790
12791
12792/** Opcode 0xd6. */
12793FNIEMOP_DEF(iemOp_salc)
12794{
12795 IEMOP_MNEMONIC("salc");
12796 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12797 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12799 IEMOP_HLP_NO_64BIT();
12800
12801 IEM_MC_BEGIN(0, 0);
12802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12803 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12804 } IEM_MC_ELSE() {
12805 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12806 } IEM_MC_ENDIF();
12807 IEM_MC_ADVANCE_RIP();
12808 IEM_MC_END();
12809 return VINF_SUCCESS;
12810}
12811
12812
12813/** Opcode 0xd7. */
12814FNIEMOP_DEF(iemOp_xlat)
12815{
12816 IEMOP_MNEMONIC("xlat");
12817 IEMOP_HLP_NO_LOCK_PREFIX();
12818 switch (pIemCpu->enmEffAddrMode)
12819 {
12820 case IEMMODE_16BIT:
12821 IEM_MC_BEGIN(2, 0);
12822 IEM_MC_LOCAL(uint8_t, u8Tmp);
12823 IEM_MC_LOCAL(uint16_t, u16Addr);
12824 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12825 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12826 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12827 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12828 IEM_MC_ADVANCE_RIP();
12829 IEM_MC_END();
12830 return VINF_SUCCESS;
12831
12832 case IEMMODE_32BIT:
12833 IEM_MC_BEGIN(2, 0);
12834 IEM_MC_LOCAL(uint8_t, u8Tmp);
12835 IEM_MC_LOCAL(uint32_t, u32Addr);
12836 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12837 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12838 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12839 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12840 IEM_MC_ADVANCE_RIP();
12841 IEM_MC_END();
12842 return VINF_SUCCESS;
12843
12844 case IEMMODE_64BIT:
12845 IEM_MC_BEGIN(2, 0);
12846 IEM_MC_LOCAL(uint8_t, u8Tmp);
12847 IEM_MC_LOCAL(uint64_t, u64Addr);
12848 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12849 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12850 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12851 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12852 IEM_MC_ADVANCE_RIP();
12853 IEM_MC_END();
12854 return VINF_SUCCESS;
12855
12856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12857 }
12858}
12859
12860
12861/**
12862 * Common worker for FPU instructions working on ST0 and STn, and storing the
12863 * result in ST0.
12864 *
12865 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12866 */
12867FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12868{
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12870
12871 IEM_MC_BEGIN(3, 1);
12872 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12873 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12875 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12876
12877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12878 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12879 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12880 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12881 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12882 IEM_MC_ELSE()
12883 IEM_MC_FPU_STACK_UNDERFLOW(0);
12884 IEM_MC_ENDIF();
12885 IEM_MC_USED_FPU();
12886 IEM_MC_ADVANCE_RIP();
12887
12888 IEM_MC_END();
12889 return VINF_SUCCESS;
12890}
12891
12892
12893/**
12894 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12895 * flags.
12896 *
12897 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12898 */
12899FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12900{
12901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12902
12903 IEM_MC_BEGIN(3, 1);
12904 IEM_MC_LOCAL(uint16_t, u16Fsw);
12905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12908
12909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12911 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12912 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12913 IEM_MC_UPDATE_FSW(u16Fsw);
12914 IEM_MC_ELSE()
12915 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12916 IEM_MC_ENDIF();
12917 IEM_MC_USED_FPU();
12918 IEM_MC_ADVANCE_RIP();
12919
12920 IEM_MC_END();
12921 return VINF_SUCCESS;
12922}
12923
12924
12925/**
12926 * Common worker for FPU instructions working on ST0 and STn, only affecting
12927 * flags, and popping when done.
12928 *
12929 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12930 */
12931FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12932{
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934
12935 IEM_MC_BEGIN(3, 1);
12936 IEM_MC_LOCAL(uint16_t, u16Fsw);
12937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12940
12941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12943 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12944 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12945 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12946 IEM_MC_ELSE()
12947 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12948 IEM_MC_ENDIF();
12949 IEM_MC_USED_FPU();
12950 IEM_MC_ADVANCE_RIP();
12951
12952 IEM_MC_END();
12953 return VINF_SUCCESS;
12954}
12955
12956
12957/** Opcode 0xd8 11/0. */
12958FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12959{
12960 IEMOP_MNEMONIC("fadd st0,stN");
12961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12962}
12963
12964
12965/** Opcode 0xd8 11/1. */
12966FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12967{
12968 IEMOP_MNEMONIC("fmul st0,stN");
12969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12970}
12971
12972
12973/** Opcode 0xd8 11/2. */
12974FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12975{
12976 IEMOP_MNEMONIC("fcom st0,stN");
12977 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12978}
12979
12980
12981/** Opcode 0xd8 11/3. */
12982FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12983{
12984 IEMOP_MNEMONIC("fcomp st0,stN");
12985 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12986}
12987
12988
12989/** Opcode 0xd8 11/4. */
12990FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12991{
12992 IEMOP_MNEMONIC("fsub st0,stN");
12993 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12994}
12995
12996
12997/** Opcode 0xd8 11/5. */
12998FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12999{
13000 IEMOP_MNEMONIC("fsubr st0,stN");
13001 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13002}
13003
13004
13005/** Opcode 0xd8 11/6. */
13006FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13007{
13008 IEMOP_MNEMONIC("fdiv st0,stN");
13009 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13010}
13011
13012
13013/** Opcode 0xd8 11/7. */
13014FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13015{
13016 IEMOP_MNEMONIC("fdivr st0,stN");
13017 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13018}
13019
13020
13021/**
13022 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13023 * the result in ST0.
13024 *
13025 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13026 */
13027FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13028{
13029 IEM_MC_BEGIN(3, 3);
13030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13031 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13032 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13033 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13034 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13035 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13036
13037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13039
13040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13041 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13042 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13043
13044 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13045 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13046 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13047 IEM_MC_ELSE()
13048 IEM_MC_FPU_STACK_UNDERFLOW(0);
13049 IEM_MC_ENDIF();
13050 IEM_MC_USED_FPU();
13051 IEM_MC_ADVANCE_RIP();
13052
13053 IEM_MC_END();
13054 return VINF_SUCCESS;
13055}
13056
13057
13058/** Opcode 0xd8 !11/0. */
13059FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13060{
13061 IEMOP_MNEMONIC("fadd st0,m32r");
13062 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13063}
13064
13065
13066/** Opcode 0xd8 !11/1. */
13067FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13068{
13069 IEMOP_MNEMONIC("fmul st0,m32r");
13070 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13071}
13072
13073
13074/** Opcode 0xd8 !11/2. */
13075FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13076{
13077 IEMOP_MNEMONIC("fcom st0,m32r");
13078
13079 IEM_MC_BEGIN(3, 3);
13080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13081 IEM_MC_LOCAL(uint16_t, u16Fsw);
13082 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13083 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13084 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13085 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13086
13087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13089
13090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13092 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13093
13094 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13095 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13096 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13097 IEM_MC_ELSE()
13098 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13099 IEM_MC_ENDIF();
13100 IEM_MC_USED_FPU();
13101 IEM_MC_ADVANCE_RIP();
13102
13103 IEM_MC_END();
13104 return VINF_SUCCESS;
13105}
13106
13107
13108/** Opcode 0xd8 !11/3. */
13109FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13110{
13111 IEMOP_MNEMONIC("fcomp st0,m32r");
13112
13113 IEM_MC_BEGIN(3, 3);
13114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13115 IEM_MC_LOCAL(uint16_t, u16Fsw);
13116 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13117 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13119 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13120
13121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13123
13124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13126 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13127
13128 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13129 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13130 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13131 IEM_MC_ELSE()
13132 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13133 IEM_MC_ENDIF();
13134 IEM_MC_USED_FPU();
13135 IEM_MC_ADVANCE_RIP();
13136
13137 IEM_MC_END();
13138 return VINF_SUCCESS;
13139}
13140
13141
13142/** Opcode 0xd8 !11/4. */
13143FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13144{
13145 IEMOP_MNEMONIC("fsub st0,m32r");
13146 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13147}
13148
13149
13150/** Opcode 0xd8 !11/5. */
13151FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13152{
13153 IEMOP_MNEMONIC("fsubr st0,m32r");
13154 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13155}
13156
13157
13158/** Opcode 0xd8 !11/6. */
13159FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13160{
13161 IEMOP_MNEMONIC("fdiv st0,m32r");
13162 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13163}
13164
13165
13166/** Opcode 0xd8 !11/7. */
13167FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13168{
13169 IEMOP_MNEMONIC("fdivr st0,m32r");
13170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13171}
13172
13173
13174/** Opcode 0xd8. */
13175FNIEMOP_DEF(iemOp_EscF0)
13176{
13177 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13179
13180 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13181 {
13182 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13183 {
13184 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13185 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13186 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13187 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13188 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13189 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13190 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13191 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13193 }
13194 }
13195 else
13196 {
13197 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13198 {
13199 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13200 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13201 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13202 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13203 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13204 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13205 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13206 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13208 }
13209 }
13210}
13211
13212
13213/** Opcode 0xd9 /0 mem32real
13214 * @sa iemOp_fld_m64r */
13215FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13216{
13217 IEMOP_MNEMONIC("fld m32r");
13218
13219 IEM_MC_BEGIN(2, 3);
13220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13221 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13222 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13223 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13224 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13225
13226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13228
13229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13231 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13232
13233 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13234 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13235 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13236 IEM_MC_ELSE()
13237 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13238 IEM_MC_ENDIF();
13239 IEM_MC_USED_FPU();
13240 IEM_MC_ADVANCE_RIP();
13241
13242 IEM_MC_END();
13243 return VINF_SUCCESS;
13244}
13245
13246
13247/** Opcode 0xd9 !11/2 mem32real */
13248FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13249{
13250 IEMOP_MNEMONIC("fst m32r");
13251 IEM_MC_BEGIN(3, 2);
13252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13253 IEM_MC_LOCAL(uint16_t, u16Fsw);
13254 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13255 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13256 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13257
13258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13260 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13261 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13262
13263 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13264 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13265 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13266 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13267 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13268 IEM_MC_ELSE()
13269 IEM_MC_IF_FCW_IM()
13270 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13271 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13272 IEM_MC_ENDIF();
13273 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13274 IEM_MC_ENDIF();
13275 IEM_MC_USED_FPU();
13276 IEM_MC_ADVANCE_RIP();
13277
13278 IEM_MC_END();
13279 return VINF_SUCCESS;
13280}
13281
13282
13283/** Opcode 0xd9 !11/3 */
13284FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13285{
13286 IEMOP_MNEMONIC("fstp m32r");
13287 IEM_MC_BEGIN(3, 2);
13288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13289 IEM_MC_LOCAL(uint16_t, u16Fsw);
13290 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13291 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13292 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13293
13294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13296 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13297 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13298
13299 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13300 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13301 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13302 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13303 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13304 IEM_MC_ELSE()
13305 IEM_MC_IF_FCW_IM()
13306 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13307 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13308 IEM_MC_ENDIF();
13309 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13310 IEM_MC_ENDIF();
13311 IEM_MC_USED_FPU();
13312 IEM_MC_ADVANCE_RIP();
13313
13314 IEM_MC_END();
13315 return VINF_SUCCESS;
13316}
13317
13318
13319/** Opcode 0xd9 !11/4 */
13320FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13321{
13322 IEMOP_MNEMONIC("fldenv m14/28byte");
13323 IEM_MC_BEGIN(3, 0);
13324 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13325 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13326 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13330 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13331 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13332 IEM_MC_END();
13333 return VINF_SUCCESS;
13334}
13335
13336
13337/** Opcode 0xd9 !11/5 */
13338FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13339{
13340 IEMOP_MNEMONIC("fldcw m2byte");
13341 IEM_MC_BEGIN(1, 1);
13342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13343 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13346 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13347 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13348 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13349 IEM_MC_END();
13350 return VINF_SUCCESS;
13351}
13352
13353
13354/** Opcode 0xd9 !11/6 */
13355FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13356{
13357 IEMOP_MNEMONIC("fstenv m14/m28byte");
13358 IEM_MC_BEGIN(3, 0);
13359 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13360 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13361 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13365 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13366 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13367 IEM_MC_END();
13368 return VINF_SUCCESS;
13369}
13370
13371
13372/** Opcode 0xd9 !11/7 */
13373FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13374{
13375 IEMOP_MNEMONIC("fnstcw m2byte");
13376 IEM_MC_BEGIN(2, 0);
13377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13378 IEM_MC_LOCAL(uint16_t, u16Fcw);
13379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13382 IEM_MC_FETCH_FCW(u16Fcw);
13383 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13384 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13385 IEM_MC_END();
13386 return VINF_SUCCESS;
13387}
13388
13389
13390/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13391FNIEMOP_DEF(iemOp_fnop)
13392{
13393 IEMOP_MNEMONIC("fnop");
13394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13395
13396 IEM_MC_BEGIN(0, 0);
13397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13399 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13400 * intel optimizations. Investigate. */
13401 IEM_MC_UPDATE_FPU_OPCODE_IP();
13402 IEM_MC_USED_FPU();
13403 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13404 IEM_MC_END();
13405 return VINF_SUCCESS;
13406}
13407
13408
13409/** Opcode 0xd9 11/0 stN */
13410FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13411{
13412 IEMOP_MNEMONIC("fld stN");
13413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13414
13415 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13416 * indicates that it does. */
13417 IEM_MC_BEGIN(0, 2);
13418 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13419 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13420 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13421 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13423 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13424 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13425 IEM_MC_ELSE()
13426 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13427 IEM_MC_ENDIF();
13428 IEM_MC_USED_FPU();
13429 IEM_MC_ADVANCE_RIP();
13430 IEM_MC_END();
13431
13432 return VINF_SUCCESS;
13433}
13434
13435
13436/** Opcode 0xd9 11/3 stN */
13437FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13438{
13439 IEMOP_MNEMONIC("fxch stN");
13440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13441
13442 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13443 * indicates that it does. */
13444 IEM_MC_BEGIN(1, 3);
13445 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13446 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13448 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13451 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13452 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13453 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13454 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13455 IEM_MC_ELSE()
13456 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13457 IEM_MC_ENDIF();
13458 IEM_MC_USED_FPU();
13459 IEM_MC_ADVANCE_RIP();
13460 IEM_MC_END();
13461
13462 return VINF_SUCCESS;
13463}
13464
13465
13466/** Opcode 0xd9 11/4, 0xdd 11/2. */
13467FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13468{
13469 IEMOP_MNEMONIC("fstp st0,stN");
13470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13471
13472 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13473 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13474 if (!iDstReg)
13475 {
13476 IEM_MC_BEGIN(0, 1);
13477 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13480 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13481 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13482 IEM_MC_ELSE()
13483 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13484 IEM_MC_ENDIF();
13485 IEM_MC_USED_FPU();
13486 IEM_MC_ADVANCE_RIP();
13487 IEM_MC_END();
13488 }
13489 else
13490 {
13491 IEM_MC_BEGIN(0, 2);
13492 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13493 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13496 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13497 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13498 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13499 IEM_MC_ELSE()
13500 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13501 IEM_MC_ENDIF();
13502 IEM_MC_USED_FPU();
13503 IEM_MC_ADVANCE_RIP();
13504 IEM_MC_END();
13505 }
13506 return VINF_SUCCESS;
13507}
13508
13509
13510/**
13511 * Common worker for FPU instructions working on ST0 and replaces it with the
13512 * result, i.e. unary operators.
13513 *
13514 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13515 */
13516FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13517{
13518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13519
13520 IEM_MC_BEGIN(2, 1);
13521 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13522 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13523 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13524
13525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13528 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13529 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13530 IEM_MC_ELSE()
13531 IEM_MC_FPU_STACK_UNDERFLOW(0);
13532 IEM_MC_ENDIF();
13533 IEM_MC_USED_FPU();
13534 IEM_MC_ADVANCE_RIP();
13535
13536 IEM_MC_END();
13537 return VINF_SUCCESS;
13538}
13539
13540
13541/** Opcode 0xd9 0xe0. */
13542FNIEMOP_DEF(iemOp_fchs)
13543{
13544 IEMOP_MNEMONIC("fchs st0");
13545 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13546}
13547
13548
13549/** Opcode 0xd9 0xe1. */
13550FNIEMOP_DEF(iemOp_fabs)
13551{
13552 IEMOP_MNEMONIC("fabs st0");
13553 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13554}
13555
13556
13557/**
13558 * Common worker for FPU instructions working on ST0 and only returns FSW.
13559 *
13560 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13561 */
13562FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13563{
13564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13565
13566 IEM_MC_BEGIN(2, 1);
13567 IEM_MC_LOCAL(uint16_t, u16Fsw);
13568 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13570
13571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13572 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13573 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13574 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13575 IEM_MC_UPDATE_FSW(u16Fsw);
13576 IEM_MC_ELSE()
13577 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13578 IEM_MC_ENDIF();
13579 IEM_MC_USED_FPU();
13580 IEM_MC_ADVANCE_RIP();
13581
13582 IEM_MC_END();
13583 return VINF_SUCCESS;
13584}
13585
13586
13587/** Opcode 0xd9 0xe4. */
13588FNIEMOP_DEF(iemOp_ftst)
13589{
13590 IEMOP_MNEMONIC("ftst st0");
13591 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13592}
13593
13594
13595/** Opcode 0xd9 0xe5. */
13596FNIEMOP_DEF(iemOp_fxam)
13597{
13598 IEMOP_MNEMONIC("fxam st0");
13599 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13600}
13601
13602
13603/**
13604 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13605 *
13606 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13607 */
13608FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13609{
13610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13611
13612 IEM_MC_BEGIN(1, 1);
13613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13614 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13615
13616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13618 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13619 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13620 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13621 IEM_MC_ELSE()
13622 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13623 IEM_MC_ENDIF();
13624 IEM_MC_USED_FPU();
13625 IEM_MC_ADVANCE_RIP();
13626
13627 IEM_MC_END();
13628 return VINF_SUCCESS;
13629}
13630
13631
13632/** Opcode 0xd9 0xe8. */
13633FNIEMOP_DEF(iemOp_fld1)
13634{
13635 IEMOP_MNEMONIC("fld1");
13636 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13637}
13638
13639
13640/** Opcode 0xd9 0xe9. */
13641FNIEMOP_DEF(iemOp_fldl2t)
13642{
13643 IEMOP_MNEMONIC("fldl2t");
13644 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13645}
13646
13647
13648/** Opcode 0xd9 0xea. */
13649FNIEMOP_DEF(iemOp_fldl2e)
13650{
13651 IEMOP_MNEMONIC("fldl2e");
13652 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13653}
13654
13655/** Opcode 0xd9 0xeb. */
13656FNIEMOP_DEF(iemOp_fldpi)
13657{
13658 IEMOP_MNEMONIC("fldpi");
13659 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13660}
13661
13662
13663/** Opcode 0xd9 0xec. */
13664FNIEMOP_DEF(iemOp_fldlg2)
13665{
13666 IEMOP_MNEMONIC("fldlg2");
13667 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13668}
13669
13670/** Opcode 0xd9 0xed. */
13671FNIEMOP_DEF(iemOp_fldln2)
13672{
13673 IEMOP_MNEMONIC("fldln2");
13674 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13675}
13676
13677
13678/** Opcode 0xd9 0xee. */
13679FNIEMOP_DEF(iemOp_fldz)
13680{
13681 IEMOP_MNEMONIC("fldz");
13682 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13683}
13684
13685
13686/** Opcode 0xd9 0xf0. */
13687FNIEMOP_DEF(iemOp_f2xm1)
13688{
13689 IEMOP_MNEMONIC("f2xm1 st0");
13690 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13691}
13692
13693
13694/** Opcode 0xd9 0xf1. */
13695FNIEMOP_DEF(iemOp_fylx2)
13696{
13697 IEMOP_MNEMONIC("fylx2 st0");
13698 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13699}
13700
13701
13702/**
13703 * Common worker for FPU instructions working on ST0 and having two outputs, one
13704 * replacing ST0 and one pushed onto the stack.
13705 *
13706 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13707 */
13708FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13709{
13710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13711
13712 IEM_MC_BEGIN(2, 1);
13713 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13714 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13715 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13716
13717 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13719 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13720 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13721 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13722 IEM_MC_ELSE()
13723 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13724 IEM_MC_ENDIF();
13725 IEM_MC_USED_FPU();
13726 IEM_MC_ADVANCE_RIP();
13727
13728 IEM_MC_END();
13729 return VINF_SUCCESS;
13730}
13731
13732
13733/** Opcode 0xd9 0xf2. */
13734FNIEMOP_DEF(iemOp_fptan)
13735{
13736 IEMOP_MNEMONIC("fptan st0");
13737 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13738}
13739
13740
13741/**
13742 * Common worker for FPU instructions working on STn and ST0, storing the result
13743 * in STn, and popping the stack unless IE, DE or ZE was raised.
13744 *
13745 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13746 */
13747FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13748{
13749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13750
13751 IEM_MC_BEGIN(3, 1);
13752 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13753 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13754 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13755 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13756
13757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13759
13760 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13761 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13762 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13763 IEM_MC_ELSE()
13764 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13765 IEM_MC_ENDIF();
13766 IEM_MC_USED_FPU();
13767 IEM_MC_ADVANCE_RIP();
13768
13769 IEM_MC_END();
13770 return VINF_SUCCESS;
13771}
13772
13773
13774/** Opcode 0xd9 0xf3. */
13775FNIEMOP_DEF(iemOp_fpatan)
13776{
13777 IEMOP_MNEMONIC("fpatan st1,st0");
13778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13779}
13780
13781
13782/** Opcode 0xd9 0xf4. */
13783FNIEMOP_DEF(iemOp_fxtract)
13784{
13785 IEMOP_MNEMONIC("fxtract st0");
13786 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13787}
13788
13789
13790/** Opcode 0xd9 0xf5. */
13791FNIEMOP_DEF(iemOp_fprem1)
13792{
13793 IEMOP_MNEMONIC("fprem1 st0, st1");
13794 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13795}
13796
13797
13798/** Opcode 0xd9 0xf6. */
13799FNIEMOP_DEF(iemOp_fdecstp)
13800{
13801 IEMOP_MNEMONIC("fdecstp");
13802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13803 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13804 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13805 * FINCSTP and FDECSTP. */
13806
13807 IEM_MC_BEGIN(0,0);
13808
13809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13811
13812 IEM_MC_FPU_STACK_DEC_TOP();
13813 IEM_MC_UPDATE_FSW_CONST(0);
13814
13815 IEM_MC_USED_FPU();
13816 IEM_MC_ADVANCE_RIP();
13817 IEM_MC_END();
13818 return VINF_SUCCESS;
13819}
13820
13821
13822/** Opcode 0xd9 0xf7. */
13823FNIEMOP_DEF(iemOp_fincstp)
13824{
13825 IEMOP_MNEMONIC("fincstp");
13826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13827 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13828 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13829 * FINCSTP and FDECSTP. */
13830
13831 IEM_MC_BEGIN(0,0);
13832
13833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13835
13836 IEM_MC_FPU_STACK_INC_TOP();
13837 IEM_MC_UPDATE_FSW_CONST(0);
13838
13839 IEM_MC_USED_FPU();
13840 IEM_MC_ADVANCE_RIP();
13841 IEM_MC_END();
13842 return VINF_SUCCESS;
13843}
13844
13845
13846/** Opcode 0xd9 0xf8. */
13847FNIEMOP_DEF(iemOp_fprem)
13848{
13849 IEMOP_MNEMONIC("fprem st0, st1");
13850 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13851}
13852
13853
13854/** Opcode 0xd9 0xf9. */
13855FNIEMOP_DEF(iemOp_fyl2xp1)
13856{
13857 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13858 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13859}
13860
13861
13862/** Opcode 0xd9 0xfa. */
13863FNIEMOP_DEF(iemOp_fsqrt)
13864{
13865 IEMOP_MNEMONIC("fsqrt st0");
13866 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13867}
13868
13869
13870/** Opcode 0xd9 0xfb. */
13871FNIEMOP_DEF(iemOp_fsincos)
13872{
13873 IEMOP_MNEMONIC("fsincos st0");
13874 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13875}
13876
13877
13878/** Opcode 0xd9 0xfc. */
13879FNIEMOP_DEF(iemOp_frndint)
13880{
13881 IEMOP_MNEMONIC("frndint st0");
13882 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13883}
13884
13885
13886/** Opcode 0xd9 0xfd. */
13887FNIEMOP_DEF(iemOp_fscale)
13888{
13889 IEMOP_MNEMONIC("fscale st0, st1");
13890 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13891}
13892
13893
13894/** Opcode 0xd9 0xfe. */
13895FNIEMOP_DEF(iemOp_fsin)
13896{
13897 IEMOP_MNEMONIC("fsin st0");
13898 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13899}
13900
13901
13902/** Opcode 0xd9 0xff. */
13903FNIEMOP_DEF(iemOp_fcos)
13904{
13905 IEMOP_MNEMONIC("fcos st0");
13906 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13907}
13908
13909
13910/** Used by iemOp_EscF1. */
13911static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13912{
13913 /* 0xe0 */ iemOp_fchs,
13914 /* 0xe1 */ iemOp_fabs,
13915 /* 0xe2 */ iemOp_Invalid,
13916 /* 0xe3 */ iemOp_Invalid,
13917 /* 0xe4 */ iemOp_ftst,
13918 /* 0xe5 */ iemOp_fxam,
13919 /* 0xe6 */ iemOp_Invalid,
13920 /* 0xe7 */ iemOp_Invalid,
13921 /* 0xe8 */ iemOp_fld1,
13922 /* 0xe9 */ iemOp_fldl2t,
13923 /* 0xea */ iemOp_fldl2e,
13924 /* 0xeb */ iemOp_fldpi,
13925 /* 0xec */ iemOp_fldlg2,
13926 /* 0xed */ iemOp_fldln2,
13927 /* 0xee */ iemOp_fldz,
13928 /* 0xef */ iemOp_Invalid,
13929 /* 0xf0 */ iemOp_f2xm1,
13930 /* 0xf1 */ iemOp_fylx2,
13931 /* 0xf2 */ iemOp_fptan,
13932 /* 0xf3 */ iemOp_fpatan,
13933 /* 0xf4 */ iemOp_fxtract,
13934 /* 0xf5 */ iemOp_fprem1,
13935 /* 0xf6 */ iemOp_fdecstp,
13936 /* 0xf7 */ iemOp_fincstp,
13937 /* 0xf8 */ iemOp_fprem,
13938 /* 0xf9 */ iemOp_fyl2xp1,
13939 /* 0xfa */ iemOp_fsqrt,
13940 /* 0xfb */ iemOp_fsincos,
13941 /* 0xfc */ iemOp_frndint,
13942 /* 0xfd */ iemOp_fscale,
13943 /* 0xfe */ iemOp_fsin,
13944 /* 0xff */ iemOp_fcos
13945};
13946
13947
13948/** Opcode 0xd9. */
13949FNIEMOP_DEF(iemOp_EscF1)
13950{
13951 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13953 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13954 {
13955 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13956 {
13957 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13958 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13959 case 2:
13960 if (bRm == 0xd0)
13961 return FNIEMOP_CALL(iemOp_fnop);
13962 return IEMOP_RAISE_INVALID_OPCODE();
13963 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13964 case 4:
13965 case 5:
13966 case 6:
13967 case 7:
13968 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13969 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13971 }
13972 }
13973 else
13974 {
13975 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13976 {
13977 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13978 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13979 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13980 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13981 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13982 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13983 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13984 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13986 }
13987 }
13988}
13989
13990
13991/** Opcode 0xda 11/0. */
13992FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13993{
13994 IEMOP_MNEMONIC("fcmovb st0,stN");
13995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13996
13997 IEM_MC_BEGIN(0, 1);
13998 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13999
14000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14002
14003 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14005 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14006 IEM_MC_ENDIF();
14007 IEM_MC_UPDATE_FPU_OPCODE_IP();
14008 IEM_MC_ELSE()
14009 IEM_MC_FPU_STACK_UNDERFLOW(0);
14010 IEM_MC_ENDIF();
14011 IEM_MC_USED_FPU();
14012 IEM_MC_ADVANCE_RIP();
14013
14014 IEM_MC_END();
14015 return VINF_SUCCESS;
14016}
14017
14018
14019/** Opcode 0xda 11/1. */
14020FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14021{
14022 IEMOP_MNEMONIC("fcmove st0,stN");
14023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14024
14025 IEM_MC_BEGIN(0, 1);
14026 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14027
14028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14030
14031 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14033 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14034 IEM_MC_ENDIF();
14035 IEM_MC_UPDATE_FPU_OPCODE_IP();
14036 IEM_MC_ELSE()
14037 IEM_MC_FPU_STACK_UNDERFLOW(0);
14038 IEM_MC_ENDIF();
14039 IEM_MC_USED_FPU();
14040 IEM_MC_ADVANCE_RIP();
14041
14042 IEM_MC_END();
14043 return VINF_SUCCESS;
14044}
14045
14046
14047/** Opcode 0xda 11/2. */
14048FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14049{
14050 IEMOP_MNEMONIC("fcmovbe st0,stN");
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052
14053 IEM_MC_BEGIN(0, 1);
14054 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14055
14056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14058
14059 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14060 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14061 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14062 IEM_MC_ENDIF();
14063 IEM_MC_UPDATE_FPU_OPCODE_IP();
14064 IEM_MC_ELSE()
14065 IEM_MC_FPU_STACK_UNDERFLOW(0);
14066 IEM_MC_ENDIF();
14067 IEM_MC_USED_FPU();
14068 IEM_MC_ADVANCE_RIP();
14069
14070 IEM_MC_END();
14071 return VINF_SUCCESS;
14072}
14073
14074
14075/** Opcode 0xda 11/3. */
14076FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14077{
14078 IEMOP_MNEMONIC("fcmovu st0,stN");
14079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14080
14081 IEM_MC_BEGIN(0, 1);
14082 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14083
14084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14086
14087 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14089 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14090 IEM_MC_ENDIF();
14091 IEM_MC_UPDATE_FPU_OPCODE_IP();
14092 IEM_MC_ELSE()
14093 IEM_MC_FPU_STACK_UNDERFLOW(0);
14094 IEM_MC_ENDIF();
14095 IEM_MC_USED_FPU();
14096 IEM_MC_ADVANCE_RIP();
14097
14098 IEM_MC_END();
14099 return VINF_SUCCESS;
14100}
14101
14102
14103/**
14104 * Common worker for FPU instructions working on ST0 and STn, only affecting
14105 * flags, and popping twice when done.
14106 *
14107 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14108 */
14109FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14110{
14111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14112
14113 IEM_MC_BEGIN(3, 1);
14114 IEM_MC_LOCAL(uint16_t, u16Fsw);
14115 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14118
14119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14121 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14122 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14123 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14124 IEM_MC_ELSE()
14125 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14126 IEM_MC_ENDIF();
14127 IEM_MC_USED_FPU();
14128 IEM_MC_ADVANCE_RIP();
14129
14130 IEM_MC_END();
14131 return VINF_SUCCESS;
14132}
14133
14134
14135/** Opcode 0xda 0xe9. */
14136FNIEMOP_DEF(iemOp_fucompp)
14137{
14138 IEMOP_MNEMONIC("fucompp st0,stN");
14139 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14140}
14141
14142
14143/**
14144 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14145 * the result in ST0.
14146 *
14147 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14148 */
14149FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14150{
14151 IEM_MC_BEGIN(3, 3);
14152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14153 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14154 IEM_MC_LOCAL(int32_t, i32Val2);
14155 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14157 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14158
14159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14161
14162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14164 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14165
14166 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14167 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14168 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14169 IEM_MC_ELSE()
14170 IEM_MC_FPU_STACK_UNDERFLOW(0);
14171 IEM_MC_ENDIF();
14172 IEM_MC_USED_FPU();
14173 IEM_MC_ADVANCE_RIP();
14174
14175 IEM_MC_END();
14176 return VINF_SUCCESS;
14177}
14178
14179
14180/** Opcode 0xda !11/0. */
14181FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14182{
14183 IEMOP_MNEMONIC("fiadd m32i");
14184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14185}
14186
14187
14188/** Opcode 0xda !11/1. */
14189FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14190{
14191 IEMOP_MNEMONIC("fimul m32i");
14192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14193}
14194
14195
14196/** Opcode 0xda !11/2. */
14197FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14198{
14199 IEMOP_MNEMONIC("ficom st0,m32i");
14200
14201 IEM_MC_BEGIN(3, 3);
14202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14203 IEM_MC_LOCAL(uint16_t, u16Fsw);
14204 IEM_MC_LOCAL(int32_t, i32Val2);
14205 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14206 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14207 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14208
14209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14211
14212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14214 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14215
14216 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14217 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14218 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14219 IEM_MC_ELSE()
14220 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14221 IEM_MC_ENDIF();
14222 IEM_MC_USED_FPU();
14223 IEM_MC_ADVANCE_RIP();
14224
14225 IEM_MC_END();
14226 return VINF_SUCCESS;
14227}
14228
14229
14230/** Opcode 0xda !11/3. */
14231FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14232{
14233 IEMOP_MNEMONIC("ficomp st0,m32i");
14234
14235 IEM_MC_BEGIN(3, 3);
14236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14237 IEM_MC_LOCAL(uint16_t, u16Fsw);
14238 IEM_MC_LOCAL(int32_t, i32Val2);
14239 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14240 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14241 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14242
14243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14245
14246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14248 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14249
14250 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14251 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14252 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14253 IEM_MC_ELSE()
14254 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14255 IEM_MC_ENDIF();
14256 IEM_MC_USED_FPU();
14257 IEM_MC_ADVANCE_RIP();
14258
14259 IEM_MC_END();
14260 return VINF_SUCCESS;
14261}
14262
14263
14264/** Opcode 0xda !11/4. */
14265FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14266{
14267 IEMOP_MNEMONIC("fisub m32i");
14268 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14269}
14270
14271
14272/** Opcode 0xda !11/5. */
14273FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14274{
14275 IEMOP_MNEMONIC("fisubr m32i");
14276 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14277}
14278
14279
14280/** Opcode 0xda !11/6. */
14281FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14282{
14283 IEMOP_MNEMONIC("fidiv m32i");
14284 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14285}
14286
14287
14288/** Opcode 0xda !11/7. */
14289FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14290{
14291 IEMOP_MNEMONIC("fidivr m32i");
14292 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14293}
14294
14295
14296/** Opcode 0xda. */
14297FNIEMOP_DEF(iemOp_EscF2)
14298{
14299 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14302 {
14303 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14304 {
14305 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14306 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14307 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14308 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14309 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14310 case 5:
14311 if (bRm == 0xe9)
14312 return FNIEMOP_CALL(iemOp_fucompp);
14313 return IEMOP_RAISE_INVALID_OPCODE();
14314 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14315 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14317 }
14318 }
14319 else
14320 {
14321 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14322 {
14323 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14324 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14325 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14326 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14327 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14328 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14329 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14330 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14332 }
14333 }
14334}
14335
14336
14337/** Opcode 0xdb !11/0. */
14338FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14339{
14340 IEMOP_MNEMONIC("fild m32i");
14341
14342 IEM_MC_BEGIN(2, 3);
14343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14344 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14345 IEM_MC_LOCAL(int32_t, i32Val);
14346 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14347 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14348
14349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14351
14352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14354 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14355
14356 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14357 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14358 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14359 IEM_MC_ELSE()
14360 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14361 IEM_MC_ENDIF();
14362 IEM_MC_USED_FPU();
14363 IEM_MC_ADVANCE_RIP();
14364
14365 IEM_MC_END();
14366 return VINF_SUCCESS;
14367}
14368
14369
14370/** Opcode 0xdb !11/1. */
14371FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14372{
14373 IEMOP_MNEMONIC("fisttp m32i");
14374 IEM_MC_BEGIN(3, 2);
14375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14376 IEM_MC_LOCAL(uint16_t, u16Fsw);
14377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14378 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14380
14381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14385
14386 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14387 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14388 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14389 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14390 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14391 IEM_MC_ELSE()
14392 IEM_MC_IF_FCW_IM()
14393 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14394 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14395 IEM_MC_ENDIF();
14396 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14397 IEM_MC_ENDIF();
14398 IEM_MC_USED_FPU();
14399 IEM_MC_ADVANCE_RIP();
14400
14401 IEM_MC_END();
14402 return VINF_SUCCESS;
14403}
14404
14405
14406/** Opcode 0xdb !11/2. */
14407FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14408{
14409 IEMOP_MNEMONIC("fist m32i");
14410 IEM_MC_BEGIN(3, 2);
14411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14412 IEM_MC_LOCAL(uint16_t, u16Fsw);
14413 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14414 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14415 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14416
14417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14420 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14421
14422 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14423 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14424 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14425 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14426 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14427 IEM_MC_ELSE()
14428 IEM_MC_IF_FCW_IM()
14429 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14430 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14431 IEM_MC_ENDIF();
14432 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14433 IEM_MC_ENDIF();
14434 IEM_MC_USED_FPU();
14435 IEM_MC_ADVANCE_RIP();
14436
14437 IEM_MC_END();
14438 return VINF_SUCCESS;
14439}
14440
14441
14442/** Opcode 0xdb !11/3. */
14443FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14444{
14445 IEMOP_MNEMONIC("fisttp m32i");
14446 IEM_MC_BEGIN(3, 2);
14447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14448 IEM_MC_LOCAL(uint16_t, u16Fsw);
14449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14450 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14452
14453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14457
14458 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14459 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14460 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14461 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14462 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14463 IEM_MC_ELSE()
14464 IEM_MC_IF_FCW_IM()
14465 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14466 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14467 IEM_MC_ENDIF();
14468 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14469 IEM_MC_ENDIF();
14470 IEM_MC_USED_FPU();
14471 IEM_MC_ADVANCE_RIP();
14472
14473 IEM_MC_END();
14474 return VINF_SUCCESS;
14475}
14476
14477
14478/** Opcode 0xdb !11/5. */
14479FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14480{
14481 IEMOP_MNEMONIC("fld m80r");
14482
14483 IEM_MC_BEGIN(2, 3);
14484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14485 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14486 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14487 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14488 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14489
14490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14492
14493 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14494 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14495 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14496
14497 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14498 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14499 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14500 IEM_MC_ELSE()
14501 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14502 IEM_MC_ENDIF();
14503 IEM_MC_USED_FPU();
14504 IEM_MC_ADVANCE_RIP();
14505
14506 IEM_MC_END();
14507 return VINF_SUCCESS;
14508}
14509
14510
14511/** Opcode 0xdb !11/7. */
14512FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14513{
14514 IEMOP_MNEMONIC("fstp m80r");
14515 IEM_MC_BEGIN(3, 2);
14516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14517 IEM_MC_LOCAL(uint16_t, u16Fsw);
14518 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14519 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14520 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14521
14522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14526
14527 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14528 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14529 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14530 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14531 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14532 IEM_MC_ELSE()
14533 IEM_MC_IF_FCW_IM()
14534 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14535 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14536 IEM_MC_ENDIF();
14537 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14538 IEM_MC_ENDIF();
14539 IEM_MC_USED_FPU();
14540 IEM_MC_ADVANCE_RIP();
14541
14542 IEM_MC_END();
14543 return VINF_SUCCESS;
14544}
14545
14546
14547/** Opcode 0xdb 11/0. */
14548FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14549{
14550 IEMOP_MNEMONIC("fcmovnb st0,stN");
14551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14552
14553 IEM_MC_BEGIN(0, 1);
14554 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14555
14556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14557 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14558
14559 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14560 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14561 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14562 IEM_MC_ENDIF();
14563 IEM_MC_UPDATE_FPU_OPCODE_IP();
14564 IEM_MC_ELSE()
14565 IEM_MC_FPU_STACK_UNDERFLOW(0);
14566 IEM_MC_ENDIF();
14567 IEM_MC_USED_FPU();
14568 IEM_MC_ADVANCE_RIP();
14569
14570 IEM_MC_END();
14571 return VINF_SUCCESS;
14572}
14573
14574
14575/** Opcode 0xdb 11/1. */
14576FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14577{
14578 IEMOP_MNEMONIC("fcmovne st0,stN");
14579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14580
14581 IEM_MC_BEGIN(0, 1);
14582 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14583
14584 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14585 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14586
14587 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14588 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14589 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14590 IEM_MC_ENDIF();
14591 IEM_MC_UPDATE_FPU_OPCODE_IP();
14592 IEM_MC_ELSE()
14593 IEM_MC_FPU_STACK_UNDERFLOW(0);
14594 IEM_MC_ENDIF();
14595 IEM_MC_USED_FPU();
14596 IEM_MC_ADVANCE_RIP();
14597
14598 IEM_MC_END();
14599 return VINF_SUCCESS;
14600}
14601
14602
14603/** Opcode 0xdb 11/2. */
14604FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14605{
14606 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14608
14609 IEM_MC_BEGIN(0, 1);
14610 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14611
14612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14613 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14614
14615 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14616 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14617 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14618 IEM_MC_ENDIF();
14619 IEM_MC_UPDATE_FPU_OPCODE_IP();
14620 IEM_MC_ELSE()
14621 IEM_MC_FPU_STACK_UNDERFLOW(0);
14622 IEM_MC_ENDIF();
14623 IEM_MC_USED_FPU();
14624 IEM_MC_ADVANCE_RIP();
14625
14626 IEM_MC_END();
14627 return VINF_SUCCESS;
14628}
14629
14630
14631/** Opcode 0xdb 11/3. */
14632FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14633{
14634 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14636
14637 IEM_MC_BEGIN(0, 1);
14638 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14639
14640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14642
14643 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14644 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14645 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14646 IEM_MC_ENDIF();
14647 IEM_MC_UPDATE_FPU_OPCODE_IP();
14648 IEM_MC_ELSE()
14649 IEM_MC_FPU_STACK_UNDERFLOW(0);
14650 IEM_MC_ENDIF();
14651 IEM_MC_USED_FPU();
14652 IEM_MC_ADVANCE_RIP();
14653
14654 IEM_MC_END();
14655 return VINF_SUCCESS;
14656}
14657
14658
14659/** Opcode 0xdb 0xe0. */
14660FNIEMOP_DEF(iemOp_fneni)
14661{
14662 IEMOP_MNEMONIC("fneni (8087/ign)");
14663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14664 IEM_MC_BEGIN(0,0);
14665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14666 IEM_MC_ADVANCE_RIP();
14667 IEM_MC_END();
14668 return VINF_SUCCESS;
14669}
14670
14671
14672/** Opcode 0xdb 0xe1. */
14673FNIEMOP_DEF(iemOp_fndisi)
14674{
14675 IEMOP_MNEMONIC("fndisi (8087/ign)");
14676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14677 IEM_MC_BEGIN(0,0);
14678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14679 IEM_MC_ADVANCE_RIP();
14680 IEM_MC_END();
14681 return VINF_SUCCESS;
14682}
14683
14684
14685/** Opcode 0xdb 0xe2. */
14686FNIEMOP_DEF(iemOp_fnclex)
14687{
14688 IEMOP_MNEMONIC("fnclex");
14689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14690
14691 IEM_MC_BEGIN(0,0);
14692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14693 IEM_MC_CLEAR_FSW_EX();
14694 IEM_MC_ADVANCE_RIP();
14695 IEM_MC_END();
14696 return VINF_SUCCESS;
14697}
14698
14699
14700/** Opcode 0xdb 0xe3. */
14701FNIEMOP_DEF(iemOp_fninit)
14702{
14703 IEMOP_MNEMONIC("fninit");
14704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14705 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14706}
14707
14708
14709/** Opcode 0xdb 0xe4. */
14710FNIEMOP_DEF(iemOp_fnsetpm)
14711{
14712 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14714 IEM_MC_BEGIN(0,0);
14715 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14716 IEM_MC_ADVANCE_RIP();
14717 IEM_MC_END();
14718 return VINF_SUCCESS;
14719}
14720
14721
14722/** Opcode 0xdb 0xe5. */
14723FNIEMOP_DEF(iemOp_frstpm)
14724{
14725 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14726#if 0 /* #UDs on newer CPUs */
14727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14728 IEM_MC_BEGIN(0,0);
14729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14730 IEM_MC_ADVANCE_RIP();
14731 IEM_MC_END();
14732 return VINF_SUCCESS;
14733#else
14734 return IEMOP_RAISE_INVALID_OPCODE();
14735#endif
14736}
14737
14738
14739/** Opcode 0xdb 11/5. */
14740FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14741{
14742 IEMOP_MNEMONIC("fucomi st0,stN");
14743 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14744}
14745
14746
14747/** Opcode 0xdb 11/6. */
14748FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14749{
14750 IEMOP_MNEMONIC("fcomi st0,stN");
14751 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14752}
14753
14754
14755/** Opcode 0xdb. */
14756FNIEMOP_DEF(iemOp_EscF3)
14757{
14758 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14761 {
14762 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14763 {
14764 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14765 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14766 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14767 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14768 case 4:
14769 switch (bRm)
14770 {
14771 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14772 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14773 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14774 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14775 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14776 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14777 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14778 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14780 }
14781 break;
14782 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14783 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14784 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14786 }
14787 }
14788 else
14789 {
14790 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14791 {
14792 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14793 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14794 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14795 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14796 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14797 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14798 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14799 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14801 }
14802 }
14803}
14804
14805
14806/**
14807 * Common worker for FPU instructions working on STn and ST0, and storing the
14808 * result in STn unless IE, DE or ZE was raised.
14809 *
14810 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14811 */
14812FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14813{
14814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14815
14816 IEM_MC_BEGIN(3, 1);
14817 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14821
14822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14824
14825 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14826 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14827 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14828 IEM_MC_ELSE()
14829 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14830 IEM_MC_ENDIF();
14831 IEM_MC_USED_FPU();
14832 IEM_MC_ADVANCE_RIP();
14833
14834 IEM_MC_END();
14835 return VINF_SUCCESS;
14836}
14837
14838
14839/** Opcode 0xdc 11/0. */
14840FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14841{
14842 IEMOP_MNEMONIC("fadd stN,st0");
14843 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14844}
14845
14846
14847/** Opcode 0xdc 11/1. */
14848FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14849{
14850 IEMOP_MNEMONIC("fmul stN,st0");
14851 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14852}
14853
14854
14855/** Opcode 0xdc 11/4. */
14856FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14857{
14858 IEMOP_MNEMONIC("fsubr stN,st0");
14859 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14860}
14861
14862
14863/** Opcode 0xdc 11/5. */
14864FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14865{
14866 IEMOP_MNEMONIC("fsub stN,st0");
14867 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14868}
14869
14870
14871/** Opcode 0xdc 11/6. */
14872FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14873{
14874 IEMOP_MNEMONIC("fdivr stN,st0");
14875 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14876}
14877
14878
14879/** Opcode 0xdc 11/7. */
14880FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14881{
14882 IEMOP_MNEMONIC("fdiv stN,st0");
14883 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14884}
14885
14886
14887/**
14888 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14889 * memory operand, and storing the result in ST0.
14890 *
14891 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14892 */
14893FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14894{
14895 IEM_MC_BEGIN(3, 3);
14896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14897 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14898 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14899 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14900 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14901 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14902
14903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14907
14908 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14910 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14911 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14912 IEM_MC_ELSE()
14913 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14914 IEM_MC_ENDIF();
14915 IEM_MC_USED_FPU();
14916 IEM_MC_ADVANCE_RIP();
14917
14918 IEM_MC_END();
14919 return VINF_SUCCESS;
14920}
14921
14922
14923/** Opcode 0xdc !11/0. */
14924FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14925{
14926 IEMOP_MNEMONIC("fadd m64r");
14927 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14928}
14929
14930
14931/** Opcode 0xdc !11/1. */
14932FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14933{
14934 IEMOP_MNEMONIC("fmul m64r");
14935 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14936}
14937
14938
14939/** Opcode 0xdc !11/2. */
14940FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14941{
14942 IEMOP_MNEMONIC("fcom st0,m64r");
14943
14944 IEM_MC_BEGIN(3, 3);
14945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14946 IEM_MC_LOCAL(uint16_t, u16Fsw);
14947 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14948 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14949 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14950 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14951
14952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14954
14955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14957 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14958
14959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14960 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14961 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14962 IEM_MC_ELSE()
14963 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14964 IEM_MC_ENDIF();
14965 IEM_MC_USED_FPU();
14966 IEM_MC_ADVANCE_RIP();
14967
14968 IEM_MC_END();
14969 return VINF_SUCCESS;
14970}
14971
14972
14973/** Opcode 0xdc !11/3. */
14974FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14975{
14976 IEMOP_MNEMONIC("fcomp st0,m64r");
14977
14978 IEM_MC_BEGIN(3, 3);
14979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14980 IEM_MC_LOCAL(uint16_t, u16Fsw);
14981 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14982 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14983 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14984 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14985
14986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14988
14989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14991 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14992
14993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14994 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14995 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14996 IEM_MC_ELSE()
14997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14998 IEM_MC_ENDIF();
14999 IEM_MC_USED_FPU();
15000 IEM_MC_ADVANCE_RIP();
15001
15002 IEM_MC_END();
15003 return VINF_SUCCESS;
15004}
15005
15006
15007/** Opcode 0xdc !11/4. */
15008FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15009{
15010 IEMOP_MNEMONIC("fsub m64r");
15011 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15012}
15013
15014
15015/** Opcode 0xdc !11/5. */
15016FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15017{
15018 IEMOP_MNEMONIC("fsubr m64r");
15019 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15020}
15021
15022
15023/** Opcode 0xdc !11/6. */
15024FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15025{
15026 IEMOP_MNEMONIC("fdiv m64r");
15027 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15028}
15029
15030
15031/** Opcode 0xdc !11/7. */
15032FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15033{
15034 IEMOP_MNEMONIC("fdivr m64r");
15035 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15036}
15037
15038
15039/** Opcode 0xdc. */
15040FNIEMOP_DEF(iemOp_EscF4)
15041{
15042 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15045 {
15046 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15047 {
15048 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15049 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15050 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15051 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15052 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15053 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15054 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15055 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15057 }
15058 }
15059 else
15060 {
15061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15062 {
15063 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15064 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15065 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15066 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15067 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15068 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15069 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15070 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15072 }
15073 }
15074}
15075
15076
15077/** Opcode 0xdd !11/0.
15078 * @sa iemOp_fld_m32r */
15079FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15080{
15081 IEMOP_MNEMONIC("fld m64r");
15082
15083 IEM_MC_BEGIN(2, 3);
15084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15085 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15086 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15087 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15088 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15089
15090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15094
15095 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15096 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15097 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15098 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15099 IEM_MC_ELSE()
15100 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15101 IEM_MC_ENDIF();
15102 IEM_MC_USED_FPU();
15103 IEM_MC_ADVANCE_RIP();
15104
15105 IEM_MC_END();
15106 return VINF_SUCCESS;
15107}
15108
15109
15110/** Opcode 0xdd !11/0. */
15111FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15112{
15113 IEMOP_MNEMONIC("fisttp m64i");
15114 IEM_MC_BEGIN(3, 2);
15115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15116 IEM_MC_LOCAL(uint16_t, u16Fsw);
15117 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15118 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15120
15121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15125
15126 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15128 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15129 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15130 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15131 IEM_MC_ELSE()
15132 IEM_MC_IF_FCW_IM()
15133 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15134 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15135 IEM_MC_ENDIF();
15136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15137 IEM_MC_ENDIF();
15138 IEM_MC_USED_FPU();
15139 IEM_MC_ADVANCE_RIP();
15140
15141 IEM_MC_END();
15142 return VINF_SUCCESS;
15143}
15144
15145
15146/** Opcode 0xdd !11/0. */
15147FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15148{
15149 IEMOP_MNEMONIC("fst m64r");
15150 IEM_MC_BEGIN(3, 2);
15151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15152 IEM_MC_LOCAL(uint16_t, u16Fsw);
15153 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15154 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15156
15157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15161
15162 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15163 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15164 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15165 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15166 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15167 IEM_MC_ELSE()
15168 IEM_MC_IF_FCW_IM()
15169 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15170 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15171 IEM_MC_ENDIF();
15172 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15173 IEM_MC_ENDIF();
15174 IEM_MC_USED_FPU();
15175 IEM_MC_ADVANCE_RIP();
15176
15177 IEM_MC_END();
15178 return VINF_SUCCESS;
15179}
15180
15181
15182
15183
15184/** Opcode 0xdd !11/0. */
15185FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15186{
15187 IEMOP_MNEMONIC("fstp m64r");
15188 IEM_MC_BEGIN(3, 2);
15189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15190 IEM_MC_LOCAL(uint16_t, u16Fsw);
15191 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15192 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15193 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15194
15195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15199
15200 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15201 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15202 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15203 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15204 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15205 IEM_MC_ELSE()
15206 IEM_MC_IF_FCW_IM()
15207 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15208 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15209 IEM_MC_ENDIF();
15210 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15211 IEM_MC_ENDIF();
15212 IEM_MC_USED_FPU();
15213 IEM_MC_ADVANCE_RIP();
15214
15215 IEM_MC_END();
15216 return VINF_SUCCESS;
15217}
15218
15219
15220/** Opcode 0xdd !11/0. */
15221FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15222{
15223 IEMOP_MNEMONIC("frstor m94/108byte");
15224 IEM_MC_BEGIN(3, 0);
15225 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15226 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15227 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15231 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15232 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15233 IEM_MC_END();
15234 return VINF_SUCCESS;
15235}
15236
15237
15238/** Opcode 0xdd !11/0. */
15239FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15240{
15241 IEMOP_MNEMONIC("fnsave m94/108byte");
15242 IEM_MC_BEGIN(3, 0);
15243 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15244 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15245 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15249 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15250 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15251 IEM_MC_END();
15252 return VINF_SUCCESS;
15253
15254}
15255
15256/** Opcode 0xdd !11/0. */
15257FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15258{
15259 IEMOP_MNEMONIC("fnstsw m16");
15260
15261 IEM_MC_BEGIN(0, 2);
15262 IEM_MC_LOCAL(uint16_t, u16Tmp);
15263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15264
15265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15268
15269 IEM_MC_FETCH_FSW(u16Tmp);
15270 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15271 IEM_MC_ADVANCE_RIP();
15272
15273/** @todo Debug / drop a hint to the verifier that things may differ
15274 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15275 * NT4SP1. (X86_FSW_PE) */
15276 IEM_MC_END();
15277 return VINF_SUCCESS;
15278}
15279
15280
15281/** Opcode 0xdd 11/0. */
15282FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15283{
15284 IEMOP_MNEMONIC("ffree stN");
15285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15286 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15287 unmodified. */
15288
15289 IEM_MC_BEGIN(0, 0);
15290
15291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15293
15294 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15295 IEM_MC_UPDATE_FPU_OPCODE_IP();
15296
15297 IEM_MC_USED_FPU();
15298 IEM_MC_ADVANCE_RIP();
15299 IEM_MC_END();
15300 return VINF_SUCCESS;
15301}
15302
15303
15304/** Opcode 0xdd 11/1. */
15305FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15306{
15307 IEMOP_MNEMONIC("fst st0,stN");
15308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15309
15310 IEM_MC_BEGIN(0, 2);
15311 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15312 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15315 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15316 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15317 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15318 IEM_MC_ELSE()
15319 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15320 IEM_MC_ENDIF();
15321 IEM_MC_USED_FPU();
15322 IEM_MC_ADVANCE_RIP();
15323 IEM_MC_END();
15324 return VINF_SUCCESS;
15325}
15326
15327
15328/** Opcode 0xdd 11/3. */
15329FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15330{
15331 IEMOP_MNEMONIC("fcom st0,stN");
15332 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15333}
15334
15335
15336/** Opcode 0xdd 11/4. */
15337FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15338{
15339 IEMOP_MNEMONIC("fcomp st0,stN");
15340 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15341}
15342
15343
15344/** Opcode 0xdd. */
15345FNIEMOP_DEF(iemOp_EscF5)
15346{
15347 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15350 {
15351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15352 {
15353 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15354 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15355 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15356 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15357 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15358 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15359 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15360 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15362 }
15363 }
15364 else
15365 {
15366 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15367 {
15368 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15369 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15370 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15371 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15372 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15373 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15374 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15375 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15377 }
15378 }
15379}
15380
15381
15382/** Opcode 0xde 11/0. */
15383FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15384{
15385 IEMOP_MNEMONIC("faddp stN,st0");
15386 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15387}
15388
15389
15390/** Opcode 0xde 11/0. */
15391FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15392{
15393 IEMOP_MNEMONIC("fmulp stN,st0");
15394 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15395}
15396
15397
15398/** Opcode 0xde 0xd9. */
15399FNIEMOP_DEF(iemOp_fcompp)
15400{
15401 IEMOP_MNEMONIC("fucompp st0,stN");
15402 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15403}
15404
15405
15406/** Opcode 0xde 11/4. */
15407FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15408{
15409 IEMOP_MNEMONIC("fsubrp stN,st0");
15410 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15411}
15412
15413
15414/** Opcode 0xde 11/5. */
15415FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15416{
15417 IEMOP_MNEMONIC("fsubp stN,st0");
15418 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15419}
15420
15421
15422/** Opcode 0xde 11/6. */
15423FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15424{
15425 IEMOP_MNEMONIC("fdivrp stN,st0");
15426 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15427}
15428
15429
15430/** Opcode 0xde 11/7. */
15431FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15432{
15433 IEMOP_MNEMONIC("fdivp stN,st0");
15434 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15435}
15436
15437
15438/**
15439 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15440 * the result in ST0.
15441 *
15442 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15443 */
15444FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15445{
15446 IEM_MC_BEGIN(3, 3);
15447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15448 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15449 IEM_MC_LOCAL(int16_t, i16Val2);
15450 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15452 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15453
15454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15456
15457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15459 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15460
15461 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15462 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15463 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15464 IEM_MC_ELSE()
15465 IEM_MC_FPU_STACK_UNDERFLOW(0);
15466 IEM_MC_ENDIF();
15467 IEM_MC_USED_FPU();
15468 IEM_MC_ADVANCE_RIP();
15469
15470 IEM_MC_END();
15471 return VINF_SUCCESS;
15472}
15473
15474
15475/** Opcode 0xde !11/0. */
15476FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15477{
15478 IEMOP_MNEMONIC("fiadd m16i");
15479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15480}
15481
15482
15483/** Opcode 0xde !11/1. */
15484FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15485{
15486 IEMOP_MNEMONIC("fimul m16i");
15487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15488}
15489
15490
15491/** Opcode 0xde !11/2. */
15492FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15493{
15494 IEMOP_MNEMONIC("ficom st0,m16i");
15495
15496 IEM_MC_BEGIN(3, 3);
15497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15498 IEM_MC_LOCAL(uint16_t, u16Fsw);
15499 IEM_MC_LOCAL(int16_t, i16Val2);
15500 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15501 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15502 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15503
15504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15506
15507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15509 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15510
15511 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15512 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15513 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15514 IEM_MC_ELSE()
15515 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15516 IEM_MC_ENDIF();
15517 IEM_MC_USED_FPU();
15518 IEM_MC_ADVANCE_RIP();
15519
15520 IEM_MC_END();
15521 return VINF_SUCCESS;
15522}
15523
15524
15525/** Opcode 0xde !11/3. */
15526FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15527{
15528 IEMOP_MNEMONIC("ficomp st0,m16i");
15529
15530 IEM_MC_BEGIN(3, 3);
15531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15532 IEM_MC_LOCAL(uint16_t, u16Fsw);
15533 IEM_MC_LOCAL(int16_t, i16Val2);
15534 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15535 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15536 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15537
15538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15540
15541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15543 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15544
15545 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15546 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15547 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15548 IEM_MC_ELSE()
15549 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15550 IEM_MC_ENDIF();
15551 IEM_MC_USED_FPU();
15552 IEM_MC_ADVANCE_RIP();
15553
15554 IEM_MC_END();
15555 return VINF_SUCCESS;
15556}
15557
15558
15559/** Opcode 0xde !11/4. */
15560FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15561{
15562 IEMOP_MNEMONIC("fisub m16i");
15563 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15564}
15565
15566
15567/** Opcode 0xde !11/5. */
15568FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15569{
15570 IEMOP_MNEMONIC("fisubr m16i");
15571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15572}
15573
15574
15575/** Opcode 0xde !11/6. */
15576FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15577{
15578 IEMOP_MNEMONIC("fiadd m16i");
15579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15580}
15581
15582
15583/** Opcode 0xde !11/7. */
15584FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15585{
15586 IEMOP_MNEMONIC("fiadd m16i");
15587 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15588}
15589
15590
15591/** Opcode 0xde. */
15592FNIEMOP_DEF(iemOp_EscF6)
15593{
15594 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15597 {
15598 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15599 {
15600 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15601 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15602 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15603 case 3: if (bRm == 0xd9)
15604 return FNIEMOP_CALL(iemOp_fcompp);
15605 return IEMOP_RAISE_INVALID_OPCODE();
15606 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15607 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15608 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15609 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15611 }
15612 }
15613 else
15614 {
15615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15616 {
15617 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15618 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15619 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15620 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15621 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15622 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15623 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15624 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15626 }
15627 }
15628}
15629
15630
15631/** Opcode 0xdf 11/0.
15632 * Undocument instruction, assumed to work like ffree + fincstp. */
15633FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15634{
15635 IEMOP_MNEMONIC("ffreep stN");
15636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15637
15638 IEM_MC_BEGIN(0, 0);
15639
15640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15642
15643 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15644 IEM_MC_FPU_STACK_INC_TOP();
15645 IEM_MC_UPDATE_FPU_OPCODE_IP();
15646
15647 IEM_MC_USED_FPU();
15648 IEM_MC_ADVANCE_RIP();
15649 IEM_MC_END();
15650 return VINF_SUCCESS;
15651}
15652
15653
15654/** Opcode 0xdf 0xe0. */
15655FNIEMOP_DEF(iemOp_fnstsw_ax)
15656{
15657 IEMOP_MNEMONIC("fnstsw ax");
15658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15659
15660 IEM_MC_BEGIN(0, 1);
15661 IEM_MC_LOCAL(uint16_t, u16Tmp);
15662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15663 IEM_MC_FETCH_FSW(u16Tmp);
15664 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15665 IEM_MC_ADVANCE_RIP();
15666 IEM_MC_END();
15667 return VINF_SUCCESS;
15668}
15669
15670
15671/** Opcode 0xdf 11/5. */
15672FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15673{
15674 IEMOP_MNEMONIC("fcomip st0,stN");
15675 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15676}
15677
15678
15679/** Opcode 0xdf 11/6. */
15680FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15681{
15682 IEMOP_MNEMONIC("fcomip st0,stN");
15683 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15684}
15685
15686
15687/** Opcode 0xdf !11/0. */
15688FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15689{
15690 IEMOP_MNEMONIC("fild m16i");
15691
15692 IEM_MC_BEGIN(2, 3);
15693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15694 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15695 IEM_MC_LOCAL(int16_t, i16Val);
15696 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15697 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15698
15699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15701
15702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15704 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15705
15706 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15707 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15708 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15709 IEM_MC_ELSE()
15710 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15711 IEM_MC_ENDIF();
15712 IEM_MC_USED_FPU();
15713 IEM_MC_ADVANCE_RIP();
15714
15715 IEM_MC_END();
15716 return VINF_SUCCESS;
15717}
15718
15719
15720/** Opcode 0xdf !11/1. */
15721FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15722{
15723 IEMOP_MNEMONIC("fisttp m16i");
15724 IEM_MC_BEGIN(3, 2);
15725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15726 IEM_MC_LOCAL(uint16_t, u16Fsw);
15727 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15728 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15730
15731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15735
15736 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15737 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15738 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15739 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15740 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15741 IEM_MC_ELSE()
15742 IEM_MC_IF_FCW_IM()
15743 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15744 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15745 IEM_MC_ENDIF();
15746 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15747 IEM_MC_ENDIF();
15748 IEM_MC_USED_FPU();
15749 IEM_MC_ADVANCE_RIP();
15750
15751 IEM_MC_END();
15752 return VINF_SUCCESS;
15753}
15754
15755
15756/** Opcode 0xdf !11/2. */
15757FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15758{
15759 IEMOP_MNEMONIC("fistp m16i");
15760 IEM_MC_BEGIN(3, 2);
15761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15762 IEM_MC_LOCAL(uint16_t, u16Fsw);
15763 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15764 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15765 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15766
15767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15771
15772 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15773 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15774 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15775 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15776 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15777 IEM_MC_ELSE()
15778 IEM_MC_IF_FCW_IM()
15779 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15780 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15781 IEM_MC_ENDIF();
15782 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15783 IEM_MC_ENDIF();
15784 IEM_MC_USED_FPU();
15785 IEM_MC_ADVANCE_RIP();
15786
15787 IEM_MC_END();
15788 return VINF_SUCCESS;
15789}
15790
15791
15792/** Opcode 0xdf !11/3. */
15793FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15794{
15795 IEMOP_MNEMONIC("fistp m16i");
15796 IEM_MC_BEGIN(3, 2);
15797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15798 IEM_MC_LOCAL(uint16_t, u16Fsw);
15799 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15800 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15802
15803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15807
15808 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15809 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15810 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15811 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15812 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15813 IEM_MC_ELSE()
15814 IEM_MC_IF_FCW_IM()
15815 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15816 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15817 IEM_MC_ENDIF();
15818 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15819 IEM_MC_ENDIF();
15820 IEM_MC_USED_FPU();
15821 IEM_MC_ADVANCE_RIP();
15822
15823 IEM_MC_END();
15824 return VINF_SUCCESS;
15825}
15826
15827
15828/** Opcode 0xdf !11/4. */
15829FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15830
15831
15832/** Opcode 0xdf !11/5. */
15833FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15834{
15835 IEMOP_MNEMONIC("fild m64i");
15836
15837 IEM_MC_BEGIN(2, 3);
15838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15839 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15840 IEM_MC_LOCAL(int64_t, i64Val);
15841 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15842 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15843
15844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15846
15847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15848 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15849 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15850
15851 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15852 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15853 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15854 IEM_MC_ELSE()
15855 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15856 IEM_MC_ENDIF();
15857 IEM_MC_USED_FPU();
15858 IEM_MC_ADVANCE_RIP();
15859
15860 IEM_MC_END();
15861 return VINF_SUCCESS;
15862}
15863
15864
15865/** Opcode 0xdf !11/6. */
15866FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15867
15868
15869/** Opcode 0xdf !11/7. */
15870FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15871{
15872 IEMOP_MNEMONIC("fistp m64i");
15873 IEM_MC_BEGIN(3, 2);
15874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15875 IEM_MC_LOCAL(uint16_t, u16Fsw);
15876 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15877 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15878 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15879
15880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15884
15885 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15886 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15887 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15888 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15889 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15890 IEM_MC_ELSE()
15891 IEM_MC_IF_FCW_IM()
15892 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15893 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15894 IEM_MC_ENDIF();
15895 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15896 IEM_MC_ENDIF();
15897 IEM_MC_USED_FPU();
15898 IEM_MC_ADVANCE_RIP();
15899
15900 IEM_MC_END();
15901 return VINF_SUCCESS;
15902}
15903
15904
15905/** Opcode 0xdf. */
15906FNIEMOP_DEF(iemOp_EscF7)
15907{
15908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15910 {
15911 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15912 {
15913 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15914 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15915 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15916 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15917 case 4: if (bRm == 0xe0)
15918 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15919 return IEMOP_RAISE_INVALID_OPCODE();
15920 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15921 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15922 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15924 }
15925 }
15926 else
15927 {
15928 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15929 {
15930 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15931 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15932 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15933 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15934 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15935 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15936 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15937 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15939 }
15940 }
15941}
15942
15943
15944/** Opcode 0xe0. */
15945FNIEMOP_DEF(iemOp_loopne_Jb)
15946{
15947 IEMOP_MNEMONIC("loopne Jb");
15948 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15949 IEMOP_HLP_NO_LOCK_PREFIX();
15950 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15951
15952 switch (pIemCpu->enmEffAddrMode)
15953 {
15954 case IEMMODE_16BIT:
15955 IEM_MC_BEGIN(0,0);
15956 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15957 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15958 IEM_MC_REL_JMP_S8(i8Imm);
15959 } IEM_MC_ELSE() {
15960 IEM_MC_ADVANCE_RIP();
15961 } IEM_MC_ENDIF();
15962 IEM_MC_END();
15963 return VINF_SUCCESS;
15964
15965 case IEMMODE_32BIT:
15966 IEM_MC_BEGIN(0,0);
15967 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15968 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15969 IEM_MC_REL_JMP_S8(i8Imm);
15970 } IEM_MC_ELSE() {
15971 IEM_MC_ADVANCE_RIP();
15972 } IEM_MC_ENDIF();
15973 IEM_MC_END();
15974 return VINF_SUCCESS;
15975
15976 case IEMMODE_64BIT:
15977 IEM_MC_BEGIN(0,0);
15978 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15979 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15980 IEM_MC_REL_JMP_S8(i8Imm);
15981 } IEM_MC_ELSE() {
15982 IEM_MC_ADVANCE_RIP();
15983 } IEM_MC_ENDIF();
15984 IEM_MC_END();
15985 return VINF_SUCCESS;
15986
15987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15988 }
15989}
15990
15991
15992/** Opcode 0xe1. */
15993FNIEMOP_DEF(iemOp_loope_Jb)
15994{
15995 IEMOP_MNEMONIC("loope Jb");
15996 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15997 IEMOP_HLP_NO_LOCK_PREFIX();
15998 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15999
16000 switch (pIemCpu->enmEffAddrMode)
16001 {
16002 case IEMMODE_16BIT:
16003 IEM_MC_BEGIN(0,0);
16004 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16005 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16006 IEM_MC_REL_JMP_S8(i8Imm);
16007 } IEM_MC_ELSE() {
16008 IEM_MC_ADVANCE_RIP();
16009 } IEM_MC_ENDIF();
16010 IEM_MC_END();
16011 return VINF_SUCCESS;
16012
16013 case IEMMODE_32BIT:
16014 IEM_MC_BEGIN(0,0);
16015 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16016 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16017 IEM_MC_REL_JMP_S8(i8Imm);
16018 } IEM_MC_ELSE() {
16019 IEM_MC_ADVANCE_RIP();
16020 } IEM_MC_ENDIF();
16021 IEM_MC_END();
16022 return VINF_SUCCESS;
16023
16024 case IEMMODE_64BIT:
16025 IEM_MC_BEGIN(0,0);
16026 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16027 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16028 IEM_MC_REL_JMP_S8(i8Imm);
16029 } IEM_MC_ELSE() {
16030 IEM_MC_ADVANCE_RIP();
16031 } IEM_MC_ENDIF();
16032 IEM_MC_END();
16033 return VINF_SUCCESS;
16034
16035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16036 }
16037}
16038
16039
16040/** Opcode 0xe2. */
16041FNIEMOP_DEF(iemOp_loop_Jb)
16042{
16043 IEMOP_MNEMONIC("loop Jb");
16044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16045 IEMOP_HLP_NO_LOCK_PREFIX();
16046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16047
16048 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16049 * using the 32-bit operand size override. How can that be restarted? See
16050 * weird pseudo code in intel manual. */
16051 switch (pIemCpu->enmEffAddrMode)
16052 {
16053 case IEMMODE_16BIT:
16054 IEM_MC_BEGIN(0,0);
16055 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16056 {
16057 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16058 IEM_MC_IF_CX_IS_NZ() {
16059 IEM_MC_REL_JMP_S8(i8Imm);
16060 } IEM_MC_ELSE() {
16061 IEM_MC_ADVANCE_RIP();
16062 } IEM_MC_ENDIF();
16063 }
16064 else
16065 {
16066 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16067 IEM_MC_ADVANCE_RIP();
16068 }
16069 IEM_MC_END();
16070 return VINF_SUCCESS;
16071
16072 case IEMMODE_32BIT:
16073 IEM_MC_BEGIN(0,0);
16074 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16075 {
16076 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16077 IEM_MC_IF_ECX_IS_NZ() {
16078 IEM_MC_REL_JMP_S8(i8Imm);
16079 } IEM_MC_ELSE() {
16080 IEM_MC_ADVANCE_RIP();
16081 } IEM_MC_ENDIF();
16082 }
16083 else
16084 {
16085 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16086 IEM_MC_ADVANCE_RIP();
16087 }
16088 IEM_MC_END();
16089 return VINF_SUCCESS;
16090
16091 case IEMMODE_64BIT:
16092 IEM_MC_BEGIN(0,0);
16093 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16094 {
16095 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16096 IEM_MC_IF_RCX_IS_NZ() {
16097 IEM_MC_REL_JMP_S8(i8Imm);
16098 } IEM_MC_ELSE() {
16099 IEM_MC_ADVANCE_RIP();
16100 } IEM_MC_ENDIF();
16101 }
16102 else
16103 {
16104 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16105 IEM_MC_ADVANCE_RIP();
16106 }
16107 IEM_MC_END();
16108 return VINF_SUCCESS;
16109
16110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16111 }
16112}
16113
16114
16115/** Opcode 0xe3. */
16116FNIEMOP_DEF(iemOp_jecxz_Jb)
16117{
16118 IEMOP_MNEMONIC("jecxz Jb");
16119 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16120 IEMOP_HLP_NO_LOCK_PREFIX();
16121 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16122
16123 switch (pIemCpu->enmEffAddrMode)
16124 {
16125 case IEMMODE_16BIT:
16126 IEM_MC_BEGIN(0,0);
16127 IEM_MC_IF_CX_IS_NZ() {
16128 IEM_MC_ADVANCE_RIP();
16129 } IEM_MC_ELSE() {
16130 IEM_MC_REL_JMP_S8(i8Imm);
16131 } IEM_MC_ENDIF();
16132 IEM_MC_END();
16133 return VINF_SUCCESS;
16134
16135 case IEMMODE_32BIT:
16136 IEM_MC_BEGIN(0,0);
16137 IEM_MC_IF_ECX_IS_NZ() {
16138 IEM_MC_ADVANCE_RIP();
16139 } IEM_MC_ELSE() {
16140 IEM_MC_REL_JMP_S8(i8Imm);
16141 } IEM_MC_ENDIF();
16142 IEM_MC_END();
16143 return VINF_SUCCESS;
16144
16145 case IEMMODE_64BIT:
16146 IEM_MC_BEGIN(0,0);
16147 IEM_MC_IF_RCX_IS_NZ() {
16148 IEM_MC_ADVANCE_RIP();
16149 } IEM_MC_ELSE() {
16150 IEM_MC_REL_JMP_S8(i8Imm);
16151 } IEM_MC_ENDIF();
16152 IEM_MC_END();
16153 return VINF_SUCCESS;
16154
16155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16156 }
16157}
16158
16159
16160/** Opcode 0xe4 */
16161FNIEMOP_DEF(iemOp_in_AL_Ib)
16162{
16163 IEMOP_MNEMONIC("in eAX,Ib");
16164 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16165 IEMOP_HLP_NO_LOCK_PREFIX();
16166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16167}
16168
16169
16170/** Opcode 0xe5 */
16171FNIEMOP_DEF(iemOp_in_eAX_Ib)
16172{
16173 IEMOP_MNEMONIC("in eAX,Ib");
16174 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16175 IEMOP_HLP_NO_LOCK_PREFIX();
16176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16177}
16178
16179
16180/** Opcode 0xe6 */
16181FNIEMOP_DEF(iemOp_out_Ib_AL)
16182{
16183 IEMOP_MNEMONIC("out Ib,AL");
16184 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16185 IEMOP_HLP_NO_LOCK_PREFIX();
16186 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16187}
16188
16189
16190/** Opcode 0xe7 */
16191FNIEMOP_DEF(iemOp_out_Ib_eAX)
16192{
16193 IEMOP_MNEMONIC("out Ib,eAX");
16194 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16195 IEMOP_HLP_NO_LOCK_PREFIX();
16196 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16197}
16198
16199
16200/** Opcode 0xe8. */
16201FNIEMOP_DEF(iemOp_call_Jv)
16202{
16203 IEMOP_MNEMONIC("call Jv");
16204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16205 switch (pIemCpu->enmEffOpSize)
16206 {
16207 case IEMMODE_16BIT:
16208 {
16209 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16210 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16211 }
16212
16213 case IEMMODE_32BIT:
16214 {
16215 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16216 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16217 }
16218
16219 case IEMMODE_64BIT:
16220 {
16221 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16222 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16223 }
16224
16225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16226 }
16227}
16228
16229
16230/** Opcode 0xe9. */
16231FNIEMOP_DEF(iemOp_jmp_Jv)
16232{
16233 IEMOP_MNEMONIC("jmp Jv");
16234 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16235 switch (pIemCpu->enmEffOpSize)
16236 {
16237 case IEMMODE_16BIT:
16238 {
16239 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16240 IEM_MC_BEGIN(0, 0);
16241 IEM_MC_REL_JMP_S16(i16Imm);
16242 IEM_MC_END();
16243 return VINF_SUCCESS;
16244 }
16245
16246 case IEMMODE_64BIT:
16247 case IEMMODE_32BIT:
16248 {
16249 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16250 IEM_MC_BEGIN(0, 0);
16251 IEM_MC_REL_JMP_S32(i32Imm);
16252 IEM_MC_END();
16253 return VINF_SUCCESS;
16254 }
16255
16256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16257 }
16258}
16259
16260
16261/** Opcode 0xea. */
16262FNIEMOP_DEF(iemOp_jmp_Ap)
16263{
16264 IEMOP_MNEMONIC("jmp Ap");
16265 IEMOP_HLP_NO_64BIT();
16266
16267 /* Decode the far pointer address and pass it on to the far call C implementation. */
16268 uint32_t offSeg;
16269 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16270 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16271 else
16272 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16273 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16274 IEMOP_HLP_NO_LOCK_PREFIX();
16275 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16276}
16277
16278
16279/** Opcode 0xeb. */
16280FNIEMOP_DEF(iemOp_jmp_Jb)
16281{
16282 IEMOP_MNEMONIC("jmp Jb");
16283 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16284 IEMOP_HLP_NO_LOCK_PREFIX();
16285 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16286
16287 IEM_MC_BEGIN(0, 0);
16288 IEM_MC_REL_JMP_S8(i8Imm);
16289 IEM_MC_END();
16290 return VINF_SUCCESS;
16291}
16292
16293
16294/** Opcode 0xec */
16295FNIEMOP_DEF(iemOp_in_AL_DX)
16296{
16297 IEMOP_MNEMONIC("in AL,DX");
16298 IEMOP_HLP_NO_LOCK_PREFIX();
16299 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16300}
16301
16302
16303/** Opcode 0xed */
16304FNIEMOP_DEF(iemOp_eAX_DX)
16305{
16306 IEMOP_MNEMONIC("in eAX,DX");
16307 IEMOP_HLP_NO_LOCK_PREFIX();
16308 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16309}
16310
16311
16312/** Opcode 0xee */
16313FNIEMOP_DEF(iemOp_out_DX_AL)
16314{
16315 IEMOP_MNEMONIC("out DX,AL");
16316 IEMOP_HLP_NO_LOCK_PREFIX();
16317 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16318}
16319
16320
16321/** Opcode 0xef */
16322FNIEMOP_DEF(iemOp_out_DX_eAX)
16323{
16324 IEMOP_MNEMONIC("out DX,eAX");
16325 IEMOP_HLP_NO_LOCK_PREFIX();
16326 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16327}
16328
16329
16330/** Opcode 0xf0. */
16331FNIEMOP_DEF(iemOp_lock)
16332{
16333 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16334 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16335
16336 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16337 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16338}
16339
16340
16341/** Opcode 0xf1. */
16342FNIEMOP_DEF(iemOp_int_1)
16343{
16344 IEMOP_MNEMONIC("int1"); /* icebp */
16345 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16346 /** @todo testcase! */
16347 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16348}
16349
16350
16351/** Opcode 0xf2. */
16352FNIEMOP_DEF(iemOp_repne)
16353{
16354 /* This overrides any previous REPE prefix. */
16355 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16356 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16357 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16358
16359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16361}
16362
16363
16364/** Opcode 0xf3. */
16365FNIEMOP_DEF(iemOp_repe)
16366{
16367 /* This overrides any previous REPNE prefix. */
16368 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16369 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16370 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16371
16372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16374}
16375
16376
16377/** Opcode 0xf4. */
16378FNIEMOP_DEF(iemOp_hlt)
16379{
16380 IEMOP_HLP_NO_LOCK_PREFIX();
16381#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC && 0
16382 if ( pIemCpu->uTargetCpu == IEMTARGETCPU_CURRENT
16383 && pIemCpu->CTX_SUFF(pCtx)->cs.Sel <= 1000)
16384 {
16385 pIemCpu->uTargetCpu = IEMTARGETCPU_286;
16386 LogAlways(("\niemOp_hlt: Enabled CPU restrictions!\n\n"));
16387 }
16388#endif
16389 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16390}
16391
16392
16393/** Opcode 0xf5. */
16394FNIEMOP_DEF(iemOp_cmc)
16395{
16396 IEMOP_MNEMONIC("cmc");
16397 IEMOP_HLP_NO_LOCK_PREFIX();
16398 IEM_MC_BEGIN(0, 0);
16399 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16400 IEM_MC_ADVANCE_RIP();
16401 IEM_MC_END();
16402 return VINF_SUCCESS;
16403}
16404
16405
16406/**
16407 * Common implementation of 'inc/dec/not/neg Eb'.
16408 *
16409 * @param bRm The RM byte.
16410 * @param pImpl The instruction implementation.
16411 */
16412FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16413{
16414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16415 {
16416 /* register access */
16417 IEM_MC_BEGIN(2, 0);
16418 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16419 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16420 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16421 IEM_MC_REF_EFLAGS(pEFlags);
16422 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16423 IEM_MC_ADVANCE_RIP();
16424 IEM_MC_END();
16425 }
16426 else
16427 {
16428 /* memory access. */
16429 IEM_MC_BEGIN(2, 2);
16430 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16433
16434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16435 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16436 IEM_MC_FETCH_EFLAGS(EFlags);
16437 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16438 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16439 else
16440 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16441
16442 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16443 IEM_MC_COMMIT_EFLAGS(EFlags);
16444 IEM_MC_ADVANCE_RIP();
16445 IEM_MC_END();
16446 }
16447 return VINF_SUCCESS;
16448}
16449
16450
16451/**
16452 * Common implementation of 'inc/dec/not/neg Ev'.
16453 *
16454 * @param bRm The RM byte.
16455 * @param pImpl The instruction implementation.
16456 */
16457FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16458{
16459 /* Registers are handled by a common worker. */
16460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16461 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16462
16463 /* Memory we do here. */
16464 switch (pIemCpu->enmEffOpSize)
16465 {
16466 case IEMMODE_16BIT:
16467 IEM_MC_BEGIN(2, 2);
16468 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16471
16472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16473 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16474 IEM_MC_FETCH_EFLAGS(EFlags);
16475 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16476 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16477 else
16478 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16479
16480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16481 IEM_MC_COMMIT_EFLAGS(EFlags);
16482 IEM_MC_ADVANCE_RIP();
16483 IEM_MC_END();
16484 return VINF_SUCCESS;
16485
16486 case IEMMODE_32BIT:
16487 IEM_MC_BEGIN(2, 2);
16488 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16491
16492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16493 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16494 IEM_MC_FETCH_EFLAGS(EFlags);
16495 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16496 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16497 else
16498 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16499
16500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16501 IEM_MC_COMMIT_EFLAGS(EFlags);
16502 IEM_MC_ADVANCE_RIP();
16503 IEM_MC_END();
16504 return VINF_SUCCESS;
16505
16506 case IEMMODE_64BIT:
16507 IEM_MC_BEGIN(2, 2);
16508 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16511
16512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16513 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16514 IEM_MC_FETCH_EFLAGS(EFlags);
16515 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16516 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16517 else
16518 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16519
16520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16521 IEM_MC_COMMIT_EFLAGS(EFlags);
16522 IEM_MC_ADVANCE_RIP();
16523 IEM_MC_END();
16524 return VINF_SUCCESS;
16525
16526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16527 }
16528}
16529
16530
16531/** Opcode 0xf6 /0. */
16532FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16533{
16534 IEMOP_MNEMONIC("test Eb,Ib");
16535 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16536
16537 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16538 {
16539 /* register access */
16540 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16541 IEMOP_HLP_NO_LOCK_PREFIX();
16542
16543 IEM_MC_BEGIN(3, 0);
16544 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16545 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16546 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16547 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16548 IEM_MC_REF_EFLAGS(pEFlags);
16549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16550 IEM_MC_ADVANCE_RIP();
16551 IEM_MC_END();
16552 }
16553 else
16554 {
16555 /* memory access. */
16556 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16557
16558 IEM_MC_BEGIN(3, 2);
16559 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16560 IEM_MC_ARG(uint8_t, u8Src, 1);
16561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16563
16564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16565 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16566 IEM_MC_ASSIGN(u8Src, u8Imm);
16567 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16568 IEM_MC_FETCH_EFLAGS(EFlags);
16569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16570
16571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16572 IEM_MC_COMMIT_EFLAGS(EFlags);
16573 IEM_MC_ADVANCE_RIP();
16574 IEM_MC_END();
16575 }
16576 return VINF_SUCCESS;
16577}
16578
16579
16580/** Opcode 0xf7 /0. */
16581FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16582{
16583 IEMOP_MNEMONIC("test Ev,Iv");
16584 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16585 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16586
16587 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16588 {
16589 /* register access */
16590 switch (pIemCpu->enmEffOpSize)
16591 {
16592 case IEMMODE_16BIT:
16593 {
16594 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16595 IEM_MC_BEGIN(3, 0);
16596 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16597 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16598 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16599 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16600 IEM_MC_REF_EFLAGS(pEFlags);
16601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16602 IEM_MC_ADVANCE_RIP();
16603 IEM_MC_END();
16604 return VINF_SUCCESS;
16605 }
16606
16607 case IEMMODE_32BIT:
16608 {
16609 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16610 IEM_MC_BEGIN(3, 0);
16611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16612 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16614 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16615 IEM_MC_REF_EFLAGS(pEFlags);
16616 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16617 /* No clearing the high dword here - test doesn't write back the result. */
16618 IEM_MC_ADVANCE_RIP();
16619 IEM_MC_END();
16620 return VINF_SUCCESS;
16621 }
16622
16623 case IEMMODE_64BIT:
16624 {
16625 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16626 IEM_MC_BEGIN(3, 0);
16627 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16628 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16629 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16630 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16631 IEM_MC_REF_EFLAGS(pEFlags);
16632 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16633 IEM_MC_ADVANCE_RIP();
16634 IEM_MC_END();
16635 return VINF_SUCCESS;
16636 }
16637
16638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16639 }
16640 }
16641 else
16642 {
16643 /* memory access. */
16644 switch (pIemCpu->enmEffOpSize)
16645 {
16646 case IEMMODE_16BIT:
16647 {
16648 IEM_MC_BEGIN(3, 2);
16649 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16650 IEM_MC_ARG(uint16_t, u16Src, 1);
16651 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16653
16654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16655 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16656 IEM_MC_ASSIGN(u16Src, u16Imm);
16657 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16658 IEM_MC_FETCH_EFLAGS(EFlags);
16659 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16660
16661 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16662 IEM_MC_COMMIT_EFLAGS(EFlags);
16663 IEM_MC_ADVANCE_RIP();
16664 IEM_MC_END();
16665 return VINF_SUCCESS;
16666 }
16667
16668 case IEMMODE_32BIT:
16669 {
16670 IEM_MC_BEGIN(3, 2);
16671 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16672 IEM_MC_ARG(uint32_t, u32Src, 1);
16673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16675
16676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16677 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16678 IEM_MC_ASSIGN(u32Src, u32Imm);
16679 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16680 IEM_MC_FETCH_EFLAGS(EFlags);
16681 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16682
16683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16684 IEM_MC_COMMIT_EFLAGS(EFlags);
16685 IEM_MC_ADVANCE_RIP();
16686 IEM_MC_END();
16687 return VINF_SUCCESS;
16688 }
16689
16690 case IEMMODE_64BIT:
16691 {
16692 IEM_MC_BEGIN(3, 2);
16693 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16694 IEM_MC_ARG(uint64_t, u64Src, 1);
16695 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16697
16698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16699 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16700 IEM_MC_ASSIGN(u64Src, u64Imm);
16701 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16702 IEM_MC_FETCH_EFLAGS(EFlags);
16703 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16704
16705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16706 IEM_MC_COMMIT_EFLAGS(EFlags);
16707 IEM_MC_ADVANCE_RIP();
16708 IEM_MC_END();
16709 return VINF_SUCCESS;
16710 }
16711
16712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16713 }
16714 }
16715}
16716
16717
16718/** Opcode 0xf6 /4, /5, /6 and /7. */
16719FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16720{
16721 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16722
16723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16724 {
16725 /* register access */
16726 IEMOP_HLP_NO_LOCK_PREFIX();
16727 IEM_MC_BEGIN(3, 1);
16728 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16729 IEM_MC_ARG(uint8_t, u8Value, 1);
16730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16731 IEM_MC_LOCAL(int32_t, rc);
16732
16733 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16734 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16735 IEM_MC_REF_EFLAGS(pEFlags);
16736 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16737 IEM_MC_IF_LOCAL_IS_Z(rc) {
16738 IEM_MC_ADVANCE_RIP();
16739 } IEM_MC_ELSE() {
16740 IEM_MC_RAISE_DIVIDE_ERROR();
16741 } IEM_MC_ENDIF();
16742
16743 IEM_MC_END();
16744 }
16745 else
16746 {
16747 /* memory access. */
16748 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16749
16750 IEM_MC_BEGIN(3, 2);
16751 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16752 IEM_MC_ARG(uint8_t, u8Value, 1);
16753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16755 IEM_MC_LOCAL(int32_t, rc);
16756
16757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16758 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16759 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16760 IEM_MC_REF_EFLAGS(pEFlags);
16761 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16762 IEM_MC_IF_LOCAL_IS_Z(rc) {
16763 IEM_MC_ADVANCE_RIP();
16764 } IEM_MC_ELSE() {
16765 IEM_MC_RAISE_DIVIDE_ERROR();
16766 } IEM_MC_ENDIF();
16767
16768 IEM_MC_END();
16769 }
16770 return VINF_SUCCESS;
16771}
16772
16773
16774/** Opcode 0xf7 /4, /5, /6 and /7. */
16775FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16776{
16777 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16778 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16779
16780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16781 {
16782 /* register access */
16783 switch (pIemCpu->enmEffOpSize)
16784 {
16785 case IEMMODE_16BIT:
16786 {
16787 IEMOP_HLP_NO_LOCK_PREFIX();
16788 IEM_MC_BEGIN(4, 1);
16789 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16790 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16791 IEM_MC_ARG(uint16_t, u16Value, 2);
16792 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16793 IEM_MC_LOCAL(int32_t, rc);
16794
16795 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16796 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16797 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16798 IEM_MC_REF_EFLAGS(pEFlags);
16799 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16800 IEM_MC_IF_LOCAL_IS_Z(rc) {
16801 IEM_MC_ADVANCE_RIP();
16802 } IEM_MC_ELSE() {
16803 IEM_MC_RAISE_DIVIDE_ERROR();
16804 } IEM_MC_ENDIF();
16805
16806 IEM_MC_END();
16807 return VINF_SUCCESS;
16808 }
16809
16810 case IEMMODE_32BIT:
16811 {
16812 IEMOP_HLP_NO_LOCK_PREFIX();
16813 IEM_MC_BEGIN(4, 1);
16814 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16815 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16816 IEM_MC_ARG(uint32_t, u32Value, 2);
16817 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16818 IEM_MC_LOCAL(int32_t, rc);
16819
16820 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16821 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16822 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16823 IEM_MC_REF_EFLAGS(pEFlags);
16824 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16825 IEM_MC_IF_LOCAL_IS_Z(rc) {
16826 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16827 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16828 IEM_MC_ADVANCE_RIP();
16829 } IEM_MC_ELSE() {
16830 IEM_MC_RAISE_DIVIDE_ERROR();
16831 } IEM_MC_ENDIF();
16832
16833 IEM_MC_END();
16834 return VINF_SUCCESS;
16835 }
16836
16837 case IEMMODE_64BIT:
16838 {
16839 IEMOP_HLP_NO_LOCK_PREFIX();
16840 IEM_MC_BEGIN(4, 1);
16841 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16842 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16843 IEM_MC_ARG(uint64_t, u64Value, 2);
16844 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16845 IEM_MC_LOCAL(int32_t, rc);
16846
16847 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16848 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16849 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16850 IEM_MC_REF_EFLAGS(pEFlags);
16851 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16852 IEM_MC_IF_LOCAL_IS_Z(rc) {
16853 IEM_MC_ADVANCE_RIP();
16854 } IEM_MC_ELSE() {
16855 IEM_MC_RAISE_DIVIDE_ERROR();
16856 } IEM_MC_ENDIF();
16857
16858 IEM_MC_END();
16859 return VINF_SUCCESS;
16860 }
16861
16862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16863 }
16864 }
16865 else
16866 {
16867 /* memory access. */
16868 switch (pIemCpu->enmEffOpSize)
16869 {
16870 case IEMMODE_16BIT:
16871 {
16872 IEMOP_HLP_NO_LOCK_PREFIX();
16873 IEM_MC_BEGIN(4, 2);
16874 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16875 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16876 IEM_MC_ARG(uint16_t, u16Value, 2);
16877 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16879 IEM_MC_LOCAL(int32_t, rc);
16880
16881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16882 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16883 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16884 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16885 IEM_MC_REF_EFLAGS(pEFlags);
16886 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16887 IEM_MC_IF_LOCAL_IS_Z(rc) {
16888 IEM_MC_ADVANCE_RIP();
16889 } IEM_MC_ELSE() {
16890 IEM_MC_RAISE_DIVIDE_ERROR();
16891 } IEM_MC_ENDIF();
16892
16893 IEM_MC_END();
16894 return VINF_SUCCESS;
16895 }
16896
16897 case IEMMODE_32BIT:
16898 {
16899 IEMOP_HLP_NO_LOCK_PREFIX();
16900 IEM_MC_BEGIN(4, 2);
16901 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16902 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16903 IEM_MC_ARG(uint32_t, u32Value, 2);
16904 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16906 IEM_MC_LOCAL(int32_t, rc);
16907
16908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16909 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16910 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16911 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16912 IEM_MC_REF_EFLAGS(pEFlags);
16913 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16914 IEM_MC_IF_LOCAL_IS_Z(rc) {
16915 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16916 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16917 IEM_MC_ADVANCE_RIP();
16918 } IEM_MC_ELSE() {
16919 IEM_MC_RAISE_DIVIDE_ERROR();
16920 } IEM_MC_ENDIF();
16921
16922 IEM_MC_END();
16923 return VINF_SUCCESS;
16924 }
16925
16926 case IEMMODE_64BIT:
16927 {
16928 IEMOP_HLP_NO_LOCK_PREFIX();
16929 IEM_MC_BEGIN(4, 2);
16930 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16931 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16932 IEM_MC_ARG(uint64_t, u64Value, 2);
16933 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16935 IEM_MC_LOCAL(int32_t, rc);
16936
16937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16938 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16939 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16940 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16941 IEM_MC_REF_EFLAGS(pEFlags);
16942 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16943 IEM_MC_IF_LOCAL_IS_Z(rc) {
16944 IEM_MC_ADVANCE_RIP();
16945 } IEM_MC_ELSE() {
16946 IEM_MC_RAISE_DIVIDE_ERROR();
16947 } IEM_MC_ENDIF();
16948
16949 IEM_MC_END();
16950 return VINF_SUCCESS;
16951 }
16952
16953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16954 }
16955 }
16956}
16957
16958/** Opcode 0xf6. */
16959FNIEMOP_DEF(iemOp_Grp3_Eb)
16960{
16961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16963 {
16964 case 0:
16965 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16966 case 1:
16967/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16968 return IEMOP_RAISE_INVALID_OPCODE();
16969 case 2:
16970 IEMOP_MNEMONIC("not Eb");
16971 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16972 case 3:
16973 IEMOP_MNEMONIC("neg Eb");
16974 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16975 case 4:
16976 IEMOP_MNEMONIC("mul Eb");
16977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16978 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16979 case 5:
16980 IEMOP_MNEMONIC("imul Eb");
16981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16982 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16983 case 6:
16984 IEMOP_MNEMONIC("div Eb");
16985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16987 case 7:
16988 IEMOP_MNEMONIC("idiv Eb");
16989 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16990 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16992 }
16993}
16994
16995
16996/** Opcode 0xf7. */
16997FNIEMOP_DEF(iemOp_Grp3_Ev)
16998{
16999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17000 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17001 {
17002 case 0:
17003 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17004 case 1:
17005/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17006 return IEMOP_RAISE_INVALID_OPCODE();
17007 case 2:
17008 IEMOP_MNEMONIC("not Ev");
17009 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17010 case 3:
17011 IEMOP_MNEMONIC("neg Ev");
17012 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17013 case 4:
17014 IEMOP_MNEMONIC("mul Ev");
17015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17016 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17017 case 5:
17018 IEMOP_MNEMONIC("imul Ev");
17019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17020 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17021 case 6:
17022 IEMOP_MNEMONIC("div Ev");
17023 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17024 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17025 case 7:
17026 IEMOP_MNEMONIC("idiv Ev");
17027 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17028 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17030 }
17031}
17032
17033
17034/** Opcode 0xf8. */
17035FNIEMOP_DEF(iemOp_clc)
17036{
17037 IEMOP_MNEMONIC("clc");
17038 IEMOP_HLP_NO_LOCK_PREFIX();
17039 IEM_MC_BEGIN(0, 0);
17040 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17041 IEM_MC_ADVANCE_RIP();
17042 IEM_MC_END();
17043 return VINF_SUCCESS;
17044}
17045
17046
17047/** Opcode 0xf9. */
17048FNIEMOP_DEF(iemOp_stc)
17049{
17050 IEMOP_MNEMONIC("stc");
17051 IEMOP_HLP_NO_LOCK_PREFIX();
17052 IEM_MC_BEGIN(0, 0);
17053 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17054 IEM_MC_ADVANCE_RIP();
17055 IEM_MC_END();
17056 return VINF_SUCCESS;
17057}
17058
17059
17060/** Opcode 0xfa. */
17061FNIEMOP_DEF(iemOp_cli)
17062{
17063 IEMOP_MNEMONIC("cli");
17064 IEMOP_HLP_NO_LOCK_PREFIX();
17065 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17066}
17067
17068
17069FNIEMOP_DEF(iemOp_sti)
17070{
17071 IEMOP_MNEMONIC("sti");
17072 IEMOP_HLP_NO_LOCK_PREFIX();
17073 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17074}
17075
17076
17077/** Opcode 0xfc. */
17078FNIEMOP_DEF(iemOp_cld)
17079{
17080 IEMOP_MNEMONIC("cld");
17081 IEMOP_HLP_NO_LOCK_PREFIX();
17082 IEM_MC_BEGIN(0, 0);
17083 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17084 IEM_MC_ADVANCE_RIP();
17085 IEM_MC_END();
17086 return VINF_SUCCESS;
17087}
17088
17089
17090/** Opcode 0xfd. */
17091FNIEMOP_DEF(iemOp_std)
17092{
17093 IEMOP_MNEMONIC("std");
17094 IEMOP_HLP_NO_LOCK_PREFIX();
17095 IEM_MC_BEGIN(0, 0);
17096 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17097 IEM_MC_ADVANCE_RIP();
17098 IEM_MC_END();
17099 return VINF_SUCCESS;
17100}
17101
17102
17103/** Opcode 0xfe. */
17104FNIEMOP_DEF(iemOp_Grp4)
17105{
17106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17108 {
17109 case 0:
17110 IEMOP_MNEMONIC("inc Ev");
17111 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17112 case 1:
17113 IEMOP_MNEMONIC("dec Ev");
17114 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17115 default:
17116 IEMOP_MNEMONIC("grp4-ud");
17117 return IEMOP_RAISE_INVALID_OPCODE();
17118 }
17119}
17120
17121
17122/**
17123 * Opcode 0xff /2.
17124 * @param bRm The RM byte.
17125 */
17126FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17127{
17128 IEMOP_MNEMONIC("calln Ev");
17129 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17131
17132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17133 {
17134 /* The new RIP is taken from a register. */
17135 switch (pIemCpu->enmEffOpSize)
17136 {
17137 case IEMMODE_16BIT:
17138 IEM_MC_BEGIN(1, 0);
17139 IEM_MC_ARG(uint16_t, u16Target, 0);
17140 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17141 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17142 IEM_MC_END()
17143 return VINF_SUCCESS;
17144
17145 case IEMMODE_32BIT:
17146 IEM_MC_BEGIN(1, 0);
17147 IEM_MC_ARG(uint32_t, u32Target, 0);
17148 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17149 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17150 IEM_MC_END()
17151 return VINF_SUCCESS;
17152
17153 case IEMMODE_64BIT:
17154 IEM_MC_BEGIN(1, 0);
17155 IEM_MC_ARG(uint64_t, u64Target, 0);
17156 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17157 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17158 IEM_MC_END()
17159 return VINF_SUCCESS;
17160
17161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17162 }
17163 }
17164 else
17165 {
17166 /* The new RIP is taken from a register. */
17167 switch (pIemCpu->enmEffOpSize)
17168 {
17169 case IEMMODE_16BIT:
17170 IEM_MC_BEGIN(1, 1);
17171 IEM_MC_ARG(uint16_t, u16Target, 0);
17172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17174 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17175 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17176 IEM_MC_END()
17177 return VINF_SUCCESS;
17178
17179 case IEMMODE_32BIT:
17180 IEM_MC_BEGIN(1, 1);
17181 IEM_MC_ARG(uint32_t, u32Target, 0);
17182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17184 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17185 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17186 IEM_MC_END()
17187 return VINF_SUCCESS;
17188
17189 case IEMMODE_64BIT:
17190 IEM_MC_BEGIN(1, 1);
17191 IEM_MC_ARG(uint64_t, u64Target, 0);
17192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17194 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17195 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17196 IEM_MC_END()
17197 return VINF_SUCCESS;
17198
17199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17200 }
17201 }
17202}
17203
17204typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17205
17206FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17207{
17208 /* Registers? How?? */
17209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17210 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17211
17212 /* Far pointer loaded from memory. */
17213 switch (pIemCpu->enmEffOpSize)
17214 {
17215 case IEMMODE_16BIT:
17216 IEM_MC_BEGIN(3, 1);
17217 IEM_MC_ARG(uint16_t, u16Sel, 0);
17218 IEM_MC_ARG(uint16_t, offSeg, 1);
17219 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17223 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17224 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17225 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17226 IEM_MC_END();
17227 return VINF_SUCCESS;
17228
17229 case IEMMODE_64BIT:
17230 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17231 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17232 * and call far qword [rsp] encodings. */
17233 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17234 {
17235 IEM_MC_BEGIN(3, 1);
17236 IEM_MC_ARG(uint16_t, u16Sel, 0);
17237 IEM_MC_ARG(uint64_t, offSeg, 1);
17238 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17242 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17243 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17244 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17245 IEM_MC_END();
17246 return VINF_SUCCESS;
17247 }
17248 /* AMD falls thru. */
17249
17250 case IEMMODE_32BIT:
17251 IEM_MC_BEGIN(3, 1);
17252 IEM_MC_ARG(uint16_t, u16Sel, 0);
17253 IEM_MC_ARG(uint32_t, offSeg, 1);
17254 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17258 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17259 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17260 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17261 IEM_MC_END();
17262 return VINF_SUCCESS;
17263
17264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17265 }
17266}
17267
17268
17269/**
17270 * Opcode 0xff /3.
17271 * @param bRm The RM byte.
17272 */
17273FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17274{
17275 IEMOP_MNEMONIC("callf Ep");
17276 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17277}
17278
17279
17280/**
17281 * Opcode 0xff /4.
17282 * @param bRm The RM byte.
17283 */
17284FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17285{
17286 IEMOP_MNEMONIC("jmpn Ev");
17287 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17288 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17289
17290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17291 {
17292 /* The new RIP is taken from a register. */
17293 switch (pIemCpu->enmEffOpSize)
17294 {
17295 case IEMMODE_16BIT:
17296 IEM_MC_BEGIN(0, 1);
17297 IEM_MC_LOCAL(uint16_t, u16Target);
17298 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17299 IEM_MC_SET_RIP_U16(u16Target);
17300 IEM_MC_END()
17301 return VINF_SUCCESS;
17302
17303 case IEMMODE_32BIT:
17304 IEM_MC_BEGIN(0, 1);
17305 IEM_MC_LOCAL(uint32_t, u32Target);
17306 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17307 IEM_MC_SET_RIP_U32(u32Target);
17308 IEM_MC_END()
17309 return VINF_SUCCESS;
17310
17311 case IEMMODE_64BIT:
17312 IEM_MC_BEGIN(0, 1);
17313 IEM_MC_LOCAL(uint64_t, u64Target);
17314 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17315 IEM_MC_SET_RIP_U64(u64Target);
17316 IEM_MC_END()
17317 return VINF_SUCCESS;
17318
17319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17320 }
17321 }
17322 else
17323 {
17324 /* The new RIP is taken from a memory location. */
17325 switch (pIemCpu->enmEffOpSize)
17326 {
17327 case IEMMODE_16BIT:
17328 IEM_MC_BEGIN(0, 2);
17329 IEM_MC_LOCAL(uint16_t, u16Target);
17330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17332 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17333 IEM_MC_SET_RIP_U16(u16Target);
17334 IEM_MC_END()
17335 return VINF_SUCCESS;
17336
17337 case IEMMODE_32BIT:
17338 IEM_MC_BEGIN(0, 2);
17339 IEM_MC_LOCAL(uint32_t, u32Target);
17340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17342 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17343 IEM_MC_SET_RIP_U32(u32Target);
17344 IEM_MC_END()
17345 return VINF_SUCCESS;
17346
17347 case IEMMODE_64BIT:
17348 IEM_MC_BEGIN(0, 2);
17349 IEM_MC_LOCAL(uint64_t, u64Target);
17350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17352 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17353 IEM_MC_SET_RIP_U64(u64Target);
17354 IEM_MC_END()
17355 return VINF_SUCCESS;
17356
17357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17358 }
17359 }
17360}
17361
17362
17363/**
17364 * Opcode 0xff /5.
17365 * @param bRm The RM byte.
17366 */
17367FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17368{
17369 IEMOP_MNEMONIC("jmpf Ep");
17370 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17371}
17372
17373
17374/**
17375 * Opcode 0xff /6.
17376 * @param bRm The RM byte.
17377 */
17378FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17379{
17380 IEMOP_MNEMONIC("push Ev");
17381 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17382
17383 /* Registers are handled by a common worker. */
17384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17385 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17386
17387 /* Memory we do here. */
17388 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17389 switch (pIemCpu->enmEffOpSize)
17390 {
17391 case IEMMODE_16BIT:
17392 IEM_MC_BEGIN(0, 2);
17393 IEM_MC_LOCAL(uint16_t, u16Src);
17394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17396 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17397 IEM_MC_PUSH_U16(u16Src);
17398 IEM_MC_ADVANCE_RIP();
17399 IEM_MC_END();
17400 return VINF_SUCCESS;
17401
17402 case IEMMODE_32BIT:
17403 IEM_MC_BEGIN(0, 2);
17404 IEM_MC_LOCAL(uint32_t, u32Src);
17405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17407 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17408 IEM_MC_PUSH_U32(u32Src);
17409 IEM_MC_ADVANCE_RIP();
17410 IEM_MC_END();
17411 return VINF_SUCCESS;
17412
17413 case IEMMODE_64BIT:
17414 IEM_MC_BEGIN(0, 2);
17415 IEM_MC_LOCAL(uint64_t, u64Src);
17416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17418 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17419 IEM_MC_PUSH_U64(u64Src);
17420 IEM_MC_ADVANCE_RIP();
17421 IEM_MC_END();
17422 return VINF_SUCCESS;
17423
17424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17425 }
17426}
17427
17428
17429/** Opcode 0xff. */
17430FNIEMOP_DEF(iemOp_Grp5)
17431{
17432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17433 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17434 {
17435 case 0:
17436 IEMOP_MNEMONIC("inc Ev");
17437 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17438 case 1:
17439 IEMOP_MNEMONIC("dec Ev");
17440 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17441 case 2:
17442 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17443 case 3:
17444 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17445 case 4:
17446 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17447 case 5:
17448 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17449 case 6:
17450 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17451 case 7:
17452 IEMOP_MNEMONIC("grp5-ud");
17453 return IEMOP_RAISE_INVALID_OPCODE();
17454 }
17455 AssertFailedReturn(VERR_IEM_IPE_3);
17456}
17457
17458
17459
17460const PFNIEMOP g_apfnOneByteMap[256] =
17461{
17462 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17463 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17464 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17465 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17466 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17467 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17468 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17469 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17470 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17471 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17472 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17473 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17474 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17475 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17476 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17477 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17478 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17479 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17480 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17481 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17482 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17483 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17484 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17485 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17486 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17487 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17488 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17489 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17490 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17491 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17492 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17493 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17494 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17495 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17496 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17497 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17498 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17499 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17500 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17501 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17502 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17503 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17504 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17505 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17506 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17507 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17508 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17509 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17510 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17511 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17512 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17513 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17514 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17515 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17516 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17517 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17518 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17519 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17520 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17521 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17522 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17523 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17524 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17525 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17526};
17527
17528
17529/** @} */
17530
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette