VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60430

Last change on this file since 60430 was 60415, checked in by vboxsync, 9 years ago

IEM: Implemented main characteristics of 8086, 80186 and 80286.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.7 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60415 2016-04-11 08:51:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
802 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
803 IEM_MC_END();
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmcall)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmresume)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /0. */
833FNIEMOP_DEF(iemOp_Grp7_vmxoff)
834{
835 IEMOP_BITCH_ABOUT_STUB();
836 return IEMOP_RAISE_INVALID_OPCODE();
837}
838
839
840/** Opcode 0x0f 0x01 /1. */
841FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
842{
843 IEMOP_MNEMONIC("sidt Ms");
844 IEMOP_HLP_MIN_286();
845 IEMOP_HLP_64BIT_OP_SIZE();
846 IEM_MC_BEGIN(3, 1);
847 IEM_MC_ARG(uint8_t, iEffSeg, 0);
848 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
853 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_monitor)
861{
862 IEMOP_MNEMONIC("monitor");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
864 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF(iemOp_Grp7_mwait)
870{
871 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
874}
875
876
877/** Opcode 0x0f 0x01 /2. */
878FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
879{
880 IEMOP_MNEMONIC("lgdt");
881 IEMOP_HLP_64BIT_OP_SIZE();
882 IEM_MC_BEGIN(3, 1);
883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
884 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
889 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
890 IEM_MC_END();
891 return VINF_SUCCESS;
892}
893
894
895/** Opcode 0x0f 0x01 0xd0. */
896FNIEMOP_DEF(iemOp_Grp7_xgetbv)
897{
898 IEMOP_MNEMONIC("xgetbv");
899 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
900 {
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
903 }
904 return IEMOP_RAISE_INVALID_OPCODE();
905}
906
907
908/** Opcode 0x0f 0x01 0xd1. */
909FNIEMOP_DEF(iemOp_Grp7_xsetbv)
910{
911 IEMOP_MNEMONIC("xsetbv");
912 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
913 {
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
916 }
917 return IEMOP_RAISE_INVALID_OPCODE();
918}
919
920
921/** Opcode 0x0f 0x01 /3. */
922FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
923{
924 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
925 ? IEMMODE_64BIT
926 : pIemCpu->enmEffOpSize;
927 IEM_MC_BEGIN(3, 1);
928 IEM_MC_ARG(uint8_t, iEffSeg, 0);
929 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
934 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
935 IEM_MC_END();
936 return VINF_SUCCESS;
937}
938
939
940/** Opcode 0x0f 0x01 0xd8. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
942
943/** Opcode 0x0f 0x01 0xd9. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
945
946/** Opcode 0x0f 0x01 0xda. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
948
949/** Opcode 0x0f 0x01 0xdb. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
951
952/** Opcode 0x0f 0x01 0xdc. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
954
955/** Opcode 0x0f 0x01 0xdd. */
956FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
957
958/** Opcode 0x0f 0x01 0xde. */
959FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
960
961/** Opcode 0x0f 0x01 0xdf. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
963
964/** Opcode 0x0f 0x01 /4. */
965FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
966{
967 IEMOP_MNEMONIC("smsw");
968 IEMOP_HLP_MIN_286();
969 IEMOP_HLP_NO_LOCK_PREFIX();
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 switch (pIemCpu->enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(0, 1);
976 IEM_MC_LOCAL(uint16_t, u16Tmp);
977 IEM_MC_FETCH_CR0_U16(u16Tmp);
978#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
979 if (pIemCpu->uTargetCpu == IEMTARGETCPU_286)
980 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); /* Reserved bits observed all set on real hw. */
981#endif
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1017 IEM_MC_ADVANCE_RIP();
1018 IEM_MC_END();
1019 return VINF_SUCCESS;
1020 }
1021}
1022
1023
1024/** Opcode 0x0f 0x01 /6. */
1025FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1026{
1027 /* The operand size is effectively ignored, all is 16-bit and only the
1028 lower 3-bits are used. */
1029 IEMOP_MNEMONIC("lmsw");
1030 IEMOP_HLP_MIN_286();
1031 IEMOP_HLP_NO_LOCK_PREFIX();
1032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1033 {
1034 IEM_MC_BEGIN(1, 0);
1035 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1036 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1037 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1038 IEM_MC_END();
1039 }
1040 else
1041 {
1042 IEM_MC_BEGIN(1, 1);
1043 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1046 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1047 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1048 IEM_MC_END();
1049 }
1050 return VINF_SUCCESS;
1051}
1052
1053
1054/** Opcode 0x0f 0x01 /7. */
1055FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1056{
1057 IEMOP_MNEMONIC("invlpg");
1058 IEMOP_HLP_MIN_486();
1059 IEMOP_HLP_NO_LOCK_PREFIX();
1060 IEM_MC_BEGIN(1, 1);
1061 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1063 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1064 IEM_MC_END();
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/** Opcode 0x0f 0x01 /7. */
1070FNIEMOP_DEF(iemOp_Grp7_swapgs)
1071{
1072 IEMOP_MNEMONIC("swapgs");
1073 IEMOP_HLP_ONLY_64BIT();
1074 IEMOP_HLP_NO_LOCK_PREFIX();
1075 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1076}
1077
1078
1079/** Opcode 0x0f 0x01 /7. */
1080FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1081{
1082 NOREF(pIemCpu);
1083 IEMOP_BITCH_ABOUT_STUB();
1084 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1085}
1086
1087
1088/** Opcode 0x0f 0x01. */
1089FNIEMOP_DEF(iemOp_Grp7)
1090{
1091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1092 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1093 {
1094 case 0:
1095 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1096 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1097 switch (bRm & X86_MODRM_RM_MASK)
1098 {
1099 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1100 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1101 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1102 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1103 }
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105
1106 case 1:
1107 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1108 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1109 switch (bRm & X86_MODRM_RM_MASK)
1110 {
1111 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1112 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1113 }
1114 return IEMOP_RAISE_INVALID_OPCODE();
1115
1116 case 2:
1117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1118 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1119 switch (bRm & X86_MODRM_RM_MASK)
1120 {
1121 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1122 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1123 }
1124 return IEMOP_RAISE_INVALID_OPCODE();
1125
1126 case 3:
1127 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1128 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1129 switch (bRm & X86_MODRM_RM_MASK)
1130 {
1131 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1132 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1133 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1134 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1135 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1136 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1137 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1138 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1140 }
1141
1142 case 4:
1143 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1144
1145 case 5:
1146 return IEMOP_RAISE_INVALID_OPCODE();
1147
1148 case 6:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1150
1151 case 7:
1152 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1153 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1154 switch (bRm & X86_MODRM_RM_MASK)
1155 {
1156 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1157 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1158 }
1159 return IEMOP_RAISE_INVALID_OPCODE();
1160
1161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1162 }
1163}
1164
1165/** Opcode 0x0f 0x00 /3. */
1166FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1167{
1168 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1170
1171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1172 {
1173 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1174 switch (pIemCpu->enmEffOpSize)
1175 {
1176 case IEMMODE_16BIT:
1177 {
1178 IEM_MC_BEGIN(4, 0);
1179 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1180 IEM_MC_ARG(uint16_t, u16Sel, 1);
1181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1182 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1183
1184 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1185 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1186 IEM_MC_REF_EFLAGS(pEFlags);
1187 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1188
1189 IEM_MC_END();
1190 return VINF_SUCCESS;
1191 }
1192
1193 case IEMMODE_32BIT:
1194 case IEMMODE_64BIT:
1195 {
1196 IEM_MC_BEGIN(4, 0);
1197 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1198 IEM_MC_ARG(uint16_t, u16Sel, 1);
1199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1200 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1201
1202 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1203 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1204 IEM_MC_REF_EFLAGS(pEFlags);
1205 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1206
1207 IEM_MC_END();
1208 return VINF_SUCCESS;
1209 }
1210
1211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1212 }
1213 }
1214 else
1215 {
1216 switch (pIemCpu->enmEffOpSize)
1217 {
1218 case IEMMODE_16BIT:
1219 {
1220 IEM_MC_BEGIN(4, 1);
1221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1222 IEM_MC_ARG(uint16_t, u16Sel, 1);
1223 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1224 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226
1227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1228 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1229
1230 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1231 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1232 IEM_MC_REF_EFLAGS(pEFlags);
1233 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1234
1235 IEM_MC_END();
1236 return VINF_SUCCESS;
1237 }
1238
1239 case IEMMODE_32BIT:
1240 case IEMMODE_64BIT:
1241 {
1242 IEM_MC_BEGIN(4, 1);
1243 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1244 IEM_MC_ARG(uint16_t, u16Sel, 1);
1245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1246 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248
1249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1250 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1251/** @todo testcase: make sure it's a 16-bit read. */
1252
1253 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1254 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1255 IEM_MC_REF_EFLAGS(pEFlags);
1256 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1257
1258 IEM_MC_END();
1259 return VINF_SUCCESS;
1260 }
1261
1262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1263 }
1264 }
1265}
1266
1267
1268
1269/** Opcode 0x0f 0x02. */
1270FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1271{
1272 IEMOP_MNEMONIC("lar Gv,Ew");
1273 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1274}
1275
1276
1277/** Opcode 0x0f 0x03. */
1278FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1279{
1280 IEMOP_MNEMONIC("lsl Gv,Ew");
1281 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1282}
1283
1284
1285/** Opcode 0x0f 0x05. */
1286FNIEMOP_DEF(iemOp_syscall)
1287{
1288 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1289 IEMOP_HLP_NO_LOCK_PREFIX();
1290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1291}
1292
1293
1294/** Opcode 0x0f 0x06. */
1295FNIEMOP_DEF(iemOp_clts)
1296{
1297 IEMOP_MNEMONIC("clts");
1298 IEMOP_HLP_NO_LOCK_PREFIX();
1299 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1300}
1301
1302
1303/** Opcode 0x0f 0x07. */
1304FNIEMOP_DEF(iemOp_sysret)
1305{
1306 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1307 IEMOP_HLP_NO_LOCK_PREFIX();
1308 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1309}
1310
1311
1312/** Opcode 0x0f 0x08. */
1313FNIEMOP_STUB(iemOp_invd);
1314// IEMOP_HLP_MIN_486();
1315
1316
1317/** Opcode 0x0f 0x09. */
1318FNIEMOP_DEF(iemOp_wbinvd)
1319{
1320 IEMOP_MNEMONIC("wbinvd");
1321 IEMOP_HLP_MIN_486();
1322 IEMOP_HLP_NO_LOCK_PREFIX();
1323 IEM_MC_BEGIN(0, 0);
1324 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1325 IEM_MC_ADVANCE_RIP();
1326 IEM_MC_END();
1327 return VINF_SUCCESS; /* ignore for now */
1328}
1329
1330
1331/** Opcode 0x0f 0x0b. */
1332FNIEMOP_STUB(iemOp_ud2);
1333
1334/** Opcode 0x0f 0x0d. */
1335FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1336{
1337 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1338 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1339 {
1340 IEMOP_MNEMONIC("GrpP");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342 }
1343
1344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1346 {
1347 IEMOP_MNEMONIC("GrpP");
1348 return IEMOP_RAISE_INVALID_OPCODE();
1349 }
1350
1351 IEMOP_HLP_NO_LOCK_PREFIX();
1352 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1353 {
1354 case 2: /* Aliased to /0 for the time being. */
1355 case 4: /* Aliased to /0 for the time being. */
1356 case 5: /* Aliased to /0 for the time being. */
1357 case 6: /* Aliased to /0 for the time being. */
1358 case 7: /* Aliased to /0 for the time being. */
1359 case 0: IEMOP_MNEMONIC("prefetch"); break;
1360 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1361 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1363 }
1364
1365 IEM_MC_BEGIN(0, 1);
1366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1368 /* Currently a NOP. */
1369 IEM_MC_ADVANCE_RIP();
1370 IEM_MC_END();
1371 return VINF_SUCCESS;
1372}
1373
1374
1375/** Opcode 0x0f 0x0e. */
1376FNIEMOP_STUB(iemOp_femms);
1377
1378
1379/** Opcode 0x0f 0x0f 0x0c. */
1380FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1381
1382/** Opcode 0x0f 0x0f 0x0d. */
1383FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1384
1385/** Opcode 0x0f 0x0f 0x1c. */
1386FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1387
1388/** Opcode 0x0f 0x0f 0x1d. */
1389FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1390
1391/** Opcode 0x0f 0x0f 0x8a. */
1392FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1393
1394/** Opcode 0x0f 0x0f 0x8e. */
1395FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1396
1397/** Opcode 0x0f 0x0f 0x90. */
1398FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1399
1400/** Opcode 0x0f 0x0f 0x94. */
1401FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1402
1403/** Opcode 0x0f 0x0f 0x96. */
1404FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1405
1406/** Opcode 0x0f 0x0f 0x97. */
1407FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1408
1409/** Opcode 0x0f 0x0f 0x9a. */
1410FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1411
1412/** Opcode 0x0f 0x0f 0x9e. */
1413FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1414
1415/** Opcode 0x0f 0x0f 0xa0. */
1416FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0xa4. */
1419FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0xa6. */
1422FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1423
1424/** Opcode 0x0f 0x0f 0xa7. */
1425FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1426
1427/** Opcode 0x0f 0x0f 0xaa. */
1428FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1429
1430/** Opcode 0x0f 0x0f 0xae. */
1431FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1432
1433/** Opcode 0x0f 0x0f 0xb0. */
1434FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1435
1436/** Opcode 0x0f 0x0f 0xb4. */
1437FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1438
1439/** Opcode 0x0f 0x0f 0xb6. */
1440FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1441
1442/** Opcode 0x0f 0x0f 0xb7. */
1443FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1444
1445/** Opcode 0x0f 0x0f 0xbb. */
1446FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1447
1448/** Opcode 0x0f 0x0f 0xbf. */
1449FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1450
1451
1452/** Opcode 0x0f 0x0f. */
1453FNIEMOP_DEF(iemOp_3Dnow)
1454{
1455 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1456 {
1457 IEMOP_MNEMONIC("3Dnow");
1458 return IEMOP_RAISE_INVALID_OPCODE();
1459 }
1460
1461 /* This is pretty sparse, use switch instead of table. */
1462 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1463 switch (b)
1464 {
1465 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1466 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1467 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1468 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1469 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1470 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1471 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1472 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1473 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1474 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1475 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1476 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1477 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1478 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1479 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1480 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1481 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1482 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1483 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1484 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1485 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1486 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1487 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1488 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1489 default:
1490 return IEMOP_RAISE_INVALID_OPCODE();
1491 }
1492}
1493
1494
1495/** Opcode 0x0f 0x10. */
1496FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1497/** Opcode 0x0f 0x11. */
1498FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1499/** Opcode 0x0f 0x12. */
1500FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1501/** Opcode 0x0f 0x13. */
1502FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1503/** Opcode 0x0f 0x14. */
1504FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1505/** Opcode 0x0f 0x15. */
1506FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1507/** Opcode 0x0f 0x16. */
1508FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1509/** Opcode 0x0f 0x17. */
1510FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1511
1512
1513/** Opcode 0x0f 0x18. */
1514FNIEMOP_DEF(iemOp_prefetch_Grp16)
1515{
1516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1517 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1518 {
1519 IEMOP_HLP_NO_LOCK_PREFIX();
1520 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1521 {
1522 case 4: /* Aliased to /0 for the time being according to AMD. */
1523 case 5: /* Aliased to /0 for the time being according to AMD. */
1524 case 6: /* Aliased to /0 for the time being according to AMD. */
1525 case 7: /* Aliased to /0 for the time being according to AMD. */
1526 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1527 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1528 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1529 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1531 }
1532
1533 IEM_MC_BEGIN(0, 1);
1534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1536 /* Currently a NOP. */
1537 IEM_MC_ADVANCE_RIP();
1538 IEM_MC_END();
1539 return VINF_SUCCESS;
1540 }
1541
1542 return IEMOP_RAISE_INVALID_OPCODE();
1543}
1544
1545
1546/** Opcode 0x0f 0x19..0x1f. */
1547FNIEMOP_DEF(iemOp_nop_Ev)
1548{
1549 IEMOP_HLP_NO_LOCK_PREFIX();
1550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1551 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1552 {
1553 IEM_MC_BEGIN(0, 0);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 IEM_MC_BEGIN(0, 1);
1560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1562 /* Currently a NOP. */
1563 IEM_MC_ADVANCE_RIP();
1564 IEM_MC_END();
1565 }
1566 return VINF_SUCCESS;
1567}
1568
1569
1570/** Opcode 0x0f 0x20. */
1571FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1572{
1573 /* mod is ignored, as is operand size overrides. */
1574 IEMOP_MNEMONIC("mov Rd,Cd");
1575 IEMOP_HLP_MIN_386();
1576 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1577 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1578 else
1579 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1580
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1583 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1584 {
1585 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1586 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1587 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1588 iCrReg |= 8;
1589 }
1590 switch (iCrReg)
1591 {
1592 case 0: case 2: case 3: case 4: case 8:
1593 break;
1594 default:
1595 return IEMOP_RAISE_INVALID_OPCODE();
1596 }
1597 IEMOP_HLP_DONE_DECODING();
1598
1599 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1600}
1601
1602
1603/** Opcode 0x0f 0x21. */
1604FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1605{
1606 IEMOP_MNEMONIC("mov Rd,Dd");
1607 IEMOP_HLP_MIN_386();
1608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1609 IEMOP_HLP_NO_LOCK_PREFIX();
1610 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1611 return IEMOP_RAISE_INVALID_OPCODE();
1612 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1613 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1614 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1615}
1616
1617
1618/** Opcode 0x0f 0x22. */
1619FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1620{
1621 /* mod is ignored, as is operand size overrides. */
1622 IEMOP_MNEMONIC("mov Cd,Rd");
1623 IEMOP_HLP_MIN_386();
1624 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1625 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1626 else
1627 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1628
1629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1630 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1631 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1632 {
1633 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1634 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1635 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1636 iCrReg |= 8;
1637 }
1638 switch (iCrReg)
1639 {
1640 case 0: case 2: case 3: case 4: case 8:
1641 break;
1642 default:
1643 return IEMOP_RAISE_INVALID_OPCODE();
1644 }
1645 IEMOP_HLP_DONE_DECODING();
1646
1647 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1648}
1649
1650
1651/** Opcode 0x0f 0x23. */
1652FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1653{
1654 IEMOP_MNEMONIC("mov Dd,Rd");
1655 IEMOP_HLP_MIN_386();
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1659 return IEMOP_RAISE_INVALID_OPCODE();
1660 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1661 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1662 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1663}
1664
1665
1666/** Opcode 0x0f 0x24. */
1667FNIEMOP_DEF(iemOp_mov_Rd_Td)
1668{
1669 IEMOP_MNEMONIC("mov Rd,Td");
1670 /** @todo works on 386 and 486. */
1671 /* The RM byte is not considered, see testcase. */
1672 return IEMOP_RAISE_INVALID_OPCODE();
1673}
1674
1675
1676/** Opcode 0x0f 0x26. */
1677FNIEMOP_DEF(iemOp_mov_Td_Rd)
1678{
1679 IEMOP_MNEMONIC("mov Td,Rd");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x28. */
1687FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1688/** Opcode 0x0f 0x29. */
1689FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1690/** Opcode 0x0f 0x2a. */
1691FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1692/** Opcode 0x0f 0x2b. */
1693FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1694/** Opcode 0x0f 0x2c. */
1695FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1696/** Opcode 0x0f 0x2d. */
1697FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1698/** Opcode 0x0f 0x2e. */
1699FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1700/** Opcode 0x0f 0x2f. */
1701FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1702
1703
1704/** Opcode 0x0f 0x30. */
1705FNIEMOP_DEF(iemOp_wrmsr)
1706{
1707 IEMOP_MNEMONIC("wrmsr");
1708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1709 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1710}
1711
1712
1713/** Opcode 0x0f 0x31. */
1714FNIEMOP_DEF(iemOp_rdtsc)
1715{
1716 IEMOP_MNEMONIC("rdtsc");
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1719}
1720
1721
1722/** Opcode 0x0f 0x33. */
1723FNIEMOP_DEF(iemOp_rdmsr)
1724{
1725 IEMOP_MNEMONIC("rdmsr");
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1728}
1729
1730
1731/** Opcode 0x0f 0x34. */
1732FNIEMOP_STUB(iemOp_rdpmc);
1733/** Opcode 0x0f 0x34. */
1734FNIEMOP_STUB(iemOp_sysenter);
1735/** Opcode 0x0f 0x35. */
1736FNIEMOP_STUB(iemOp_sysexit);
1737/** Opcode 0x0f 0x37. */
1738FNIEMOP_STUB(iemOp_getsec);
1739/** Opcode 0x0f 0x38. */
1740FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1741/** Opcode 0x0f 0x3a. */
1742FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1743/** Opcode 0x0f 0x3c (?). */
1744FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1745
1746/**
1747 * Implements a conditional move.
1748 *
1749 * Wish there was an obvious way to do this where we could share and reduce
1750 * code bloat.
1751 *
1752 * @param a_Cnd The conditional "microcode" operation.
1753 */
1754#define CMOV_X(a_Cnd) \
1755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1757 { \
1758 switch (pIemCpu->enmEffOpSize) \
1759 { \
1760 case IEMMODE_16BIT: \
1761 IEM_MC_BEGIN(0, 1); \
1762 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1763 a_Cnd { \
1764 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1765 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1766 } IEM_MC_ENDIF(); \
1767 IEM_MC_ADVANCE_RIP(); \
1768 IEM_MC_END(); \
1769 return VINF_SUCCESS; \
1770 \
1771 case IEMMODE_32BIT: \
1772 IEM_MC_BEGIN(0, 1); \
1773 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1774 a_Cnd { \
1775 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1776 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1777 } IEM_MC_ELSE() { \
1778 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1779 } IEM_MC_ENDIF(); \
1780 IEM_MC_ADVANCE_RIP(); \
1781 IEM_MC_END(); \
1782 return VINF_SUCCESS; \
1783 \
1784 case IEMMODE_64BIT: \
1785 IEM_MC_BEGIN(0, 1); \
1786 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1787 a_Cnd { \
1788 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1789 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1790 } IEM_MC_ENDIF(); \
1791 IEM_MC_ADVANCE_RIP(); \
1792 IEM_MC_END(); \
1793 return VINF_SUCCESS; \
1794 \
1795 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1796 } \
1797 } \
1798 else \
1799 { \
1800 switch (pIemCpu->enmEffOpSize) \
1801 { \
1802 case IEMMODE_16BIT: \
1803 IEM_MC_BEGIN(0, 2); \
1804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1805 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1807 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1808 a_Cnd { \
1809 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1810 } IEM_MC_ENDIF(); \
1811 IEM_MC_ADVANCE_RIP(); \
1812 IEM_MC_END(); \
1813 return VINF_SUCCESS; \
1814 \
1815 case IEMMODE_32BIT: \
1816 IEM_MC_BEGIN(0, 2); \
1817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1818 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1820 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1821 a_Cnd { \
1822 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1823 } IEM_MC_ELSE() { \
1824 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1825 } IEM_MC_ENDIF(); \
1826 IEM_MC_ADVANCE_RIP(); \
1827 IEM_MC_END(); \
1828 return VINF_SUCCESS; \
1829 \
1830 case IEMMODE_64BIT: \
1831 IEM_MC_BEGIN(0, 2); \
1832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1833 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1835 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1836 a_Cnd { \
1837 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1838 } IEM_MC_ENDIF(); \
1839 IEM_MC_ADVANCE_RIP(); \
1840 IEM_MC_END(); \
1841 return VINF_SUCCESS; \
1842 \
1843 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1844 } \
1845 } do {} while (0)
1846
1847
1848
1849/** Opcode 0x0f 0x40. */
1850FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1851{
1852 IEMOP_MNEMONIC("cmovo Gv,Ev");
1853 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1854}
1855
1856
1857/** Opcode 0x0f 0x41. */
1858FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1859{
1860 IEMOP_MNEMONIC("cmovno Gv,Ev");
1861 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1862}
1863
1864
1865/** Opcode 0x0f 0x42. */
1866FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1867{
1868 IEMOP_MNEMONIC("cmovc Gv,Ev");
1869 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1870}
1871
1872
1873/** Opcode 0x0f 0x43. */
1874FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1875{
1876 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1877 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1878}
1879
1880
1881/** Opcode 0x0f 0x44. */
1882FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1883{
1884 IEMOP_MNEMONIC("cmove Gv,Ev");
1885 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1886}
1887
1888
1889/** Opcode 0x0f 0x45. */
1890FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1891{
1892 IEMOP_MNEMONIC("cmovne Gv,Ev");
1893 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1894}
1895
1896
1897/** Opcode 0x0f 0x46. */
1898FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1899{
1900 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1901 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1902}
1903
1904
1905/** Opcode 0x0f 0x47. */
1906FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1907{
1908 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1909 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1910}
1911
1912
1913/** Opcode 0x0f 0x48. */
1914FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1915{
1916 IEMOP_MNEMONIC("cmovs Gv,Ev");
1917 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1918}
1919
1920
1921/** Opcode 0x0f 0x49. */
1922FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1923{
1924 IEMOP_MNEMONIC("cmovns Gv,Ev");
1925 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1926}
1927
1928
1929/** Opcode 0x0f 0x4a. */
1930FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1931{
1932 IEMOP_MNEMONIC("cmovp Gv,Ev");
1933 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1934}
1935
1936
1937/** Opcode 0x0f 0x4b. */
1938FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1939{
1940 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1941 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1942}
1943
1944
1945/** Opcode 0x0f 0x4c. */
1946FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1947{
1948 IEMOP_MNEMONIC("cmovl Gv,Ev");
1949 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1950}
1951
1952
1953/** Opcode 0x0f 0x4d. */
1954FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1955{
1956 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1957 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1958}
1959
1960
1961/** Opcode 0x0f 0x4e. */
1962FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1963{
1964 IEMOP_MNEMONIC("cmovle Gv,Ev");
1965 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1966}
1967
1968
1969/** Opcode 0x0f 0x4f. */
1970FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1971{
1972 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1973 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1974}
1975
1976#undef CMOV_X
1977
1978/** Opcode 0x0f 0x50. */
1979FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1980/** Opcode 0x0f 0x51. */
1981FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1982/** Opcode 0x0f 0x52. */
1983FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1984/** Opcode 0x0f 0x53. */
1985FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1986/** Opcode 0x0f 0x54. */
1987FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1988/** Opcode 0x0f 0x55. */
1989FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1990/** Opcode 0x0f 0x56. */
1991FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1992/** Opcode 0x0f 0x57. */
1993FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1994/** Opcode 0x0f 0x58. */
1995FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1996/** Opcode 0x0f 0x59. */
1997FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1998/** Opcode 0x0f 0x5a. */
1999FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2000/** Opcode 0x0f 0x5b. */
2001FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2002/** Opcode 0x0f 0x5c. */
2003FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2004/** Opcode 0x0f 0x5d. */
2005FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2006/** Opcode 0x0f 0x5e. */
2007FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2008/** Opcode 0x0f 0x5f. */
2009FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2010
2011
2012/**
2013 * Common worker for SSE2 and MMX instructions on the forms:
2014 * pxxxx xmm1, xmm2/mem128
2015 * pxxxx mm1, mm2/mem32
2016 *
2017 * The 2nd operand is the first half of a register, which in the memory case
2018 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2019 * memory accessed for MMX.
2020 *
2021 * Exceptions type 4.
2022 */
2023FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2024{
2025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2026 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2027 {
2028 case IEM_OP_PRF_SIZE_OP: /* SSE */
2029 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2030 {
2031 /*
2032 * Register, register.
2033 */
2034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2035 IEM_MC_BEGIN(2, 0);
2036 IEM_MC_ARG(uint128_t *, pDst, 0);
2037 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2038 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2039 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2040 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2041 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 else
2046 {
2047 /*
2048 * Register, memory.
2049 */
2050 IEM_MC_BEGIN(2, 2);
2051 IEM_MC_ARG(uint128_t *, pDst, 0);
2052 IEM_MC_LOCAL(uint64_t, uSrc);
2053 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2055
2056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2059 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2060
2061 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2062 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2063
2064 IEM_MC_ADVANCE_RIP();
2065 IEM_MC_END();
2066 }
2067 return VINF_SUCCESS;
2068
2069 case 0: /* MMX */
2070 if (!pImpl->pfnU64)
2071 return IEMOP_RAISE_INVALID_OPCODE();
2072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2073 {
2074 /*
2075 * Register, register.
2076 */
2077 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2078 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2080 IEM_MC_BEGIN(2, 0);
2081 IEM_MC_ARG(uint64_t *, pDst, 0);
2082 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2083 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2084 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2085 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2086 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 /*
2093 * Register, memory.
2094 */
2095 IEM_MC_BEGIN(2, 2);
2096 IEM_MC_ARG(uint64_t *, pDst, 0);
2097 IEM_MC_LOCAL(uint32_t, uSrc);
2098 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2100
2101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2103 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2104 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2105
2106 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2107 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2108
2109 IEM_MC_ADVANCE_RIP();
2110 IEM_MC_END();
2111 }
2112 return VINF_SUCCESS;
2113
2114 default:
2115 return IEMOP_RAISE_INVALID_OPCODE();
2116 }
2117}
2118
2119
2120/** Opcode 0x0f 0x60. */
2121FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2122{
2123 IEMOP_MNEMONIC("punpcklbw");
2124 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2125}
2126
2127
2128/** Opcode 0x0f 0x61. */
2129FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2130{
2131 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2132 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2133}
2134
2135
2136/** Opcode 0x0f 0x62. */
2137FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2138{
2139 IEMOP_MNEMONIC("punpckldq");
2140 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2141}
2142
2143
2144/** Opcode 0x0f 0x63. */
2145FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2146/** Opcode 0x0f 0x64. */
2147FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2148/** Opcode 0x0f 0x65. */
2149FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2150/** Opcode 0x0f 0x66. */
2151FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2152/** Opcode 0x0f 0x67. */
2153FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2154
2155
2156/**
2157 * Common worker for SSE2 and MMX instructions on the forms:
2158 * pxxxx xmm1, xmm2/mem128
2159 * pxxxx mm1, mm2/mem64
2160 *
2161 * The 2nd operand is the second half of a register, which in the memory case
2162 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2163 * where it may read the full 128 bits or only the upper 64 bits.
2164 *
2165 * Exceptions type 4.
2166 */
2167FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2168{
2169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2170 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2171 {
2172 case IEM_OP_PRF_SIZE_OP: /* SSE */
2173 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2174 {
2175 /*
2176 * Register, register.
2177 */
2178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2179 IEM_MC_BEGIN(2, 0);
2180 IEM_MC_ARG(uint128_t *, pDst, 0);
2181 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2182 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2183 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2184 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2185 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2186 IEM_MC_ADVANCE_RIP();
2187 IEM_MC_END();
2188 }
2189 else
2190 {
2191 /*
2192 * Register, memory.
2193 */
2194 IEM_MC_BEGIN(2, 2);
2195 IEM_MC_ARG(uint128_t *, pDst, 0);
2196 IEM_MC_LOCAL(uint128_t, uSrc);
2197 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2199
2200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2203 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2204
2205 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2206 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2207
2208 IEM_MC_ADVANCE_RIP();
2209 IEM_MC_END();
2210 }
2211 return VINF_SUCCESS;
2212
2213 case 0: /* MMX */
2214 if (!pImpl->pfnU64)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2217 {
2218 /*
2219 * Register, register.
2220 */
2221 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2222 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2224 IEM_MC_BEGIN(2, 0);
2225 IEM_MC_ARG(uint64_t *, pDst, 0);
2226 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2227 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2228 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2229 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2230 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2231 IEM_MC_ADVANCE_RIP();
2232 IEM_MC_END();
2233 }
2234 else
2235 {
2236 /*
2237 * Register, memory.
2238 */
2239 IEM_MC_BEGIN(2, 2);
2240 IEM_MC_ARG(uint64_t *, pDst, 0);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244
2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2248 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2249
2250 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2251 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2252
2253 IEM_MC_ADVANCE_RIP();
2254 IEM_MC_END();
2255 }
2256 return VINF_SUCCESS;
2257
2258 default:
2259 return IEMOP_RAISE_INVALID_OPCODE();
2260 }
2261}
2262
2263
2264/** Opcode 0x0f 0x68. */
2265FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2266{
2267 IEMOP_MNEMONIC("punpckhbw");
2268 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2269}
2270
2271
2272/** Opcode 0x0f 0x69. */
2273FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2274{
2275 IEMOP_MNEMONIC("punpckhwd");
2276 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2277}
2278
2279
2280/** Opcode 0x0f 0x6a. */
2281FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2282{
2283 IEMOP_MNEMONIC("punpckhdq");
2284 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2285}
2286
2287/** Opcode 0x0f 0x6b. */
2288FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2289
2290
2291/** Opcode 0x0f 0x6c. */
2292FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2293{
2294 IEMOP_MNEMONIC("punpcklqdq");
2295 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2296}
2297
2298
2299/** Opcode 0x0f 0x6d. */
2300FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2301{
2302 IEMOP_MNEMONIC("punpckhqdq");
2303 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2304}
2305
2306
2307/** Opcode 0x0f 0x6e. */
2308FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2309{
2310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2311 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2312 {
2313 case IEM_OP_PRF_SIZE_OP: /* SSE */
2314 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2316 {
2317 /* XMM, greg*/
2318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2319 IEM_MC_BEGIN(0, 1);
2320 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2321 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2322 {
2323 IEM_MC_LOCAL(uint64_t, u64Tmp);
2324 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2325 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2326 }
2327 else
2328 {
2329 IEM_MC_LOCAL(uint32_t, u32Tmp);
2330 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2331 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2332 }
2333 IEM_MC_ADVANCE_RIP();
2334 IEM_MC_END();
2335 }
2336 else
2337 {
2338 /* XMM, [mem] */
2339 IEM_MC_BEGIN(0, 2);
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2344 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2345 {
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2348 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2349 }
2350 else
2351 {
2352 IEM_MC_LOCAL(uint32_t, u32Tmp);
2353 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2354 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2355 }
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 return VINF_SUCCESS;
2360
2361 case 0: /* MMX */
2362 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2364 {
2365 /* MMX, greg */
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2367 IEM_MC_BEGIN(0, 1);
2368 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2369 IEM_MC_LOCAL(uint64_t, u64Tmp);
2370 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2371 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2372 else
2373 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2374 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2375 IEM_MC_ADVANCE_RIP();
2376 IEM_MC_END();
2377 }
2378 else
2379 {
2380 /* MMX, [mem] */
2381 IEM_MC_BEGIN(0, 2);
2382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2383 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2386 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2387 {
2388 IEM_MC_LOCAL(uint64_t, u64Tmp);
2389 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2390 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2391 }
2392 else
2393 {
2394 IEM_MC_LOCAL(uint32_t, u32Tmp);
2395 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2396 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2397 }
2398 IEM_MC_ADVANCE_RIP();
2399 IEM_MC_END();
2400 }
2401 return VINF_SUCCESS;
2402
2403 default:
2404 return IEMOP_RAISE_INVALID_OPCODE();
2405 }
2406}
2407
2408
2409/** Opcode 0x0f 0x6f. */
2410FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2411{
2412 bool fAligned = false;
2413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2414 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2415 {
2416 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2417 fAligned = true;
2418 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2419 if (fAligned)
2420 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2421 else
2422 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2423 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2424 {
2425 /*
2426 * Register, register.
2427 */
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 IEM_MC_BEGIN(0, 1);
2430 IEM_MC_LOCAL(uint128_t, u128Tmp);
2431 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2432 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2433 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2434 IEM_MC_ADVANCE_RIP();
2435 IEM_MC_END();
2436 }
2437 else
2438 {
2439 /*
2440 * Register, memory.
2441 */
2442 IEM_MC_BEGIN(0, 2);
2443 IEM_MC_LOCAL(uint128_t, u128Tmp);
2444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2445
2446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2448 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2449 if (fAligned)
2450 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2451 else
2452 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2453 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2454
2455 IEM_MC_ADVANCE_RIP();
2456 IEM_MC_END();
2457 }
2458 return VINF_SUCCESS;
2459
2460 case 0: /* MMX */
2461 IEMOP_MNEMONIC("movq Pq,Qq");
2462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2463 {
2464 /*
2465 * Register, register.
2466 */
2467 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2468 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2470 IEM_MC_BEGIN(0, 1);
2471 IEM_MC_LOCAL(uint64_t, u64Tmp);
2472 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2473 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2474 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2475 IEM_MC_ADVANCE_RIP();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /*
2481 * Register, memory.
2482 */
2483 IEM_MC_BEGIN(0, 2);
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2486
2487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2490 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2491 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2492
2493 IEM_MC_ADVANCE_RIP();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 default:
2499 return IEMOP_RAISE_INVALID_OPCODE();
2500 }
2501}
2502
2503
2504/** Opcode 0x0f 0x70. The immediate here is evil! */
2505FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2506{
2507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2508 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2509 {
2510 case IEM_OP_PRF_SIZE_OP: /* SSE */
2511 case IEM_OP_PRF_REPNZ: /* SSE */
2512 case IEM_OP_PRF_REPZ: /* SSE */
2513 {
2514 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2515 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2516 {
2517 case IEM_OP_PRF_SIZE_OP:
2518 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2519 pfnAImpl = iemAImpl_pshufd;
2520 break;
2521 case IEM_OP_PRF_REPNZ:
2522 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2523 pfnAImpl = iemAImpl_pshuflw;
2524 break;
2525 case IEM_OP_PRF_REPZ:
2526 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2527 pfnAImpl = iemAImpl_pshufhw;
2528 break;
2529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2530 }
2531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2532 {
2533 /*
2534 * Register, register.
2535 */
2536 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2538
2539 IEM_MC_BEGIN(3, 0);
2540 IEM_MC_ARG(uint128_t *, pDst, 0);
2541 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2542 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2543 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2544 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2545 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2546 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /*
2553 * Register, memory.
2554 */
2555 IEM_MC_BEGIN(3, 2);
2556 IEM_MC_ARG(uint128_t *, pDst, 0);
2557 IEM_MC_LOCAL(uint128_t, uSrc);
2558 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2563 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2566
2567 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2568 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2569 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2570
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 return VINF_SUCCESS;
2575 }
2576
2577 case 0: /* MMX Extension */
2578 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2579 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2580 {
2581 /*
2582 * Register, register.
2583 */
2584 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2586
2587 IEM_MC_BEGIN(3, 0);
2588 IEM_MC_ARG(uint64_t *, pDst, 0);
2589 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2590 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2591 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2592 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2593 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2594 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2595 IEM_MC_ADVANCE_RIP();
2596 IEM_MC_END();
2597 }
2598 else
2599 {
2600 /*
2601 * Register, memory.
2602 */
2603 IEM_MC_BEGIN(3, 2);
2604 IEM_MC_ARG(uint64_t *, pDst, 0);
2605 IEM_MC_LOCAL(uint64_t, uSrc);
2606 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2608
2609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2610 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2611 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2614
2615 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2616 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2617 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2618
2619 IEM_MC_ADVANCE_RIP();
2620 IEM_MC_END();
2621 }
2622 return VINF_SUCCESS;
2623
2624 default:
2625 return IEMOP_RAISE_INVALID_OPCODE();
2626 }
2627}
2628
2629
2630/** Opcode 0x0f 0x71 11/2. */
2631FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2632
2633/** Opcode 0x66 0x0f 0x71 11/2. */
2634FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2635
2636/** Opcode 0x0f 0x71 11/4. */
2637FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2638
2639/** Opcode 0x66 0x0f 0x71 11/4. */
2640FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2641
2642/** Opcode 0x0f 0x71 11/6. */
2643FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2644
2645/** Opcode 0x66 0x0f 0x71 11/6. */
2646FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2647
2648
2649/** Opcode 0x0f 0x71. */
2650FNIEMOP_DEF(iemOp_Grp12)
2651{
2652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2653 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2654 return IEMOP_RAISE_INVALID_OPCODE();
2655 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2656 {
2657 case 0: case 1: case 3: case 5: case 7:
2658 return IEMOP_RAISE_INVALID_OPCODE();
2659 case 2:
2660 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2661 {
2662 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2663 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2664 default: return IEMOP_RAISE_INVALID_OPCODE();
2665 }
2666 case 4:
2667 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2668 {
2669 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2670 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2671 default: return IEMOP_RAISE_INVALID_OPCODE();
2672 }
2673 case 6:
2674 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2675 {
2676 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2677 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2678 default: return IEMOP_RAISE_INVALID_OPCODE();
2679 }
2680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2681 }
2682}
2683
2684
2685/** Opcode 0x0f 0x72 11/2. */
2686FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2687
2688/** Opcode 0x66 0x0f 0x72 11/2. */
2689FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2690
2691/** Opcode 0x0f 0x72 11/4. */
2692FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2693
2694/** Opcode 0x66 0x0f 0x72 11/4. */
2695FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2696
2697/** Opcode 0x0f 0x72 11/6. */
2698FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2699
2700/** Opcode 0x66 0x0f 0x72 11/6. */
2701FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2702
2703
2704/** Opcode 0x0f 0x72. */
2705FNIEMOP_DEF(iemOp_Grp13)
2706{
2707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2708 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2709 return IEMOP_RAISE_INVALID_OPCODE();
2710 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2711 {
2712 case 0: case 1: case 3: case 5: case 7:
2713 return IEMOP_RAISE_INVALID_OPCODE();
2714 case 2:
2715 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2716 {
2717 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2718 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2719 default: return IEMOP_RAISE_INVALID_OPCODE();
2720 }
2721 case 4:
2722 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2723 {
2724 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2725 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2726 default: return IEMOP_RAISE_INVALID_OPCODE();
2727 }
2728 case 6:
2729 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2730 {
2731 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2732 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2733 default: return IEMOP_RAISE_INVALID_OPCODE();
2734 }
2735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2736 }
2737}
2738
2739
2740/** Opcode 0x0f 0x73 11/2. */
2741FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2742
2743/** Opcode 0x66 0x0f 0x73 11/2. */
2744FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2745
2746/** Opcode 0x66 0x0f 0x73 11/3. */
2747FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2748
2749/** Opcode 0x0f 0x73 11/6. */
2750FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2751
2752/** Opcode 0x66 0x0f 0x73 11/6. */
2753FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2754
2755/** Opcode 0x66 0x0f 0x73 11/7. */
2756FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2757
2758
2759/** Opcode 0x0f 0x73. */
2760FNIEMOP_DEF(iemOp_Grp14)
2761{
2762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2763 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2764 return IEMOP_RAISE_INVALID_OPCODE();
2765 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2766 {
2767 case 0: case 1: case 4: case 5:
2768 return IEMOP_RAISE_INVALID_OPCODE();
2769 case 2:
2770 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2771 {
2772 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2773 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2774 default: return IEMOP_RAISE_INVALID_OPCODE();
2775 }
2776 case 3:
2777 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2778 {
2779 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2780 default: return IEMOP_RAISE_INVALID_OPCODE();
2781 }
2782 case 6:
2783 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2784 {
2785 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2786 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2787 default: return IEMOP_RAISE_INVALID_OPCODE();
2788 }
2789 case 7:
2790 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2791 {
2792 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2793 default: return IEMOP_RAISE_INVALID_OPCODE();
2794 }
2795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2796 }
2797}
2798
2799
2800/**
2801 * Common worker for SSE2 and MMX instructions on the forms:
2802 * pxxx mm1, mm2/mem64
2803 * pxxx xmm1, xmm2/mem128
2804 *
2805 * Proper alignment of the 128-bit operand is enforced.
2806 * Exceptions type 4. SSE2 and MMX cpuid checks.
2807 */
2808FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2809{
2810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2811 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2812 {
2813 case IEM_OP_PRF_SIZE_OP: /* SSE */
2814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2815 {
2816 /*
2817 * Register, register.
2818 */
2819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2820 IEM_MC_BEGIN(2, 0);
2821 IEM_MC_ARG(uint128_t *, pDst, 0);
2822 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2824 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2825 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2826 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2827 IEM_MC_ADVANCE_RIP();
2828 IEM_MC_END();
2829 }
2830 else
2831 {
2832 /*
2833 * Register, memory.
2834 */
2835 IEM_MC_BEGIN(2, 2);
2836 IEM_MC_ARG(uint128_t *, pDst, 0);
2837 IEM_MC_LOCAL(uint128_t, uSrc);
2838 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2840
2841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2843 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2844 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2845
2846 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2847 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2848
2849 IEM_MC_ADVANCE_RIP();
2850 IEM_MC_END();
2851 }
2852 return VINF_SUCCESS;
2853
2854 case 0: /* MMX */
2855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2856 {
2857 /*
2858 * Register, register.
2859 */
2860 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2861 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2863 IEM_MC_BEGIN(2, 0);
2864 IEM_MC_ARG(uint64_t *, pDst, 0);
2865 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2866 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2867 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2868 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2869 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2870 IEM_MC_ADVANCE_RIP();
2871 IEM_MC_END();
2872 }
2873 else
2874 {
2875 /*
2876 * Register, memory.
2877 */
2878 IEM_MC_BEGIN(2, 2);
2879 IEM_MC_ARG(uint64_t *, pDst, 0);
2880 IEM_MC_LOCAL(uint64_t, uSrc);
2881 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2883
2884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2886 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2887 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2888
2889 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2890 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2891
2892 IEM_MC_ADVANCE_RIP();
2893 IEM_MC_END();
2894 }
2895 return VINF_SUCCESS;
2896
2897 default:
2898 return IEMOP_RAISE_INVALID_OPCODE();
2899 }
2900}
2901
2902
2903/** Opcode 0x0f 0x74. */
2904FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2905{
2906 IEMOP_MNEMONIC("pcmpeqb");
2907 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2908}
2909
2910
2911/** Opcode 0x0f 0x75. */
2912FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2913{
2914 IEMOP_MNEMONIC("pcmpeqw");
2915 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2916}
2917
2918
2919/** Opcode 0x0f 0x76. */
2920FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2921{
2922 IEMOP_MNEMONIC("pcmpeqd");
2923 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2924}
2925
2926
2927/** Opcode 0x0f 0x77. */
2928FNIEMOP_STUB(iemOp_emms);
2929/** Opcode 0x0f 0x78. */
2930FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2931/** Opcode 0x0f 0x79. */
2932FNIEMOP_UD_STUB(iemOp_vmwrite);
2933/** Opcode 0x0f 0x7c. */
2934FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2935/** Opcode 0x0f 0x7d. */
2936FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2937
2938
2939/** Opcode 0x0f 0x7e. */
2940FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2941{
2942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2943 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2944 {
2945 case IEM_OP_PRF_SIZE_OP: /* SSE */
2946 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2948 {
2949 /* greg, XMM */
2950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2951 IEM_MC_BEGIN(0, 1);
2952 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2953 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2954 {
2955 IEM_MC_LOCAL(uint64_t, u64Tmp);
2956 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2957 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2958 }
2959 else
2960 {
2961 IEM_MC_LOCAL(uint32_t, u32Tmp);
2962 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2963 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2964 }
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 else
2969 {
2970 /* [mem], XMM */
2971 IEM_MC_BEGIN(0, 2);
2972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2976 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2977 {
2978 IEM_MC_LOCAL(uint64_t, u64Tmp);
2979 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2980 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2981 }
2982 else
2983 {
2984 IEM_MC_LOCAL(uint32_t, u32Tmp);
2985 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2986 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2987 }
2988 IEM_MC_ADVANCE_RIP();
2989 IEM_MC_END();
2990 }
2991 return VINF_SUCCESS;
2992
2993 case 0: /* MMX */
2994 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2996 {
2997 /* greg, MMX */
2998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2999 IEM_MC_BEGIN(0, 1);
3000 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3001 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3002 {
3003 IEM_MC_LOCAL(uint64_t, u64Tmp);
3004 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3005 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3006 }
3007 else
3008 {
3009 IEM_MC_LOCAL(uint32_t, u32Tmp);
3010 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3011 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3012 }
3013 IEM_MC_ADVANCE_RIP();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /* [mem], MMX */
3019 IEM_MC_BEGIN(0, 2);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3025 {
3026 IEM_MC_LOCAL(uint64_t, u64Tmp);
3027 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3028 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3029 }
3030 else
3031 {
3032 IEM_MC_LOCAL(uint32_t, u32Tmp);
3033 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3034 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3035 }
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 return VINF_SUCCESS;
3040
3041 default:
3042 return IEMOP_RAISE_INVALID_OPCODE();
3043 }
3044}
3045
3046
3047/** Opcode 0x0f 0x7f. */
3048FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3049{
3050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3051 bool fAligned = false;
3052 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3053 {
3054 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3055 fAligned = true;
3056 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3057 if (fAligned)
3058 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3059 else
3060 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3062 {
3063 /*
3064 * Register, register.
3065 */
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEM_MC_BEGIN(0, 1);
3068 IEM_MC_LOCAL(uint128_t, u128Tmp);
3069 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3070 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3071 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3072 IEM_MC_ADVANCE_RIP();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * Register, memory.
3079 */
3080 IEM_MC_BEGIN(0, 2);
3081 IEM_MC_LOCAL(uint128_t, u128Tmp);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3087 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3088 if (fAligned)
3089 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3090 else
3091 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3092
3093 IEM_MC_ADVANCE_RIP();
3094 IEM_MC_END();
3095 }
3096 return VINF_SUCCESS;
3097
3098 case 0: /* MMX */
3099 IEMOP_MNEMONIC("movq Qq,Pq");
3100
3101 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3102 {
3103 /*
3104 * Register, register.
3105 */
3106 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3107 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3109 IEM_MC_BEGIN(0, 1);
3110 IEM_MC_LOCAL(uint64_t, u64Tmp);
3111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3112 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3113 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3114 IEM_MC_ADVANCE_RIP();
3115 IEM_MC_END();
3116 }
3117 else
3118 {
3119 /*
3120 * Register, memory.
3121 */
3122 IEM_MC_BEGIN(0, 2);
3123 IEM_MC_LOCAL(uint64_t, u64Tmp);
3124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3125
3126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3128 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3129 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3130 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3131
3132 IEM_MC_ADVANCE_RIP();
3133 IEM_MC_END();
3134 }
3135 return VINF_SUCCESS;
3136
3137 default:
3138 return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140}
3141
3142
3143
3144/** Opcode 0x0f 0x80. */
3145FNIEMOP_DEF(iemOp_jo_Jv)
3146{
3147 IEMOP_MNEMONIC("jo Jv");
3148 IEMOP_HLP_MIN_386();
3149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3150 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3151 {
3152 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3153 IEMOP_HLP_NO_LOCK_PREFIX();
3154
3155 IEM_MC_BEGIN(0, 0);
3156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3157 IEM_MC_REL_JMP_S16(i16Imm);
3158 } IEM_MC_ELSE() {
3159 IEM_MC_ADVANCE_RIP();
3160 } IEM_MC_ENDIF();
3161 IEM_MC_END();
3162 }
3163 else
3164 {
3165 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3166 IEMOP_HLP_NO_LOCK_PREFIX();
3167
3168 IEM_MC_BEGIN(0, 0);
3169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3170 IEM_MC_REL_JMP_S32(i32Imm);
3171 } IEM_MC_ELSE() {
3172 IEM_MC_ADVANCE_RIP();
3173 } IEM_MC_ENDIF();
3174 IEM_MC_END();
3175 }
3176 return VINF_SUCCESS;
3177}
3178
3179
3180/** Opcode 0x0f 0x81. */
3181FNIEMOP_DEF(iemOp_jno_Jv)
3182{
3183 IEMOP_MNEMONIC("jno Jv");
3184 IEMOP_HLP_MIN_386();
3185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3186 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3187 {
3188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3189 IEMOP_HLP_NO_LOCK_PREFIX();
3190
3191 IEM_MC_BEGIN(0, 0);
3192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3193 IEM_MC_ADVANCE_RIP();
3194 } IEM_MC_ELSE() {
3195 IEM_MC_REL_JMP_S16(i16Imm);
3196 } IEM_MC_ENDIF();
3197 IEM_MC_END();
3198 }
3199 else
3200 {
3201 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3202 IEMOP_HLP_NO_LOCK_PREFIX();
3203
3204 IEM_MC_BEGIN(0, 0);
3205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3206 IEM_MC_ADVANCE_RIP();
3207 } IEM_MC_ELSE() {
3208 IEM_MC_REL_JMP_S32(i32Imm);
3209 } IEM_MC_ENDIF();
3210 IEM_MC_END();
3211 }
3212 return VINF_SUCCESS;
3213}
3214
3215
3216/** Opcode 0x0f 0x82. */
3217FNIEMOP_DEF(iemOp_jc_Jv)
3218{
3219 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3220 IEMOP_HLP_MIN_386();
3221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3222 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3223 {
3224 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3225 IEMOP_HLP_NO_LOCK_PREFIX();
3226
3227 IEM_MC_BEGIN(0, 0);
3228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3229 IEM_MC_REL_JMP_S16(i16Imm);
3230 } IEM_MC_ELSE() {
3231 IEM_MC_ADVANCE_RIP();
3232 } IEM_MC_ENDIF();
3233 IEM_MC_END();
3234 }
3235 else
3236 {
3237 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3238 IEMOP_HLP_NO_LOCK_PREFIX();
3239
3240 IEM_MC_BEGIN(0, 0);
3241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3242 IEM_MC_REL_JMP_S32(i32Imm);
3243 } IEM_MC_ELSE() {
3244 IEM_MC_ADVANCE_RIP();
3245 } IEM_MC_ENDIF();
3246 IEM_MC_END();
3247 }
3248 return VINF_SUCCESS;
3249}
3250
3251
3252/** Opcode 0x0f 0x83. */
3253FNIEMOP_DEF(iemOp_jnc_Jv)
3254{
3255 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3256 IEMOP_HLP_MIN_386();
3257 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3258 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3259 {
3260 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3261 IEMOP_HLP_NO_LOCK_PREFIX();
3262
3263 IEM_MC_BEGIN(0, 0);
3264 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3265 IEM_MC_ADVANCE_RIP();
3266 } IEM_MC_ELSE() {
3267 IEM_MC_REL_JMP_S16(i16Imm);
3268 } IEM_MC_ENDIF();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3274 IEMOP_HLP_NO_LOCK_PREFIX();
3275
3276 IEM_MC_BEGIN(0, 0);
3277 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3278 IEM_MC_ADVANCE_RIP();
3279 } IEM_MC_ELSE() {
3280 IEM_MC_REL_JMP_S32(i32Imm);
3281 } IEM_MC_ENDIF();
3282 IEM_MC_END();
3283 }
3284 return VINF_SUCCESS;
3285}
3286
3287
3288/** Opcode 0x0f 0x84. */
3289FNIEMOP_DEF(iemOp_je_Jv)
3290{
3291 IEMOP_MNEMONIC("je/jz Jv");
3292 IEMOP_HLP_MIN_386();
3293 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3294 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3295 {
3296 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3297 IEMOP_HLP_NO_LOCK_PREFIX();
3298
3299 IEM_MC_BEGIN(0, 0);
3300 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3301 IEM_MC_REL_JMP_S16(i16Imm);
3302 } IEM_MC_ELSE() {
3303 IEM_MC_ADVANCE_RIP();
3304 } IEM_MC_ENDIF();
3305 IEM_MC_END();
3306 }
3307 else
3308 {
3309 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3310 IEMOP_HLP_NO_LOCK_PREFIX();
3311
3312 IEM_MC_BEGIN(0, 0);
3313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3314 IEM_MC_REL_JMP_S32(i32Imm);
3315 } IEM_MC_ELSE() {
3316 IEM_MC_ADVANCE_RIP();
3317 } IEM_MC_ENDIF();
3318 IEM_MC_END();
3319 }
3320 return VINF_SUCCESS;
3321}
3322
3323
3324/** Opcode 0x0f 0x85. */
3325FNIEMOP_DEF(iemOp_jne_Jv)
3326{
3327 IEMOP_MNEMONIC("jne/jnz Jv");
3328 IEMOP_HLP_MIN_386();
3329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3330 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3331 {
3332 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3333 IEMOP_HLP_NO_LOCK_PREFIX();
3334
3335 IEM_MC_BEGIN(0, 0);
3336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3337 IEM_MC_ADVANCE_RIP();
3338 } IEM_MC_ELSE() {
3339 IEM_MC_REL_JMP_S16(i16Imm);
3340 } IEM_MC_ENDIF();
3341 IEM_MC_END();
3342 }
3343 else
3344 {
3345 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3346 IEMOP_HLP_NO_LOCK_PREFIX();
3347
3348 IEM_MC_BEGIN(0, 0);
3349 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3350 IEM_MC_ADVANCE_RIP();
3351 } IEM_MC_ELSE() {
3352 IEM_MC_REL_JMP_S32(i32Imm);
3353 } IEM_MC_ENDIF();
3354 IEM_MC_END();
3355 }
3356 return VINF_SUCCESS;
3357}
3358
3359
3360/** Opcode 0x0f 0x86. */
3361FNIEMOP_DEF(iemOp_jbe_Jv)
3362{
3363 IEMOP_MNEMONIC("jbe/jna Jv");
3364 IEMOP_HLP_MIN_386();
3365 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3366 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3367 {
3368 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3369 IEMOP_HLP_NO_LOCK_PREFIX();
3370
3371 IEM_MC_BEGIN(0, 0);
3372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3373 IEM_MC_REL_JMP_S16(i16Imm);
3374 } IEM_MC_ELSE() {
3375 IEM_MC_ADVANCE_RIP();
3376 } IEM_MC_ENDIF();
3377 IEM_MC_END();
3378 }
3379 else
3380 {
3381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3382 IEMOP_HLP_NO_LOCK_PREFIX();
3383
3384 IEM_MC_BEGIN(0, 0);
3385 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3386 IEM_MC_REL_JMP_S32(i32Imm);
3387 } IEM_MC_ELSE() {
3388 IEM_MC_ADVANCE_RIP();
3389 } IEM_MC_ENDIF();
3390 IEM_MC_END();
3391 }
3392 return VINF_SUCCESS;
3393}
3394
3395
3396/** Opcode 0x0f 0x87. */
3397FNIEMOP_DEF(iemOp_jnbe_Jv)
3398{
3399 IEMOP_MNEMONIC("jnbe/ja Jv");
3400 IEMOP_HLP_MIN_386();
3401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3402 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3403 {
3404 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3405 IEMOP_HLP_NO_LOCK_PREFIX();
3406
3407 IEM_MC_BEGIN(0, 0);
3408 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3409 IEM_MC_ADVANCE_RIP();
3410 } IEM_MC_ELSE() {
3411 IEM_MC_REL_JMP_S16(i16Imm);
3412 } IEM_MC_ENDIF();
3413 IEM_MC_END();
3414 }
3415 else
3416 {
3417 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3418 IEMOP_HLP_NO_LOCK_PREFIX();
3419
3420 IEM_MC_BEGIN(0, 0);
3421 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3422 IEM_MC_ADVANCE_RIP();
3423 } IEM_MC_ELSE() {
3424 IEM_MC_REL_JMP_S32(i32Imm);
3425 } IEM_MC_ENDIF();
3426 IEM_MC_END();
3427 }
3428 return VINF_SUCCESS;
3429}
3430
3431
3432/** Opcode 0x0f 0x88. */
3433FNIEMOP_DEF(iemOp_js_Jv)
3434{
3435 IEMOP_MNEMONIC("js Jv");
3436 IEMOP_HLP_MIN_386();
3437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3438 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3439 {
3440 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3441 IEMOP_HLP_NO_LOCK_PREFIX();
3442
3443 IEM_MC_BEGIN(0, 0);
3444 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3445 IEM_MC_REL_JMP_S16(i16Imm);
3446 } IEM_MC_ELSE() {
3447 IEM_MC_ADVANCE_RIP();
3448 } IEM_MC_ENDIF();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3454 IEMOP_HLP_NO_LOCK_PREFIX();
3455
3456 IEM_MC_BEGIN(0, 0);
3457 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3458 IEM_MC_REL_JMP_S32(i32Imm);
3459 } IEM_MC_ELSE() {
3460 IEM_MC_ADVANCE_RIP();
3461 } IEM_MC_ENDIF();
3462 IEM_MC_END();
3463 }
3464 return VINF_SUCCESS;
3465}
3466
3467
3468/** Opcode 0x0f 0x89. */
3469FNIEMOP_DEF(iemOp_jns_Jv)
3470{
3471 IEMOP_MNEMONIC("jns Jv");
3472 IEMOP_HLP_MIN_386();
3473 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3474 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3475 {
3476 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3477 IEMOP_HLP_NO_LOCK_PREFIX();
3478
3479 IEM_MC_BEGIN(0, 0);
3480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3481 IEM_MC_ADVANCE_RIP();
3482 } IEM_MC_ELSE() {
3483 IEM_MC_REL_JMP_S16(i16Imm);
3484 } IEM_MC_ENDIF();
3485 IEM_MC_END();
3486 }
3487 else
3488 {
3489 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3490 IEMOP_HLP_NO_LOCK_PREFIX();
3491
3492 IEM_MC_BEGIN(0, 0);
3493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3494 IEM_MC_ADVANCE_RIP();
3495 } IEM_MC_ELSE() {
3496 IEM_MC_REL_JMP_S32(i32Imm);
3497 } IEM_MC_ENDIF();
3498 IEM_MC_END();
3499 }
3500 return VINF_SUCCESS;
3501}
3502
3503
3504/** Opcode 0x0f 0x8a. */
3505FNIEMOP_DEF(iemOp_jp_Jv)
3506{
3507 IEMOP_MNEMONIC("jp Jv");
3508 IEMOP_HLP_MIN_386();
3509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3510 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3511 {
3512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3513 IEMOP_HLP_NO_LOCK_PREFIX();
3514
3515 IEM_MC_BEGIN(0, 0);
3516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3517 IEM_MC_REL_JMP_S16(i16Imm);
3518 } IEM_MC_ELSE() {
3519 IEM_MC_ADVANCE_RIP();
3520 } IEM_MC_ENDIF();
3521 IEM_MC_END();
3522 }
3523 else
3524 {
3525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3526 IEMOP_HLP_NO_LOCK_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3530 IEM_MC_REL_JMP_S32(i32Imm);
3531 } IEM_MC_ELSE() {
3532 IEM_MC_ADVANCE_RIP();
3533 } IEM_MC_ENDIF();
3534 IEM_MC_END();
3535 }
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** Opcode 0x0f 0x8b. */
3541FNIEMOP_DEF(iemOp_jnp_Jv)
3542{
3543 IEMOP_MNEMONIC("jo Jv");
3544 IEMOP_HLP_MIN_386();
3545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3546 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3547 {
3548 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3549 IEMOP_HLP_NO_LOCK_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0);
3552 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3553 IEM_MC_ADVANCE_RIP();
3554 } IEM_MC_ELSE() {
3555 IEM_MC_REL_JMP_S16(i16Imm);
3556 } IEM_MC_ENDIF();
3557 IEM_MC_END();
3558 }
3559 else
3560 {
3561 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3562 IEMOP_HLP_NO_LOCK_PREFIX();
3563
3564 IEM_MC_BEGIN(0, 0);
3565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3566 IEM_MC_ADVANCE_RIP();
3567 } IEM_MC_ELSE() {
3568 IEM_MC_REL_JMP_S32(i32Imm);
3569 } IEM_MC_ENDIF();
3570 IEM_MC_END();
3571 }
3572 return VINF_SUCCESS;
3573}
3574
3575
3576/** Opcode 0x0f 0x8c. */
3577FNIEMOP_DEF(iemOp_jl_Jv)
3578{
3579 IEMOP_MNEMONIC("jl/jnge Jv");
3580 IEMOP_HLP_MIN_386();
3581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3582 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3583 {
3584 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3585 IEMOP_HLP_NO_LOCK_PREFIX();
3586
3587 IEM_MC_BEGIN(0, 0);
3588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3589 IEM_MC_REL_JMP_S16(i16Imm);
3590 } IEM_MC_ELSE() {
3591 IEM_MC_ADVANCE_RIP();
3592 } IEM_MC_ENDIF();
3593 IEM_MC_END();
3594 }
3595 else
3596 {
3597 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3598 IEMOP_HLP_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 0);
3601 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3602 IEM_MC_REL_JMP_S32(i32Imm);
3603 } IEM_MC_ELSE() {
3604 IEM_MC_ADVANCE_RIP();
3605 } IEM_MC_ENDIF();
3606 IEM_MC_END();
3607 }
3608 return VINF_SUCCESS;
3609}
3610
3611
3612/** Opcode 0x0f 0x8d. */
3613FNIEMOP_DEF(iemOp_jnl_Jv)
3614{
3615 IEMOP_MNEMONIC("jnl/jge Jv");
3616 IEMOP_HLP_MIN_386();
3617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3618 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3619 {
3620 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3621 IEMOP_HLP_NO_LOCK_PREFIX();
3622
3623 IEM_MC_BEGIN(0, 0);
3624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3625 IEM_MC_ADVANCE_RIP();
3626 } IEM_MC_ELSE() {
3627 IEM_MC_REL_JMP_S16(i16Imm);
3628 } IEM_MC_ENDIF();
3629 IEM_MC_END();
3630 }
3631 else
3632 {
3633 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3634 IEMOP_HLP_NO_LOCK_PREFIX();
3635
3636 IEM_MC_BEGIN(0, 0);
3637 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3638 IEM_MC_ADVANCE_RIP();
3639 } IEM_MC_ELSE() {
3640 IEM_MC_REL_JMP_S32(i32Imm);
3641 } IEM_MC_ENDIF();
3642 IEM_MC_END();
3643 }
3644 return VINF_SUCCESS;
3645}
3646
3647
3648/** Opcode 0x0f 0x8e. */
3649FNIEMOP_DEF(iemOp_jle_Jv)
3650{
3651 IEMOP_MNEMONIC("jle/jng Jv");
3652 IEMOP_HLP_MIN_386();
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3654 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3655 {
3656 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3657 IEMOP_HLP_NO_LOCK_PREFIX();
3658
3659 IEM_MC_BEGIN(0, 0);
3660 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3661 IEM_MC_REL_JMP_S16(i16Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666 }
3667 else
3668 {
3669 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3670 IEMOP_HLP_NO_LOCK_PREFIX();
3671
3672 IEM_MC_BEGIN(0, 0);
3673 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3674 IEM_MC_REL_JMP_S32(i32Imm);
3675 } IEM_MC_ELSE() {
3676 IEM_MC_ADVANCE_RIP();
3677 } IEM_MC_ENDIF();
3678 IEM_MC_END();
3679 }
3680 return VINF_SUCCESS;
3681}
3682
3683
3684/** Opcode 0x0f 0x8f. */
3685FNIEMOP_DEF(iemOp_jnle_Jv)
3686{
3687 IEMOP_MNEMONIC("jnle/jg Jv");
3688 IEMOP_HLP_MIN_386();
3689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3690 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3691 {
3692 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3693 IEMOP_HLP_NO_LOCK_PREFIX();
3694
3695 IEM_MC_BEGIN(0, 0);
3696 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3697 IEM_MC_ADVANCE_RIP();
3698 } IEM_MC_ELSE() {
3699 IEM_MC_REL_JMP_S16(i16Imm);
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702 }
3703 else
3704 {
3705 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3706 IEMOP_HLP_NO_LOCK_PREFIX();
3707
3708 IEM_MC_BEGIN(0, 0);
3709 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3710 IEM_MC_ADVANCE_RIP();
3711 } IEM_MC_ELSE() {
3712 IEM_MC_REL_JMP_S32(i32Imm);
3713 } IEM_MC_ENDIF();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719
3720/** Opcode 0x0f 0x90. */
3721FNIEMOP_DEF(iemOp_seto_Eb)
3722{
3723 IEMOP_MNEMONIC("seto Eb");
3724 IEMOP_HLP_MIN_386();
3725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3726 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3727
3728 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3729 * any way. AMD says it's "unused", whatever that means. We're
3730 * ignoring for now. */
3731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3732 {
3733 /* register target */
3734 IEM_MC_BEGIN(0, 0);
3735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3737 } IEM_MC_ELSE() {
3738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3739 } IEM_MC_ENDIF();
3740 IEM_MC_ADVANCE_RIP();
3741 IEM_MC_END();
3742 }
3743 else
3744 {
3745 /* memory target */
3746 IEM_MC_BEGIN(0, 1);
3747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3750 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3751 } IEM_MC_ELSE() {
3752 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3753 } IEM_MC_ENDIF();
3754 IEM_MC_ADVANCE_RIP();
3755 IEM_MC_END();
3756 }
3757 return VINF_SUCCESS;
3758}
3759
3760
3761/** Opcode 0x0f 0x91. */
3762FNIEMOP_DEF(iemOp_setno_Eb)
3763{
3764 IEMOP_MNEMONIC("setno Eb");
3765 IEMOP_HLP_MIN_386();
3766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3767 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3768
3769 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3770 * any way. AMD says it's "unused", whatever that means. We're
3771 * ignoring for now. */
3772 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3773 {
3774 /* register target */
3775 IEM_MC_BEGIN(0, 0);
3776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3777 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3778 } IEM_MC_ELSE() {
3779 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3780 } IEM_MC_ENDIF();
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /* memory target */
3787 IEM_MC_BEGIN(0, 1);
3788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3790 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3791 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3792 } IEM_MC_ELSE() {
3793 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3794 } IEM_MC_ENDIF();
3795 IEM_MC_ADVANCE_RIP();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x92. */
3803FNIEMOP_DEF(iemOp_setc_Eb)
3804{
3805 IEMOP_MNEMONIC("setc Eb");
3806 IEMOP_HLP_MIN_386();
3807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3808 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3809
3810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3811 * any way. AMD says it's "unused", whatever that means. We're
3812 * ignoring for now. */
3813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3814 {
3815 /* register target */
3816 IEM_MC_BEGIN(0, 0);
3817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3818 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3819 } IEM_MC_ELSE() {
3820 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3821 } IEM_MC_ENDIF();
3822 IEM_MC_ADVANCE_RIP();
3823 IEM_MC_END();
3824 }
3825 else
3826 {
3827 /* memory target */
3828 IEM_MC_BEGIN(0, 1);
3829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3831 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3832 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3833 } IEM_MC_ELSE() {
3834 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3835 } IEM_MC_ENDIF();
3836 IEM_MC_ADVANCE_RIP();
3837 IEM_MC_END();
3838 }
3839 return VINF_SUCCESS;
3840}
3841
3842
3843/** Opcode 0x0f 0x93. */
3844FNIEMOP_DEF(iemOp_setnc_Eb)
3845{
3846 IEMOP_MNEMONIC("setnc Eb");
3847 IEMOP_HLP_MIN_386();
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3850
3851 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3852 * any way. AMD says it's "unused", whatever that means. We're
3853 * ignoring for now. */
3854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3855 {
3856 /* register target */
3857 IEM_MC_BEGIN(0, 0);
3858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3859 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3860 } IEM_MC_ELSE() {
3861 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3862 } IEM_MC_ENDIF();
3863 IEM_MC_ADVANCE_RIP();
3864 IEM_MC_END();
3865 }
3866 else
3867 {
3868 /* memory target */
3869 IEM_MC_BEGIN(0, 1);
3870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3873 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3874 } IEM_MC_ELSE() {
3875 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3876 } IEM_MC_ENDIF();
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 return VINF_SUCCESS;
3881}
3882
3883
3884/** Opcode 0x0f 0x94. */
3885FNIEMOP_DEF(iemOp_sete_Eb)
3886{
3887 IEMOP_MNEMONIC("sete Eb");
3888 IEMOP_HLP_MIN_386();
3889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3890 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3891
3892 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3893 * any way. AMD says it's "unused", whatever that means. We're
3894 * ignoring for now. */
3895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3896 {
3897 /* register target */
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3903 } IEM_MC_ENDIF();
3904 IEM_MC_ADVANCE_RIP();
3905 IEM_MC_END();
3906 }
3907 else
3908 {
3909 /* memory target */
3910 IEM_MC_BEGIN(0, 1);
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3914 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3915 } IEM_MC_ELSE() {
3916 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3917 } IEM_MC_ENDIF();
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 }
3921 return VINF_SUCCESS;
3922}
3923
3924
3925/** Opcode 0x0f 0x95. */
3926FNIEMOP_DEF(iemOp_setne_Eb)
3927{
3928 IEMOP_MNEMONIC("setne Eb");
3929 IEMOP_HLP_MIN_386();
3930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3931 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3932
3933 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3934 * any way. AMD says it's "unused", whatever that means. We're
3935 * ignoring for now. */
3936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3937 {
3938 /* register target */
3939 IEM_MC_BEGIN(0, 0);
3940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3941 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3942 } IEM_MC_ELSE() {
3943 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3944 } IEM_MC_ENDIF();
3945 IEM_MC_ADVANCE_RIP();
3946 IEM_MC_END();
3947 }
3948 else
3949 {
3950 /* memory target */
3951 IEM_MC_BEGIN(0, 1);
3952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3955 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3956 } IEM_MC_ELSE() {
3957 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3958 } IEM_MC_ENDIF();
3959 IEM_MC_ADVANCE_RIP();
3960 IEM_MC_END();
3961 }
3962 return VINF_SUCCESS;
3963}
3964
3965
3966/** Opcode 0x0f 0x96. */
3967FNIEMOP_DEF(iemOp_setbe_Eb)
3968{
3969 IEMOP_MNEMONIC("setbe Eb");
3970 IEMOP_HLP_MIN_386();
3971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3972 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3973
3974 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3975 * any way. AMD says it's "unused", whatever that means. We're
3976 * ignoring for now. */
3977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3978 {
3979 /* register target */
3980 IEM_MC_BEGIN(0, 0);
3981 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3982 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3983 } IEM_MC_ELSE() {
3984 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3985 } IEM_MC_ENDIF();
3986 IEM_MC_ADVANCE_RIP();
3987 IEM_MC_END();
3988 }
3989 else
3990 {
3991 /* memory target */
3992 IEM_MC_BEGIN(0, 1);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3996 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3997 } IEM_MC_ELSE() {
3998 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3999 } IEM_MC_ENDIF();
4000 IEM_MC_ADVANCE_RIP();
4001 IEM_MC_END();
4002 }
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** Opcode 0x0f 0x97. */
4008FNIEMOP_DEF(iemOp_setnbe_Eb)
4009{
4010 IEMOP_MNEMONIC("setnbe Eb");
4011 IEMOP_HLP_MIN_386();
4012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4013 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4014
4015 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4016 * any way. AMD says it's "unused", whatever that means. We're
4017 * ignoring for now. */
4018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4019 {
4020 /* register target */
4021 IEM_MC_BEGIN(0, 0);
4022 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4024 } IEM_MC_ELSE() {
4025 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4026 } IEM_MC_ENDIF();
4027 IEM_MC_ADVANCE_RIP();
4028 IEM_MC_END();
4029 }
4030 else
4031 {
4032 /* memory target */
4033 IEM_MC_BEGIN(0, 1);
4034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4036 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4037 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4038 } IEM_MC_ELSE() {
4039 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4040 } IEM_MC_ENDIF();
4041 IEM_MC_ADVANCE_RIP();
4042 IEM_MC_END();
4043 }
4044 return VINF_SUCCESS;
4045}
4046
4047
4048/** Opcode 0x0f 0x98. */
4049FNIEMOP_DEF(iemOp_sets_Eb)
4050{
4051 IEMOP_MNEMONIC("sets Eb");
4052 IEMOP_HLP_MIN_386();
4053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4054 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4055
4056 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4057 * any way. AMD says it's "unused", whatever that means. We're
4058 * ignoring for now. */
4059 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4060 {
4061 /* register target */
4062 IEM_MC_BEGIN(0, 0);
4063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4064 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4065 } IEM_MC_ELSE() {
4066 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4067 } IEM_MC_ENDIF();
4068 IEM_MC_ADVANCE_RIP();
4069 IEM_MC_END();
4070 }
4071 else
4072 {
4073 /* memory target */
4074 IEM_MC_BEGIN(0, 1);
4075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4078 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4079 } IEM_MC_ELSE() {
4080 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4081 } IEM_MC_ENDIF();
4082 IEM_MC_ADVANCE_RIP();
4083 IEM_MC_END();
4084 }
4085 return VINF_SUCCESS;
4086}
4087
4088
4089/** Opcode 0x0f 0x99. */
4090FNIEMOP_DEF(iemOp_setns_Eb)
4091{
4092 IEMOP_MNEMONIC("setns Eb");
4093 IEMOP_HLP_MIN_386();
4094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4095 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEM_MC_BEGIN(0, 0);
4104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4106 } IEM_MC_ELSE() {
4107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4108 } IEM_MC_ENDIF();
4109 IEM_MC_ADVANCE_RIP();
4110 IEM_MC_END();
4111 }
4112 else
4113 {
4114 /* memory target */
4115 IEM_MC_BEGIN(0, 1);
4116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4119 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4120 } IEM_MC_ELSE() {
4121 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ENDIF();
4123 IEM_MC_ADVANCE_RIP();
4124 IEM_MC_END();
4125 }
4126 return VINF_SUCCESS;
4127}
4128
4129
4130/** Opcode 0x0f 0x9a. */
4131FNIEMOP_DEF(iemOp_setp_Eb)
4132{
4133 IEMOP_MNEMONIC("setnp Eb");
4134 IEMOP_HLP_MIN_386();
4135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4136 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4137
4138 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4139 * any way. AMD says it's "unused", whatever that means. We're
4140 * ignoring for now. */
4141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4142 {
4143 /* register target */
4144 IEM_MC_BEGIN(0, 0);
4145 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4146 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4147 } IEM_MC_ELSE() {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4149 } IEM_MC_ENDIF();
4150 IEM_MC_ADVANCE_RIP();
4151 IEM_MC_END();
4152 }
4153 else
4154 {
4155 /* memory target */
4156 IEM_MC_BEGIN(0, 1);
4157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4160 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4161 } IEM_MC_ELSE() {
4162 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4163 } IEM_MC_ENDIF();
4164 IEM_MC_ADVANCE_RIP();
4165 IEM_MC_END();
4166 }
4167 return VINF_SUCCESS;
4168}
4169
4170
4171/** Opcode 0x0f 0x9b. */
4172FNIEMOP_DEF(iemOp_setnp_Eb)
4173{
4174 IEMOP_MNEMONIC("setnp Eb");
4175 IEMOP_HLP_MIN_386();
4176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4177 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4178
4179 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4180 * any way. AMD says it's "unused", whatever that means. We're
4181 * ignoring for now. */
4182 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4183 {
4184 /* register target */
4185 IEM_MC_BEGIN(0, 0);
4186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4187 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4188 } IEM_MC_ELSE() {
4189 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4190 } IEM_MC_ENDIF();
4191 IEM_MC_ADVANCE_RIP();
4192 IEM_MC_END();
4193 }
4194 else
4195 {
4196 /* memory target */
4197 IEM_MC_BEGIN(0, 1);
4198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4201 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4204 } IEM_MC_ENDIF();
4205 IEM_MC_ADVANCE_RIP();
4206 IEM_MC_END();
4207 }
4208 return VINF_SUCCESS;
4209}
4210
4211
4212/** Opcode 0x0f 0x9c. */
4213FNIEMOP_DEF(iemOp_setl_Eb)
4214{
4215 IEMOP_MNEMONIC("setl Eb");
4216 IEMOP_HLP_MIN_386();
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4219
4220 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4221 * any way. AMD says it's "unused", whatever that means. We're
4222 * ignoring for now. */
4223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4224 {
4225 /* register target */
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4228 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4229 } IEM_MC_ELSE() {
4230 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4231 } IEM_MC_ENDIF();
4232 IEM_MC_ADVANCE_RIP();
4233 IEM_MC_END();
4234 }
4235 else
4236 {
4237 /* memory target */
4238 IEM_MC_BEGIN(0, 1);
4239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4241 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4242 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4243 } IEM_MC_ELSE() {
4244 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4245 } IEM_MC_ENDIF();
4246 IEM_MC_ADVANCE_RIP();
4247 IEM_MC_END();
4248 }
4249 return VINF_SUCCESS;
4250}
4251
4252
4253/** Opcode 0x0f 0x9d. */
4254FNIEMOP_DEF(iemOp_setnl_Eb)
4255{
4256 IEMOP_MNEMONIC("setnl Eb");
4257 IEMOP_HLP_MIN_386();
4258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4259 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4260
4261 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4262 * any way. AMD says it's "unused", whatever that means. We're
4263 * ignoring for now. */
4264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4265 {
4266 /* register target */
4267 IEM_MC_BEGIN(0, 0);
4268 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4269 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4270 } IEM_MC_ELSE() {
4271 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4272 } IEM_MC_ENDIF();
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /* memory target */
4279 IEM_MC_BEGIN(0, 1);
4280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4282 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4283 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4284 } IEM_MC_ELSE() {
4285 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4286 } IEM_MC_ENDIF();
4287 IEM_MC_ADVANCE_RIP();
4288 IEM_MC_END();
4289 }
4290 return VINF_SUCCESS;
4291}
4292
4293
4294/** Opcode 0x0f 0x9e. */
4295FNIEMOP_DEF(iemOp_setle_Eb)
4296{
4297 IEMOP_MNEMONIC("setle Eb");
4298 IEMOP_HLP_MIN_386();
4299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4300 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4301
4302 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4303 * any way. AMD says it's "unused", whatever that means. We're
4304 * ignoring for now. */
4305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4306 {
4307 /* register target */
4308 IEM_MC_BEGIN(0, 0);
4309 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4310 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4311 } IEM_MC_ELSE() {
4312 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4313 } IEM_MC_ENDIF();
4314 IEM_MC_ADVANCE_RIP();
4315 IEM_MC_END();
4316 }
4317 else
4318 {
4319 /* memory target */
4320 IEM_MC_BEGIN(0, 1);
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4323 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4324 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4327 } IEM_MC_ENDIF();
4328 IEM_MC_ADVANCE_RIP();
4329 IEM_MC_END();
4330 }
4331 return VINF_SUCCESS;
4332}
4333
4334
4335/** Opcode 0x0f 0x9f. */
4336FNIEMOP_DEF(iemOp_setnle_Eb)
4337{
4338 IEMOP_MNEMONIC("setnle Eb");
4339 IEMOP_HLP_MIN_386();
4340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4341 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4342
4343 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4344 * any way. AMD says it's "unused", whatever that means. We're
4345 * ignoring for now. */
4346 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4347 {
4348 /* register target */
4349 IEM_MC_BEGIN(0, 0);
4350 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4351 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4352 } IEM_MC_ELSE() {
4353 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4354 } IEM_MC_ENDIF();
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 }
4358 else
4359 {
4360 /* memory target */
4361 IEM_MC_BEGIN(0, 1);
4362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4364 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4365 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4366 } IEM_MC_ELSE() {
4367 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4368 } IEM_MC_ENDIF();
4369 IEM_MC_ADVANCE_RIP();
4370 IEM_MC_END();
4371 }
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/**
4377 * Common 'push segment-register' helper.
4378 */
4379FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4380{
4381 IEMOP_HLP_NO_LOCK_PREFIX();
4382 if (iReg < X86_SREG_FS)
4383 IEMOP_HLP_NO_64BIT();
4384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4385
4386 switch (pIemCpu->enmEffOpSize)
4387 {
4388 case IEMMODE_16BIT:
4389 IEM_MC_BEGIN(0, 1);
4390 IEM_MC_LOCAL(uint16_t, u16Value);
4391 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4392 IEM_MC_PUSH_U16(u16Value);
4393 IEM_MC_ADVANCE_RIP();
4394 IEM_MC_END();
4395 break;
4396
4397 case IEMMODE_32BIT:
4398 IEM_MC_BEGIN(0, 1);
4399 IEM_MC_LOCAL(uint32_t, u32Value);
4400 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4401 IEM_MC_PUSH_U32_SREG(u32Value);
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 break;
4405
4406 case IEMMODE_64BIT:
4407 IEM_MC_BEGIN(0, 1);
4408 IEM_MC_LOCAL(uint64_t, u64Value);
4409 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4410 IEM_MC_PUSH_U64(u64Value);
4411 IEM_MC_ADVANCE_RIP();
4412 IEM_MC_END();
4413 break;
4414 }
4415
4416 return VINF_SUCCESS;
4417}
4418
4419
4420/** Opcode 0x0f 0xa0. */
4421FNIEMOP_DEF(iemOp_push_fs)
4422{
4423 IEMOP_MNEMONIC("push fs");
4424 IEMOP_HLP_MIN_386();
4425 IEMOP_HLP_NO_LOCK_PREFIX();
4426 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4427}
4428
4429
4430/** Opcode 0x0f 0xa1. */
4431FNIEMOP_DEF(iemOp_pop_fs)
4432{
4433 IEMOP_MNEMONIC("pop fs");
4434 IEMOP_HLP_MIN_386();
4435 IEMOP_HLP_NO_LOCK_PREFIX();
4436 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4437}
4438
4439
4440/** Opcode 0x0f 0xa2. */
4441FNIEMOP_DEF(iemOp_cpuid)
4442{
4443 IEMOP_MNEMONIC("cpuid");
4444 IEMOP_HLP_MIN_486(); /* not all 486es. */
4445 IEMOP_HLP_NO_LOCK_PREFIX();
4446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4447}
4448
4449
4450/**
4451 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4452 * iemOp_bts_Ev_Gv.
4453 */
4454FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4455{
4456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4457 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4458
4459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4460 {
4461 /* register destination. */
4462 IEMOP_HLP_NO_LOCK_PREFIX();
4463 switch (pIemCpu->enmEffOpSize)
4464 {
4465 case IEMMODE_16BIT:
4466 IEM_MC_BEGIN(3, 0);
4467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4468 IEM_MC_ARG(uint16_t, u16Src, 1);
4469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4470
4471 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4472 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4473 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4474 IEM_MC_REF_EFLAGS(pEFlags);
4475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4476
4477 IEM_MC_ADVANCE_RIP();
4478 IEM_MC_END();
4479 return VINF_SUCCESS;
4480
4481 case IEMMODE_32BIT:
4482 IEM_MC_BEGIN(3, 0);
4483 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4484 IEM_MC_ARG(uint32_t, u32Src, 1);
4485 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4486
4487 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4488 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4489 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4490 IEM_MC_REF_EFLAGS(pEFlags);
4491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4492
4493 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4494 IEM_MC_ADVANCE_RIP();
4495 IEM_MC_END();
4496 return VINF_SUCCESS;
4497
4498 case IEMMODE_64BIT:
4499 IEM_MC_BEGIN(3, 0);
4500 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4501 IEM_MC_ARG(uint64_t, u64Src, 1);
4502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4503
4504 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4505 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4506 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4507 IEM_MC_REF_EFLAGS(pEFlags);
4508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4509
4510 IEM_MC_ADVANCE_RIP();
4511 IEM_MC_END();
4512 return VINF_SUCCESS;
4513
4514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4515 }
4516 }
4517 else
4518 {
4519 /* memory destination. */
4520
4521 uint32_t fAccess;
4522 if (pImpl->pfnLockedU16)
4523 fAccess = IEM_ACCESS_DATA_RW;
4524 else /* BT */
4525 {
4526 IEMOP_HLP_NO_LOCK_PREFIX();
4527 fAccess = IEM_ACCESS_DATA_R;
4528 }
4529
4530 NOREF(fAccess);
4531
4532 /** @todo test negative bit offsets! */
4533 switch (pIemCpu->enmEffOpSize)
4534 {
4535 case IEMMODE_16BIT:
4536 IEM_MC_BEGIN(3, 2);
4537 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4538 IEM_MC_ARG(uint16_t, u16Src, 1);
4539 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4541 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4542
4543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4544 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4545 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4546 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4547 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4548 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4549 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4550 IEM_MC_FETCH_EFLAGS(EFlags);
4551
4552 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4553 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4555 else
4556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4558
4559 IEM_MC_COMMIT_EFLAGS(EFlags);
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 return VINF_SUCCESS;
4563
4564 case IEMMODE_32BIT:
4565 IEM_MC_BEGIN(3, 2);
4566 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4567 IEM_MC_ARG(uint32_t, u32Src, 1);
4568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4570 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4571
4572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4573 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4574 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4575 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4576 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4577 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4578 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4579 IEM_MC_FETCH_EFLAGS(EFlags);
4580
4581 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4582 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4583 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4584 else
4585 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4586 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4587
4588 IEM_MC_COMMIT_EFLAGS(EFlags);
4589 IEM_MC_ADVANCE_RIP();
4590 IEM_MC_END();
4591 return VINF_SUCCESS;
4592
4593 case IEMMODE_64BIT:
4594 IEM_MC_BEGIN(3, 2);
4595 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4596 IEM_MC_ARG(uint64_t, u64Src, 1);
4597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4599 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4600
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4602 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4603 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4604 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4605 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4606 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4607 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4608 IEM_MC_FETCH_EFLAGS(EFlags);
4609
4610 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4611 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4613 else
4614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4616
4617 IEM_MC_COMMIT_EFLAGS(EFlags);
4618 IEM_MC_ADVANCE_RIP();
4619 IEM_MC_END();
4620 return VINF_SUCCESS;
4621
4622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4623 }
4624 }
4625}
4626
4627
4628/** Opcode 0x0f 0xa3. */
4629FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4630{
4631 IEMOP_MNEMONIC("bt Gv,Gv");
4632 IEMOP_HLP_MIN_386();
4633 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4634}
4635
4636
4637/**
4638 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4639 */
4640FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4641{
4642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4643 IEMOP_HLP_NO_LOCK_PREFIX();
4644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4645
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4649 IEMOP_HLP_NO_LOCK_PREFIX();
4650
4651 switch (pIemCpu->enmEffOpSize)
4652 {
4653 case IEMMODE_16BIT:
4654 IEM_MC_BEGIN(4, 0);
4655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4656 IEM_MC_ARG(uint16_t, u16Src, 1);
4657 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4658 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4659
4660 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4661 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4662 IEM_MC_REF_EFLAGS(pEFlags);
4663 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4664
4665 IEM_MC_ADVANCE_RIP();
4666 IEM_MC_END();
4667 return VINF_SUCCESS;
4668
4669 case IEMMODE_32BIT:
4670 IEM_MC_BEGIN(4, 0);
4671 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4672 IEM_MC_ARG(uint32_t, u32Src, 1);
4673 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4674 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4675
4676 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4677 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4678 IEM_MC_REF_EFLAGS(pEFlags);
4679 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4680
4681 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4682 IEM_MC_ADVANCE_RIP();
4683 IEM_MC_END();
4684 return VINF_SUCCESS;
4685
4686 case IEMMODE_64BIT:
4687 IEM_MC_BEGIN(4, 0);
4688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4689 IEM_MC_ARG(uint64_t, u64Src, 1);
4690 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4692
4693 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4694 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4695 IEM_MC_REF_EFLAGS(pEFlags);
4696 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4697
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 return VINF_SUCCESS;
4701
4702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4703 }
4704 }
4705 else
4706 {
4707 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4708
4709 switch (pIemCpu->enmEffOpSize)
4710 {
4711 case IEMMODE_16BIT:
4712 IEM_MC_BEGIN(4, 2);
4713 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4714 IEM_MC_ARG(uint16_t, u16Src, 1);
4715 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4716 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4718
4719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4720 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4721 IEM_MC_ASSIGN(cShiftArg, cShift);
4722 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4723 IEM_MC_FETCH_EFLAGS(EFlags);
4724 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4725 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4726
4727 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4728 IEM_MC_COMMIT_EFLAGS(EFlags);
4729 IEM_MC_ADVANCE_RIP();
4730 IEM_MC_END();
4731 return VINF_SUCCESS;
4732
4733 case IEMMODE_32BIT:
4734 IEM_MC_BEGIN(4, 2);
4735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4736 IEM_MC_ARG(uint32_t, u32Src, 1);
4737 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4740
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4742 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4743 IEM_MC_ASSIGN(cShiftArg, cShift);
4744 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4745 IEM_MC_FETCH_EFLAGS(EFlags);
4746 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4747 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4748
4749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4750 IEM_MC_COMMIT_EFLAGS(EFlags);
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 return VINF_SUCCESS;
4754
4755 case IEMMODE_64BIT:
4756 IEM_MC_BEGIN(4, 2);
4757 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4758 IEM_MC_ARG(uint64_t, u64Src, 1);
4759 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4762
4763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4764 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4765 IEM_MC_ASSIGN(cShiftArg, cShift);
4766 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4767 IEM_MC_FETCH_EFLAGS(EFlags);
4768 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4769 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4770
4771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4772 IEM_MC_COMMIT_EFLAGS(EFlags);
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 return VINF_SUCCESS;
4776
4777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4778 }
4779 }
4780}
4781
4782
4783/**
4784 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4785 */
4786FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4787{
4788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4789 IEMOP_HLP_NO_LOCK_PREFIX();
4790 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4791
4792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4793 {
4794 IEMOP_HLP_NO_LOCK_PREFIX();
4795
4796 switch (pIemCpu->enmEffOpSize)
4797 {
4798 case IEMMODE_16BIT:
4799 IEM_MC_BEGIN(4, 0);
4800 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4801 IEM_MC_ARG(uint16_t, u16Src, 1);
4802 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4803 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4804
4805 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4806 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4807 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4808 IEM_MC_REF_EFLAGS(pEFlags);
4809 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4810
4811 IEM_MC_ADVANCE_RIP();
4812 IEM_MC_END();
4813 return VINF_SUCCESS;
4814
4815 case IEMMODE_32BIT:
4816 IEM_MC_BEGIN(4, 0);
4817 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4818 IEM_MC_ARG(uint32_t, u32Src, 1);
4819 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4820 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4821
4822 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4823 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4824 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4825 IEM_MC_REF_EFLAGS(pEFlags);
4826 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4827
4828 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 return VINF_SUCCESS;
4832
4833 case IEMMODE_64BIT:
4834 IEM_MC_BEGIN(4, 0);
4835 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4836 IEM_MC_ARG(uint64_t, u64Src, 1);
4837 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4838 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4839
4840 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4841 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4842 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4843 IEM_MC_REF_EFLAGS(pEFlags);
4844 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4845
4846 IEM_MC_ADVANCE_RIP();
4847 IEM_MC_END();
4848 return VINF_SUCCESS;
4849
4850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4851 }
4852 }
4853 else
4854 {
4855 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4856
4857 switch (pIemCpu->enmEffOpSize)
4858 {
4859 case IEMMODE_16BIT:
4860 IEM_MC_BEGIN(4, 2);
4861 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4862 IEM_MC_ARG(uint16_t, u16Src, 1);
4863 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4864 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4866
4867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4868 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4869 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4870 IEM_MC_FETCH_EFLAGS(EFlags);
4871 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4872 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4873
4874 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4875 IEM_MC_COMMIT_EFLAGS(EFlags);
4876 IEM_MC_ADVANCE_RIP();
4877 IEM_MC_END();
4878 return VINF_SUCCESS;
4879
4880 case IEMMODE_32BIT:
4881 IEM_MC_BEGIN(4, 2);
4882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4883 IEM_MC_ARG(uint32_t, u32Src, 1);
4884 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4885 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4887
4888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4889 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4890 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4891 IEM_MC_FETCH_EFLAGS(EFlags);
4892 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4893 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4894
4895 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4896 IEM_MC_COMMIT_EFLAGS(EFlags);
4897 IEM_MC_ADVANCE_RIP();
4898 IEM_MC_END();
4899 return VINF_SUCCESS;
4900
4901 case IEMMODE_64BIT:
4902 IEM_MC_BEGIN(4, 2);
4903 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4904 IEM_MC_ARG(uint64_t, u64Src, 1);
4905 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4908
4909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4910 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4911 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4912 IEM_MC_FETCH_EFLAGS(EFlags);
4913 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4914 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4915
4916 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4917 IEM_MC_COMMIT_EFLAGS(EFlags);
4918 IEM_MC_ADVANCE_RIP();
4919 IEM_MC_END();
4920 return VINF_SUCCESS;
4921
4922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4923 }
4924 }
4925}
4926
4927
4928
4929/** Opcode 0x0f 0xa4. */
4930FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4931{
4932 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4933 IEMOP_HLP_MIN_386();
4934 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4935}
4936
4937
4938/** Opcode 0x0f 0xa5. */
4939FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4940{
4941 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4942 IEMOP_HLP_MIN_386();
4943 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4944}
4945
4946
4947/** Opcode 0x0f 0xa8. */
4948FNIEMOP_DEF(iemOp_push_gs)
4949{
4950 IEMOP_MNEMONIC("push gs");
4951 IEMOP_HLP_MIN_386();
4952 IEMOP_HLP_NO_LOCK_PREFIX();
4953 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4954}
4955
4956
4957/** Opcode 0x0f 0xa9. */
4958FNIEMOP_DEF(iemOp_pop_gs)
4959{
4960 IEMOP_MNEMONIC("pop gs");
4961 IEMOP_HLP_MIN_386();
4962 IEMOP_HLP_NO_LOCK_PREFIX();
4963 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4964}
4965
4966
4967/** Opcode 0x0f 0xaa. */
4968FNIEMOP_STUB(iemOp_rsm);
4969//IEMOP_HLP_MIN_386();
4970
4971
4972/** Opcode 0x0f 0xab. */
4973FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4974{
4975 IEMOP_MNEMONIC("bts Ev,Gv");
4976 IEMOP_HLP_MIN_386();
4977 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4978}
4979
4980
4981/** Opcode 0x0f 0xac. */
4982FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4983{
4984 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4985 IEMOP_HLP_MIN_386();
4986 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4987}
4988
4989
4990/** Opcode 0x0f 0xad. */
4991FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4992{
4993 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4994 IEMOP_HLP_MIN_386();
4995 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4996}
4997
4998
4999/** Opcode 0x0f 0xae mem/0. */
5000FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC("fxsave m512");
5003 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5004 return IEMOP_RAISE_INVALID_OPCODE();
5005
5006 IEM_MC_BEGIN(3, 1);
5007 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5008 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5009 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5012 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5013 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5014 IEM_MC_END();
5015 return VINF_SUCCESS;
5016}
5017
5018
5019/** Opcode 0x0f 0xae mem/1. */
5020FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5021{
5022 IEMOP_MNEMONIC("fxrstor m512");
5023 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5024 return IEMOP_RAISE_INVALID_OPCODE();
5025
5026 IEM_MC_BEGIN(3, 1);
5027 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5028 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5029 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5032 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5033 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5034 IEM_MC_END();
5035 return VINF_SUCCESS;
5036}
5037
5038
5039/** Opcode 0x0f 0xae mem/2. */
5040FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5041
5042/** Opcode 0x0f 0xae mem/3. */
5043FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5044
5045/** Opcode 0x0f 0xae mem/4. */
5046FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5047
5048/** Opcode 0x0f 0xae mem/5. */
5049FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5050
5051/** Opcode 0x0f 0xae mem/6. */
5052FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5053
5054/** Opcode 0x0f 0xae mem/7. */
5055FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5056
5057
5058/** Opcode 0x0f 0xae 11b/5. */
5059FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC("lfence");
5062 IEMOP_HLP_NO_LOCK_PREFIX();
5063 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5064 return IEMOP_RAISE_INVALID_OPCODE();
5065
5066 IEM_MC_BEGIN(0, 0);
5067 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5068 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5069 else
5070 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074}
5075
5076
5077/** Opcode 0x0f 0xae 11b/6. */
5078FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5079{
5080 IEMOP_MNEMONIC("mfence");
5081 IEMOP_HLP_NO_LOCK_PREFIX();
5082 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5083 return IEMOP_RAISE_INVALID_OPCODE();
5084
5085 IEM_MC_BEGIN(0, 0);
5086 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5087 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5088 else
5089 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093}
5094
5095
5096/** Opcode 0x0f 0xae 11b/7. */
5097FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5098{
5099 IEMOP_MNEMONIC("sfence");
5100 IEMOP_HLP_NO_LOCK_PREFIX();
5101 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5102 return IEMOP_RAISE_INVALID_OPCODE();
5103
5104 IEM_MC_BEGIN(0, 0);
5105 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5106 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5107 else
5108 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5109 IEM_MC_ADVANCE_RIP();
5110 IEM_MC_END();
5111 return VINF_SUCCESS;
5112}
5113
5114
5115/** Opcode 0xf3 0x0f 0xae 11b/0. */
5116FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5117
5118/** Opcode 0xf3 0x0f 0xae 11b/1. */
5119FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5120
5121/** Opcode 0xf3 0x0f 0xae 11b/2. */
5122FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5123
5124/** Opcode 0xf3 0x0f 0xae 11b/3. */
5125FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5126
5127
5128/** Opcode 0x0f 0xae. */
5129FNIEMOP_DEF(iemOp_Grp15)
5130{
5131 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5134 {
5135 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5136 {
5137 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5138 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5139 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5140 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5141 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5142 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5143 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5144 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5146 }
5147 }
5148 else
5149 {
5150 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5151 {
5152 case 0:
5153 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5154 {
5155 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5156 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5157 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5158 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5159 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5160 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5161 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5162 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5164 }
5165 break;
5166
5167 case IEM_OP_PRF_REPZ:
5168 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5169 {
5170 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5171 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5172 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5173 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5174 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5175 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5176 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5177 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5179 }
5180 break;
5181
5182 default:
5183 return IEMOP_RAISE_INVALID_OPCODE();
5184 }
5185 }
5186}
5187
5188
5189/** Opcode 0x0f 0xaf. */
5190FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5191{
5192 IEMOP_MNEMONIC("imul Gv,Ev");
5193 IEMOP_HLP_MIN_386();
5194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5196}
5197
5198
5199/** Opcode 0x0f 0xb0. */
5200FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5201{
5202 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5203 IEMOP_HLP_MIN_486();
5204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5205
5206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5207 {
5208 IEMOP_HLP_DONE_DECODING();
5209 IEM_MC_BEGIN(4, 0);
5210 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5211 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5212 IEM_MC_ARG(uint8_t, u8Src, 2);
5213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5214
5215 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5216 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5217 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5218 IEM_MC_REF_EFLAGS(pEFlags);
5219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5221 else
5222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5223
5224 IEM_MC_ADVANCE_RIP();
5225 IEM_MC_END();
5226 }
5227 else
5228 {
5229 IEM_MC_BEGIN(4, 3);
5230 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5231 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5232 IEM_MC_ARG(uint8_t, u8Src, 2);
5233 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5235 IEM_MC_LOCAL(uint8_t, u8Al);
5236
5237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5238 IEMOP_HLP_DONE_DECODING();
5239 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5240 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5241 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5242 IEM_MC_FETCH_EFLAGS(EFlags);
5243 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5244 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5246 else
5247 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5248
5249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5250 IEM_MC_COMMIT_EFLAGS(EFlags);
5251 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5252 IEM_MC_ADVANCE_RIP();
5253 IEM_MC_END();
5254 }
5255 return VINF_SUCCESS;
5256}
5257
5258/** Opcode 0x0f 0xb1. */
5259FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5260{
5261 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5262 IEMOP_HLP_MIN_486();
5263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5264
5265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5266 {
5267 IEMOP_HLP_DONE_DECODING();
5268 switch (pIemCpu->enmEffOpSize)
5269 {
5270 case IEMMODE_16BIT:
5271 IEM_MC_BEGIN(4, 0);
5272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5273 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5274 IEM_MC_ARG(uint16_t, u16Src, 2);
5275 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5276
5277 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5278 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5279 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5280 IEM_MC_REF_EFLAGS(pEFlags);
5281 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5283 else
5284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5285
5286 IEM_MC_ADVANCE_RIP();
5287 IEM_MC_END();
5288 return VINF_SUCCESS;
5289
5290 case IEMMODE_32BIT:
5291 IEM_MC_BEGIN(4, 0);
5292 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5293 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5294 IEM_MC_ARG(uint32_t, u32Src, 2);
5295 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5296
5297 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5298 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5299 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5300 IEM_MC_REF_EFLAGS(pEFlags);
5301 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5303 else
5304 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5305
5306 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5307 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5308 IEM_MC_ADVANCE_RIP();
5309 IEM_MC_END();
5310 return VINF_SUCCESS;
5311
5312 case IEMMODE_64BIT:
5313 IEM_MC_BEGIN(4, 0);
5314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5315 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5316#ifdef RT_ARCH_X86
5317 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5318#else
5319 IEM_MC_ARG(uint64_t, u64Src, 2);
5320#endif
5321 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5322
5323 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5324 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5325 IEM_MC_REF_EFLAGS(pEFlags);
5326#ifdef RT_ARCH_X86
5327 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5328 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5330 else
5331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5332#else
5333 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5334 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5336 else
5337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5338#endif
5339
5340 IEM_MC_ADVANCE_RIP();
5341 IEM_MC_END();
5342 return VINF_SUCCESS;
5343
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 }
5347 else
5348 {
5349 switch (pIemCpu->enmEffOpSize)
5350 {
5351 case IEMMODE_16BIT:
5352 IEM_MC_BEGIN(4, 3);
5353 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5354 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5355 IEM_MC_ARG(uint16_t, u16Src, 2);
5356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5358 IEM_MC_LOCAL(uint16_t, u16Ax);
5359
5360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5361 IEMOP_HLP_DONE_DECODING();
5362 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5363 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5364 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5365 IEM_MC_FETCH_EFLAGS(EFlags);
5366 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5367 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5368 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5369 else
5370 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5371
5372 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5373 IEM_MC_COMMIT_EFLAGS(EFlags);
5374 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5375 IEM_MC_ADVANCE_RIP();
5376 IEM_MC_END();
5377 return VINF_SUCCESS;
5378
5379 case IEMMODE_32BIT:
5380 IEM_MC_BEGIN(4, 3);
5381 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5382 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5383 IEM_MC_ARG(uint32_t, u32Src, 2);
5384 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5386 IEM_MC_LOCAL(uint32_t, u32Eax);
5387
5388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5389 IEMOP_HLP_DONE_DECODING();
5390 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5391 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5392 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5393 IEM_MC_FETCH_EFLAGS(EFlags);
5394 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5395 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5397 else
5398 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5399
5400 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5401 IEM_MC_COMMIT_EFLAGS(EFlags);
5402 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5403 IEM_MC_ADVANCE_RIP();
5404 IEM_MC_END();
5405 return VINF_SUCCESS;
5406
5407 case IEMMODE_64BIT:
5408 IEM_MC_BEGIN(4, 3);
5409 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5410 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5411#ifdef RT_ARCH_X86
5412 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5413#else
5414 IEM_MC_ARG(uint64_t, u64Src, 2);
5415#endif
5416 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5418 IEM_MC_LOCAL(uint64_t, u64Rax);
5419
5420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5421 IEMOP_HLP_DONE_DECODING();
5422 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5423 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5424 IEM_MC_FETCH_EFLAGS(EFlags);
5425 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5426#ifdef RT_ARCH_X86
5427 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5428 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5429 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5430 else
5431 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5432#else
5433 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5434 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5435 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5436 else
5437 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5438#endif
5439
5440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5441 IEM_MC_COMMIT_EFLAGS(EFlags);
5442 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5443 IEM_MC_ADVANCE_RIP();
5444 IEM_MC_END();
5445 return VINF_SUCCESS;
5446
5447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5448 }
5449 }
5450}
5451
5452
5453FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5454{
5455 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5456 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5457
5458 switch (pIemCpu->enmEffOpSize)
5459 {
5460 case IEMMODE_16BIT:
5461 IEM_MC_BEGIN(5, 1);
5462 IEM_MC_ARG(uint16_t, uSel, 0);
5463 IEM_MC_ARG(uint16_t, offSeg, 1);
5464 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5465 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5466 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5467 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5470 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5471 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5472 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5473 IEM_MC_END();
5474 return VINF_SUCCESS;
5475
5476 case IEMMODE_32BIT:
5477 IEM_MC_BEGIN(5, 1);
5478 IEM_MC_ARG(uint16_t, uSel, 0);
5479 IEM_MC_ARG(uint32_t, offSeg, 1);
5480 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5481 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5482 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5483 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5486 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5487 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5488 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5489 IEM_MC_END();
5490 return VINF_SUCCESS;
5491
5492 case IEMMODE_64BIT:
5493 IEM_MC_BEGIN(5, 1);
5494 IEM_MC_ARG(uint16_t, uSel, 0);
5495 IEM_MC_ARG(uint64_t, offSeg, 1);
5496 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5497 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5498 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5503 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5504 else
5505 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5506 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5507 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5508 IEM_MC_END();
5509 return VINF_SUCCESS;
5510
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513}
5514
5515
5516/** Opcode 0x0f 0xb2. */
5517FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5518{
5519 IEMOP_MNEMONIC("lss Gv,Mp");
5520 IEMOP_HLP_MIN_386();
5521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5523 return IEMOP_RAISE_INVALID_OPCODE();
5524 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5525}
5526
5527
5528/** Opcode 0x0f 0xb3. */
5529FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5530{
5531 IEMOP_MNEMONIC("btr Ev,Gv");
5532 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5533}
5534
5535
5536/** Opcode 0x0f 0xb4. */
5537FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5538{
5539 IEMOP_MNEMONIC("lfs Gv,Mp");
5540 IEMOP_HLP_MIN_386();
5541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5543 return IEMOP_RAISE_INVALID_OPCODE();
5544 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5545}
5546
5547
5548/** Opcode 0x0f 0xb5. */
5549FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5550{
5551 IEMOP_MNEMONIC("lgs Gv,Mp");
5552 IEMOP_HLP_MIN_386();
5553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5555 return IEMOP_RAISE_INVALID_OPCODE();
5556 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5557}
5558
5559
5560/** Opcode 0x0f 0xb6. */
5561FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5562{
5563 IEMOP_MNEMONIC("movzx Gv,Eb");
5564 IEMOP_HLP_MIN_386();
5565
5566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5567 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5568
5569 /*
5570 * If rm is denoting a register, no more instruction bytes.
5571 */
5572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5573 {
5574 switch (pIemCpu->enmEffOpSize)
5575 {
5576 case IEMMODE_16BIT:
5577 IEM_MC_BEGIN(0, 1);
5578 IEM_MC_LOCAL(uint16_t, u16Value);
5579 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5580 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 return VINF_SUCCESS;
5584
5585 case IEMMODE_32BIT:
5586 IEM_MC_BEGIN(0, 1);
5587 IEM_MC_LOCAL(uint32_t, u32Value);
5588 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5589 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5590 IEM_MC_ADVANCE_RIP();
5591 IEM_MC_END();
5592 return VINF_SUCCESS;
5593
5594 case IEMMODE_64BIT:
5595 IEM_MC_BEGIN(0, 1);
5596 IEM_MC_LOCAL(uint64_t, u64Value);
5597 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5598 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5599 IEM_MC_ADVANCE_RIP();
5600 IEM_MC_END();
5601 return VINF_SUCCESS;
5602
5603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5604 }
5605 }
5606 else
5607 {
5608 /*
5609 * We're loading a register from memory.
5610 */
5611 switch (pIemCpu->enmEffOpSize)
5612 {
5613 case IEMMODE_16BIT:
5614 IEM_MC_BEGIN(0, 2);
5615 IEM_MC_LOCAL(uint16_t, u16Value);
5616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5618 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5619 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 return VINF_SUCCESS;
5623
5624 case IEMMODE_32BIT:
5625 IEM_MC_BEGIN(0, 2);
5626 IEM_MC_LOCAL(uint32_t, u32Value);
5627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5629 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5630 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5631 IEM_MC_ADVANCE_RIP();
5632 IEM_MC_END();
5633 return VINF_SUCCESS;
5634
5635 case IEMMODE_64BIT:
5636 IEM_MC_BEGIN(0, 2);
5637 IEM_MC_LOCAL(uint64_t, u64Value);
5638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5640 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5642 IEM_MC_ADVANCE_RIP();
5643 IEM_MC_END();
5644 return VINF_SUCCESS;
5645
5646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5647 }
5648 }
5649}
5650
5651
5652/** Opcode 0x0f 0xb7. */
5653FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5654{
5655 IEMOP_MNEMONIC("movzx Gv,Ew");
5656 IEMOP_HLP_MIN_386();
5657
5658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5659 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5660
5661 /** @todo Not entirely sure how the operand size prefix is handled here,
5662 * assuming that it will be ignored. Would be nice to have a few
5663 * test for this. */
5664 /*
5665 * If rm is denoting a register, no more instruction bytes.
5666 */
5667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5668 {
5669 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5670 {
5671 IEM_MC_BEGIN(0, 1);
5672 IEM_MC_LOCAL(uint32_t, u32Value);
5673 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5674 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5675 IEM_MC_ADVANCE_RIP();
5676 IEM_MC_END();
5677 }
5678 else
5679 {
5680 IEM_MC_BEGIN(0, 1);
5681 IEM_MC_LOCAL(uint64_t, u64Value);
5682 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5683 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5684 IEM_MC_ADVANCE_RIP();
5685 IEM_MC_END();
5686 }
5687 }
5688 else
5689 {
5690 /*
5691 * We're loading a register from memory.
5692 */
5693 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5694 {
5695 IEM_MC_BEGIN(0, 2);
5696 IEM_MC_LOCAL(uint32_t, u32Value);
5697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5699 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5700 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5701 IEM_MC_ADVANCE_RIP();
5702 IEM_MC_END();
5703 }
5704 else
5705 {
5706 IEM_MC_BEGIN(0, 2);
5707 IEM_MC_LOCAL(uint64_t, u64Value);
5708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5710 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5711 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5712 IEM_MC_ADVANCE_RIP();
5713 IEM_MC_END();
5714 }
5715 }
5716 return VINF_SUCCESS;
5717}
5718
5719
5720/** Opcode 0x0f 0xb8. */
5721FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5722
5723
5724/** Opcode 0x0f 0xb9. */
5725FNIEMOP_DEF(iemOp_Grp10)
5726{
5727 Log(("iemOp_Grp10 -> #UD\n"));
5728 return IEMOP_RAISE_INVALID_OPCODE();
5729}
5730
5731
5732/** Opcode 0x0f 0xba. */
5733FNIEMOP_DEF(iemOp_Grp8)
5734{
5735 IEMOP_HLP_MIN_386();
5736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5737 PCIEMOPBINSIZES pImpl;
5738 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5739 {
5740 case 0: case 1: case 2: case 3:
5741 return IEMOP_RAISE_INVALID_OPCODE();
5742 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5743 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5744 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5745 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5747 }
5748 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5749
5750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5751 {
5752 /* register destination. */
5753 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5754 IEMOP_HLP_NO_LOCK_PREFIX();
5755
5756 switch (pIemCpu->enmEffOpSize)
5757 {
5758 case IEMMODE_16BIT:
5759 IEM_MC_BEGIN(3, 0);
5760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5761 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5763
5764 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5765 IEM_MC_REF_EFLAGS(pEFlags);
5766 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5767
5768 IEM_MC_ADVANCE_RIP();
5769 IEM_MC_END();
5770 return VINF_SUCCESS;
5771
5772 case IEMMODE_32BIT:
5773 IEM_MC_BEGIN(3, 0);
5774 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5775 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5776 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5777
5778 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5779 IEM_MC_REF_EFLAGS(pEFlags);
5780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5781
5782 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5783 IEM_MC_ADVANCE_RIP();
5784 IEM_MC_END();
5785 return VINF_SUCCESS;
5786
5787 case IEMMODE_64BIT:
5788 IEM_MC_BEGIN(3, 0);
5789 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5790 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5792
5793 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5794 IEM_MC_REF_EFLAGS(pEFlags);
5795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 return VINF_SUCCESS;
5800
5801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5802 }
5803 }
5804 else
5805 {
5806 /* memory destination. */
5807
5808 uint32_t fAccess;
5809 if (pImpl->pfnLockedU16)
5810 fAccess = IEM_ACCESS_DATA_RW;
5811 else /* BT */
5812 {
5813 IEMOP_HLP_NO_LOCK_PREFIX();
5814 fAccess = IEM_ACCESS_DATA_R;
5815 }
5816
5817 /** @todo test negative bit offsets! */
5818 switch (pIemCpu->enmEffOpSize)
5819 {
5820 case IEMMODE_16BIT:
5821 IEM_MC_BEGIN(3, 1);
5822 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5823 IEM_MC_ARG(uint16_t, u16Src, 1);
5824 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826
5827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5828 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5829 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5830 IEM_MC_FETCH_EFLAGS(EFlags);
5831 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5832 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5833 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5834 else
5835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5837
5838 IEM_MC_COMMIT_EFLAGS(EFlags);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 case IEMMODE_32BIT:
5844 IEM_MC_BEGIN(3, 1);
5845 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5846 IEM_MC_ARG(uint32_t, u32Src, 1);
5847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5849
5850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5851 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5852 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5853 IEM_MC_FETCH_EFLAGS(EFlags);
5854 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5855 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5856 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5857 else
5858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5859 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5860
5861 IEM_MC_COMMIT_EFLAGS(EFlags);
5862 IEM_MC_ADVANCE_RIP();
5863 IEM_MC_END();
5864 return VINF_SUCCESS;
5865
5866 case IEMMODE_64BIT:
5867 IEM_MC_BEGIN(3, 1);
5868 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5869 IEM_MC_ARG(uint64_t, u64Src, 1);
5870 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5872
5873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5874 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5875 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5876 IEM_MC_FETCH_EFLAGS(EFlags);
5877 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5878 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5879 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5880 else
5881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5883
5884 IEM_MC_COMMIT_EFLAGS(EFlags);
5885 IEM_MC_ADVANCE_RIP();
5886 IEM_MC_END();
5887 return VINF_SUCCESS;
5888
5889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5890 }
5891 }
5892
5893}
5894
5895
5896/** Opcode 0x0f 0xbb. */
5897FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5898{
5899 IEMOP_MNEMONIC("btc Ev,Gv");
5900 IEMOP_HLP_MIN_386();
5901 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5902}
5903
5904
5905/** Opcode 0x0f 0xbc. */
5906FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5907{
5908 IEMOP_MNEMONIC("bsf Gv,Ev");
5909 IEMOP_HLP_MIN_386();
5910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5911 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5912}
5913
5914
5915/** Opcode 0x0f 0xbd. */
5916FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5917{
5918 IEMOP_MNEMONIC("bsr Gv,Ev");
5919 IEMOP_HLP_MIN_386();
5920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5922}
5923
5924
5925/** Opcode 0x0f 0xbe. */
5926FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5927{
5928 IEMOP_MNEMONIC("movsx Gv,Eb");
5929 IEMOP_HLP_MIN_386();
5930
5931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5932 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5933
5934 /*
5935 * If rm is denoting a register, no more instruction bytes.
5936 */
5937 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5938 {
5939 switch (pIemCpu->enmEffOpSize)
5940 {
5941 case IEMMODE_16BIT:
5942 IEM_MC_BEGIN(0, 1);
5943 IEM_MC_LOCAL(uint16_t, u16Value);
5944 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5945 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5946 IEM_MC_ADVANCE_RIP();
5947 IEM_MC_END();
5948 return VINF_SUCCESS;
5949
5950 case IEMMODE_32BIT:
5951 IEM_MC_BEGIN(0, 1);
5952 IEM_MC_LOCAL(uint32_t, u32Value);
5953 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5954 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 return VINF_SUCCESS;
5958
5959 case IEMMODE_64BIT:
5960 IEM_MC_BEGIN(0, 1);
5961 IEM_MC_LOCAL(uint64_t, u64Value);
5962 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5963 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5969 }
5970 }
5971 else
5972 {
5973 /*
5974 * We're loading a register from memory.
5975 */
5976 switch (pIemCpu->enmEffOpSize)
5977 {
5978 case IEMMODE_16BIT:
5979 IEM_MC_BEGIN(0, 2);
5980 IEM_MC_LOCAL(uint16_t, u16Value);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5983 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5984 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5985 IEM_MC_ADVANCE_RIP();
5986 IEM_MC_END();
5987 return VINF_SUCCESS;
5988
5989 case IEMMODE_32BIT:
5990 IEM_MC_BEGIN(0, 2);
5991 IEM_MC_LOCAL(uint32_t, u32Value);
5992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5994 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5995 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 case IEMMODE_64BIT:
6001 IEM_MC_BEGIN(0, 2);
6002 IEM_MC_LOCAL(uint64_t, u64Value);
6003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6005 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6006 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 return VINF_SUCCESS;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013 }
6014}
6015
6016
6017/** Opcode 0x0f 0xbf. */
6018FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6019{
6020 IEMOP_MNEMONIC("movsx Gv,Ew");
6021 IEMOP_HLP_MIN_386();
6022
6023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6024 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6025
6026 /** @todo Not entirely sure how the operand size prefix is handled here,
6027 * assuming that it will be ignored. Would be nice to have a few
6028 * test for this. */
6029 /*
6030 * If rm is denoting a register, no more instruction bytes.
6031 */
6032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6033 {
6034 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6035 {
6036 IEM_MC_BEGIN(0, 1);
6037 IEM_MC_LOCAL(uint32_t, u32Value);
6038 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6039 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 }
6043 else
6044 {
6045 IEM_MC_BEGIN(0, 1);
6046 IEM_MC_LOCAL(uint64_t, u64Value);
6047 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6048 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6049 IEM_MC_ADVANCE_RIP();
6050 IEM_MC_END();
6051 }
6052 }
6053 else
6054 {
6055 /*
6056 * We're loading a register from memory.
6057 */
6058 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6059 {
6060 IEM_MC_BEGIN(0, 2);
6061 IEM_MC_LOCAL(uint32_t, u32Value);
6062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6064 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6065 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6066 IEM_MC_ADVANCE_RIP();
6067 IEM_MC_END();
6068 }
6069 else
6070 {
6071 IEM_MC_BEGIN(0, 2);
6072 IEM_MC_LOCAL(uint64_t, u64Value);
6073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6075 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6077 IEM_MC_ADVANCE_RIP();
6078 IEM_MC_END();
6079 }
6080 }
6081 return VINF_SUCCESS;
6082}
6083
6084
6085/** Opcode 0x0f 0xc0. */
6086FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6087{
6088 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6089 IEMOP_HLP_MIN_486();
6090 IEMOP_MNEMONIC("xadd Eb,Gb");
6091
6092 /*
6093 * If rm is denoting a register, no more instruction bytes.
6094 */
6095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6096 {
6097 IEMOP_HLP_NO_LOCK_PREFIX();
6098
6099 IEM_MC_BEGIN(3, 0);
6100 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6101 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6103
6104 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6105 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6106 IEM_MC_REF_EFLAGS(pEFlags);
6107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6108
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 }
6112 else
6113 {
6114 /*
6115 * We're accessing memory.
6116 */
6117 IEM_MC_BEGIN(3, 3);
6118 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6119 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6120 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6121 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6123
6124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6125 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6126 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6127 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6128 IEM_MC_FETCH_EFLAGS(EFlags);
6129 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6130 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6131 else
6132 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6133
6134 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6135 IEM_MC_COMMIT_EFLAGS(EFlags);
6136 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6137 IEM_MC_ADVANCE_RIP();
6138 IEM_MC_END();
6139 return VINF_SUCCESS;
6140 }
6141 return VINF_SUCCESS;
6142}
6143
6144
6145/** Opcode 0x0f 0xc1. */
6146FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6147{
6148 IEMOP_MNEMONIC("xadd Ev,Gv");
6149 IEMOP_HLP_MIN_486();
6150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6151
6152 /*
6153 * If rm is denoting a register, no more instruction bytes.
6154 */
6155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6156 {
6157 IEMOP_HLP_NO_LOCK_PREFIX();
6158
6159 switch (pIemCpu->enmEffOpSize)
6160 {
6161 case IEMMODE_16BIT:
6162 IEM_MC_BEGIN(3, 0);
6163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6164 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6166
6167 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6168 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6169 IEM_MC_REF_EFLAGS(pEFlags);
6170 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6171
6172 IEM_MC_ADVANCE_RIP();
6173 IEM_MC_END();
6174 return VINF_SUCCESS;
6175
6176 case IEMMODE_32BIT:
6177 IEM_MC_BEGIN(3, 0);
6178 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6179 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6181
6182 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6183 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6184 IEM_MC_REF_EFLAGS(pEFlags);
6185 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6186
6187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6188 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6189 IEM_MC_ADVANCE_RIP();
6190 IEM_MC_END();
6191 return VINF_SUCCESS;
6192
6193 case IEMMODE_64BIT:
6194 IEM_MC_BEGIN(3, 0);
6195 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6196 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6197 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6198
6199 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6200 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6201 IEM_MC_REF_EFLAGS(pEFlags);
6202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6203
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6209 }
6210 }
6211 else
6212 {
6213 /*
6214 * We're accessing memory.
6215 */
6216 switch (pIemCpu->enmEffOpSize)
6217 {
6218 case IEMMODE_16BIT:
6219 IEM_MC_BEGIN(3, 3);
6220 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6221 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6222 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6223 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6227 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6228 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6229 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6230 IEM_MC_FETCH_EFLAGS(EFlags);
6231 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6232 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6233 else
6234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6235
6236 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6237 IEM_MC_COMMIT_EFLAGS(EFlags);
6238 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6239 IEM_MC_ADVANCE_RIP();
6240 IEM_MC_END();
6241 return VINF_SUCCESS;
6242
6243 case IEMMODE_32BIT:
6244 IEM_MC_BEGIN(3, 3);
6245 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6246 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6247 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6248 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6250
6251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6252 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6253 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6254 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6255 IEM_MC_FETCH_EFLAGS(EFlags);
6256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6258 else
6259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6260
6261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6262 IEM_MC_COMMIT_EFLAGS(EFlags);
6263 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6264 IEM_MC_ADVANCE_RIP();
6265 IEM_MC_END();
6266 return VINF_SUCCESS;
6267
6268 case IEMMODE_64BIT:
6269 IEM_MC_BEGIN(3, 3);
6270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6271 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6272 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6273 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6275
6276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6277 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6278 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6279 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6280 IEM_MC_FETCH_EFLAGS(EFlags);
6281 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6283 else
6284 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6285
6286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6287 IEM_MC_COMMIT_EFLAGS(EFlags);
6288 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6289 IEM_MC_ADVANCE_RIP();
6290 IEM_MC_END();
6291 return VINF_SUCCESS;
6292
6293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6294 }
6295 }
6296}
6297
6298/** Opcode 0x0f 0xc2. */
6299FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6300
6301/** Opcode 0x0f 0xc3. */
6302FNIEMOP_STUB(iemOp_movnti_My_Gy);
6303
6304/** Opcode 0x0f 0xc4. */
6305FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6306
6307/** Opcode 0x0f 0xc5. */
6308FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6309
6310/** Opcode 0x0f 0xc6. */
6311FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6312
6313
6314/** Opcode 0x0f 0xc7 !11/1. */
6315FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6316{
6317 IEMOP_MNEMONIC("cmpxchg8b Mq");
6318
6319 IEM_MC_BEGIN(4, 3);
6320 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6321 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6322 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6323 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6324 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6325 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6327
6328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6329 IEMOP_HLP_DONE_DECODING();
6330 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6331
6332 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6333 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6334 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6335
6336 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6337 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6338 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6339
6340 IEM_MC_FETCH_EFLAGS(EFlags);
6341 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6342 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6343 else
6344 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6345
6346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6347 IEM_MC_COMMIT_EFLAGS(EFlags);
6348 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6349 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6350 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6351 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6352 IEM_MC_ENDIF();
6353 IEM_MC_ADVANCE_RIP();
6354
6355 IEM_MC_END();
6356 return VINF_SUCCESS;
6357}
6358
6359
6360/** Opcode REX.W 0x0f 0xc7 !11/1. */
6361FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6362
6363/** Opcode 0x0f 0xc7 11/6. */
6364FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6365
6366/** Opcode 0x0f 0xc7 !11/6. */
6367FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6368
6369/** Opcode 0x66 0x0f 0xc7 !11/6. */
6370FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6371
6372/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6373FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6374
6375/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6376FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6377
6378
6379/** Opcode 0x0f 0xc7. */
6380FNIEMOP_DEF(iemOp_Grp9)
6381{
6382 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6384 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6385 {
6386 case 0: case 2: case 3: case 4: case 5:
6387 return IEMOP_RAISE_INVALID_OPCODE();
6388 case 1:
6389 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6390 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6391 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6392 return IEMOP_RAISE_INVALID_OPCODE();
6393 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6394 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6395 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6396 case 6:
6397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6398 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6399 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6400 {
6401 case 0:
6402 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6403 case IEM_OP_PRF_SIZE_OP:
6404 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6405 case IEM_OP_PRF_REPZ:
6406 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6407 default:
6408 return IEMOP_RAISE_INVALID_OPCODE();
6409 }
6410 case 7:
6411 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6412 {
6413 case 0:
6414 case IEM_OP_PRF_REPZ:
6415 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6416 default:
6417 return IEMOP_RAISE_INVALID_OPCODE();
6418 }
6419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6420 }
6421}
6422
6423
6424/**
6425 * Common 'bswap register' helper.
6426 */
6427FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6428{
6429 IEMOP_HLP_NO_LOCK_PREFIX();
6430 switch (pIemCpu->enmEffOpSize)
6431 {
6432 case IEMMODE_16BIT:
6433 IEM_MC_BEGIN(1, 0);
6434 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6435 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6436 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6437 IEM_MC_ADVANCE_RIP();
6438 IEM_MC_END();
6439 return VINF_SUCCESS;
6440
6441 case IEMMODE_32BIT:
6442 IEM_MC_BEGIN(1, 0);
6443 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6444 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6446 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6447 IEM_MC_ADVANCE_RIP();
6448 IEM_MC_END();
6449 return VINF_SUCCESS;
6450
6451 case IEMMODE_64BIT:
6452 IEM_MC_BEGIN(1, 0);
6453 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6454 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6455 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6456 IEM_MC_ADVANCE_RIP();
6457 IEM_MC_END();
6458 return VINF_SUCCESS;
6459
6460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6461 }
6462}
6463
6464
6465/** Opcode 0x0f 0xc8. */
6466FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6467{
6468 IEMOP_MNEMONIC("bswap rAX/r8");
6469 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6470 prefix. REX.B is the correct prefix it appears. For a parallel
6471 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6472 IEMOP_HLP_MIN_486();
6473 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6474}
6475
6476
6477/** Opcode 0x0f 0xc9. */
6478FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6479{
6480 IEMOP_MNEMONIC("bswap rCX/r9");
6481 IEMOP_HLP_MIN_486();
6482 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6483}
6484
6485
6486/** Opcode 0x0f 0xca. */
6487FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6488{
6489 IEMOP_MNEMONIC("bswap rDX/r9");
6490 IEMOP_HLP_MIN_486();
6491 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6492}
6493
6494
6495/** Opcode 0x0f 0xcb. */
6496FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6497{
6498 IEMOP_MNEMONIC("bswap rBX/r9");
6499 IEMOP_HLP_MIN_486();
6500 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6501}
6502
6503
6504/** Opcode 0x0f 0xcc. */
6505FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6506{
6507 IEMOP_MNEMONIC("bswap rSP/r12");
6508 IEMOP_HLP_MIN_486();
6509 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6510}
6511
6512
6513/** Opcode 0x0f 0xcd. */
6514FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6515{
6516 IEMOP_MNEMONIC("bswap rBP/r13");
6517 IEMOP_HLP_MIN_486();
6518 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6519}
6520
6521
6522/** Opcode 0x0f 0xce. */
6523FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6524{
6525 IEMOP_MNEMONIC("bswap rSI/r14");
6526 IEMOP_HLP_MIN_486();
6527 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6528}
6529
6530
6531/** Opcode 0x0f 0xcf. */
6532FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6533{
6534 IEMOP_MNEMONIC("bswap rDI/r15");
6535 IEMOP_HLP_MIN_486();
6536 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6537}
6538
6539
6540
6541/** Opcode 0x0f 0xd0. */
6542FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6543/** Opcode 0x0f 0xd1. */
6544FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6545/** Opcode 0x0f 0xd2. */
6546FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6547/** Opcode 0x0f 0xd3. */
6548FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6549/** Opcode 0x0f 0xd4. */
6550FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6551/** Opcode 0x0f 0xd5. */
6552FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6553/** Opcode 0x0f 0xd6. */
6554FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6555
6556
6557/** Opcode 0x0f 0xd7. */
6558FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6559{
6560 /* Docs says register only. */
6561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6562 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6563 return IEMOP_RAISE_INVALID_OPCODE();
6564
6565 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6566 /** @todo testcase: Check that the instruction implicitly clears the high
6567 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6568 * and opcode modifications are made to work with the whole width (not
6569 * just 128). */
6570 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6571 {
6572 case IEM_OP_PRF_SIZE_OP: /* SSE */
6573 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6574 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6575 IEM_MC_BEGIN(2, 0);
6576 IEM_MC_ARG(uint64_t *, pDst, 0);
6577 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6578 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6579 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6580 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6581 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6582 IEM_MC_ADVANCE_RIP();
6583 IEM_MC_END();
6584 return VINF_SUCCESS;
6585
6586 case 0: /* MMX */
6587 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6588 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6589 IEM_MC_BEGIN(2, 0);
6590 IEM_MC_ARG(uint64_t *, pDst, 0);
6591 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6592 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6593 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6594 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6595 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6596 IEM_MC_ADVANCE_RIP();
6597 IEM_MC_END();
6598 return VINF_SUCCESS;
6599
6600 default:
6601 return IEMOP_RAISE_INVALID_OPCODE();
6602 }
6603}
6604
6605
6606/** Opcode 0x0f 0xd8. */
6607FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6608/** Opcode 0x0f 0xd9. */
6609FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6610/** Opcode 0x0f 0xda. */
6611FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6612/** Opcode 0x0f 0xdb. */
6613FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6614/** Opcode 0x0f 0xdc. */
6615FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6616/** Opcode 0x0f 0xdd. */
6617FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6618/** Opcode 0x0f 0xde. */
6619FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6620/** Opcode 0x0f 0xdf. */
6621FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6622/** Opcode 0x0f 0xe0. */
6623FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6624/** Opcode 0x0f 0xe1. */
6625FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6626/** Opcode 0x0f 0xe2. */
6627FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6628/** Opcode 0x0f 0xe3. */
6629FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6630/** Opcode 0x0f 0xe4. */
6631FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6632/** Opcode 0x0f 0xe5. */
6633FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6634/** Opcode 0x0f 0xe6. */
6635FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6636/** Opcode 0x0f 0xe7. */
6637FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6638/** Opcode 0x0f 0xe8. */
6639FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6640/** Opcode 0x0f 0xe9. */
6641FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6642/** Opcode 0x0f 0xea. */
6643FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6644/** Opcode 0x0f 0xeb. */
6645FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6646/** Opcode 0x0f 0xec. */
6647FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6648/** Opcode 0x0f 0xed. */
6649FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6650/** Opcode 0x0f 0xee. */
6651FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6652
6653
6654/** Opcode 0x0f 0xef. */
6655FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6656{
6657 IEMOP_MNEMONIC("pxor");
6658 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6659}
6660
6661
6662/** Opcode 0x0f 0xf0. */
6663FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6664/** Opcode 0x0f 0xf1. */
6665FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6666/** Opcode 0x0f 0xf2. */
6667FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6668/** Opcode 0x0f 0xf3. */
6669FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6670/** Opcode 0x0f 0xf4. */
6671FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6672/** Opcode 0x0f 0xf5. */
6673FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6674/** Opcode 0x0f 0xf6. */
6675FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6676/** Opcode 0x0f 0xf7. */
6677FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6678/** Opcode 0x0f 0xf8. */
6679FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6680/** Opcode 0x0f 0xf9. */
6681FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6682/** Opcode 0x0f 0xfa. */
6683FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6684/** Opcode 0x0f 0xfb. */
6685FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6686/** Opcode 0x0f 0xfc. */
6687FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6688/** Opcode 0x0f 0xfd. */
6689FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6690/** Opcode 0x0f 0xfe. */
6691FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6692
6693
6694const PFNIEMOP g_apfnTwoByteMap[256] =
6695{
6696 /* 0x00 */ iemOp_Grp6,
6697 /* 0x01 */ iemOp_Grp7,
6698 /* 0x02 */ iemOp_lar_Gv_Ew,
6699 /* 0x03 */ iemOp_lsl_Gv_Ew,
6700 /* 0x04 */ iemOp_Invalid,
6701 /* 0x05 */ iemOp_syscall,
6702 /* 0x06 */ iemOp_clts,
6703 /* 0x07 */ iemOp_sysret,
6704 /* 0x08 */ iemOp_invd,
6705 /* 0x09 */ iemOp_wbinvd,
6706 /* 0x0a */ iemOp_Invalid,
6707 /* 0x0b */ iemOp_ud2,
6708 /* 0x0c */ iemOp_Invalid,
6709 /* 0x0d */ iemOp_nop_Ev_GrpP,
6710 /* 0x0e */ iemOp_femms,
6711 /* 0x0f */ iemOp_3Dnow,
6712 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6713 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6714 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6715 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6716 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6717 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6718 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6719 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6720 /* 0x18 */ iemOp_prefetch_Grp16,
6721 /* 0x19 */ iemOp_nop_Ev,
6722 /* 0x1a */ iemOp_nop_Ev,
6723 /* 0x1b */ iemOp_nop_Ev,
6724 /* 0x1c */ iemOp_nop_Ev,
6725 /* 0x1d */ iemOp_nop_Ev,
6726 /* 0x1e */ iemOp_nop_Ev,
6727 /* 0x1f */ iemOp_nop_Ev,
6728 /* 0x20 */ iemOp_mov_Rd_Cd,
6729 /* 0x21 */ iemOp_mov_Rd_Dd,
6730 /* 0x22 */ iemOp_mov_Cd_Rd,
6731 /* 0x23 */ iemOp_mov_Dd_Rd,
6732 /* 0x24 */ iemOp_mov_Rd_Td,
6733 /* 0x25 */ iemOp_Invalid,
6734 /* 0x26 */ iemOp_mov_Td_Rd,
6735 /* 0x27 */ iemOp_Invalid,
6736 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6737 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6738 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6739 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6740 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6741 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6742 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6743 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6744 /* 0x30 */ iemOp_wrmsr,
6745 /* 0x31 */ iemOp_rdtsc,
6746 /* 0x32 */ iemOp_rdmsr,
6747 /* 0x33 */ iemOp_rdpmc,
6748 /* 0x34 */ iemOp_sysenter,
6749 /* 0x35 */ iemOp_sysexit,
6750 /* 0x36 */ iemOp_Invalid,
6751 /* 0x37 */ iemOp_getsec,
6752 /* 0x38 */ iemOp_3byte_Esc_A4,
6753 /* 0x39 */ iemOp_Invalid,
6754 /* 0x3a */ iemOp_3byte_Esc_A5,
6755 /* 0x3b */ iemOp_Invalid,
6756 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6757 /* 0x3d */ iemOp_Invalid,
6758 /* 0x3e */ iemOp_Invalid,
6759 /* 0x3f */ iemOp_Invalid,
6760 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6761 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6762 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6763 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6764 /* 0x44 */ iemOp_cmove_Gv_Ev,
6765 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6766 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6767 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6768 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6769 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6770 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6771 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6772 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6773 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6774 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6775 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6776 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6777 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6778 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6779 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6780 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6781 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6782 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6783 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6784 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6785 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6786 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6787 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6788 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6789 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6790 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6791 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6792 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6793 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6794 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6795 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6796 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6797 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6798 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6799 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6800 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6801 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6802 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6803 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6804 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6805 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6806 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6807 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6808 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6809 /* 0x71 */ iemOp_Grp12,
6810 /* 0x72 */ iemOp_Grp13,
6811 /* 0x73 */ iemOp_Grp14,
6812 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6813 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6814 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6815 /* 0x77 */ iemOp_emms,
6816 /* 0x78 */ iemOp_vmread_AmdGrp17,
6817 /* 0x79 */ iemOp_vmwrite,
6818 /* 0x7a */ iemOp_Invalid,
6819 /* 0x7b */ iemOp_Invalid,
6820 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6821 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6822 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6823 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6824 /* 0x80 */ iemOp_jo_Jv,
6825 /* 0x81 */ iemOp_jno_Jv,
6826 /* 0x82 */ iemOp_jc_Jv,
6827 /* 0x83 */ iemOp_jnc_Jv,
6828 /* 0x84 */ iemOp_je_Jv,
6829 /* 0x85 */ iemOp_jne_Jv,
6830 /* 0x86 */ iemOp_jbe_Jv,
6831 /* 0x87 */ iemOp_jnbe_Jv,
6832 /* 0x88 */ iemOp_js_Jv,
6833 /* 0x89 */ iemOp_jns_Jv,
6834 /* 0x8a */ iemOp_jp_Jv,
6835 /* 0x8b */ iemOp_jnp_Jv,
6836 /* 0x8c */ iemOp_jl_Jv,
6837 /* 0x8d */ iemOp_jnl_Jv,
6838 /* 0x8e */ iemOp_jle_Jv,
6839 /* 0x8f */ iemOp_jnle_Jv,
6840 /* 0x90 */ iemOp_seto_Eb,
6841 /* 0x91 */ iemOp_setno_Eb,
6842 /* 0x92 */ iemOp_setc_Eb,
6843 /* 0x93 */ iemOp_setnc_Eb,
6844 /* 0x94 */ iemOp_sete_Eb,
6845 /* 0x95 */ iemOp_setne_Eb,
6846 /* 0x96 */ iemOp_setbe_Eb,
6847 /* 0x97 */ iemOp_setnbe_Eb,
6848 /* 0x98 */ iemOp_sets_Eb,
6849 /* 0x99 */ iemOp_setns_Eb,
6850 /* 0x9a */ iemOp_setp_Eb,
6851 /* 0x9b */ iemOp_setnp_Eb,
6852 /* 0x9c */ iemOp_setl_Eb,
6853 /* 0x9d */ iemOp_setnl_Eb,
6854 /* 0x9e */ iemOp_setle_Eb,
6855 /* 0x9f */ iemOp_setnle_Eb,
6856 /* 0xa0 */ iemOp_push_fs,
6857 /* 0xa1 */ iemOp_pop_fs,
6858 /* 0xa2 */ iemOp_cpuid,
6859 /* 0xa3 */ iemOp_bt_Ev_Gv,
6860 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6861 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6862 /* 0xa6 */ iemOp_Invalid,
6863 /* 0xa7 */ iemOp_Invalid,
6864 /* 0xa8 */ iemOp_push_gs,
6865 /* 0xa9 */ iemOp_pop_gs,
6866 /* 0xaa */ iemOp_rsm,
6867 /* 0xab */ iemOp_bts_Ev_Gv,
6868 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6869 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6870 /* 0xae */ iemOp_Grp15,
6871 /* 0xaf */ iemOp_imul_Gv_Ev,
6872 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6873 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6874 /* 0xb2 */ iemOp_lss_Gv_Mp,
6875 /* 0xb3 */ iemOp_btr_Ev_Gv,
6876 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6877 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6878 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6879 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6880 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6881 /* 0xb9 */ iemOp_Grp10,
6882 /* 0xba */ iemOp_Grp8,
6883 /* 0xbd */ iemOp_btc_Ev_Gv,
6884 /* 0xbc */ iemOp_bsf_Gv_Ev,
6885 /* 0xbd */ iemOp_bsr_Gv_Ev,
6886 /* 0xbe */ iemOp_movsx_Gv_Eb,
6887 /* 0xbf */ iemOp_movsx_Gv_Ew,
6888 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6889 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6890 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6891 /* 0xc3 */ iemOp_movnti_My_Gy,
6892 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6893 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6894 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6895 /* 0xc7 */ iemOp_Grp9,
6896 /* 0xc8 */ iemOp_bswap_rAX_r8,
6897 /* 0xc9 */ iemOp_bswap_rCX_r9,
6898 /* 0xca */ iemOp_bswap_rDX_r10,
6899 /* 0xcb */ iemOp_bswap_rBX_r11,
6900 /* 0xcc */ iemOp_bswap_rSP_r12,
6901 /* 0xcd */ iemOp_bswap_rBP_r13,
6902 /* 0xce */ iemOp_bswap_rSI_r14,
6903 /* 0xcf */ iemOp_bswap_rDI_r15,
6904 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6905 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6906 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6907 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6908 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6909 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6910 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6911 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6912 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6913 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6914 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6915 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6916 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6917 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6918 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6919 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6920 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6921 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6922 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6923 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6924 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6925 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6926 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6927 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6928 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6929 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6930 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6931 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6932 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6933 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6934 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6935 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6936 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6937 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6938 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6939 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6940 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6941 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6942 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6943 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6944 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6945 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6946 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6947 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6948 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6949 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6950 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6951 /* 0xff */ iemOp_Invalid
6952};
6953
6954/** @} */
6955
6956
6957/** @name One byte opcodes.
6958 *
6959 * @{
6960 */
6961
6962/** Opcode 0x00. */
6963FNIEMOP_DEF(iemOp_add_Eb_Gb)
6964{
6965 IEMOP_MNEMONIC("add Eb,Gb");
6966 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6967}
6968
6969
6970/** Opcode 0x01. */
6971FNIEMOP_DEF(iemOp_add_Ev_Gv)
6972{
6973 IEMOP_MNEMONIC("add Ev,Gv");
6974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6975}
6976
6977
6978/** Opcode 0x02. */
6979FNIEMOP_DEF(iemOp_add_Gb_Eb)
6980{
6981 IEMOP_MNEMONIC("add Gb,Eb");
6982 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6983}
6984
6985
6986/** Opcode 0x03. */
6987FNIEMOP_DEF(iemOp_add_Gv_Ev)
6988{
6989 IEMOP_MNEMONIC("add Gv,Ev");
6990 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6991}
6992
6993
6994/** Opcode 0x04. */
6995FNIEMOP_DEF(iemOp_add_Al_Ib)
6996{
6997 IEMOP_MNEMONIC("add al,Ib");
6998 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6999}
7000
7001
7002/** Opcode 0x05. */
7003FNIEMOP_DEF(iemOp_add_eAX_Iz)
7004{
7005 IEMOP_MNEMONIC("add rAX,Iz");
7006 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7007}
7008
7009
7010/** Opcode 0x06. */
7011FNIEMOP_DEF(iemOp_push_ES)
7012{
7013 IEMOP_MNEMONIC("push es");
7014 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7015}
7016
7017
7018/** Opcode 0x07. */
7019FNIEMOP_DEF(iemOp_pop_ES)
7020{
7021 IEMOP_MNEMONIC("pop es");
7022 IEMOP_HLP_NO_64BIT();
7023 IEMOP_HLP_NO_LOCK_PREFIX();
7024 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7025}
7026
7027
7028/** Opcode 0x08. */
7029FNIEMOP_DEF(iemOp_or_Eb_Gb)
7030{
7031 IEMOP_MNEMONIC("or Eb,Gb");
7032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7033 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7034}
7035
7036
7037/** Opcode 0x09. */
7038FNIEMOP_DEF(iemOp_or_Ev_Gv)
7039{
7040 IEMOP_MNEMONIC("or Ev,Gv ");
7041 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7042 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7043}
7044
7045
7046/** Opcode 0x0a. */
7047FNIEMOP_DEF(iemOp_or_Gb_Eb)
7048{
7049 IEMOP_MNEMONIC("or Gb,Eb");
7050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7051 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7052}
7053
7054
7055/** Opcode 0x0b. */
7056FNIEMOP_DEF(iemOp_or_Gv_Ev)
7057{
7058 IEMOP_MNEMONIC("or Gv,Ev");
7059 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7060 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7061}
7062
7063
7064/** Opcode 0x0c. */
7065FNIEMOP_DEF(iemOp_or_Al_Ib)
7066{
7067 IEMOP_MNEMONIC("or al,Ib");
7068 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7069 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7070}
7071
7072
7073/** Opcode 0x0d. */
7074FNIEMOP_DEF(iemOp_or_eAX_Iz)
7075{
7076 IEMOP_MNEMONIC("or rAX,Iz");
7077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7078 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7079}
7080
7081
7082/** Opcode 0x0e. */
7083FNIEMOP_DEF(iemOp_push_CS)
7084{
7085 IEMOP_MNEMONIC("push cs");
7086 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7087}
7088
7089
7090/** Opcode 0x0f. */
7091FNIEMOP_DEF(iemOp_2byteEscape)
7092{
7093 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7094 /** @todo PUSH CS on 8086, undefined on 80186. */
7095 IEMOP_HLP_MIN_286();
7096 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7097}
7098
7099/** Opcode 0x10. */
7100FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7101{
7102 IEMOP_MNEMONIC("adc Eb,Gb");
7103 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7104}
7105
7106
7107/** Opcode 0x11. */
7108FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7109{
7110 IEMOP_MNEMONIC("adc Ev,Gv");
7111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7112}
7113
7114
7115/** Opcode 0x12. */
7116FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7117{
7118 IEMOP_MNEMONIC("adc Gb,Eb");
7119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7120}
7121
7122
7123/** Opcode 0x13. */
7124FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7125{
7126 IEMOP_MNEMONIC("adc Gv,Ev");
7127 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7128}
7129
7130
7131/** Opcode 0x14. */
7132FNIEMOP_DEF(iemOp_adc_Al_Ib)
7133{
7134 IEMOP_MNEMONIC("adc al,Ib");
7135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7136}
7137
7138
7139/** Opcode 0x15. */
7140FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7141{
7142 IEMOP_MNEMONIC("adc rAX,Iz");
7143 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7144}
7145
7146
7147/** Opcode 0x16. */
7148FNIEMOP_DEF(iemOp_push_SS)
7149{
7150 IEMOP_MNEMONIC("push ss");
7151 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7152}
7153
7154
7155/** Opcode 0x17. */
7156FNIEMOP_DEF(iemOp_pop_SS)
7157{
7158 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7159 IEMOP_HLP_NO_LOCK_PREFIX();
7160 IEMOP_HLP_NO_64BIT();
7161 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7162}
7163
7164
7165/** Opcode 0x18. */
7166FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7167{
7168 IEMOP_MNEMONIC("sbb Eb,Gb");
7169 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7170}
7171
7172
7173/** Opcode 0x19. */
7174FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7175{
7176 IEMOP_MNEMONIC("sbb Ev,Gv");
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7178}
7179
7180
7181/** Opcode 0x1a. */
7182FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7183{
7184 IEMOP_MNEMONIC("sbb Gb,Eb");
7185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7186}
7187
7188
7189/** Opcode 0x1b. */
7190FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7191{
7192 IEMOP_MNEMONIC("sbb Gv,Ev");
7193 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7194}
7195
7196
7197/** Opcode 0x1c. */
7198FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7199{
7200 IEMOP_MNEMONIC("sbb al,Ib");
7201 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7202}
7203
7204
7205/** Opcode 0x1d. */
7206FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7207{
7208 IEMOP_MNEMONIC("sbb rAX,Iz");
7209 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7210}
7211
7212
7213/** Opcode 0x1e. */
7214FNIEMOP_DEF(iemOp_push_DS)
7215{
7216 IEMOP_MNEMONIC("push ds");
7217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7218}
7219
7220
7221/** Opcode 0x1f. */
7222FNIEMOP_DEF(iemOp_pop_DS)
7223{
7224 IEMOP_MNEMONIC("pop ds");
7225 IEMOP_HLP_NO_LOCK_PREFIX();
7226 IEMOP_HLP_NO_64BIT();
7227 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7228}
7229
7230
7231/** Opcode 0x20. */
7232FNIEMOP_DEF(iemOp_and_Eb_Gb)
7233{
7234 IEMOP_MNEMONIC("and Eb,Gb");
7235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7236 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7237}
7238
7239
7240/** Opcode 0x21. */
7241FNIEMOP_DEF(iemOp_and_Ev_Gv)
7242{
7243 IEMOP_MNEMONIC("and Ev,Gv");
7244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7246}
7247
7248
7249/** Opcode 0x22. */
7250FNIEMOP_DEF(iemOp_and_Gb_Eb)
7251{
7252 IEMOP_MNEMONIC("and Gb,Eb");
7253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7255}
7256
7257
7258/** Opcode 0x23. */
7259FNIEMOP_DEF(iemOp_and_Gv_Ev)
7260{
7261 IEMOP_MNEMONIC("and Gv,Ev");
7262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7263 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7264}
7265
7266
7267/** Opcode 0x24. */
7268FNIEMOP_DEF(iemOp_and_Al_Ib)
7269{
7270 IEMOP_MNEMONIC("and al,Ib");
7271 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7272 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7273}
7274
7275
7276/** Opcode 0x25. */
7277FNIEMOP_DEF(iemOp_and_eAX_Iz)
7278{
7279 IEMOP_MNEMONIC("and rAX,Iz");
7280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7281 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7282}
7283
7284
7285/** Opcode 0x26. */
7286FNIEMOP_DEF(iemOp_seg_ES)
7287{
7288 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7289 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7290 pIemCpu->iEffSeg = X86_SREG_ES;
7291
7292 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7293 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7294}
7295
7296
7297/** Opcode 0x27. */
7298FNIEMOP_DEF(iemOp_daa)
7299{
7300 IEMOP_MNEMONIC("daa AL");
7301 IEMOP_HLP_NO_64BIT();
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7304 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7305}
7306
7307
7308/** Opcode 0x28. */
7309FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7310{
7311 IEMOP_MNEMONIC("sub Eb,Gb");
7312 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7313}
7314
7315
7316/** Opcode 0x29. */
7317FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7318{
7319 IEMOP_MNEMONIC("sub Ev,Gv");
7320 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7321}
7322
7323
7324/** Opcode 0x2a. */
7325FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7326{
7327 IEMOP_MNEMONIC("sub Gb,Eb");
7328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7329}
7330
7331
7332/** Opcode 0x2b. */
7333FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7334{
7335 IEMOP_MNEMONIC("sub Gv,Ev");
7336 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7337}
7338
7339
7340/** Opcode 0x2c. */
7341FNIEMOP_DEF(iemOp_sub_Al_Ib)
7342{
7343 IEMOP_MNEMONIC("sub al,Ib");
7344 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7345}
7346
7347
7348/** Opcode 0x2d. */
7349FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7350{
7351 IEMOP_MNEMONIC("sub rAX,Iz");
7352 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7353}
7354
7355
7356/** Opcode 0x2e. */
7357FNIEMOP_DEF(iemOp_seg_CS)
7358{
7359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7360 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7361 pIemCpu->iEffSeg = X86_SREG_CS;
7362
7363 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7364 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7365}
7366
7367
7368/** Opcode 0x2f. */
7369FNIEMOP_DEF(iemOp_das)
7370{
7371 IEMOP_MNEMONIC("das AL");
7372 IEMOP_HLP_NO_64BIT();
7373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7375 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7376}
7377
7378
7379/** Opcode 0x30. */
7380FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7381{
7382 IEMOP_MNEMONIC("xor Eb,Gb");
7383 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7384 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7385}
7386
7387
7388/** Opcode 0x31. */
7389FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7390{
7391 IEMOP_MNEMONIC("xor Ev,Gv");
7392 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7393 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7394}
7395
7396
7397/** Opcode 0x32. */
7398FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7399{
7400 IEMOP_MNEMONIC("xor Gb,Eb");
7401 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7402 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7403}
7404
7405
7406/** Opcode 0x33. */
7407FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7408{
7409 IEMOP_MNEMONIC("xor Gv,Ev");
7410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7412}
7413
7414
7415/** Opcode 0x34. */
7416FNIEMOP_DEF(iemOp_xor_Al_Ib)
7417{
7418 IEMOP_MNEMONIC("xor al,Ib");
7419 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7421}
7422
7423
7424/** Opcode 0x35. */
7425FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7426{
7427 IEMOP_MNEMONIC("xor rAX,Iz");
7428 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7429 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7430}
7431
7432
7433/** Opcode 0x36. */
7434FNIEMOP_DEF(iemOp_seg_SS)
7435{
7436 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7437 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7438 pIemCpu->iEffSeg = X86_SREG_SS;
7439
7440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7442}
7443
7444
7445/** Opcode 0x37. */
7446FNIEMOP_STUB(iemOp_aaa);
7447
7448
7449/** Opcode 0x38. */
7450FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7451{
7452 IEMOP_MNEMONIC("cmp Eb,Gb");
7453 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7455}
7456
7457
7458/** Opcode 0x39. */
7459FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7460{
7461 IEMOP_MNEMONIC("cmp Ev,Gv");
7462 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7463 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7464}
7465
7466
7467/** Opcode 0x3a. */
7468FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7469{
7470 IEMOP_MNEMONIC("cmp Gb,Eb");
7471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7472}
7473
7474
7475/** Opcode 0x3b. */
7476FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7477{
7478 IEMOP_MNEMONIC("cmp Gv,Ev");
7479 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7480}
7481
7482
7483/** Opcode 0x3c. */
7484FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7485{
7486 IEMOP_MNEMONIC("cmp al,Ib");
7487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7488}
7489
7490
7491/** Opcode 0x3d. */
7492FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7493{
7494 IEMOP_MNEMONIC("cmp rAX,Iz");
7495 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7496}
7497
7498
7499/** Opcode 0x3e. */
7500FNIEMOP_DEF(iemOp_seg_DS)
7501{
7502 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7503 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7504 pIemCpu->iEffSeg = X86_SREG_DS;
7505
7506 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7507 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7508}
7509
7510
7511/** Opcode 0x3f. */
7512FNIEMOP_STUB(iemOp_aas);
7513
7514/**
7515 * Common 'inc/dec/not/neg register' helper.
7516 */
7517FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7518{
7519 IEMOP_HLP_NO_LOCK_PREFIX();
7520 switch (pIemCpu->enmEffOpSize)
7521 {
7522 case IEMMODE_16BIT:
7523 IEM_MC_BEGIN(2, 0);
7524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7525 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7526 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7527 IEM_MC_REF_EFLAGS(pEFlags);
7528 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7529 IEM_MC_ADVANCE_RIP();
7530 IEM_MC_END();
7531 return VINF_SUCCESS;
7532
7533 case IEMMODE_32BIT:
7534 IEM_MC_BEGIN(2, 0);
7535 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7536 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7537 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7538 IEM_MC_REF_EFLAGS(pEFlags);
7539 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7540 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 return VINF_SUCCESS;
7544
7545 case IEMMODE_64BIT:
7546 IEM_MC_BEGIN(2, 0);
7547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7548 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7549 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7550 IEM_MC_REF_EFLAGS(pEFlags);
7551 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7552 IEM_MC_ADVANCE_RIP();
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555 }
7556 return VINF_SUCCESS;
7557}
7558
7559
7560/** Opcode 0x40. */
7561FNIEMOP_DEF(iemOp_inc_eAX)
7562{
7563 /*
7564 * This is a REX prefix in 64-bit mode.
7565 */
7566 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7567 {
7568 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7569 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7570
7571 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7572 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7573 }
7574
7575 IEMOP_MNEMONIC("inc eAX");
7576 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7577}
7578
7579
7580/** Opcode 0x41. */
7581FNIEMOP_DEF(iemOp_inc_eCX)
7582{
7583 /*
7584 * This is a REX prefix in 64-bit mode.
7585 */
7586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7587 {
7588 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7589 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7590 pIemCpu->uRexB = 1 << 3;
7591
7592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7593 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7594 }
7595
7596 IEMOP_MNEMONIC("inc eCX");
7597 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7598}
7599
7600
7601/** Opcode 0x42. */
7602FNIEMOP_DEF(iemOp_inc_eDX)
7603{
7604 /*
7605 * This is a REX prefix in 64-bit mode.
7606 */
7607 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7608 {
7609 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7610 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7611 pIemCpu->uRexIndex = 1 << 3;
7612
7613 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7614 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7615 }
7616
7617 IEMOP_MNEMONIC("inc eDX");
7618 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7619}
7620
7621
7622
7623/** Opcode 0x43. */
7624FNIEMOP_DEF(iemOp_inc_eBX)
7625{
7626 /*
7627 * This is a REX prefix in 64-bit mode.
7628 */
7629 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7630 {
7631 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7632 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7633 pIemCpu->uRexB = 1 << 3;
7634 pIemCpu->uRexIndex = 1 << 3;
7635
7636 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7637 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7638 }
7639
7640 IEMOP_MNEMONIC("inc eBX");
7641 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7642}
7643
7644
7645/** Opcode 0x44. */
7646FNIEMOP_DEF(iemOp_inc_eSP)
7647{
7648 /*
7649 * This is a REX prefix in 64-bit mode.
7650 */
7651 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7652 {
7653 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7654 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7655 pIemCpu->uRexReg = 1 << 3;
7656
7657 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7658 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7659 }
7660
7661 IEMOP_MNEMONIC("inc eSP");
7662 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7663}
7664
7665
7666/** Opcode 0x45. */
7667FNIEMOP_DEF(iemOp_inc_eBP)
7668{
7669 /*
7670 * This is a REX prefix in 64-bit mode.
7671 */
7672 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7673 {
7674 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7675 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7676 pIemCpu->uRexReg = 1 << 3;
7677 pIemCpu->uRexB = 1 << 3;
7678
7679 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7680 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7681 }
7682
7683 IEMOP_MNEMONIC("inc eBP");
7684 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7685}
7686
7687
7688/** Opcode 0x46. */
7689FNIEMOP_DEF(iemOp_inc_eSI)
7690{
7691 /*
7692 * This is a REX prefix in 64-bit mode.
7693 */
7694 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7695 {
7696 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7697 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7698 pIemCpu->uRexReg = 1 << 3;
7699 pIemCpu->uRexIndex = 1 << 3;
7700
7701 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7702 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7703 }
7704
7705 IEMOP_MNEMONIC("inc eSI");
7706 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7707}
7708
7709
7710/** Opcode 0x47. */
7711FNIEMOP_DEF(iemOp_inc_eDI)
7712{
7713 /*
7714 * This is a REX prefix in 64-bit mode.
7715 */
7716 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7717 {
7718 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7719 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7720 pIemCpu->uRexReg = 1 << 3;
7721 pIemCpu->uRexB = 1 << 3;
7722 pIemCpu->uRexIndex = 1 << 3;
7723
7724 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7725 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7726 }
7727
7728 IEMOP_MNEMONIC("inc eDI");
7729 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7730}
7731
7732
7733/** Opcode 0x48. */
7734FNIEMOP_DEF(iemOp_dec_eAX)
7735{
7736 /*
7737 * This is a REX prefix in 64-bit mode.
7738 */
7739 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7740 {
7741 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7742 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7743 iemRecalEffOpSize(pIemCpu);
7744
7745 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7746 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7747 }
7748
7749 IEMOP_MNEMONIC("dec eAX");
7750 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7751}
7752
7753
7754/** Opcode 0x49. */
7755FNIEMOP_DEF(iemOp_dec_eCX)
7756{
7757 /*
7758 * This is a REX prefix in 64-bit mode.
7759 */
7760 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7761 {
7762 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7763 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7764 pIemCpu->uRexB = 1 << 3;
7765 iemRecalEffOpSize(pIemCpu);
7766
7767 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7768 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7769 }
7770
7771 IEMOP_MNEMONIC("dec eCX");
7772 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7773}
7774
7775
7776/** Opcode 0x4a. */
7777FNIEMOP_DEF(iemOp_dec_eDX)
7778{
7779 /*
7780 * This is a REX prefix in 64-bit mode.
7781 */
7782 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7783 {
7784 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7785 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7786 pIemCpu->uRexIndex = 1 << 3;
7787 iemRecalEffOpSize(pIemCpu);
7788
7789 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7790 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7791 }
7792
7793 IEMOP_MNEMONIC("dec eDX");
7794 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7795}
7796
7797
7798/** Opcode 0x4b. */
7799FNIEMOP_DEF(iemOp_dec_eBX)
7800{
7801 /*
7802 * This is a REX prefix in 64-bit mode.
7803 */
7804 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7805 {
7806 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7807 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7808 pIemCpu->uRexB = 1 << 3;
7809 pIemCpu->uRexIndex = 1 << 3;
7810 iemRecalEffOpSize(pIemCpu);
7811
7812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7814 }
7815
7816 IEMOP_MNEMONIC("dec eBX");
7817 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7818}
7819
7820
7821/** Opcode 0x4c. */
7822FNIEMOP_DEF(iemOp_dec_eSP)
7823{
7824 /*
7825 * This is a REX prefix in 64-bit mode.
7826 */
7827 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7828 {
7829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7830 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7831 pIemCpu->uRexReg = 1 << 3;
7832 iemRecalEffOpSize(pIemCpu);
7833
7834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7836 }
7837
7838 IEMOP_MNEMONIC("dec eSP");
7839 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7840}
7841
7842
7843/** Opcode 0x4d. */
7844FNIEMOP_DEF(iemOp_dec_eBP)
7845{
7846 /*
7847 * This is a REX prefix in 64-bit mode.
7848 */
7849 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7850 {
7851 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7852 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7853 pIemCpu->uRexReg = 1 << 3;
7854 pIemCpu->uRexB = 1 << 3;
7855 iemRecalEffOpSize(pIemCpu);
7856
7857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7859 }
7860
7861 IEMOP_MNEMONIC("dec eBP");
7862 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7863}
7864
7865
7866/** Opcode 0x4e. */
7867FNIEMOP_DEF(iemOp_dec_eSI)
7868{
7869 /*
7870 * This is a REX prefix in 64-bit mode.
7871 */
7872 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7873 {
7874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7875 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7876 pIemCpu->uRexReg = 1 << 3;
7877 pIemCpu->uRexIndex = 1 << 3;
7878 iemRecalEffOpSize(pIemCpu);
7879
7880 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7881 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7882 }
7883
7884 IEMOP_MNEMONIC("dec eSI");
7885 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7886}
7887
7888
7889/** Opcode 0x4f. */
7890FNIEMOP_DEF(iemOp_dec_eDI)
7891{
7892 /*
7893 * This is a REX prefix in 64-bit mode.
7894 */
7895 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7896 {
7897 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7898 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7899 pIemCpu->uRexReg = 1 << 3;
7900 pIemCpu->uRexB = 1 << 3;
7901 pIemCpu->uRexIndex = 1 << 3;
7902 iemRecalEffOpSize(pIemCpu);
7903
7904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7906 }
7907
7908 IEMOP_MNEMONIC("dec eDI");
7909 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7910}
7911
7912
7913/**
7914 * Common 'push register' helper.
7915 */
7916FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7917{
7918 IEMOP_HLP_NO_LOCK_PREFIX();
7919 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7920 {
7921 iReg |= pIemCpu->uRexB;
7922 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7923 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7924 }
7925
7926 switch (pIemCpu->enmEffOpSize)
7927 {
7928 case IEMMODE_16BIT:
7929 IEM_MC_BEGIN(0, 1);
7930 IEM_MC_LOCAL(uint16_t, u16Value);
7931 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7932 IEM_MC_PUSH_U16(u16Value);
7933 IEM_MC_ADVANCE_RIP();
7934 IEM_MC_END();
7935 break;
7936
7937 case IEMMODE_32BIT:
7938 IEM_MC_BEGIN(0, 1);
7939 IEM_MC_LOCAL(uint32_t, u32Value);
7940 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7941 IEM_MC_PUSH_U32(u32Value);
7942 IEM_MC_ADVANCE_RIP();
7943 IEM_MC_END();
7944 break;
7945
7946 case IEMMODE_64BIT:
7947 IEM_MC_BEGIN(0, 1);
7948 IEM_MC_LOCAL(uint64_t, u64Value);
7949 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7950 IEM_MC_PUSH_U64(u64Value);
7951 IEM_MC_ADVANCE_RIP();
7952 IEM_MC_END();
7953 break;
7954 }
7955
7956 return VINF_SUCCESS;
7957}
7958
7959
7960/** Opcode 0x50. */
7961FNIEMOP_DEF(iemOp_push_eAX)
7962{
7963 IEMOP_MNEMONIC("push rAX");
7964 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7965}
7966
7967
7968/** Opcode 0x51. */
7969FNIEMOP_DEF(iemOp_push_eCX)
7970{
7971 IEMOP_MNEMONIC("push rCX");
7972 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7973}
7974
7975
7976/** Opcode 0x52. */
7977FNIEMOP_DEF(iemOp_push_eDX)
7978{
7979 IEMOP_MNEMONIC("push rDX");
7980 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7981}
7982
7983
7984/** Opcode 0x53. */
7985FNIEMOP_DEF(iemOp_push_eBX)
7986{
7987 IEMOP_MNEMONIC("push rBX");
7988 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7989}
7990
7991
7992/** Opcode 0x54. */
7993FNIEMOP_DEF(iemOp_push_eSP)
7994{
7995 IEMOP_MNEMONIC("push rSP");
7996#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
7997 if (pIemCpu->uTargetCpu == IEMTARGETCPU_8086)
7998 {
7999 IEM_MC_BEGIN(0, 1);
8000 IEM_MC_LOCAL(uint16_t, u16Value);
8001 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8002 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8003 IEM_MC_PUSH_U16(u16Value);
8004 IEM_MC_ADVANCE_RIP();
8005 IEM_MC_END();
8006 }
8007#endif
8008 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8009}
8010
8011
8012/** Opcode 0x55. */
8013FNIEMOP_DEF(iemOp_push_eBP)
8014{
8015 IEMOP_MNEMONIC("push rBP");
8016 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8017}
8018
8019
8020/** Opcode 0x56. */
8021FNIEMOP_DEF(iemOp_push_eSI)
8022{
8023 IEMOP_MNEMONIC("push rSI");
8024 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8025}
8026
8027
8028/** Opcode 0x57. */
8029FNIEMOP_DEF(iemOp_push_eDI)
8030{
8031 IEMOP_MNEMONIC("push rDI");
8032 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8033}
8034
8035
8036/**
8037 * Common 'pop register' helper.
8038 */
8039FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8040{
8041 IEMOP_HLP_NO_LOCK_PREFIX();
8042 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8043 {
8044 iReg |= pIemCpu->uRexB;
8045 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8046 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8047 }
8048
8049 switch (pIemCpu->enmEffOpSize)
8050 {
8051 case IEMMODE_16BIT:
8052 IEM_MC_BEGIN(0, 1);
8053 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8054 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8055 IEM_MC_POP_U16(pu16Dst);
8056 IEM_MC_ADVANCE_RIP();
8057 IEM_MC_END();
8058 break;
8059
8060 case IEMMODE_32BIT:
8061 IEM_MC_BEGIN(0, 1);
8062 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8063 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8064 IEM_MC_POP_U32(pu32Dst);
8065 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8066 IEM_MC_ADVANCE_RIP();
8067 IEM_MC_END();
8068 break;
8069
8070 case IEMMODE_64BIT:
8071 IEM_MC_BEGIN(0, 1);
8072 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8073 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8074 IEM_MC_POP_U64(pu64Dst);
8075 IEM_MC_ADVANCE_RIP();
8076 IEM_MC_END();
8077 break;
8078 }
8079
8080 return VINF_SUCCESS;
8081}
8082
8083
8084/** Opcode 0x58. */
8085FNIEMOP_DEF(iemOp_pop_eAX)
8086{
8087 IEMOP_MNEMONIC("pop rAX");
8088 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8089}
8090
8091
8092/** Opcode 0x59. */
8093FNIEMOP_DEF(iemOp_pop_eCX)
8094{
8095 IEMOP_MNEMONIC("pop rCX");
8096 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8097}
8098
8099
8100/** Opcode 0x5a. */
8101FNIEMOP_DEF(iemOp_pop_eDX)
8102{
8103 IEMOP_MNEMONIC("pop rDX");
8104 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8105}
8106
8107
8108/** Opcode 0x5b. */
8109FNIEMOP_DEF(iemOp_pop_eBX)
8110{
8111 IEMOP_MNEMONIC("pop rBX");
8112 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8113}
8114
8115
8116/** Opcode 0x5c. */
8117FNIEMOP_DEF(iemOp_pop_eSP)
8118{
8119 IEMOP_MNEMONIC("pop rSP");
8120 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8121 {
8122 if (pIemCpu->uRexB)
8123 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8124 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8125 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8126 }
8127
8128 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8129 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8130 /** @todo add testcase for this instruction. */
8131 switch (pIemCpu->enmEffOpSize)
8132 {
8133 case IEMMODE_16BIT:
8134 IEM_MC_BEGIN(0, 1);
8135 IEM_MC_LOCAL(uint16_t, u16Dst);
8136 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8137 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8138 IEM_MC_ADVANCE_RIP();
8139 IEM_MC_END();
8140 break;
8141
8142 case IEMMODE_32BIT:
8143 IEM_MC_BEGIN(0, 1);
8144 IEM_MC_LOCAL(uint32_t, u32Dst);
8145 IEM_MC_POP_U32(&u32Dst);
8146 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8147 IEM_MC_ADVANCE_RIP();
8148 IEM_MC_END();
8149 break;
8150
8151 case IEMMODE_64BIT:
8152 IEM_MC_BEGIN(0, 1);
8153 IEM_MC_LOCAL(uint64_t, u64Dst);
8154 IEM_MC_POP_U64(&u64Dst);
8155 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8156 IEM_MC_ADVANCE_RIP();
8157 IEM_MC_END();
8158 break;
8159 }
8160
8161 return VINF_SUCCESS;
8162}
8163
8164
8165/** Opcode 0x5d. */
8166FNIEMOP_DEF(iemOp_pop_eBP)
8167{
8168 IEMOP_MNEMONIC("pop rBP");
8169 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8170}
8171
8172
8173/** Opcode 0x5e. */
8174FNIEMOP_DEF(iemOp_pop_eSI)
8175{
8176 IEMOP_MNEMONIC("pop rSI");
8177 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8178}
8179
8180
8181/** Opcode 0x5f. */
8182FNIEMOP_DEF(iemOp_pop_eDI)
8183{
8184 IEMOP_MNEMONIC("pop rDI");
8185 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8186}
8187
8188
8189/** Opcode 0x60. */
8190FNIEMOP_DEF(iemOp_pusha)
8191{
8192 IEMOP_MNEMONIC("pusha");
8193 IEMOP_HLP_MIN_186();
8194 IEMOP_HLP_NO_64BIT();
8195 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8197 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8198 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8199}
8200
8201
8202/** Opcode 0x61. */
8203FNIEMOP_DEF(iemOp_popa)
8204{
8205 IEMOP_MNEMONIC("popa");
8206 IEMOP_HLP_MIN_186();
8207 IEMOP_HLP_NO_64BIT();
8208 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8209 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8210 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8211 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8212}
8213
8214
8215/** Opcode 0x62. */
8216FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8217// IEMOP_HLP_MIN_186();
8218
8219
8220/** Opcode 0x63 - non-64-bit modes. */
8221FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8222{
8223 IEMOP_MNEMONIC("arpl Ew,Gw");
8224 IEMOP_HLP_MIN_286();
8225 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8227
8228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8229 {
8230 /* Register */
8231 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8232 IEM_MC_BEGIN(3, 0);
8233 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8234 IEM_MC_ARG(uint16_t, u16Src, 1);
8235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8236
8237 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8238 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8239 IEM_MC_REF_EFLAGS(pEFlags);
8240 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8241
8242 IEM_MC_ADVANCE_RIP();
8243 IEM_MC_END();
8244 }
8245 else
8246 {
8247 /* Memory */
8248 IEM_MC_BEGIN(3, 2);
8249 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8250 IEM_MC_ARG(uint16_t, u16Src, 1);
8251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253
8254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8255 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8256 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8257 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8258 IEM_MC_FETCH_EFLAGS(EFlags);
8259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8260
8261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8262 IEM_MC_COMMIT_EFLAGS(EFlags);
8263 IEM_MC_ADVANCE_RIP();
8264 IEM_MC_END();
8265 }
8266 return VINF_SUCCESS;
8267
8268}
8269
8270
8271/** Opcode 0x63.
8272 * @note This is a weird one. It works like a regular move instruction if
8273 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8274 * @todo This definitely needs a testcase to verify the odd cases. */
8275FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8276{
8277 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8278
8279 IEMOP_MNEMONIC("movsxd Gv,Ev");
8280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8281
8282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8283 {
8284 /*
8285 * Register to register.
8286 */
8287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8288 IEM_MC_BEGIN(0, 1);
8289 IEM_MC_LOCAL(uint64_t, u64Value);
8290 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8291 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8292 IEM_MC_ADVANCE_RIP();
8293 IEM_MC_END();
8294 }
8295 else
8296 {
8297 /*
8298 * We're loading a register from memory.
8299 */
8300 IEM_MC_BEGIN(0, 2);
8301 IEM_MC_LOCAL(uint64_t, u64Value);
8302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8305 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8306 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8307 IEM_MC_ADVANCE_RIP();
8308 IEM_MC_END();
8309 }
8310 return VINF_SUCCESS;
8311}
8312
8313
8314/** Opcode 0x64. */
8315FNIEMOP_DEF(iemOp_seg_FS)
8316{
8317 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8318 IEMOP_HLP_MIN_386();
8319
8320 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8321 pIemCpu->iEffSeg = X86_SREG_FS;
8322
8323 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8324 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8325}
8326
8327
8328/** Opcode 0x65. */
8329FNIEMOP_DEF(iemOp_seg_GS)
8330{
8331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8332 IEMOP_HLP_MIN_386();
8333
8334 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8335 pIemCpu->iEffSeg = X86_SREG_GS;
8336
8337 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8338 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8339}
8340
8341
8342/** Opcode 0x66. */
8343FNIEMOP_DEF(iemOp_op_size)
8344{
8345 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8346 IEMOP_HLP_MIN_386();
8347
8348 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8349 iemRecalEffOpSize(pIemCpu);
8350
8351 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8352 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8353}
8354
8355
8356/** Opcode 0x67. */
8357FNIEMOP_DEF(iemOp_addr_size)
8358{
8359 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8360 IEMOP_HLP_MIN_386();
8361
8362 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8363 switch (pIemCpu->enmDefAddrMode)
8364 {
8365 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8366 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8367 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8368 default: AssertFailed();
8369 }
8370
8371 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8372 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8373}
8374
8375
8376/** Opcode 0x68. */
8377FNIEMOP_DEF(iemOp_push_Iz)
8378{
8379 IEMOP_MNEMONIC("push Iz");
8380 IEMOP_HLP_MIN_186();
8381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8382 switch (pIemCpu->enmEffOpSize)
8383 {
8384 case IEMMODE_16BIT:
8385 {
8386 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8387 IEMOP_HLP_NO_LOCK_PREFIX();
8388 IEM_MC_BEGIN(0,0);
8389 IEM_MC_PUSH_U16(u16Imm);
8390 IEM_MC_ADVANCE_RIP();
8391 IEM_MC_END();
8392 return VINF_SUCCESS;
8393 }
8394
8395 case IEMMODE_32BIT:
8396 {
8397 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8398 IEMOP_HLP_NO_LOCK_PREFIX();
8399 IEM_MC_BEGIN(0,0);
8400 IEM_MC_PUSH_U32(u32Imm);
8401 IEM_MC_ADVANCE_RIP();
8402 IEM_MC_END();
8403 return VINF_SUCCESS;
8404 }
8405
8406 case IEMMODE_64BIT:
8407 {
8408 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8409 IEMOP_HLP_NO_LOCK_PREFIX();
8410 IEM_MC_BEGIN(0,0);
8411 IEM_MC_PUSH_U64(u64Imm);
8412 IEM_MC_ADVANCE_RIP();
8413 IEM_MC_END();
8414 return VINF_SUCCESS;
8415 }
8416
8417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8418 }
8419}
8420
8421
8422/** Opcode 0x69. */
8423FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8424{
8425 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8426 IEMOP_HLP_MIN_186();
8427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8428 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8429
8430 switch (pIemCpu->enmEffOpSize)
8431 {
8432 case IEMMODE_16BIT:
8433 {
8434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8435 {
8436 /* register operand */
8437 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8439
8440 IEM_MC_BEGIN(3, 1);
8441 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8442 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8444 IEM_MC_LOCAL(uint16_t, u16Tmp);
8445
8446 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8447 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8448 IEM_MC_REF_EFLAGS(pEFlags);
8449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8450 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8451
8452 IEM_MC_ADVANCE_RIP();
8453 IEM_MC_END();
8454 }
8455 else
8456 {
8457 /* memory operand */
8458 IEM_MC_BEGIN(3, 2);
8459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8460 IEM_MC_ARG(uint16_t, u16Src, 1);
8461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8462 IEM_MC_LOCAL(uint16_t, u16Tmp);
8463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8464
8465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8466 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8467 IEM_MC_ASSIGN(u16Src, u16Imm);
8468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8469 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8470 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8471 IEM_MC_REF_EFLAGS(pEFlags);
8472 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8473 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8474
8475 IEM_MC_ADVANCE_RIP();
8476 IEM_MC_END();
8477 }
8478 return VINF_SUCCESS;
8479 }
8480
8481 case IEMMODE_32BIT:
8482 {
8483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8484 {
8485 /* register operand */
8486 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8488
8489 IEM_MC_BEGIN(3, 1);
8490 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8491 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8493 IEM_MC_LOCAL(uint32_t, u32Tmp);
8494
8495 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8496 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8497 IEM_MC_REF_EFLAGS(pEFlags);
8498 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8499 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8500
8501 IEM_MC_ADVANCE_RIP();
8502 IEM_MC_END();
8503 }
8504 else
8505 {
8506 /* memory operand */
8507 IEM_MC_BEGIN(3, 2);
8508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8509 IEM_MC_ARG(uint32_t, u32Src, 1);
8510 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8511 IEM_MC_LOCAL(uint32_t, u32Tmp);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513
8514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8515 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8516 IEM_MC_ASSIGN(u32Src, u32Imm);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8519 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8520 IEM_MC_REF_EFLAGS(pEFlags);
8521 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8522 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8523
8524 IEM_MC_ADVANCE_RIP();
8525 IEM_MC_END();
8526 }
8527 return VINF_SUCCESS;
8528 }
8529
8530 case IEMMODE_64BIT:
8531 {
8532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8533 {
8534 /* register operand */
8535 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8537
8538 IEM_MC_BEGIN(3, 1);
8539 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8540 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8541 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8542 IEM_MC_LOCAL(uint64_t, u64Tmp);
8543
8544 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8545 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8546 IEM_MC_REF_EFLAGS(pEFlags);
8547 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8548 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8549
8550 IEM_MC_ADVANCE_RIP();
8551 IEM_MC_END();
8552 }
8553 else
8554 {
8555 /* memory operand */
8556 IEM_MC_BEGIN(3, 2);
8557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8558 IEM_MC_ARG(uint64_t, u64Src, 1);
8559 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8560 IEM_MC_LOCAL(uint64_t, u64Tmp);
8561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8562
8563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8564 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8565 IEM_MC_ASSIGN(u64Src, u64Imm);
8566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8567 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8568 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8569 IEM_MC_REF_EFLAGS(pEFlags);
8570 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8571 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8572
8573 IEM_MC_ADVANCE_RIP();
8574 IEM_MC_END();
8575 }
8576 return VINF_SUCCESS;
8577 }
8578 }
8579 AssertFailedReturn(VERR_IEM_IPE_9);
8580}
8581
8582
8583/** Opcode 0x6a. */
8584FNIEMOP_DEF(iemOp_push_Ib)
8585{
8586 IEMOP_MNEMONIC("push Ib");
8587 IEMOP_HLP_MIN_186();
8588 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8589 IEMOP_HLP_NO_LOCK_PREFIX();
8590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8591
8592 IEM_MC_BEGIN(0,0);
8593 switch (pIemCpu->enmEffOpSize)
8594 {
8595 case IEMMODE_16BIT:
8596 IEM_MC_PUSH_U16(i8Imm);
8597 break;
8598 case IEMMODE_32BIT:
8599 IEM_MC_PUSH_U32(i8Imm);
8600 break;
8601 case IEMMODE_64BIT:
8602 IEM_MC_PUSH_U64(i8Imm);
8603 break;
8604 }
8605 IEM_MC_ADVANCE_RIP();
8606 IEM_MC_END();
8607 return VINF_SUCCESS;
8608}
8609
8610
8611/** Opcode 0x6b. */
8612FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8613{
8614 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8615 IEMOP_HLP_MIN_186();
8616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8617 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8618
8619 switch (pIemCpu->enmEffOpSize)
8620 {
8621 case IEMMODE_16BIT:
8622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8623 {
8624 /* register operand */
8625 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8627
8628 IEM_MC_BEGIN(3, 1);
8629 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8630 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8632 IEM_MC_LOCAL(uint16_t, u16Tmp);
8633
8634 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8635 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8636 IEM_MC_REF_EFLAGS(pEFlags);
8637 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8638 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8639
8640 IEM_MC_ADVANCE_RIP();
8641 IEM_MC_END();
8642 }
8643 else
8644 {
8645 /* memory operand */
8646 IEM_MC_BEGIN(3, 2);
8647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8648 IEM_MC_ARG(uint16_t, u16Src, 1);
8649 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8650 IEM_MC_LOCAL(uint16_t, u16Tmp);
8651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8652
8653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8654 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8655 IEM_MC_ASSIGN(u16Src, u16Imm);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8658 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8659 IEM_MC_REF_EFLAGS(pEFlags);
8660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8662
8663 IEM_MC_ADVANCE_RIP();
8664 IEM_MC_END();
8665 }
8666 return VINF_SUCCESS;
8667
8668 case IEMMODE_32BIT:
8669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8670 {
8671 /* register operand */
8672 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8674
8675 IEM_MC_BEGIN(3, 1);
8676 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8677 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8679 IEM_MC_LOCAL(uint32_t, u32Tmp);
8680
8681 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8682 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8683 IEM_MC_REF_EFLAGS(pEFlags);
8684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8685 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8686
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 }
8690 else
8691 {
8692 /* memory operand */
8693 IEM_MC_BEGIN(3, 2);
8694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8695 IEM_MC_ARG(uint32_t, u32Src, 1);
8696 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8697 IEM_MC_LOCAL(uint32_t, u32Tmp);
8698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8699
8700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8701 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8702 IEM_MC_ASSIGN(u32Src, u32Imm);
8703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8704 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8705 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8706 IEM_MC_REF_EFLAGS(pEFlags);
8707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8708 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8709
8710 IEM_MC_ADVANCE_RIP();
8711 IEM_MC_END();
8712 }
8713 return VINF_SUCCESS;
8714
8715 case IEMMODE_64BIT:
8716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8717 {
8718 /* register operand */
8719 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721
8722 IEM_MC_BEGIN(3, 1);
8723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8724 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8726 IEM_MC_LOCAL(uint64_t, u64Tmp);
8727
8728 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8729 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8730 IEM_MC_REF_EFLAGS(pEFlags);
8731 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8732 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8733
8734 IEM_MC_ADVANCE_RIP();
8735 IEM_MC_END();
8736 }
8737 else
8738 {
8739 /* memory operand */
8740 IEM_MC_BEGIN(3, 2);
8741 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8742 IEM_MC_ARG(uint64_t, u64Src, 1);
8743 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8744 IEM_MC_LOCAL(uint64_t, u64Tmp);
8745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8746
8747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8748 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8749 IEM_MC_ASSIGN(u64Src, u64Imm);
8750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8751 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8752 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8753 IEM_MC_REF_EFLAGS(pEFlags);
8754 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8755 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8756
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 }
8760 return VINF_SUCCESS;
8761 }
8762 AssertFailedReturn(VERR_IEM_IPE_8);
8763}
8764
8765
8766/** Opcode 0x6c. */
8767FNIEMOP_DEF(iemOp_insb_Yb_DX)
8768{
8769 IEMOP_HLP_MIN_186();
8770 IEMOP_HLP_NO_LOCK_PREFIX();
8771 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8772 {
8773 IEMOP_MNEMONIC("rep ins Yb,DX");
8774 switch (pIemCpu->enmEffAddrMode)
8775 {
8776 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8777 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8778 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8780 }
8781 }
8782 else
8783 {
8784 IEMOP_MNEMONIC("ins Yb,DX");
8785 switch (pIemCpu->enmEffAddrMode)
8786 {
8787 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8788 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8789 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8791 }
8792 }
8793}
8794
8795
8796/** Opcode 0x6d. */
8797FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8798{
8799 IEMOP_HLP_MIN_186();
8800 IEMOP_HLP_NO_LOCK_PREFIX();
8801 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8802 {
8803 IEMOP_MNEMONIC("rep ins Yv,DX");
8804 switch (pIemCpu->enmEffOpSize)
8805 {
8806 case IEMMODE_16BIT:
8807 switch (pIemCpu->enmEffAddrMode)
8808 {
8809 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8810 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8811 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8813 }
8814 break;
8815 case IEMMODE_64BIT:
8816 case IEMMODE_32BIT:
8817 switch (pIemCpu->enmEffAddrMode)
8818 {
8819 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8820 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8821 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8823 }
8824 break;
8825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8826 }
8827 }
8828 else
8829 {
8830 IEMOP_MNEMONIC("ins Yv,DX");
8831 switch (pIemCpu->enmEffOpSize)
8832 {
8833 case IEMMODE_16BIT:
8834 switch (pIemCpu->enmEffAddrMode)
8835 {
8836 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8837 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8838 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8840 }
8841 break;
8842 case IEMMODE_64BIT:
8843 case IEMMODE_32BIT:
8844 switch (pIemCpu->enmEffAddrMode)
8845 {
8846 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8847 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8848 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8850 }
8851 break;
8852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8853 }
8854 }
8855}
8856
8857
8858/** Opcode 0x6e. */
8859FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8860{
8861 IEMOP_HLP_MIN_186();
8862 IEMOP_HLP_NO_LOCK_PREFIX();
8863 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8864 {
8865 IEMOP_MNEMONIC("rep outs DX,Yb");
8866 switch (pIemCpu->enmEffAddrMode)
8867 {
8868 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8869 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8870 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8872 }
8873 }
8874 else
8875 {
8876 IEMOP_MNEMONIC("outs DX,Yb");
8877 switch (pIemCpu->enmEffAddrMode)
8878 {
8879 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8880 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8881 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8883 }
8884 }
8885}
8886
8887
8888/** Opcode 0x6f. */
8889FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8890{
8891 IEMOP_HLP_MIN_186();
8892 IEMOP_HLP_NO_LOCK_PREFIX();
8893 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8894 {
8895 IEMOP_MNEMONIC("rep outs DX,Yv");
8896 switch (pIemCpu->enmEffOpSize)
8897 {
8898 case IEMMODE_16BIT:
8899 switch (pIemCpu->enmEffAddrMode)
8900 {
8901 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8902 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8903 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8905 }
8906 break;
8907 case IEMMODE_64BIT:
8908 case IEMMODE_32BIT:
8909 switch (pIemCpu->enmEffAddrMode)
8910 {
8911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8915 }
8916 break;
8917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8918 }
8919 }
8920 else
8921 {
8922 IEMOP_MNEMONIC("outs DX,Yv");
8923 switch (pIemCpu->enmEffOpSize)
8924 {
8925 case IEMMODE_16BIT:
8926 switch (pIemCpu->enmEffAddrMode)
8927 {
8928 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8929 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8930 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8932 }
8933 break;
8934 case IEMMODE_64BIT:
8935 case IEMMODE_32BIT:
8936 switch (pIemCpu->enmEffAddrMode)
8937 {
8938 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8939 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8940 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8942 }
8943 break;
8944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8945 }
8946 }
8947}
8948
8949
8950/** Opcode 0x70. */
8951FNIEMOP_DEF(iemOp_jo_Jb)
8952{
8953 IEMOP_MNEMONIC("jo Jb");
8954 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8955 IEMOP_HLP_NO_LOCK_PREFIX();
8956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8957
8958 IEM_MC_BEGIN(0, 0);
8959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8960 IEM_MC_REL_JMP_S8(i8Imm);
8961 } IEM_MC_ELSE() {
8962 IEM_MC_ADVANCE_RIP();
8963 } IEM_MC_ENDIF();
8964 IEM_MC_END();
8965 return VINF_SUCCESS;
8966}
8967
8968
8969/** Opcode 0x71. */
8970FNIEMOP_DEF(iemOp_jno_Jb)
8971{
8972 IEMOP_MNEMONIC("jno Jb");
8973 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8974 IEMOP_HLP_NO_LOCK_PREFIX();
8975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8976
8977 IEM_MC_BEGIN(0, 0);
8978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8979 IEM_MC_ADVANCE_RIP();
8980 } IEM_MC_ELSE() {
8981 IEM_MC_REL_JMP_S8(i8Imm);
8982 } IEM_MC_ENDIF();
8983 IEM_MC_END();
8984 return VINF_SUCCESS;
8985}
8986
8987/** Opcode 0x72. */
8988FNIEMOP_DEF(iemOp_jc_Jb)
8989{
8990 IEMOP_MNEMONIC("jc/jnae Jb");
8991 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8992 IEMOP_HLP_NO_LOCK_PREFIX();
8993 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8994
8995 IEM_MC_BEGIN(0, 0);
8996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8997 IEM_MC_REL_JMP_S8(i8Imm);
8998 } IEM_MC_ELSE() {
8999 IEM_MC_ADVANCE_RIP();
9000 } IEM_MC_ENDIF();
9001 IEM_MC_END();
9002 return VINF_SUCCESS;
9003}
9004
9005
9006/** Opcode 0x73. */
9007FNIEMOP_DEF(iemOp_jnc_Jb)
9008{
9009 IEMOP_MNEMONIC("jnc/jnb Jb");
9010 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9011 IEMOP_HLP_NO_LOCK_PREFIX();
9012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9013
9014 IEM_MC_BEGIN(0, 0);
9015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9016 IEM_MC_ADVANCE_RIP();
9017 } IEM_MC_ELSE() {
9018 IEM_MC_REL_JMP_S8(i8Imm);
9019 } IEM_MC_ENDIF();
9020 IEM_MC_END();
9021 return VINF_SUCCESS;
9022}
9023
9024
9025/** Opcode 0x74. */
9026FNIEMOP_DEF(iemOp_je_Jb)
9027{
9028 IEMOP_MNEMONIC("je/jz Jb");
9029 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9030 IEMOP_HLP_NO_LOCK_PREFIX();
9031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9032
9033 IEM_MC_BEGIN(0, 0);
9034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9035 IEM_MC_REL_JMP_S8(i8Imm);
9036 } IEM_MC_ELSE() {
9037 IEM_MC_ADVANCE_RIP();
9038 } IEM_MC_ENDIF();
9039 IEM_MC_END();
9040 return VINF_SUCCESS;
9041}
9042
9043
9044/** Opcode 0x75. */
9045FNIEMOP_DEF(iemOp_jne_Jb)
9046{
9047 IEMOP_MNEMONIC("jne/jnz Jb");
9048 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9049 IEMOP_HLP_NO_LOCK_PREFIX();
9050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9051
9052 IEM_MC_BEGIN(0, 0);
9053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9054 IEM_MC_ADVANCE_RIP();
9055 } IEM_MC_ELSE() {
9056 IEM_MC_REL_JMP_S8(i8Imm);
9057 } IEM_MC_ENDIF();
9058 IEM_MC_END();
9059 return VINF_SUCCESS;
9060}
9061
9062
9063/** Opcode 0x76. */
9064FNIEMOP_DEF(iemOp_jbe_Jb)
9065{
9066 IEMOP_MNEMONIC("jbe/jna Jb");
9067 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9068 IEMOP_HLP_NO_LOCK_PREFIX();
9069 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9070
9071 IEM_MC_BEGIN(0, 0);
9072 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9073 IEM_MC_REL_JMP_S8(i8Imm);
9074 } IEM_MC_ELSE() {
9075 IEM_MC_ADVANCE_RIP();
9076 } IEM_MC_ENDIF();
9077 IEM_MC_END();
9078 return VINF_SUCCESS;
9079}
9080
9081
9082/** Opcode 0x77. */
9083FNIEMOP_DEF(iemOp_jnbe_Jb)
9084{
9085 IEMOP_MNEMONIC("jnbe/ja Jb");
9086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9087 IEMOP_HLP_NO_LOCK_PREFIX();
9088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9089
9090 IEM_MC_BEGIN(0, 0);
9091 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9092 IEM_MC_ADVANCE_RIP();
9093 } IEM_MC_ELSE() {
9094 IEM_MC_REL_JMP_S8(i8Imm);
9095 } IEM_MC_ENDIF();
9096 IEM_MC_END();
9097 return VINF_SUCCESS;
9098}
9099
9100
9101/** Opcode 0x78. */
9102FNIEMOP_DEF(iemOp_js_Jb)
9103{
9104 IEMOP_MNEMONIC("js Jb");
9105 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9106 IEMOP_HLP_NO_LOCK_PREFIX();
9107 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9108
9109 IEM_MC_BEGIN(0, 0);
9110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9111 IEM_MC_REL_JMP_S8(i8Imm);
9112 } IEM_MC_ELSE() {
9113 IEM_MC_ADVANCE_RIP();
9114 } IEM_MC_ENDIF();
9115 IEM_MC_END();
9116 return VINF_SUCCESS;
9117}
9118
9119
9120/** Opcode 0x79. */
9121FNIEMOP_DEF(iemOp_jns_Jb)
9122{
9123 IEMOP_MNEMONIC("jns Jb");
9124 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9125 IEMOP_HLP_NO_LOCK_PREFIX();
9126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9127
9128 IEM_MC_BEGIN(0, 0);
9129 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9130 IEM_MC_ADVANCE_RIP();
9131 } IEM_MC_ELSE() {
9132 IEM_MC_REL_JMP_S8(i8Imm);
9133 } IEM_MC_ENDIF();
9134 IEM_MC_END();
9135 return VINF_SUCCESS;
9136}
9137
9138
9139/** Opcode 0x7a. */
9140FNIEMOP_DEF(iemOp_jp_Jb)
9141{
9142 IEMOP_MNEMONIC("jp Jb");
9143 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9144 IEMOP_HLP_NO_LOCK_PREFIX();
9145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9146
9147 IEM_MC_BEGIN(0, 0);
9148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9149 IEM_MC_REL_JMP_S8(i8Imm);
9150 } IEM_MC_ELSE() {
9151 IEM_MC_ADVANCE_RIP();
9152 } IEM_MC_ENDIF();
9153 IEM_MC_END();
9154 return VINF_SUCCESS;
9155}
9156
9157
9158/** Opcode 0x7b. */
9159FNIEMOP_DEF(iemOp_jnp_Jb)
9160{
9161 IEMOP_MNEMONIC("jnp Jb");
9162 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9163 IEMOP_HLP_NO_LOCK_PREFIX();
9164 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9165
9166 IEM_MC_BEGIN(0, 0);
9167 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9168 IEM_MC_ADVANCE_RIP();
9169 } IEM_MC_ELSE() {
9170 IEM_MC_REL_JMP_S8(i8Imm);
9171 } IEM_MC_ENDIF();
9172 IEM_MC_END();
9173 return VINF_SUCCESS;
9174}
9175
9176
9177/** Opcode 0x7c. */
9178FNIEMOP_DEF(iemOp_jl_Jb)
9179{
9180 IEMOP_MNEMONIC("jl/jnge Jb");
9181 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9182 IEMOP_HLP_NO_LOCK_PREFIX();
9183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9184
9185 IEM_MC_BEGIN(0, 0);
9186 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9187 IEM_MC_REL_JMP_S8(i8Imm);
9188 } IEM_MC_ELSE() {
9189 IEM_MC_ADVANCE_RIP();
9190 } IEM_MC_ENDIF();
9191 IEM_MC_END();
9192 return VINF_SUCCESS;
9193}
9194
9195
9196/** Opcode 0x7d. */
9197FNIEMOP_DEF(iemOp_jnl_Jb)
9198{
9199 IEMOP_MNEMONIC("jnl/jge Jb");
9200 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9201 IEMOP_HLP_NO_LOCK_PREFIX();
9202 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9203
9204 IEM_MC_BEGIN(0, 0);
9205 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9206 IEM_MC_ADVANCE_RIP();
9207 } IEM_MC_ELSE() {
9208 IEM_MC_REL_JMP_S8(i8Imm);
9209 } IEM_MC_ENDIF();
9210 IEM_MC_END();
9211 return VINF_SUCCESS;
9212}
9213
9214
9215/** Opcode 0x7e. */
9216FNIEMOP_DEF(iemOp_jle_Jb)
9217{
9218 IEMOP_MNEMONIC("jle/jng Jb");
9219 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9220 IEMOP_HLP_NO_LOCK_PREFIX();
9221 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9222
9223 IEM_MC_BEGIN(0, 0);
9224 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9225 IEM_MC_REL_JMP_S8(i8Imm);
9226 } IEM_MC_ELSE() {
9227 IEM_MC_ADVANCE_RIP();
9228 } IEM_MC_ENDIF();
9229 IEM_MC_END();
9230 return VINF_SUCCESS;
9231}
9232
9233
9234/** Opcode 0x7f. */
9235FNIEMOP_DEF(iemOp_jnle_Jb)
9236{
9237 IEMOP_MNEMONIC("jnle/jg Jb");
9238 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9239 IEMOP_HLP_NO_LOCK_PREFIX();
9240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9241
9242 IEM_MC_BEGIN(0, 0);
9243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9244 IEM_MC_ADVANCE_RIP();
9245 } IEM_MC_ELSE() {
9246 IEM_MC_REL_JMP_S8(i8Imm);
9247 } IEM_MC_ENDIF();
9248 IEM_MC_END();
9249 return VINF_SUCCESS;
9250}
9251
9252
9253/** Opcode 0x80. */
9254FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9255{
9256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9257 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9258 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9259
9260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9261 {
9262 /* register target */
9263 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9264 IEMOP_HLP_NO_LOCK_PREFIX();
9265 IEM_MC_BEGIN(3, 0);
9266 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9267 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9268 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9269
9270 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9271 IEM_MC_REF_EFLAGS(pEFlags);
9272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9273
9274 IEM_MC_ADVANCE_RIP();
9275 IEM_MC_END();
9276 }
9277 else
9278 {
9279 /* memory target */
9280 uint32_t fAccess;
9281 if (pImpl->pfnLockedU8)
9282 fAccess = IEM_ACCESS_DATA_RW;
9283 else
9284 { /* CMP */
9285 IEMOP_HLP_NO_LOCK_PREFIX();
9286 fAccess = IEM_ACCESS_DATA_R;
9287 }
9288 IEM_MC_BEGIN(3, 2);
9289 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9292
9293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9294 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9295 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9296
9297 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9298 IEM_MC_FETCH_EFLAGS(EFlags);
9299 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9301 else
9302 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9303
9304 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9305 IEM_MC_COMMIT_EFLAGS(EFlags);
9306 IEM_MC_ADVANCE_RIP();
9307 IEM_MC_END();
9308 }
9309 return VINF_SUCCESS;
9310}
9311
9312
9313/** Opcode 0x81. */
9314FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9315{
9316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9317 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9318 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9319
9320 switch (pIemCpu->enmEffOpSize)
9321 {
9322 case IEMMODE_16BIT:
9323 {
9324 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9325 {
9326 /* register target */
9327 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9328 IEMOP_HLP_NO_LOCK_PREFIX();
9329 IEM_MC_BEGIN(3, 0);
9330 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9331 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9332 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9333
9334 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9335 IEM_MC_REF_EFLAGS(pEFlags);
9336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9337
9338 IEM_MC_ADVANCE_RIP();
9339 IEM_MC_END();
9340 }
9341 else
9342 {
9343 /* memory target */
9344 uint32_t fAccess;
9345 if (pImpl->pfnLockedU16)
9346 fAccess = IEM_ACCESS_DATA_RW;
9347 else
9348 { /* CMP, TEST */
9349 IEMOP_HLP_NO_LOCK_PREFIX();
9350 fAccess = IEM_ACCESS_DATA_R;
9351 }
9352 IEM_MC_BEGIN(3, 2);
9353 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9354 IEM_MC_ARG(uint16_t, u16Src, 1);
9355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9357
9358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9360 IEM_MC_ASSIGN(u16Src, u16Imm);
9361 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9362 IEM_MC_FETCH_EFLAGS(EFlags);
9363 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9364 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9365 else
9366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9367
9368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9369 IEM_MC_COMMIT_EFLAGS(EFlags);
9370 IEM_MC_ADVANCE_RIP();
9371 IEM_MC_END();
9372 }
9373 break;
9374 }
9375
9376 case IEMMODE_32BIT:
9377 {
9378 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9379 {
9380 /* register target */
9381 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9382 IEMOP_HLP_NO_LOCK_PREFIX();
9383 IEM_MC_BEGIN(3, 0);
9384 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9385 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9386 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9387
9388 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9389 IEM_MC_REF_EFLAGS(pEFlags);
9390 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9391 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9392
9393 IEM_MC_ADVANCE_RIP();
9394 IEM_MC_END();
9395 }
9396 else
9397 {
9398 /* memory target */
9399 uint32_t fAccess;
9400 if (pImpl->pfnLockedU32)
9401 fAccess = IEM_ACCESS_DATA_RW;
9402 else
9403 { /* CMP, TEST */
9404 IEMOP_HLP_NO_LOCK_PREFIX();
9405 fAccess = IEM_ACCESS_DATA_R;
9406 }
9407 IEM_MC_BEGIN(3, 2);
9408 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9409 IEM_MC_ARG(uint32_t, u32Src, 1);
9410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9412
9413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9414 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9415 IEM_MC_ASSIGN(u32Src, u32Imm);
9416 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9417 IEM_MC_FETCH_EFLAGS(EFlags);
9418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9420 else
9421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9422
9423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9424 IEM_MC_COMMIT_EFLAGS(EFlags);
9425 IEM_MC_ADVANCE_RIP();
9426 IEM_MC_END();
9427 }
9428 break;
9429 }
9430
9431 case IEMMODE_64BIT:
9432 {
9433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9434 {
9435 /* register target */
9436 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9437 IEMOP_HLP_NO_LOCK_PREFIX();
9438 IEM_MC_BEGIN(3, 0);
9439 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9440 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9442
9443 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9444 IEM_MC_REF_EFLAGS(pEFlags);
9445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9446
9447 IEM_MC_ADVANCE_RIP();
9448 IEM_MC_END();
9449 }
9450 else
9451 {
9452 /* memory target */
9453 uint32_t fAccess;
9454 if (pImpl->pfnLockedU64)
9455 fAccess = IEM_ACCESS_DATA_RW;
9456 else
9457 { /* CMP */
9458 IEMOP_HLP_NO_LOCK_PREFIX();
9459 fAccess = IEM_ACCESS_DATA_R;
9460 }
9461 IEM_MC_BEGIN(3, 2);
9462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9463 IEM_MC_ARG(uint64_t, u64Src, 1);
9464 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9466
9467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9468 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9469 IEM_MC_ASSIGN(u64Src, u64Imm);
9470 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9471 IEM_MC_FETCH_EFLAGS(EFlags);
9472 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9473 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9474 else
9475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9476
9477 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9478 IEM_MC_COMMIT_EFLAGS(EFlags);
9479 IEM_MC_ADVANCE_RIP();
9480 IEM_MC_END();
9481 }
9482 break;
9483 }
9484 }
9485 return VINF_SUCCESS;
9486}
9487
9488
9489/** Opcode 0x82. */
9490FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9491{
9492 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9493 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9494}
9495
9496
9497/** Opcode 0x83. */
9498FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9499{
9500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9501 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9502 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9503 to the 386 even if absent in the intel reference manuals and some
9504 3rd party opcode listings. */
9505 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9506
9507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9508 {
9509 /*
9510 * Register target
9511 */
9512 IEMOP_HLP_NO_LOCK_PREFIX();
9513 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9514 switch (pIemCpu->enmEffOpSize)
9515 {
9516 case IEMMODE_16BIT:
9517 {
9518 IEM_MC_BEGIN(3, 0);
9519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9520 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9522
9523 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9524 IEM_MC_REF_EFLAGS(pEFlags);
9525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9526
9527 IEM_MC_ADVANCE_RIP();
9528 IEM_MC_END();
9529 break;
9530 }
9531
9532 case IEMMODE_32BIT:
9533 {
9534 IEM_MC_BEGIN(3, 0);
9535 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9536 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9538
9539 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9540 IEM_MC_REF_EFLAGS(pEFlags);
9541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9542 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9543
9544 IEM_MC_ADVANCE_RIP();
9545 IEM_MC_END();
9546 break;
9547 }
9548
9549 case IEMMODE_64BIT:
9550 {
9551 IEM_MC_BEGIN(3, 0);
9552 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9553 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9555
9556 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9557 IEM_MC_REF_EFLAGS(pEFlags);
9558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9559
9560 IEM_MC_ADVANCE_RIP();
9561 IEM_MC_END();
9562 break;
9563 }
9564 }
9565 }
9566 else
9567 {
9568 /*
9569 * Memory target.
9570 */
9571 uint32_t fAccess;
9572 if (pImpl->pfnLockedU16)
9573 fAccess = IEM_ACCESS_DATA_RW;
9574 else
9575 { /* CMP */
9576 IEMOP_HLP_NO_LOCK_PREFIX();
9577 fAccess = IEM_ACCESS_DATA_R;
9578 }
9579
9580 switch (pIemCpu->enmEffOpSize)
9581 {
9582 case IEMMODE_16BIT:
9583 {
9584 IEM_MC_BEGIN(3, 2);
9585 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9586 IEM_MC_ARG(uint16_t, u16Src, 1);
9587 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9589
9590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9591 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9592 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9593 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9594 IEM_MC_FETCH_EFLAGS(EFlags);
9595 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9597 else
9598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9599
9600 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9601 IEM_MC_COMMIT_EFLAGS(EFlags);
9602 IEM_MC_ADVANCE_RIP();
9603 IEM_MC_END();
9604 break;
9605 }
9606
9607 case IEMMODE_32BIT:
9608 {
9609 IEM_MC_BEGIN(3, 2);
9610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9611 IEM_MC_ARG(uint32_t, u32Src, 1);
9612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9614
9615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9616 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9617 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9618 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9619 IEM_MC_FETCH_EFLAGS(EFlags);
9620 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9622 else
9623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9624
9625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9626 IEM_MC_COMMIT_EFLAGS(EFlags);
9627 IEM_MC_ADVANCE_RIP();
9628 IEM_MC_END();
9629 break;
9630 }
9631
9632 case IEMMODE_64BIT:
9633 {
9634 IEM_MC_BEGIN(3, 2);
9635 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9636 IEM_MC_ARG(uint64_t, u64Src, 1);
9637 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9639
9640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9641 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9642 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9643 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9644 IEM_MC_FETCH_EFLAGS(EFlags);
9645 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9646 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9647 else
9648 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9649
9650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9651 IEM_MC_COMMIT_EFLAGS(EFlags);
9652 IEM_MC_ADVANCE_RIP();
9653 IEM_MC_END();
9654 break;
9655 }
9656 }
9657 }
9658 return VINF_SUCCESS;
9659}
9660
9661
9662/** Opcode 0x84. */
9663FNIEMOP_DEF(iemOp_test_Eb_Gb)
9664{
9665 IEMOP_MNEMONIC("test Eb,Gb");
9666 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9668 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9669}
9670
9671
9672/** Opcode 0x85. */
9673FNIEMOP_DEF(iemOp_test_Ev_Gv)
9674{
9675 IEMOP_MNEMONIC("test Ev,Gv");
9676 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9679}
9680
9681
9682/** Opcode 0x86. */
9683FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9684{
9685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9686 IEMOP_MNEMONIC("xchg Eb,Gb");
9687
9688 /*
9689 * If rm is denoting a register, no more instruction bytes.
9690 */
9691 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9692 {
9693 IEMOP_HLP_NO_LOCK_PREFIX();
9694
9695 IEM_MC_BEGIN(0, 2);
9696 IEM_MC_LOCAL(uint8_t, uTmp1);
9697 IEM_MC_LOCAL(uint8_t, uTmp2);
9698
9699 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9700 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9701 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9702 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9703
9704 IEM_MC_ADVANCE_RIP();
9705 IEM_MC_END();
9706 }
9707 else
9708 {
9709 /*
9710 * We're accessing memory.
9711 */
9712/** @todo the register must be committed separately! */
9713 IEM_MC_BEGIN(2, 2);
9714 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9715 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9717
9718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9719 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9720 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9721 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9723
9724 IEM_MC_ADVANCE_RIP();
9725 IEM_MC_END();
9726 }
9727 return VINF_SUCCESS;
9728}
9729
9730
9731/** Opcode 0x87. */
9732FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9733{
9734 IEMOP_MNEMONIC("xchg Ev,Gv");
9735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9736
9737 /*
9738 * If rm is denoting a register, no more instruction bytes.
9739 */
9740 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9741 {
9742 IEMOP_HLP_NO_LOCK_PREFIX();
9743
9744 switch (pIemCpu->enmEffOpSize)
9745 {
9746 case IEMMODE_16BIT:
9747 IEM_MC_BEGIN(0, 2);
9748 IEM_MC_LOCAL(uint16_t, uTmp1);
9749 IEM_MC_LOCAL(uint16_t, uTmp2);
9750
9751 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9752 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9753 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9754 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9755
9756 IEM_MC_ADVANCE_RIP();
9757 IEM_MC_END();
9758 return VINF_SUCCESS;
9759
9760 case IEMMODE_32BIT:
9761 IEM_MC_BEGIN(0, 2);
9762 IEM_MC_LOCAL(uint32_t, uTmp1);
9763 IEM_MC_LOCAL(uint32_t, uTmp2);
9764
9765 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9766 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9767 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9769
9770 IEM_MC_ADVANCE_RIP();
9771 IEM_MC_END();
9772 return VINF_SUCCESS;
9773
9774 case IEMMODE_64BIT:
9775 IEM_MC_BEGIN(0, 2);
9776 IEM_MC_LOCAL(uint64_t, uTmp1);
9777 IEM_MC_LOCAL(uint64_t, uTmp2);
9778
9779 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9780 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9781 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9782 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9783
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 return VINF_SUCCESS;
9787
9788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9789 }
9790 }
9791 else
9792 {
9793 /*
9794 * We're accessing memory.
9795 */
9796 switch (pIemCpu->enmEffOpSize)
9797 {
9798/** @todo the register must be committed separately! */
9799 case IEMMODE_16BIT:
9800 IEM_MC_BEGIN(2, 2);
9801 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9802 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9804
9805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9806 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9807 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9808 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9810
9811 IEM_MC_ADVANCE_RIP();
9812 IEM_MC_END();
9813 return VINF_SUCCESS;
9814
9815 case IEMMODE_32BIT:
9816 IEM_MC_BEGIN(2, 2);
9817 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9818 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9820
9821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9822 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9823 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9824 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9825 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9826
9827 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9828 IEM_MC_ADVANCE_RIP();
9829 IEM_MC_END();
9830 return VINF_SUCCESS;
9831
9832 case IEMMODE_64BIT:
9833 IEM_MC_BEGIN(2, 2);
9834 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9835 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9837
9838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9839 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9840 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9841 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9843
9844 IEM_MC_ADVANCE_RIP();
9845 IEM_MC_END();
9846 return VINF_SUCCESS;
9847
9848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9849 }
9850 }
9851}
9852
9853
9854/** Opcode 0x88. */
9855FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9856{
9857 IEMOP_MNEMONIC("mov Eb,Gb");
9858
9859 uint8_t bRm;
9860 IEM_OPCODE_GET_NEXT_U8(&bRm);
9861 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9862
9863 /*
9864 * If rm is denoting a register, no more instruction bytes.
9865 */
9866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9867 {
9868 IEM_MC_BEGIN(0, 1);
9869 IEM_MC_LOCAL(uint8_t, u8Value);
9870 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9871 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9872 IEM_MC_ADVANCE_RIP();
9873 IEM_MC_END();
9874 }
9875 else
9876 {
9877 /*
9878 * We're writing a register to memory.
9879 */
9880 IEM_MC_BEGIN(0, 2);
9881 IEM_MC_LOCAL(uint8_t, u8Value);
9882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9884 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9885 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9886 IEM_MC_ADVANCE_RIP();
9887 IEM_MC_END();
9888 }
9889 return VINF_SUCCESS;
9890
9891}
9892
9893
9894/** Opcode 0x89. */
9895FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9896{
9897 IEMOP_MNEMONIC("mov Ev,Gv");
9898
9899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9900 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9901
9902 /*
9903 * If rm is denoting a register, no more instruction bytes.
9904 */
9905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9906 {
9907 switch (pIemCpu->enmEffOpSize)
9908 {
9909 case IEMMODE_16BIT:
9910 IEM_MC_BEGIN(0, 1);
9911 IEM_MC_LOCAL(uint16_t, u16Value);
9912 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9913 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9914 IEM_MC_ADVANCE_RIP();
9915 IEM_MC_END();
9916 break;
9917
9918 case IEMMODE_32BIT:
9919 IEM_MC_BEGIN(0, 1);
9920 IEM_MC_LOCAL(uint32_t, u32Value);
9921 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9922 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9923 IEM_MC_ADVANCE_RIP();
9924 IEM_MC_END();
9925 break;
9926
9927 case IEMMODE_64BIT:
9928 IEM_MC_BEGIN(0, 1);
9929 IEM_MC_LOCAL(uint64_t, u64Value);
9930 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9931 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9932 IEM_MC_ADVANCE_RIP();
9933 IEM_MC_END();
9934 break;
9935 }
9936 }
9937 else
9938 {
9939 /*
9940 * We're writing a register to memory.
9941 */
9942 switch (pIemCpu->enmEffOpSize)
9943 {
9944 case IEMMODE_16BIT:
9945 IEM_MC_BEGIN(0, 2);
9946 IEM_MC_LOCAL(uint16_t, u16Value);
9947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9949 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9950 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9951 IEM_MC_ADVANCE_RIP();
9952 IEM_MC_END();
9953 break;
9954
9955 case IEMMODE_32BIT:
9956 IEM_MC_BEGIN(0, 2);
9957 IEM_MC_LOCAL(uint32_t, u32Value);
9958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9960 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9961 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9962 IEM_MC_ADVANCE_RIP();
9963 IEM_MC_END();
9964 break;
9965
9966 case IEMMODE_64BIT:
9967 IEM_MC_BEGIN(0, 2);
9968 IEM_MC_LOCAL(uint64_t, u64Value);
9969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9971 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9972 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9973 IEM_MC_ADVANCE_RIP();
9974 IEM_MC_END();
9975 break;
9976 }
9977 }
9978 return VINF_SUCCESS;
9979}
9980
9981
9982/** Opcode 0x8a. */
9983FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9984{
9985 IEMOP_MNEMONIC("mov Gb,Eb");
9986
9987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9988 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9989
9990 /*
9991 * If rm is denoting a register, no more instruction bytes.
9992 */
9993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9994 {
9995 IEM_MC_BEGIN(0, 1);
9996 IEM_MC_LOCAL(uint8_t, u8Value);
9997 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9998 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9999 IEM_MC_ADVANCE_RIP();
10000 IEM_MC_END();
10001 }
10002 else
10003 {
10004 /*
10005 * We're loading a register from memory.
10006 */
10007 IEM_MC_BEGIN(0, 2);
10008 IEM_MC_LOCAL(uint8_t, u8Value);
10009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10011 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10012 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10013 IEM_MC_ADVANCE_RIP();
10014 IEM_MC_END();
10015 }
10016 return VINF_SUCCESS;
10017}
10018
10019
10020/** Opcode 0x8b. */
10021FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10022{
10023 IEMOP_MNEMONIC("mov Gv,Ev");
10024
10025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10026 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10027
10028 /*
10029 * If rm is denoting a register, no more instruction bytes.
10030 */
10031 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10032 {
10033 switch (pIemCpu->enmEffOpSize)
10034 {
10035 case IEMMODE_16BIT:
10036 IEM_MC_BEGIN(0, 1);
10037 IEM_MC_LOCAL(uint16_t, u16Value);
10038 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10039 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10040 IEM_MC_ADVANCE_RIP();
10041 IEM_MC_END();
10042 break;
10043
10044 case IEMMODE_32BIT:
10045 IEM_MC_BEGIN(0, 1);
10046 IEM_MC_LOCAL(uint32_t, u32Value);
10047 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10048 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10049 IEM_MC_ADVANCE_RIP();
10050 IEM_MC_END();
10051 break;
10052
10053 case IEMMODE_64BIT:
10054 IEM_MC_BEGIN(0, 1);
10055 IEM_MC_LOCAL(uint64_t, u64Value);
10056 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10057 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10058 IEM_MC_ADVANCE_RIP();
10059 IEM_MC_END();
10060 break;
10061 }
10062 }
10063 else
10064 {
10065 /*
10066 * We're loading a register from memory.
10067 */
10068 switch (pIemCpu->enmEffOpSize)
10069 {
10070 case IEMMODE_16BIT:
10071 IEM_MC_BEGIN(0, 2);
10072 IEM_MC_LOCAL(uint16_t, u16Value);
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10075 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10076 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10077 IEM_MC_ADVANCE_RIP();
10078 IEM_MC_END();
10079 break;
10080
10081 case IEMMODE_32BIT:
10082 IEM_MC_BEGIN(0, 2);
10083 IEM_MC_LOCAL(uint32_t, u32Value);
10084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10086 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10087 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10088 IEM_MC_ADVANCE_RIP();
10089 IEM_MC_END();
10090 break;
10091
10092 case IEMMODE_64BIT:
10093 IEM_MC_BEGIN(0, 2);
10094 IEM_MC_LOCAL(uint64_t, u64Value);
10095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10097 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10098 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10099 IEM_MC_ADVANCE_RIP();
10100 IEM_MC_END();
10101 break;
10102 }
10103 }
10104 return VINF_SUCCESS;
10105}
10106
10107
10108/** Opcode 0x63. */
10109FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10110{
10111 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10112 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10113 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10114 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10115 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10116}
10117
10118
10119/** Opcode 0x8c. */
10120FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10121{
10122 IEMOP_MNEMONIC("mov Ev,Sw");
10123
10124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10126
10127 /*
10128 * Check that the destination register exists. The REX.R prefix is ignored.
10129 */
10130 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10131 if ( iSegReg > X86_SREG_GS)
10132 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10133
10134 /*
10135 * If rm is denoting a register, no more instruction bytes.
10136 * In that case, the operand size is respected and the upper bits are
10137 * cleared (starting with some pentium).
10138 */
10139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10140 {
10141 switch (pIemCpu->enmEffOpSize)
10142 {
10143 case IEMMODE_16BIT:
10144 IEM_MC_BEGIN(0, 1);
10145 IEM_MC_LOCAL(uint16_t, u16Value);
10146 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10147 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10148 IEM_MC_ADVANCE_RIP();
10149 IEM_MC_END();
10150 break;
10151
10152 case IEMMODE_32BIT:
10153 IEM_MC_BEGIN(0, 1);
10154 IEM_MC_LOCAL(uint32_t, u32Value);
10155 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10156 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10157 IEM_MC_ADVANCE_RIP();
10158 IEM_MC_END();
10159 break;
10160
10161 case IEMMODE_64BIT:
10162 IEM_MC_BEGIN(0, 1);
10163 IEM_MC_LOCAL(uint64_t, u64Value);
10164 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10165 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10166 IEM_MC_ADVANCE_RIP();
10167 IEM_MC_END();
10168 break;
10169 }
10170 }
10171 else
10172 {
10173 /*
10174 * We're saving the register to memory. The access is word sized
10175 * regardless of operand size prefixes.
10176 */
10177#if 0 /* not necessary */
10178 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10179#endif
10180 IEM_MC_BEGIN(0, 2);
10181 IEM_MC_LOCAL(uint16_t, u16Value);
10182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10184 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10185 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10186 IEM_MC_ADVANCE_RIP();
10187 IEM_MC_END();
10188 }
10189 return VINF_SUCCESS;
10190}
10191
10192
10193
10194
10195/** Opcode 0x8d. */
10196FNIEMOP_DEF(iemOp_lea_Gv_M)
10197{
10198 IEMOP_MNEMONIC("lea Gv,M");
10199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10200 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10202 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10203
10204 switch (pIemCpu->enmEffOpSize)
10205 {
10206 case IEMMODE_16BIT:
10207 IEM_MC_BEGIN(0, 2);
10208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10209 IEM_MC_LOCAL(uint16_t, u16Cast);
10210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10211 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10212 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10213 IEM_MC_ADVANCE_RIP();
10214 IEM_MC_END();
10215 return VINF_SUCCESS;
10216
10217 case IEMMODE_32BIT:
10218 IEM_MC_BEGIN(0, 2);
10219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10220 IEM_MC_LOCAL(uint32_t, u32Cast);
10221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10222 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10223 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10224 IEM_MC_ADVANCE_RIP();
10225 IEM_MC_END();
10226 return VINF_SUCCESS;
10227
10228 case IEMMODE_64BIT:
10229 IEM_MC_BEGIN(0, 1);
10230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10232 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10233 IEM_MC_ADVANCE_RIP();
10234 IEM_MC_END();
10235 return VINF_SUCCESS;
10236 }
10237 AssertFailedReturn(VERR_IEM_IPE_7);
10238}
10239
10240
10241/** Opcode 0x8e. */
10242FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10243{
10244 IEMOP_MNEMONIC("mov Sw,Ev");
10245
10246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10247 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10248
10249 /*
10250 * The practical operand size is 16-bit.
10251 */
10252#if 0 /* not necessary */
10253 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10254#endif
10255
10256 /*
10257 * Check that the destination register exists and can be used with this
10258 * instruction. The REX.R prefix is ignored.
10259 */
10260 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10261 if ( iSegReg == X86_SREG_CS
10262 || iSegReg > X86_SREG_GS)
10263 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10264
10265 /*
10266 * If rm is denoting a register, no more instruction bytes.
10267 */
10268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10269 {
10270 IEM_MC_BEGIN(2, 0);
10271 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10272 IEM_MC_ARG(uint16_t, u16Value, 1);
10273 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10274 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10275 IEM_MC_END();
10276 }
10277 else
10278 {
10279 /*
10280 * We're loading the register from memory. The access is word sized
10281 * regardless of operand size prefixes.
10282 */
10283 IEM_MC_BEGIN(2, 1);
10284 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10285 IEM_MC_ARG(uint16_t, u16Value, 1);
10286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10288 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10289 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10290 IEM_MC_END();
10291 }
10292 return VINF_SUCCESS;
10293}
10294
10295
10296/** Opcode 0x8f /0. */
10297FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10298{
10299 /* This bugger is rather annoying as it requires rSP to be updated before
10300 doing the effective address calculations. Will eventually require a
10301 split between the R/M+SIB decoding and the effective address
10302 calculation - which is something that is required for any attempt at
10303 reusing this code for a recompiler. It may also be good to have if we
10304 need to delay #UD exception caused by invalid lock prefixes.
10305
10306 For now, we'll do a mostly safe interpreter-only implementation here. */
10307 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10308 * now until tests show it's checked.. */
10309 IEMOP_MNEMONIC("pop Ev");
10310 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10311
10312 /* Register access is relatively easy and can share code. */
10313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10314 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10315
10316 /*
10317 * Memory target.
10318 *
10319 * Intel says that RSP is incremented before it's used in any effective
10320 * address calcuations. This means some serious extra annoyance here since
10321 * we decode and calculate the effective address in one step and like to
10322 * delay committing registers till everything is done.
10323 *
10324 * So, we'll decode and calculate the effective address twice. This will
10325 * require some recoding if turned into a recompiler.
10326 */
10327 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10328
10329#ifndef TST_IEM_CHECK_MC
10330 /* Calc effective address with modified ESP. */
10331 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10332 RTGCPTR GCPtrEff;
10333 VBOXSTRICTRC rcStrict;
10334 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10335 if (rcStrict != VINF_SUCCESS)
10336 return rcStrict;
10337 pIemCpu->offOpcode = offOpcodeSaved;
10338
10339 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10340 uint64_t const RspSaved = pCtx->rsp;
10341 switch (pIemCpu->enmEffOpSize)
10342 {
10343 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10344 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10345 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10347 }
10348 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10349 Assert(rcStrict == VINF_SUCCESS);
10350 pCtx->rsp = RspSaved;
10351
10352 /* Perform the operation - this should be CImpl. */
10353 RTUINT64U TmpRsp;
10354 TmpRsp.u = pCtx->rsp;
10355 switch (pIemCpu->enmEffOpSize)
10356 {
10357 case IEMMODE_16BIT:
10358 {
10359 uint16_t u16Value;
10360 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10361 if (rcStrict == VINF_SUCCESS)
10362 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10363 break;
10364 }
10365
10366 case IEMMODE_32BIT:
10367 {
10368 uint32_t u32Value;
10369 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10370 if (rcStrict == VINF_SUCCESS)
10371 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10372 break;
10373 }
10374
10375 case IEMMODE_64BIT:
10376 {
10377 uint64_t u64Value;
10378 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10379 if (rcStrict == VINF_SUCCESS)
10380 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10381 break;
10382 }
10383
10384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10385 }
10386 if (rcStrict == VINF_SUCCESS)
10387 {
10388 pCtx->rsp = TmpRsp.u;
10389 iemRegUpdateRipAndClearRF(pIemCpu);
10390 }
10391 return rcStrict;
10392
10393#else
10394 return VERR_IEM_IPE_2;
10395#endif
10396}
10397
10398
10399/** Opcode 0x8f. */
10400FNIEMOP_DEF(iemOp_Grp1A)
10401{
10402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10403 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10404 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10405
10406 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10407 /** @todo XOP decoding. */
10408 IEMOP_MNEMONIC("3-byte-xop");
10409 return IEMOP_RAISE_INVALID_OPCODE();
10410}
10411
10412
10413/**
10414 * Common 'xchg reg,rAX' helper.
10415 */
10416FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10417{
10418 IEMOP_HLP_NO_LOCK_PREFIX();
10419
10420 iReg |= pIemCpu->uRexB;
10421 switch (pIemCpu->enmEffOpSize)
10422 {
10423 case IEMMODE_16BIT:
10424 IEM_MC_BEGIN(0, 2);
10425 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10426 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10427 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10428 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10429 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10430 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10431 IEM_MC_ADVANCE_RIP();
10432 IEM_MC_END();
10433 return VINF_SUCCESS;
10434
10435 case IEMMODE_32BIT:
10436 IEM_MC_BEGIN(0, 2);
10437 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10438 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10439 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10440 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10441 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10442 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10443 IEM_MC_ADVANCE_RIP();
10444 IEM_MC_END();
10445 return VINF_SUCCESS;
10446
10447 case IEMMODE_64BIT:
10448 IEM_MC_BEGIN(0, 2);
10449 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10450 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10451 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10452 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10453 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10454 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10455 IEM_MC_ADVANCE_RIP();
10456 IEM_MC_END();
10457 return VINF_SUCCESS;
10458
10459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10460 }
10461}
10462
10463
10464/** Opcode 0x90. */
10465FNIEMOP_DEF(iemOp_nop)
10466{
10467 /* R8/R8D and RAX/EAX can be exchanged. */
10468 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10469 {
10470 IEMOP_MNEMONIC("xchg r8,rAX");
10471 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10472 }
10473
10474 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10475 IEMOP_MNEMONIC("pause");
10476 else
10477 IEMOP_MNEMONIC("nop");
10478 IEM_MC_BEGIN(0, 0);
10479 IEM_MC_ADVANCE_RIP();
10480 IEM_MC_END();
10481 return VINF_SUCCESS;
10482}
10483
10484
10485/** Opcode 0x91. */
10486FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10487{
10488 IEMOP_MNEMONIC("xchg rCX,rAX");
10489 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10490}
10491
10492
10493/** Opcode 0x92. */
10494FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10495{
10496 IEMOP_MNEMONIC("xchg rDX,rAX");
10497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10498}
10499
10500
10501/** Opcode 0x93. */
10502FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10503{
10504 IEMOP_MNEMONIC("xchg rBX,rAX");
10505 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10506}
10507
10508
10509/** Opcode 0x94. */
10510FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10511{
10512 IEMOP_MNEMONIC("xchg rSX,rAX");
10513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10514}
10515
10516
10517/** Opcode 0x95. */
10518FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10519{
10520 IEMOP_MNEMONIC("xchg rBP,rAX");
10521 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10522}
10523
10524
10525/** Opcode 0x96. */
10526FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10527{
10528 IEMOP_MNEMONIC("xchg rSI,rAX");
10529 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10530}
10531
10532
10533/** Opcode 0x97. */
10534FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10535{
10536 IEMOP_MNEMONIC("xchg rDI,rAX");
10537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10538}
10539
10540
10541/** Opcode 0x98. */
10542FNIEMOP_DEF(iemOp_cbw)
10543{
10544 IEMOP_HLP_NO_LOCK_PREFIX();
10545 switch (pIemCpu->enmEffOpSize)
10546 {
10547 case IEMMODE_16BIT:
10548 IEMOP_MNEMONIC("cbw");
10549 IEM_MC_BEGIN(0, 1);
10550 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10551 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10552 } IEM_MC_ELSE() {
10553 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10554 } IEM_MC_ENDIF();
10555 IEM_MC_ADVANCE_RIP();
10556 IEM_MC_END();
10557 return VINF_SUCCESS;
10558
10559 case IEMMODE_32BIT:
10560 IEMOP_MNEMONIC("cwde");
10561 IEM_MC_BEGIN(0, 1);
10562 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10563 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10564 } IEM_MC_ELSE() {
10565 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10566 } IEM_MC_ENDIF();
10567 IEM_MC_ADVANCE_RIP();
10568 IEM_MC_END();
10569 return VINF_SUCCESS;
10570
10571 case IEMMODE_64BIT:
10572 IEMOP_MNEMONIC("cdqe");
10573 IEM_MC_BEGIN(0, 1);
10574 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10575 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10576 } IEM_MC_ELSE() {
10577 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10578 } IEM_MC_ENDIF();
10579 IEM_MC_ADVANCE_RIP();
10580 IEM_MC_END();
10581 return VINF_SUCCESS;
10582
10583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10584 }
10585}
10586
10587
10588/** Opcode 0x99. */
10589FNIEMOP_DEF(iemOp_cwd)
10590{
10591 IEMOP_HLP_NO_LOCK_PREFIX();
10592 switch (pIemCpu->enmEffOpSize)
10593 {
10594 case IEMMODE_16BIT:
10595 IEMOP_MNEMONIC("cwd");
10596 IEM_MC_BEGIN(0, 1);
10597 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10598 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10599 } IEM_MC_ELSE() {
10600 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10601 } IEM_MC_ENDIF();
10602 IEM_MC_ADVANCE_RIP();
10603 IEM_MC_END();
10604 return VINF_SUCCESS;
10605
10606 case IEMMODE_32BIT:
10607 IEMOP_MNEMONIC("cdq");
10608 IEM_MC_BEGIN(0, 1);
10609 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10610 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10611 } IEM_MC_ELSE() {
10612 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10613 } IEM_MC_ENDIF();
10614 IEM_MC_ADVANCE_RIP();
10615 IEM_MC_END();
10616 return VINF_SUCCESS;
10617
10618 case IEMMODE_64BIT:
10619 IEMOP_MNEMONIC("cqo");
10620 IEM_MC_BEGIN(0, 1);
10621 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10622 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10623 } IEM_MC_ELSE() {
10624 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10625 } IEM_MC_ENDIF();
10626 IEM_MC_ADVANCE_RIP();
10627 IEM_MC_END();
10628 return VINF_SUCCESS;
10629
10630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10631 }
10632}
10633
10634
10635/** Opcode 0x9a. */
10636FNIEMOP_DEF(iemOp_call_Ap)
10637{
10638 IEMOP_MNEMONIC("call Ap");
10639 IEMOP_HLP_NO_64BIT();
10640
10641 /* Decode the far pointer address and pass it on to the far call C implementation. */
10642 uint32_t offSeg;
10643 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10644 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10645 else
10646 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10647 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10649 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10650}
10651
10652
10653/** Opcode 0x9b. (aka fwait) */
10654FNIEMOP_DEF(iemOp_wait)
10655{
10656 IEMOP_MNEMONIC("wait");
10657 IEMOP_HLP_NO_LOCK_PREFIX();
10658
10659 IEM_MC_BEGIN(0, 0);
10660 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10662 IEM_MC_ADVANCE_RIP();
10663 IEM_MC_END();
10664 return VINF_SUCCESS;
10665}
10666
10667
10668/** Opcode 0x9c. */
10669FNIEMOP_DEF(iemOp_pushf_Fv)
10670{
10671 IEMOP_HLP_NO_LOCK_PREFIX();
10672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10673 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10674}
10675
10676
10677/** Opcode 0x9d. */
10678FNIEMOP_DEF(iemOp_popf_Fv)
10679{
10680 IEMOP_HLP_NO_LOCK_PREFIX();
10681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10682 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10683}
10684
10685
10686/** Opcode 0x9e. */
10687FNIEMOP_DEF(iemOp_sahf)
10688{
10689 IEMOP_MNEMONIC("sahf");
10690 IEMOP_HLP_NO_LOCK_PREFIX();
10691 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10692 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10693 return IEMOP_RAISE_INVALID_OPCODE();
10694 IEM_MC_BEGIN(0, 2);
10695 IEM_MC_LOCAL(uint32_t, u32Flags);
10696 IEM_MC_LOCAL(uint32_t, EFlags);
10697 IEM_MC_FETCH_EFLAGS(EFlags);
10698 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10699 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10700 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10701 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10702 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10703 IEM_MC_COMMIT_EFLAGS(EFlags);
10704 IEM_MC_ADVANCE_RIP();
10705 IEM_MC_END();
10706 return VINF_SUCCESS;
10707}
10708
10709
10710/** Opcode 0x9f. */
10711FNIEMOP_DEF(iemOp_lahf)
10712{
10713 IEMOP_MNEMONIC("lahf");
10714 IEMOP_HLP_NO_LOCK_PREFIX();
10715 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10716 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10717 return IEMOP_RAISE_INVALID_OPCODE();
10718 IEM_MC_BEGIN(0, 1);
10719 IEM_MC_LOCAL(uint8_t, u8Flags);
10720 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10721 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10722 IEM_MC_ADVANCE_RIP();
10723 IEM_MC_END();
10724 return VINF_SUCCESS;
10725}
10726
10727
10728/**
10729 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10730 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10731 * prefixes. Will return on failures.
10732 * @param a_GCPtrMemOff The variable to store the offset in.
10733 */
10734#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10735 do \
10736 { \
10737 switch (pIemCpu->enmEffAddrMode) \
10738 { \
10739 case IEMMODE_16BIT: \
10740 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10741 break; \
10742 case IEMMODE_32BIT: \
10743 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10744 break; \
10745 case IEMMODE_64BIT: \
10746 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10747 break; \
10748 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10749 } \
10750 IEMOP_HLP_NO_LOCK_PREFIX(); \
10751 } while (0)
10752
10753/** Opcode 0xa0. */
10754FNIEMOP_DEF(iemOp_mov_Al_Ob)
10755{
10756 /*
10757 * Get the offset and fend of lock prefixes.
10758 */
10759 RTGCPTR GCPtrMemOff;
10760 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10761
10762 /*
10763 * Fetch AL.
10764 */
10765 IEM_MC_BEGIN(0,1);
10766 IEM_MC_LOCAL(uint8_t, u8Tmp);
10767 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10768 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10769 IEM_MC_ADVANCE_RIP();
10770 IEM_MC_END();
10771 return VINF_SUCCESS;
10772}
10773
10774
10775/** Opcode 0xa1. */
10776FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10777{
10778 /*
10779 * Get the offset and fend of lock prefixes.
10780 */
10781 IEMOP_MNEMONIC("mov rAX,Ov");
10782 RTGCPTR GCPtrMemOff;
10783 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10784
10785 /*
10786 * Fetch rAX.
10787 */
10788 switch (pIemCpu->enmEffOpSize)
10789 {
10790 case IEMMODE_16BIT:
10791 IEM_MC_BEGIN(0,1);
10792 IEM_MC_LOCAL(uint16_t, u16Tmp);
10793 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10794 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10795 IEM_MC_ADVANCE_RIP();
10796 IEM_MC_END();
10797 return VINF_SUCCESS;
10798
10799 case IEMMODE_32BIT:
10800 IEM_MC_BEGIN(0,1);
10801 IEM_MC_LOCAL(uint32_t, u32Tmp);
10802 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10803 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10804 IEM_MC_ADVANCE_RIP();
10805 IEM_MC_END();
10806 return VINF_SUCCESS;
10807
10808 case IEMMODE_64BIT:
10809 IEM_MC_BEGIN(0,1);
10810 IEM_MC_LOCAL(uint64_t, u64Tmp);
10811 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10812 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10813 IEM_MC_ADVANCE_RIP();
10814 IEM_MC_END();
10815 return VINF_SUCCESS;
10816
10817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10818 }
10819}
10820
10821
10822/** Opcode 0xa2. */
10823FNIEMOP_DEF(iemOp_mov_Ob_AL)
10824{
10825 /*
10826 * Get the offset and fend of lock prefixes.
10827 */
10828 RTGCPTR GCPtrMemOff;
10829 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10830
10831 /*
10832 * Store AL.
10833 */
10834 IEM_MC_BEGIN(0,1);
10835 IEM_MC_LOCAL(uint8_t, u8Tmp);
10836 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10837 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10838 IEM_MC_ADVANCE_RIP();
10839 IEM_MC_END();
10840 return VINF_SUCCESS;
10841}
10842
10843
10844/** Opcode 0xa3. */
10845FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10846{
10847 /*
10848 * Get the offset and fend of lock prefixes.
10849 */
10850 RTGCPTR GCPtrMemOff;
10851 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10852
10853 /*
10854 * Store rAX.
10855 */
10856 switch (pIemCpu->enmEffOpSize)
10857 {
10858 case IEMMODE_16BIT:
10859 IEM_MC_BEGIN(0,1);
10860 IEM_MC_LOCAL(uint16_t, u16Tmp);
10861 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10862 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10863 IEM_MC_ADVANCE_RIP();
10864 IEM_MC_END();
10865 return VINF_SUCCESS;
10866
10867 case IEMMODE_32BIT:
10868 IEM_MC_BEGIN(0,1);
10869 IEM_MC_LOCAL(uint32_t, u32Tmp);
10870 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10871 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10872 IEM_MC_ADVANCE_RIP();
10873 IEM_MC_END();
10874 return VINF_SUCCESS;
10875
10876 case IEMMODE_64BIT:
10877 IEM_MC_BEGIN(0,1);
10878 IEM_MC_LOCAL(uint64_t, u64Tmp);
10879 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10880 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10881 IEM_MC_ADVANCE_RIP();
10882 IEM_MC_END();
10883 return VINF_SUCCESS;
10884
10885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10886 }
10887}
10888
10889/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10890#define IEM_MOVS_CASE(ValBits, AddrBits) \
10891 IEM_MC_BEGIN(0, 2); \
10892 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10893 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10894 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10895 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10896 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10897 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10899 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10900 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10901 } IEM_MC_ELSE() { \
10902 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10903 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10904 } IEM_MC_ENDIF(); \
10905 IEM_MC_ADVANCE_RIP(); \
10906 IEM_MC_END();
10907
10908/** Opcode 0xa4. */
10909FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10910{
10911 IEMOP_HLP_NO_LOCK_PREFIX();
10912
10913 /*
10914 * Use the C implementation if a repeat prefix is encountered.
10915 */
10916 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10917 {
10918 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10919 switch (pIemCpu->enmEffAddrMode)
10920 {
10921 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10922 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10923 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10925 }
10926 }
10927 IEMOP_MNEMONIC("movsb Xb,Yb");
10928
10929 /*
10930 * Sharing case implementation with movs[wdq] below.
10931 */
10932 switch (pIemCpu->enmEffAddrMode)
10933 {
10934 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10935 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10936 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10938 }
10939 return VINF_SUCCESS;
10940}
10941
10942
10943/** Opcode 0xa5. */
10944FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10945{
10946 IEMOP_HLP_NO_LOCK_PREFIX();
10947
10948 /*
10949 * Use the C implementation if a repeat prefix is encountered.
10950 */
10951 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10952 {
10953 IEMOP_MNEMONIC("rep movs Xv,Yv");
10954 switch (pIemCpu->enmEffOpSize)
10955 {
10956 case IEMMODE_16BIT:
10957 switch (pIemCpu->enmEffAddrMode)
10958 {
10959 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10960 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10961 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10963 }
10964 break;
10965 case IEMMODE_32BIT:
10966 switch (pIemCpu->enmEffAddrMode)
10967 {
10968 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10972 }
10973 case IEMMODE_64BIT:
10974 switch (pIemCpu->enmEffAddrMode)
10975 {
10976 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10977 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10978 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10980 }
10981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10982 }
10983 }
10984 IEMOP_MNEMONIC("movs Xv,Yv");
10985
10986 /*
10987 * Annoying double switch here.
10988 * Using ugly macro for implementing the cases, sharing it with movsb.
10989 */
10990 switch (pIemCpu->enmEffOpSize)
10991 {
10992 case IEMMODE_16BIT:
10993 switch (pIemCpu->enmEffAddrMode)
10994 {
10995 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10996 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10997 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10999 }
11000 break;
11001
11002 case IEMMODE_32BIT:
11003 switch (pIemCpu->enmEffAddrMode)
11004 {
11005 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11006 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11007 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11009 }
11010 break;
11011
11012 case IEMMODE_64BIT:
11013 switch (pIemCpu->enmEffAddrMode)
11014 {
11015 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11016 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11017 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11019 }
11020 break;
11021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11022 }
11023 return VINF_SUCCESS;
11024}
11025
11026#undef IEM_MOVS_CASE
11027
11028/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11029#define IEM_CMPS_CASE(ValBits, AddrBits) \
11030 IEM_MC_BEGIN(3, 3); \
11031 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11032 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11033 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11034 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11035 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11036 \
11037 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11038 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11039 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11040 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11041 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11042 IEM_MC_REF_EFLAGS(pEFlags); \
11043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11044 \
11045 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11046 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11047 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11048 } IEM_MC_ELSE() { \
11049 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11050 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11051 } IEM_MC_ENDIF(); \
11052 IEM_MC_ADVANCE_RIP(); \
11053 IEM_MC_END(); \
11054
11055/** Opcode 0xa6. */
11056FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11057{
11058 IEMOP_HLP_NO_LOCK_PREFIX();
11059
11060 /*
11061 * Use the C implementation if a repeat prefix is encountered.
11062 */
11063 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11064 {
11065 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11066 switch (pIemCpu->enmEffAddrMode)
11067 {
11068 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11069 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11070 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11072 }
11073 }
11074 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11075 {
11076 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11077 switch (pIemCpu->enmEffAddrMode)
11078 {
11079 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11080 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11081 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11083 }
11084 }
11085 IEMOP_MNEMONIC("cmps Xb,Yb");
11086
11087 /*
11088 * Sharing case implementation with cmps[wdq] below.
11089 */
11090 switch (pIemCpu->enmEffAddrMode)
11091 {
11092 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11093 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11094 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11096 }
11097 return VINF_SUCCESS;
11098
11099}
11100
11101
11102/** Opcode 0xa7. */
11103FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11104{
11105 IEMOP_HLP_NO_LOCK_PREFIX();
11106
11107 /*
11108 * Use the C implementation if a repeat prefix is encountered.
11109 */
11110 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11111 {
11112 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11113 switch (pIemCpu->enmEffOpSize)
11114 {
11115 case IEMMODE_16BIT:
11116 switch (pIemCpu->enmEffAddrMode)
11117 {
11118 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11119 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11120 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11122 }
11123 break;
11124 case IEMMODE_32BIT:
11125 switch (pIemCpu->enmEffAddrMode)
11126 {
11127 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11128 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11129 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11131 }
11132 case IEMMODE_64BIT:
11133 switch (pIemCpu->enmEffAddrMode)
11134 {
11135 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11139 }
11140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11141 }
11142 }
11143
11144 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11145 {
11146 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11147 switch (pIemCpu->enmEffOpSize)
11148 {
11149 case IEMMODE_16BIT:
11150 switch (pIemCpu->enmEffAddrMode)
11151 {
11152 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11153 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11154 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11156 }
11157 break;
11158 case IEMMODE_32BIT:
11159 switch (pIemCpu->enmEffAddrMode)
11160 {
11161 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11162 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11163 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11165 }
11166 case IEMMODE_64BIT:
11167 switch (pIemCpu->enmEffAddrMode)
11168 {
11169 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11170 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11171 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11175 }
11176 }
11177
11178 IEMOP_MNEMONIC("cmps Xv,Yv");
11179
11180 /*
11181 * Annoying double switch here.
11182 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11183 */
11184 switch (pIemCpu->enmEffOpSize)
11185 {
11186 case IEMMODE_16BIT:
11187 switch (pIemCpu->enmEffAddrMode)
11188 {
11189 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11190 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11191 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11193 }
11194 break;
11195
11196 case IEMMODE_32BIT:
11197 switch (pIemCpu->enmEffAddrMode)
11198 {
11199 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11200 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11201 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11203 }
11204 break;
11205
11206 case IEMMODE_64BIT:
11207 switch (pIemCpu->enmEffAddrMode)
11208 {
11209 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11210 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11211 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11213 }
11214 break;
11215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11216 }
11217 return VINF_SUCCESS;
11218
11219}
11220
11221#undef IEM_CMPS_CASE
11222
11223/** Opcode 0xa8. */
11224FNIEMOP_DEF(iemOp_test_AL_Ib)
11225{
11226 IEMOP_MNEMONIC("test al,Ib");
11227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11228 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11229}
11230
11231
11232/** Opcode 0xa9. */
11233FNIEMOP_DEF(iemOp_test_eAX_Iz)
11234{
11235 IEMOP_MNEMONIC("test rAX,Iz");
11236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11237 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11238}
11239
11240
11241/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11242#define IEM_STOS_CASE(ValBits, AddrBits) \
11243 IEM_MC_BEGIN(0, 2); \
11244 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11245 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11246 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11247 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11248 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11250 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11251 } IEM_MC_ELSE() { \
11252 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11253 } IEM_MC_ENDIF(); \
11254 IEM_MC_ADVANCE_RIP(); \
11255 IEM_MC_END(); \
11256
11257/** Opcode 0xaa. */
11258FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11259{
11260 IEMOP_HLP_NO_LOCK_PREFIX();
11261
11262 /*
11263 * Use the C implementation if a repeat prefix is encountered.
11264 */
11265 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11266 {
11267 IEMOP_MNEMONIC("rep stos Yb,al");
11268 switch (pIemCpu->enmEffAddrMode)
11269 {
11270 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11271 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11272 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11274 }
11275 }
11276 IEMOP_MNEMONIC("stos Yb,al");
11277
11278 /*
11279 * Sharing case implementation with stos[wdq] below.
11280 */
11281 switch (pIemCpu->enmEffAddrMode)
11282 {
11283 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11284 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11285 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11287 }
11288 return VINF_SUCCESS;
11289}
11290
11291
11292/** Opcode 0xab. */
11293FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11294{
11295 IEMOP_HLP_NO_LOCK_PREFIX();
11296
11297 /*
11298 * Use the C implementation if a repeat prefix is encountered.
11299 */
11300 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11301 {
11302 IEMOP_MNEMONIC("rep stos Yv,rAX");
11303 switch (pIemCpu->enmEffOpSize)
11304 {
11305 case IEMMODE_16BIT:
11306 switch (pIemCpu->enmEffAddrMode)
11307 {
11308 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11309 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11310 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11312 }
11313 break;
11314 case IEMMODE_32BIT:
11315 switch (pIemCpu->enmEffAddrMode)
11316 {
11317 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11318 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11319 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11321 }
11322 case IEMMODE_64BIT:
11323 switch (pIemCpu->enmEffAddrMode)
11324 {
11325 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11326 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11327 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11329 }
11330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11331 }
11332 }
11333 IEMOP_MNEMONIC("stos Yv,rAX");
11334
11335 /*
11336 * Annoying double switch here.
11337 * Using ugly macro for implementing the cases, sharing it with stosb.
11338 */
11339 switch (pIemCpu->enmEffOpSize)
11340 {
11341 case IEMMODE_16BIT:
11342 switch (pIemCpu->enmEffAddrMode)
11343 {
11344 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11345 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11346 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11348 }
11349 break;
11350
11351 case IEMMODE_32BIT:
11352 switch (pIemCpu->enmEffAddrMode)
11353 {
11354 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11355 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11356 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11358 }
11359 break;
11360
11361 case IEMMODE_64BIT:
11362 switch (pIemCpu->enmEffAddrMode)
11363 {
11364 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11365 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11366 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11368 }
11369 break;
11370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11371 }
11372 return VINF_SUCCESS;
11373}
11374
11375#undef IEM_STOS_CASE
11376
11377/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11378#define IEM_LODS_CASE(ValBits, AddrBits) \
11379 IEM_MC_BEGIN(0, 2); \
11380 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11381 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11382 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11383 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11384 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11386 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11387 } IEM_MC_ELSE() { \
11388 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11389 } IEM_MC_ENDIF(); \
11390 IEM_MC_ADVANCE_RIP(); \
11391 IEM_MC_END();
11392
11393/** Opcode 0xac. */
11394FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11395{
11396 IEMOP_HLP_NO_LOCK_PREFIX();
11397
11398 /*
11399 * Use the C implementation if a repeat prefix is encountered.
11400 */
11401 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11402 {
11403 IEMOP_MNEMONIC("rep lodsb al,Xb");
11404 switch (pIemCpu->enmEffAddrMode)
11405 {
11406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11410 }
11411 }
11412 IEMOP_MNEMONIC("lodsb al,Xb");
11413
11414 /*
11415 * Sharing case implementation with stos[wdq] below.
11416 */
11417 switch (pIemCpu->enmEffAddrMode)
11418 {
11419 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11420 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11421 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11423 }
11424 return VINF_SUCCESS;
11425}
11426
11427
11428/** Opcode 0xad. */
11429FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11430{
11431 IEMOP_HLP_NO_LOCK_PREFIX();
11432
11433 /*
11434 * Use the C implementation if a repeat prefix is encountered.
11435 */
11436 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11437 {
11438 IEMOP_MNEMONIC("rep lods rAX,Xv");
11439 switch (pIemCpu->enmEffOpSize)
11440 {
11441 case IEMMODE_16BIT:
11442 switch (pIemCpu->enmEffAddrMode)
11443 {
11444 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11445 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11446 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11448 }
11449 break;
11450 case IEMMODE_32BIT:
11451 switch (pIemCpu->enmEffAddrMode)
11452 {
11453 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11454 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11455 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11457 }
11458 case IEMMODE_64BIT:
11459 switch (pIemCpu->enmEffAddrMode)
11460 {
11461 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11465 }
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 }
11469 IEMOP_MNEMONIC("lods rAX,Xv");
11470
11471 /*
11472 * Annoying double switch here.
11473 * Using ugly macro for implementing the cases, sharing it with lodsb.
11474 */
11475 switch (pIemCpu->enmEffOpSize)
11476 {
11477 case IEMMODE_16BIT:
11478 switch (pIemCpu->enmEffAddrMode)
11479 {
11480 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11481 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11482 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11484 }
11485 break;
11486
11487 case IEMMODE_32BIT:
11488 switch (pIemCpu->enmEffAddrMode)
11489 {
11490 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11491 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11492 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11494 }
11495 break;
11496
11497 case IEMMODE_64BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11501 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11502 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 break;
11506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11507 }
11508 return VINF_SUCCESS;
11509}
11510
11511#undef IEM_LODS_CASE
11512
11513/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11514#define IEM_SCAS_CASE(ValBits, AddrBits) \
11515 IEM_MC_BEGIN(3, 2); \
11516 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11517 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11518 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11519 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11520 \
11521 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11522 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11523 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11524 IEM_MC_REF_EFLAGS(pEFlags); \
11525 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11526 \
11527 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11528 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11529 } IEM_MC_ELSE() { \
11530 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11531 } IEM_MC_ENDIF(); \
11532 IEM_MC_ADVANCE_RIP(); \
11533 IEM_MC_END();
11534
11535/** Opcode 0xae. */
11536FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11537{
11538 IEMOP_HLP_NO_LOCK_PREFIX();
11539
11540 /*
11541 * Use the C implementation if a repeat prefix is encountered.
11542 */
11543 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11544 {
11545 IEMOP_MNEMONIC("repe scasb al,Xb");
11546 switch (pIemCpu->enmEffAddrMode)
11547 {
11548 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11549 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11550 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11552 }
11553 }
11554 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11555 {
11556 IEMOP_MNEMONIC("repne scasb al,Xb");
11557 switch (pIemCpu->enmEffAddrMode)
11558 {
11559 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11563 }
11564 }
11565 IEMOP_MNEMONIC("scasb al,Xb");
11566
11567 /*
11568 * Sharing case implementation with stos[wdq] below.
11569 */
11570 switch (pIemCpu->enmEffAddrMode)
11571 {
11572 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11573 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11574 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11576 }
11577 return VINF_SUCCESS;
11578}
11579
11580
11581/** Opcode 0xaf. */
11582FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11583{
11584 IEMOP_HLP_NO_LOCK_PREFIX();
11585
11586 /*
11587 * Use the C implementation if a repeat prefix is encountered.
11588 */
11589 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11590 {
11591 IEMOP_MNEMONIC("repe scas rAX,Xv");
11592 switch (pIemCpu->enmEffOpSize)
11593 {
11594 case IEMMODE_16BIT:
11595 switch (pIemCpu->enmEffAddrMode)
11596 {
11597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11601 }
11602 break;
11603 case IEMMODE_32BIT:
11604 switch (pIemCpu->enmEffAddrMode)
11605 {
11606 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11607 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11608 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11610 }
11611 case IEMMODE_64BIT:
11612 switch (pIemCpu->enmEffAddrMode)
11613 {
11614 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11618 }
11619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11620 }
11621 }
11622 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11623 {
11624 IEMOP_MNEMONIC("repne scas rAX,Xv");
11625 switch (pIemCpu->enmEffOpSize)
11626 {
11627 case IEMMODE_16BIT:
11628 switch (pIemCpu->enmEffAddrMode)
11629 {
11630 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11631 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11632 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11634 }
11635 break;
11636 case IEMMODE_32BIT:
11637 switch (pIemCpu->enmEffAddrMode)
11638 {
11639 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11640 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11641 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11643 }
11644 case IEMMODE_64BIT:
11645 switch (pIemCpu->enmEffAddrMode)
11646 {
11647 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11648 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11649 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11651 }
11652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11653 }
11654 }
11655 IEMOP_MNEMONIC("scas rAX,Xv");
11656
11657 /*
11658 * Annoying double switch here.
11659 * Using ugly macro for implementing the cases, sharing it with scasb.
11660 */
11661 switch (pIemCpu->enmEffOpSize)
11662 {
11663 case IEMMODE_16BIT:
11664 switch (pIemCpu->enmEffAddrMode)
11665 {
11666 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11667 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11668 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11670 }
11671 break;
11672
11673 case IEMMODE_32BIT:
11674 switch (pIemCpu->enmEffAddrMode)
11675 {
11676 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11677 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11678 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11680 }
11681 break;
11682
11683 case IEMMODE_64BIT:
11684 switch (pIemCpu->enmEffAddrMode)
11685 {
11686 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11687 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11688 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11690 }
11691 break;
11692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11693 }
11694 return VINF_SUCCESS;
11695}
11696
11697#undef IEM_SCAS_CASE
11698
11699/**
11700 * Common 'mov r8, imm8' helper.
11701 */
11702FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11703{
11704 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11705 IEMOP_HLP_NO_LOCK_PREFIX();
11706
11707 IEM_MC_BEGIN(0, 1);
11708 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11709 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11710 IEM_MC_ADVANCE_RIP();
11711 IEM_MC_END();
11712
11713 return VINF_SUCCESS;
11714}
11715
11716
11717/** Opcode 0xb0. */
11718FNIEMOP_DEF(iemOp_mov_AL_Ib)
11719{
11720 IEMOP_MNEMONIC("mov AL,Ib");
11721 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11722}
11723
11724
11725/** Opcode 0xb1. */
11726FNIEMOP_DEF(iemOp_CL_Ib)
11727{
11728 IEMOP_MNEMONIC("mov CL,Ib");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xb2. */
11734FNIEMOP_DEF(iemOp_DL_Ib)
11735{
11736 IEMOP_MNEMONIC("mov DL,Ib");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xb3. */
11742FNIEMOP_DEF(iemOp_BL_Ib)
11743{
11744 IEMOP_MNEMONIC("mov BL,Ib");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xb4. */
11750FNIEMOP_DEF(iemOp_mov_AH_Ib)
11751{
11752 IEMOP_MNEMONIC("mov AH,Ib");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xb5. */
11758FNIEMOP_DEF(iemOp_CH_Ib)
11759{
11760 IEMOP_MNEMONIC("mov CH,Ib");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11762}
11763
11764
11765/** Opcode 0xb6. */
11766FNIEMOP_DEF(iemOp_DH_Ib)
11767{
11768 IEMOP_MNEMONIC("mov DH,Ib");
11769 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11770}
11771
11772
11773/** Opcode 0xb7. */
11774FNIEMOP_DEF(iemOp_BH_Ib)
11775{
11776 IEMOP_MNEMONIC("mov BH,Ib");
11777 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11778}
11779
11780
11781/**
11782 * Common 'mov regX,immX' helper.
11783 */
11784FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11785{
11786 switch (pIemCpu->enmEffOpSize)
11787 {
11788 case IEMMODE_16BIT:
11789 {
11790 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11791 IEMOP_HLP_NO_LOCK_PREFIX();
11792
11793 IEM_MC_BEGIN(0, 1);
11794 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11795 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11796 IEM_MC_ADVANCE_RIP();
11797 IEM_MC_END();
11798 break;
11799 }
11800
11801 case IEMMODE_32BIT:
11802 {
11803 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11804 IEMOP_HLP_NO_LOCK_PREFIX();
11805
11806 IEM_MC_BEGIN(0, 1);
11807 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11808 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11809 IEM_MC_ADVANCE_RIP();
11810 IEM_MC_END();
11811 break;
11812 }
11813 case IEMMODE_64BIT:
11814 {
11815 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11816 IEMOP_HLP_NO_LOCK_PREFIX();
11817
11818 IEM_MC_BEGIN(0, 1);
11819 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11820 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11821 IEM_MC_ADVANCE_RIP();
11822 IEM_MC_END();
11823 break;
11824 }
11825 }
11826
11827 return VINF_SUCCESS;
11828}
11829
11830
11831/** Opcode 0xb8. */
11832FNIEMOP_DEF(iemOp_eAX_Iv)
11833{
11834 IEMOP_MNEMONIC("mov rAX,IV");
11835 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11836}
11837
11838
11839/** Opcode 0xb9. */
11840FNIEMOP_DEF(iemOp_eCX_Iv)
11841{
11842 IEMOP_MNEMONIC("mov rCX,IV");
11843 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11844}
11845
11846
11847/** Opcode 0xba. */
11848FNIEMOP_DEF(iemOp_eDX_Iv)
11849{
11850 IEMOP_MNEMONIC("mov rDX,IV");
11851 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11852}
11853
11854
11855/** Opcode 0xbb. */
11856FNIEMOP_DEF(iemOp_eBX_Iv)
11857{
11858 IEMOP_MNEMONIC("mov rBX,IV");
11859 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11860}
11861
11862
11863/** Opcode 0xbc. */
11864FNIEMOP_DEF(iemOp_eSP_Iv)
11865{
11866 IEMOP_MNEMONIC("mov rSP,IV");
11867 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11868}
11869
11870
11871/** Opcode 0xbd. */
11872FNIEMOP_DEF(iemOp_eBP_Iv)
11873{
11874 IEMOP_MNEMONIC("mov rBP,IV");
11875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11876}
11877
11878
11879/** Opcode 0xbe. */
11880FNIEMOP_DEF(iemOp_eSI_Iv)
11881{
11882 IEMOP_MNEMONIC("mov rSI,IV");
11883 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11884}
11885
11886
11887/** Opcode 0xbf. */
11888FNIEMOP_DEF(iemOp_eDI_Iv)
11889{
11890 IEMOP_MNEMONIC("mov rDI,IV");
11891 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11892}
11893
11894
11895/** Opcode 0xc0. */
11896FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11897{
11898 IEMOP_HLP_MIN_186();
11899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11900 PCIEMOPSHIFTSIZES pImpl;
11901 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11902 {
11903 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11904 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11905 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11906 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11907 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11908 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11909 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11910 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11911 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11912 }
11913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11914
11915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11916 {
11917 /* register */
11918 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11919 IEMOP_HLP_NO_LOCK_PREFIX();
11920 IEM_MC_BEGIN(3, 0);
11921 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11922 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11923 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11924 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11925 IEM_MC_REF_EFLAGS(pEFlags);
11926 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11927 IEM_MC_ADVANCE_RIP();
11928 IEM_MC_END();
11929 }
11930 else
11931 {
11932 /* memory */
11933 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11934 IEM_MC_BEGIN(3, 2);
11935 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11936 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11937 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11939
11940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11941 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11942 IEM_MC_ASSIGN(cShiftArg, cShift);
11943 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11944 IEM_MC_FETCH_EFLAGS(EFlags);
11945 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11946
11947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11948 IEM_MC_COMMIT_EFLAGS(EFlags);
11949 IEM_MC_ADVANCE_RIP();
11950 IEM_MC_END();
11951 }
11952 return VINF_SUCCESS;
11953}
11954
11955
11956/** Opcode 0xc1. */
11957FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11958{
11959 IEMOP_HLP_MIN_186();
11960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11961 PCIEMOPSHIFTSIZES pImpl;
11962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11963 {
11964 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11965 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11966 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11967 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11968 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11969 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11970 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11971 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11972 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11973 }
11974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11975
11976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11977 {
11978 /* register */
11979 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11980 IEMOP_HLP_NO_LOCK_PREFIX();
11981 switch (pIemCpu->enmEffOpSize)
11982 {
11983 case IEMMODE_16BIT:
11984 IEM_MC_BEGIN(3, 0);
11985 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11986 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11988 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11989 IEM_MC_REF_EFLAGS(pEFlags);
11990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11991 IEM_MC_ADVANCE_RIP();
11992 IEM_MC_END();
11993 return VINF_SUCCESS;
11994
11995 case IEMMODE_32BIT:
11996 IEM_MC_BEGIN(3, 0);
11997 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11998 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12000 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12001 IEM_MC_REF_EFLAGS(pEFlags);
12002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12003 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12004 IEM_MC_ADVANCE_RIP();
12005 IEM_MC_END();
12006 return VINF_SUCCESS;
12007
12008 case IEMMODE_64BIT:
12009 IEM_MC_BEGIN(3, 0);
12010 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12011 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12012 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12013 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12014 IEM_MC_REF_EFLAGS(pEFlags);
12015 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12016 IEM_MC_ADVANCE_RIP();
12017 IEM_MC_END();
12018 return VINF_SUCCESS;
12019
12020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12021 }
12022 }
12023 else
12024 {
12025 /* memory */
12026 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12027 switch (pIemCpu->enmEffOpSize)
12028 {
12029 case IEMMODE_16BIT:
12030 IEM_MC_BEGIN(3, 2);
12031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12032 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12033 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12035
12036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12037 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12038 IEM_MC_ASSIGN(cShiftArg, cShift);
12039 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12040 IEM_MC_FETCH_EFLAGS(EFlags);
12041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12042
12043 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12044 IEM_MC_COMMIT_EFLAGS(EFlags);
12045 IEM_MC_ADVANCE_RIP();
12046 IEM_MC_END();
12047 return VINF_SUCCESS;
12048
12049 case IEMMODE_32BIT:
12050 IEM_MC_BEGIN(3, 2);
12051 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12052 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12053 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12055
12056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12057 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12058 IEM_MC_ASSIGN(cShiftArg, cShift);
12059 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12060 IEM_MC_FETCH_EFLAGS(EFlags);
12061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12062
12063 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12064 IEM_MC_COMMIT_EFLAGS(EFlags);
12065 IEM_MC_ADVANCE_RIP();
12066 IEM_MC_END();
12067 return VINF_SUCCESS;
12068
12069 case IEMMODE_64BIT:
12070 IEM_MC_BEGIN(3, 2);
12071 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12072 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12073 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12075
12076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12077 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12078 IEM_MC_ASSIGN(cShiftArg, cShift);
12079 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12080 IEM_MC_FETCH_EFLAGS(EFlags);
12081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12082
12083 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12084 IEM_MC_COMMIT_EFLAGS(EFlags);
12085 IEM_MC_ADVANCE_RIP();
12086 IEM_MC_END();
12087 return VINF_SUCCESS;
12088
12089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12090 }
12091 }
12092}
12093
12094
12095/** Opcode 0xc2. */
12096FNIEMOP_DEF(iemOp_retn_Iw)
12097{
12098 IEMOP_MNEMONIC("retn Iw");
12099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12100 IEMOP_HLP_NO_LOCK_PREFIX();
12101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12102 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12103}
12104
12105
12106/** Opcode 0xc3. */
12107FNIEMOP_DEF(iemOp_retn)
12108{
12109 IEMOP_MNEMONIC("retn");
12110 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12111 IEMOP_HLP_NO_LOCK_PREFIX();
12112 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12113}
12114
12115
12116/** Opcode 0xc4. */
12117FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12118{
12119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12120 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12121 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12122 {
12123 IEMOP_MNEMONIC("2-byte-vex");
12124 /* The LES instruction is invalid 64-bit mode. In legacy and
12125 compatability mode it is invalid with MOD=3.
12126 The use as a VEX prefix is made possible by assigning the inverted
12127 REX.R to the top MOD bit, and the top bit in the inverted register
12128 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12129 to accessing registers 0..7 in this VEX form. */
12130 /** @todo VEX: Just use new tables for it. */
12131 return IEMOP_RAISE_INVALID_OPCODE();
12132 }
12133 IEMOP_MNEMONIC("les Gv,Mp");
12134 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12135}
12136
12137
12138/** Opcode 0xc5. */
12139FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12140{
12141 /* The LDS instruction is invalid 64-bit mode. In legacy and
12142 compatability mode it is invalid with MOD=3.
12143 The use as a VEX prefix is made possible by assigning the inverted
12144 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12145 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12147 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12148 {
12149 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12150 {
12151 IEMOP_MNEMONIC("lds Gv,Mp");
12152 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12153 }
12154 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12155 }
12156
12157 IEMOP_MNEMONIC("3-byte-vex");
12158 /** @todo Test when exctly the VEX conformance checks kick in during
12159 * instruction decoding and fetching (using \#PF). */
12160 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12161 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12162 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12163#if 0 /* will make sense of this next week... */
12164 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12165 &&
12166 )
12167 {
12168
12169 }
12170#endif
12171
12172 /** @todo VEX: Just use new tables for it. */
12173 return IEMOP_RAISE_INVALID_OPCODE();
12174}
12175
12176
12177/** Opcode 0xc6. */
12178FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12179{
12180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12181 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12182 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12183 return IEMOP_RAISE_INVALID_OPCODE();
12184 IEMOP_MNEMONIC("mov Eb,Ib");
12185
12186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12187 {
12188 /* register access */
12189 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12190 IEM_MC_BEGIN(0, 0);
12191 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12192 IEM_MC_ADVANCE_RIP();
12193 IEM_MC_END();
12194 }
12195 else
12196 {
12197 /* memory access. */
12198 IEM_MC_BEGIN(0, 1);
12199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12201 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12202 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12203 IEM_MC_ADVANCE_RIP();
12204 IEM_MC_END();
12205 }
12206 return VINF_SUCCESS;
12207}
12208
12209
12210/** Opcode 0xc7. */
12211FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12212{
12213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12214 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12215 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12216 return IEMOP_RAISE_INVALID_OPCODE();
12217 IEMOP_MNEMONIC("mov Ev,Iz");
12218
12219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12220 {
12221 /* register access */
12222 switch (pIemCpu->enmEffOpSize)
12223 {
12224 case IEMMODE_16BIT:
12225 IEM_MC_BEGIN(0, 0);
12226 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12227 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 return VINF_SUCCESS;
12231
12232 case IEMMODE_32BIT:
12233 IEM_MC_BEGIN(0, 0);
12234 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12235 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12236 IEM_MC_ADVANCE_RIP();
12237 IEM_MC_END();
12238 return VINF_SUCCESS;
12239
12240 case IEMMODE_64BIT:
12241 IEM_MC_BEGIN(0, 0);
12242 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12243 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12244 IEM_MC_ADVANCE_RIP();
12245 IEM_MC_END();
12246 return VINF_SUCCESS;
12247
12248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12249 }
12250 }
12251 else
12252 {
12253 /* memory access. */
12254 switch (pIemCpu->enmEffOpSize)
12255 {
12256 case IEMMODE_16BIT:
12257 IEM_MC_BEGIN(0, 1);
12258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12260 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12261 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12262 IEM_MC_ADVANCE_RIP();
12263 IEM_MC_END();
12264 return VINF_SUCCESS;
12265
12266 case IEMMODE_32BIT:
12267 IEM_MC_BEGIN(0, 1);
12268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12270 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12271 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12272 IEM_MC_ADVANCE_RIP();
12273 IEM_MC_END();
12274 return VINF_SUCCESS;
12275
12276 case IEMMODE_64BIT:
12277 IEM_MC_BEGIN(0, 1);
12278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12280 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12281 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12282 IEM_MC_ADVANCE_RIP();
12283 IEM_MC_END();
12284 return VINF_SUCCESS;
12285
12286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12287 }
12288 }
12289}
12290
12291
12292
12293
12294/** Opcode 0xc8. */
12295FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12296{
12297 IEMOP_MNEMONIC("enter Iw,Ib");
12298 IEMOP_HLP_MIN_186();
12299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12300 IEMOP_HLP_NO_LOCK_PREFIX();
12301 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12302 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12303 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12304}
12305
12306
12307/** Opcode 0xc9. */
12308FNIEMOP_DEF(iemOp_leave)
12309{
12310 IEMOP_MNEMONIC("retn");
12311 IEMOP_HLP_MIN_186();
12312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12313 IEMOP_HLP_NO_LOCK_PREFIX();
12314 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12315}
12316
12317
12318/** Opcode 0xca. */
12319FNIEMOP_DEF(iemOp_retf_Iw)
12320{
12321 IEMOP_MNEMONIC("retf Iw");
12322 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12323 IEMOP_HLP_NO_LOCK_PREFIX();
12324 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12325 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12326}
12327
12328
12329/** Opcode 0xcb. */
12330FNIEMOP_DEF(iemOp_retf)
12331{
12332 IEMOP_MNEMONIC("retf");
12333 IEMOP_HLP_NO_LOCK_PREFIX();
12334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12335 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12336}
12337
12338
12339/** Opcode 0xcc. */
12340FNIEMOP_DEF(iemOp_int_3)
12341{
12342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12344}
12345
12346
12347/** Opcode 0xcd. */
12348FNIEMOP_DEF(iemOp_int_Ib)
12349{
12350 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12352 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12353}
12354
12355
12356/** Opcode 0xce. */
12357FNIEMOP_DEF(iemOp_into)
12358{
12359 IEMOP_MNEMONIC("into");
12360 IEMOP_HLP_NO_64BIT();
12361
12362 IEM_MC_BEGIN(2, 0);
12363 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12364 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12365 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12366 IEM_MC_END();
12367 return VINF_SUCCESS;
12368}
12369
12370
12371/** Opcode 0xcf. */
12372FNIEMOP_DEF(iemOp_iret)
12373{
12374 IEMOP_MNEMONIC("iret");
12375 IEMOP_HLP_NO_LOCK_PREFIX();
12376 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12377}
12378
12379
12380/** Opcode 0xd0. */
12381FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12382{
12383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12384 PCIEMOPSHIFTSIZES pImpl;
12385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12386 {
12387 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12388 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12389 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12390 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12391 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12392 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12393 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12394 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12396 }
12397 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12398
12399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12400 {
12401 /* register */
12402 IEMOP_HLP_NO_LOCK_PREFIX();
12403 IEM_MC_BEGIN(3, 0);
12404 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12405 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12407 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12408 IEM_MC_REF_EFLAGS(pEFlags);
12409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12410 IEM_MC_ADVANCE_RIP();
12411 IEM_MC_END();
12412 }
12413 else
12414 {
12415 /* memory */
12416 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12417 IEM_MC_BEGIN(3, 2);
12418 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12419 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12420 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12422
12423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12424 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12425 IEM_MC_FETCH_EFLAGS(EFlags);
12426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12427
12428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12429 IEM_MC_COMMIT_EFLAGS(EFlags);
12430 IEM_MC_ADVANCE_RIP();
12431 IEM_MC_END();
12432 }
12433 return VINF_SUCCESS;
12434}
12435
12436
12437
12438/** Opcode 0xd1. */
12439FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12440{
12441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12442 PCIEMOPSHIFTSIZES pImpl;
12443 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12444 {
12445 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12446 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12447 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12448 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12449 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12450 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12451 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12452 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12454 }
12455 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12456
12457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12458 {
12459 /* register */
12460 IEMOP_HLP_NO_LOCK_PREFIX();
12461 switch (pIemCpu->enmEffOpSize)
12462 {
12463 case IEMMODE_16BIT:
12464 IEM_MC_BEGIN(3, 0);
12465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12466 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12468 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12469 IEM_MC_REF_EFLAGS(pEFlags);
12470 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12471 IEM_MC_ADVANCE_RIP();
12472 IEM_MC_END();
12473 return VINF_SUCCESS;
12474
12475 case IEMMODE_32BIT:
12476 IEM_MC_BEGIN(3, 0);
12477 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12478 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12480 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12481 IEM_MC_REF_EFLAGS(pEFlags);
12482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12483 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12484 IEM_MC_ADVANCE_RIP();
12485 IEM_MC_END();
12486 return VINF_SUCCESS;
12487
12488 case IEMMODE_64BIT:
12489 IEM_MC_BEGIN(3, 0);
12490 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12491 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12493 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12494 IEM_MC_REF_EFLAGS(pEFlags);
12495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12496 IEM_MC_ADVANCE_RIP();
12497 IEM_MC_END();
12498 return VINF_SUCCESS;
12499
12500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12501 }
12502 }
12503 else
12504 {
12505 /* memory */
12506 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12507 switch (pIemCpu->enmEffOpSize)
12508 {
12509 case IEMMODE_16BIT:
12510 IEM_MC_BEGIN(3, 2);
12511 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12512 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12513 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12515
12516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12517 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12518 IEM_MC_FETCH_EFLAGS(EFlags);
12519 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12520
12521 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12522 IEM_MC_COMMIT_EFLAGS(EFlags);
12523 IEM_MC_ADVANCE_RIP();
12524 IEM_MC_END();
12525 return VINF_SUCCESS;
12526
12527 case IEMMODE_32BIT:
12528 IEM_MC_BEGIN(3, 2);
12529 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12530 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12531 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12533
12534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12535 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12536 IEM_MC_FETCH_EFLAGS(EFlags);
12537 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12538
12539 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12540 IEM_MC_COMMIT_EFLAGS(EFlags);
12541 IEM_MC_ADVANCE_RIP();
12542 IEM_MC_END();
12543 return VINF_SUCCESS;
12544
12545 case IEMMODE_64BIT:
12546 IEM_MC_BEGIN(3, 2);
12547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12548 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12549 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12551
12552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12553 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12554 IEM_MC_FETCH_EFLAGS(EFlags);
12555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12556
12557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12558 IEM_MC_COMMIT_EFLAGS(EFlags);
12559 IEM_MC_ADVANCE_RIP();
12560 IEM_MC_END();
12561 return VINF_SUCCESS;
12562
12563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12564 }
12565 }
12566}
12567
12568
12569/** Opcode 0xd2. */
12570FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12571{
12572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12573 PCIEMOPSHIFTSIZES pImpl;
12574 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12575 {
12576 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12577 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12578 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12579 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12580 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12581 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12582 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12583 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12585 }
12586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12587
12588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12589 {
12590 /* register */
12591 IEMOP_HLP_NO_LOCK_PREFIX();
12592 IEM_MC_BEGIN(3, 0);
12593 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12594 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12595 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12596 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12598 IEM_MC_REF_EFLAGS(pEFlags);
12599 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12600 IEM_MC_ADVANCE_RIP();
12601 IEM_MC_END();
12602 }
12603 else
12604 {
12605 /* memory */
12606 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12607 IEM_MC_BEGIN(3, 2);
12608 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12609 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12610 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12612
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12615 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12616 IEM_MC_FETCH_EFLAGS(EFlags);
12617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12618
12619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12620 IEM_MC_COMMIT_EFLAGS(EFlags);
12621 IEM_MC_ADVANCE_RIP();
12622 IEM_MC_END();
12623 }
12624 return VINF_SUCCESS;
12625}
12626
12627
12628/** Opcode 0xd3. */
12629FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12630{
12631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12632 PCIEMOPSHIFTSIZES pImpl;
12633 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12634 {
12635 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12636 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12637 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12638 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12639 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12640 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12641 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12642 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12643 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12644 }
12645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12646
12647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12648 {
12649 /* register */
12650 IEMOP_HLP_NO_LOCK_PREFIX();
12651 switch (pIemCpu->enmEffOpSize)
12652 {
12653 case IEMMODE_16BIT:
12654 IEM_MC_BEGIN(3, 0);
12655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12656 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12658 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12659 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12660 IEM_MC_REF_EFLAGS(pEFlags);
12661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12662 IEM_MC_ADVANCE_RIP();
12663 IEM_MC_END();
12664 return VINF_SUCCESS;
12665
12666 case IEMMODE_32BIT:
12667 IEM_MC_BEGIN(3, 0);
12668 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12669 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12670 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12671 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12672 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12673 IEM_MC_REF_EFLAGS(pEFlags);
12674 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12675 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12676 IEM_MC_ADVANCE_RIP();
12677 IEM_MC_END();
12678 return VINF_SUCCESS;
12679
12680 case IEMMODE_64BIT:
12681 IEM_MC_BEGIN(3, 0);
12682 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12683 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12685 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12686 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12687 IEM_MC_REF_EFLAGS(pEFlags);
12688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12689 IEM_MC_ADVANCE_RIP();
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692
12693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12694 }
12695 }
12696 else
12697 {
12698 /* memory */
12699 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12700 switch (pIemCpu->enmEffOpSize)
12701 {
12702 case IEMMODE_16BIT:
12703 IEM_MC_BEGIN(3, 2);
12704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12705 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12708
12709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12710 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12711 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12712 IEM_MC_FETCH_EFLAGS(EFlags);
12713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12714
12715 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12716 IEM_MC_COMMIT_EFLAGS(EFlags);
12717 IEM_MC_ADVANCE_RIP();
12718 IEM_MC_END();
12719 return VINF_SUCCESS;
12720
12721 case IEMMODE_32BIT:
12722 IEM_MC_BEGIN(3, 2);
12723 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12724 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12725 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12727
12728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12729 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12730 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12731 IEM_MC_FETCH_EFLAGS(EFlags);
12732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12733
12734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12735 IEM_MC_COMMIT_EFLAGS(EFlags);
12736 IEM_MC_ADVANCE_RIP();
12737 IEM_MC_END();
12738 return VINF_SUCCESS;
12739
12740 case IEMMODE_64BIT:
12741 IEM_MC_BEGIN(3, 2);
12742 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12743 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12744 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12746
12747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12748 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12749 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12750 IEM_MC_FETCH_EFLAGS(EFlags);
12751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12752
12753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12754 IEM_MC_COMMIT_EFLAGS(EFlags);
12755 IEM_MC_ADVANCE_RIP();
12756 IEM_MC_END();
12757 return VINF_SUCCESS;
12758
12759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12760 }
12761 }
12762}
12763
12764/** Opcode 0xd4. */
12765FNIEMOP_DEF(iemOp_aam_Ib)
12766{
12767 IEMOP_MNEMONIC("aam Ib");
12768 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12769 IEMOP_HLP_NO_LOCK_PREFIX();
12770 IEMOP_HLP_NO_64BIT();
12771 if (!bImm)
12772 return IEMOP_RAISE_DIVIDE_ERROR();
12773 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12774}
12775
12776
12777/** Opcode 0xd5. */
12778FNIEMOP_DEF(iemOp_aad_Ib)
12779{
12780 IEMOP_MNEMONIC("aad Ib");
12781 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12782 IEMOP_HLP_NO_LOCK_PREFIX();
12783 IEMOP_HLP_NO_64BIT();
12784 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12785}
12786
12787
12788/** Opcode 0xd6. */
12789FNIEMOP_DEF(iemOp_salc)
12790{
12791 IEMOP_MNEMONIC("salc");
12792 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12793 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12795 IEMOP_HLP_NO_64BIT();
12796
12797 IEM_MC_BEGIN(0, 0);
12798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12799 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12800 } IEM_MC_ELSE() {
12801 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12802 } IEM_MC_ENDIF();
12803 IEM_MC_ADVANCE_RIP();
12804 IEM_MC_END();
12805 return VINF_SUCCESS;
12806}
12807
12808
12809/** Opcode 0xd7. */
12810FNIEMOP_DEF(iemOp_xlat)
12811{
12812 IEMOP_MNEMONIC("xlat");
12813 IEMOP_HLP_NO_LOCK_PREFIX();
12814 switch (pIemCpu->enmEffAddrMode)
12815 {
12816 case IEMMODE_16BIT:
12817 IEM_MC_BEGIN(2, 0);
12818 IEM_MC_LOCAL(uint8_t, u8Tmp);
12819 IEM_MC_LOCAL(uint16_t, u16Addr);
12820 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12821 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12822 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12823 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12824 IEM_MC_ADVANCE_RIP();
12825 IEM_MC_END();
12826 return VINF_SUCCESS;
12827
12828 case IEMMODE_32BIT:
12829 IEM_MC_BEGIN(2, 0);
12830 IEM_MC_LOCAL(uint8_t, u8Tmp);
12831 IEM_MC_LOCAL(uint32_t, u32Addr);
12832 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12833 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12834 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12835 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12836 IEM_MC_ADVANCE_RIP();
12837 IEM_MC_END();
12838 return VINF_SUCCESS;
12839
12840 case IEMMODE_64BIT:
12841 IEM_MC_BEGIN(2, 0);
12842 IEM_MC_LOCAL(uint8_t, u8Tmp);
12843 IEM_MC_LOCAL(uint64_t, u64Addr);
12844 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12845 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12846 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12847 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12848 IEM_MC_ADVANCE_RIP();
12849 IEM_MC_END();
12850 return VINF_SUCCESS;
12851
12852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12853 }
12854}
12855
12856
12857/**
12858 * Common worker for FPU instructions working on ST0 and STn, and storing the
12859 * result in ST0.
12860 *
12861 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12862 */
12863FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12864{
12865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12866
12867 IEM_MC_BEGIN(3, 1);
12868 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12869 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12870 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12871 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12872
12873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12875 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12876 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12877 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12878 IEM_MC_ELSE()
12879 IEM_MC_FPU_STACK_UNDERFLOW(0);
12880 IEM_MC_ENDIF();
12881 IEM_MC_USED_FPU();
12882 IEM_MC_ADVANCE_RIP();
12883
12884 IEM_MC_END();
12885 return VINF_SUCCESS;
12886}
12887
12888
12889/**
12890 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12891 * flags.
12892 *
12893 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12894 */
12895FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12896{
12897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12898
12899 IEM_MC_BEGIN(3, 1);
12900 IEM_MC_LOCAL(uint16_t, u16Fsw);
12901 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12902 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12903 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12904
12905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12906 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12907 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12908 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12909 IEM_MC_UPDATE_FSW(u16Fsw);
12910 IEM_MC_ELSE()
12911 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12912 IEM_MC_ENDIF();
12913 IEM_MC_USED_FPU();
12914 IEM_MC_ADVANCE_RIP();
12915
12916 IEM_MC_END();
12917 return VINF_SUCCESS;
12918}
12919
12920
12921/**
12922 * Common worker for FPU instructions working on ST0 and STn, only affecting
12923 * flags, and popping when done.
12924 *
12925 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12926 */
12927FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12928{
12929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12930
12931 IEM_MC_BEGIN(3, 1);
12932 IEM_MC_LOCAL(uint16_t, u16Fsw);
12933 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12936
12937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12939 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12940 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12941 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12942 IEM_MC_ELSE()
12943 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12944 IEM_MC_ENDIF();
12945 IEM_MC_USED_FPU();
12946 IEM_MC_ADVANCE_RIP();
12947
12948 IEM_MC_END();
12949 return VINF_SUCCESS;
12950}
12951
12952
12953/** Opcode 0xd8 11/0. */
12954FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12955{
12956 IEMOP_MNEMONIC("fadd st0,stN");
12957 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12958}
12959
12960
12961/** Opcode 0xd8 11/1. */
12962FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12963{
12964 IEMOP_MNEMONIC("fmul st0,stN");
12965 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12966}
12967
12968
12969/** Opcode 0xd8 11/2. */
12970FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC("fcom st0,stN");
12973 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12974}
12975
12976
12977/** Opcode 0xd8 11/3. */
12978FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12979{
12980 IEMOP_MNEMONIC("fcomp st0,stN");
12981 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12982}
12983
12984
12985/** Opcode 0xd8 11/4. */
12986FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12987{
12988 IEMOP_MNEMONIC("fsub st0,stN");
12989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12990}
12991
12992
12993/** Opcode 0xd8 11/5. */
12994FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12995{
12996 IEMOP_MNEMONIC("fsubr st0,stN");
12997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12998}
12999
13000
13001/** Opcode 0xd8 11/6. */
13002FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC("fdiv st0,stN");
13005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13006}
13007
13008
13009/** Opcode 0xd8 11/7. */
13010FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13011{
13012 IEMOP_MNEMONIC("fdivr st0,stN");
13013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13014}
13015
13016
13017/**
13018 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13019 * the result in ST0.
13020 *
13021 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13022 */
13023FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13024{
13025 IEM_MC_BEGIN(3, 3);
13026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13027 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13028 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13029 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13030 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13031 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13032
13033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13035
13036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13038 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13039
13040 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13041 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13042 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13043 IEM_MC_ELSE()
13044 IEM_MC_FPU_STACK_UNDERFLOW(0);
13045 IEM_MC_ENDIF();
13046 IEM_MC_USED_FPU();
13047 IEM_MC_ADVANCE_RIP();
13048
13049 IEM_MC_END();
13050 return VINF_SUCCESS;
13051}
13052
13053
13054/** Opcode 0xd8 !11/0. */
13055FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13056{
13057 IEMOP_MNEMONIC("fadd st0,m32r");
13058 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13059}
13060
13061
13062/** Opcode 0xd8 !11/1. */
13063FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13064{
13065 IEMOP_MNEMONIC("fmul st0,m32r");
13066 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13067}
13068
13069
13070/** Opcode 0xd8 !11/2. */
13071FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13072{
13073 IEMOP_MNEMONIC("fcom st0,m32r");
13074
13075 IEM_MC_BEGIN(3, 3);
13076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13077 IEM_MC_LOCAL(uint16_t, u16Fsw);
13078 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13079 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13081 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13082
13083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13085
13086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13088 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13089
13090 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13091 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13093 IEM_MC_ELSE()
13094 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13095 IEM_MC_ENDIF();
13096 IEM_MC_USED_FPU();
13097 IEM_MC_ADVANCE_RIP();
13098
13099 IEM_MC_END();
13100 return VINF_SUCCESS;
13101}
13102
13103
13104/** Opcode 0xd8 !11/3. */
13105FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13106{
13107 IEMOP_MNEMONIC("fcomp st0,m32r");
13108
13109 IEM_MC_BEGIN(3, 3);
13110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13111 IEM_MC_LOCAL(uint16_t, u16Fsw);
13112 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13115 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13116
13117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13119
13120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13122 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13123
13124 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13125 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13126 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13127 IEM_MC_ELSE()
13128 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13129 IEM_MC_ENDIF();
13130 IEM_MC_USED_FPU();
13131 IEM_MC_ADVANCE_RIP();
13132
13133 IEM_MC_END();
13134 return VINF_SUCCESS;
13135}
13136
13137
13138/** Opcode 0xd8 !11/4. */
13139FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13140{
13141 IEMOP_MNEMONIC("fsub st0,m32r");
13142 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13143}
13144
13145
13146/** Opcode 0xd8 !11/5. */
13147FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13148{
13149 IEMOP_MNEMONIC("fsubr st0,m32r");
13150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13151}
13152
13153
13154/** Opcode 0xd8 !11/6. */
13155FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13156{
13157 IEMOP_MNEMONIC("fdiv st0,m32r");
13158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13159}
13160
13161
13162/** Opcode 0xd8 !11/7. */
13163FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13164{
13165 IEMOP_MNEMONIC("fdivr st0,m32r");
13166 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13167}
13168
13169
13170/** Opcode 0xd8. */
13171FNIEMOP_DEF(iemOp_EscF0)
13172{
13173 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13175
13176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13177 {
13178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13179 {
13180 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13181 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13182 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13183 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13184 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13185 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13186 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13187 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13189 }
13190 }
13191 else
13192 {
13193 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13194 {
13195 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13196 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13197 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13198 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13199 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13200 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13201 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13202 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13204 }
13205 }
13206}
13207
13208
13209/** Opcode 0xd9 /0 mem32real
13210 * @sa iemOp_fld_m64r */
13211FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13212{
13213 IEMOP_MNEMONIC("fld m32r");
13214
13215 IEM_MC_BEGIN(2, 3);
13216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13217 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13218 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13219 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13220 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13221
13222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224
13225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13227 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13228
13229 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13230 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13231 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13232 IEM_MC_ELSE()
13233 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13234 IEM_MC_ENDIF();
13235 IEM_MC_USED_FPU();
13236 IEM_MC_ADVANCE_RIP();
13237
13238 IEM_MC_END();
13239 return VINF_SUCCESS;
13240}
13241
13242
13243/** Opcode 0xd9 !11/2 mem32real */
13244FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13245{
13246 IEMOP_MNEMONIC("fst m32r");
13247 IEM_MC_BEGIN(3, 2);
13248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13249 IEM_MC_LOCAL(uint16_t, u16Fsw);
13250 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13251 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13253
13254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13258
13259 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13260 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13261 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13262 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13263 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13264 IEM_MC_ELSE()
13265 IEM_MC_IF_FCW_IM()
13266 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13267 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13268 IEM_MC_ENDIF();
13269 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13270 IEM_MC_ENDIF();
13271 IEM_MC_USED_FPU();
13272 IEM_MC_ADVANCE_RIP();
13273
13274 IEM_MC_END();
13275 return VINF_SUCCESS;
13276}
13277
13278
13279/** Opcode 0xd9 !11/3 */
13280FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13281{
13282 IEMOP_MNEMONIC("fstp m32r");
13283 IEM_MC_BEGIN(3, 2);
13284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13285 IEM_MC_LOCAL(uint16_t, u16Fsw);
13286 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13287 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13288 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13289
13290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13293 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13294
13295 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13296 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13297 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13298 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13299 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13300 IEM_MC_ELSE()
13301 IEM_MC_IF_FCW_IM()
13302 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13303 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13304 IEM_MC_ENDIF();
13305 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13306 IEM_MC_ENDIF();
13307 IEM_MC_USED_FPU();
13308 IEM_MC_ADVANCE_RIP();
13309
13310 IEM_MC_END();
13311 return VINF_SUCCESS;
13312}
13313
13314
13315/** Opcode 0xd9 !11/4 */
13316FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13317{
13318 IEMOP_MNEMONIC("fldenv m14/28byte");
13319 IEM_MC_BEGIN(3, 0);
13320 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13321 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13322 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13326 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13327 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13328 IEM_MC_END();
13329 return VINF_SUCCESS;
13330}
13331
13332
13333/** Opcode 0xd9 !11/5 */
13334FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13335{
13336 IEMOP_MNEMONIC("fldcw m2byte");
13337 IEM_MC_BEGIN(1, 1);
13338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13339 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13343 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13344 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13345 IEM_MC_END();
13346 return VINF_SUCCESS;
13347}
13348
13349
13350/** Opcode 0xd9 !11/6 */
13351FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13352{
13353 IEMOP_MNEMONIC("fstenv m14/m28byte");
13354 IEM_MC_BEGIN(3, 0);
13355 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13356 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13357 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13361 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13362 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13363 IEM_MC_END();
13364 return VINF_SUCCESS;
13365}
13366
13367
13368/** Opcode 0xd9 !11/7 */
13369FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13370{
13371 IEMOP_MNEMONIC("fnstcw m2byte");
13372 IEM_MC_BEGIN(2, 0);
13373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13374 IEM_MC_LOCAL(uint16_t, u16Fcw);
13375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13378 IEM_MC_FETCH_FCW(u16Fcw);
13379 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13380 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13381 IEM_MC_END();
13382 return VINF_SUCCESS;
13383}
13384
13385
13386/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13387FNIEMOP_DEF(iemOp_fnop)
13388{
13389 IEMOP_MNEMONIC("fnop");
13390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13391
13392 IEM_MC_BEGIN(0, 0);
13393 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13394 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13395 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13396 * intel optimizations. Investigate. */
13397 IEM_MC_UPDATE_FPU_OPCODE_IP();
13398 IEM_MC_USED_FPU();
13399 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13400 IEM_MC_END();
13401 return VINF_SUCCESS;
13402}
13403
13404
13405/** Opcode 0xd9 11/0 stN */
13406FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13407{
13408 IEMOP_MNEMONIC("fld stN");
13409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13410
13411 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13412 * indicates that it does. */
13413 IEM_MC_BEGIN(0, 2);
13414 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13415 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13419 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13420 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13421 IEM_MC_ELSE()
13422 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13423 IEM_MC_ENDIF();
13424 IEM_MC_USED_FPU();
13425 IEM_MC_ADVANCE_RIP();
13426 IEM_MC_END();
13427
13428 return VINF_SUCCESS;
13429}
13430
13431
13432/** Opcode 0xd9 11/3 stN */
13433FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13434{
13435 IEMOP_MNEMONIC("fxch stN");
13436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13437
13438 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13439 * indicates that it does. */
13440 IEM_MC_BEGIN(1, 3);
13441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13442 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13443 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13444 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13447 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13448 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13449 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13450 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13451 IEM_MC_ELSE()
13452 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13453 IEM_MC_ENDIF();
13454 IEM_MC_USED_FPU();
13455 IEM_MC_ADVANCE_RIP();
13456 IEM_MC_END();
13457
13458 return VINF_SUCCESS;
13459}
13460
13461
13462/** Opcode 0xd9 11/4, 0xdd 11/2. */
13463FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13464{
13465 IEMOP_MNEMONIC("fstp st0,stN");
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13467
13468 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13469 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13470 if (!iDstReg)
13471 {
13472 IEM_MC_BEGIN(0, 1);
13473 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13475 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13476 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13477 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13478 IEM_MC_ELSE()
13479 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13480 IEM_MC_ENDIF();
13481 IEM_MC_USED_FPU();
13482 IEM_MC_ADVANCE_RIP();
13483 IEM_MC_END();
13484 }
13485 else
13486 {
13487 IEM_MC_BEGIN(0, 2);
13488 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13489 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13492 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13493 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13494 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13495 IEM_MC_ELSE()
13496 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13497 IEM_MC_ENDIF();
13498 IEM_MC_USED_FPU();
13499 IEM_MC_ADVANCE_RIP();
13500 IEM_MC_END();
13501 }
13502 return VINF_SUCCESS;
13503}
13504
13505
13506/**
13507 * Common worker for FPU instructions working on ST0 and replaces it with the
13508 * result, i.e. unary operators.
13509 *
13510 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13511 */
13512FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13513{
13514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13515
13516 IEM_MC_BEGIN(2, 1);
13517 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13518 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13519 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13520
13521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13524 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13525 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13526 IEM_MC_ELSE()
13527 IEM_MC_FPU_STACK_UNDERFLOW(0);
13528 IEM_MC_ENDIF();
13529 IEM_MC_USED_FPU();
13530 IEM_MC_ADVANCE_RIP();
13531
13532 IEM_MC_END();
13533 return VINF_SUCCESS;
13534}
13535
13536
13537/** Opcode 0xd9 0xe0. */
13538FNIEMOP_DEF(iemOp_fchs)
13539{
13540 IEMOP_MNEMONIC("fchs st0");
13541 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13542}
13543
13544
13545/** Opcode 0xd9 0xe1. */
13546FNIEMOP_DEF(iemOp_fabs)
13547{
13548 IEMOP_MNEMONIC("fabs st0");
13549 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13550}
13551
13552
13553/**
13554 * Common worker for FPU instructions working on ST0 and only returns FSW.
13555 *
13556 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13557 */
13558FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13559{
13560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13561
13562 IEM_MC_BEGIN(2, 1);
13563 IEM_MC_LOCAL(uint16_t, u16Fsw);
13564 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13566
13567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13569 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13570 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13571 IEM_MC_UPDATE_FSW(u16Fsw);
13572 IEM_MC_ELSE()
13573 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13574 IEM_MC_ENDIF();
13575 IEM_MC_USED_FPU();
13576 IEM_MC_ADVANCE_RIP();
13577
13578 IEM_MC_END();
13579 return VINF_SUCCESS;
13580}
13581
13582
13583/** Opcode 0xd9 0xe4. */
13584FNIEMOP_DEF(iemOp_ftst)
13585{
13586 IEMOP_MNEMONIC("ftst st0");
13587 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13588}
13589
13590
13591/** Opcode 0xd9 0xe5. */
13592FNIEMOP_DEF(iemOp_fxam)
13593{
13594 IEMOP_MNEMONIC("fxam st0");
13595 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13596}
13597
13598
13599/**
13600 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13601 *
13602 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13603 */
13604FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13605{
13606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13607
13608 IEM_MC_BEGIN(1, 1);
13609 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13610 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13611
13612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13613 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13614 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13615 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13616 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13617 IEM_MC_ELSE()
13618 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13619 IEM_MC_ENDIF();
13620 IEM_MC_USED_FPU();
13621 IEM_MC_ADVANCE_RIP();
13622
13623 IEM_MC_END();
13624 return VINF_SUCCESS;
13625}
13626
13627
13628/** Opcode 0xd9 0xe8. */
13629FNIEMOP_DEF(iemOp_fld1)
13630{
13631 IEMOP_MNEMONIC("fld1");
13632 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13633}
13634
13635
13636/** Opcode 0xd9 0xe9. */
13637FNIEMOP_DEF(iemOp_fldl2t)
13638{
13639 IEMOP_MNEMONIC("fldl2t");
13640 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13641}
13642
13643
13644/** Opcode 0xd9 0xea. */
13645FNIEMOP_DEF(iemOp_fldl2e)
13646{
13647 IEMOP_MNEMONIC("fldl2e");
13648 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13649}
13650
13651/** Opcode 0xd9 0xeb. */
13652FNIEMOP_DEF(iemOp_fldpi)
13653{
13654 IEMOP_MNEMONIC("fldpi");
13655 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13656}
13657
13658
13659/** Opcode 0xd9 0xec. */
13660FNIEMOP_DEF(iemOp_fldlg2)
13661{
13662 IEMOP_MNEMONIC("fldlg2");
13663 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13664}
13665
13666/** Opcode 0xd9 0xed. */
13667FNIEMOP_DEF(iemOp_fldln2)
13668{
13669 IEMOP_MNEMONIC("fldln2");
13670 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13671}
13672
13673
13674/** Opcode 0xd9 0xee. */
13675FNIEMOP_DEF(iemOp_fldz)
13676{
13677 IEMOP_MNEMONIC("fldz");
13678 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13679}
13680
13681
13682/** Opcode 0xd9 0xf0. */
13683FNIEMOP_DEF(iemOp_f2xm1)
13684{
13685 IEMOP_MNEMONIC("f2xm1 st0");
13686 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13687}
13688
13689
13690/** Opcode 0xd9 0xf1. */
13691FNIEMOP_DEF(iemOp_fylx2)
13692{
13693 IEMOP_MNEMONIC("fylx2 st0");
13694 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13695}
13696
13697
13698/**
13699 * Common worker for FPU instructions working on ST0 and having two outputs, one
13700 * replacing ST0 and one pushed onto the stack.
13701 *
13702 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13703 */
13704FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13705{
13706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13707
13708 IEM_MC_BEGIN(2, 1);
13709 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13710 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13711 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13712
13713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13715 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13716 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13717 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13718 IEM_MC_ELSE()
13719 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13720 IEM_MC_ENDIF();
13721 IEM_MC_USED_FPU();
13722 IEM_MC_ADVANCE_RIP();
13723
13724 IEM_MC_END();
13725 return VINF_SUCCESS;
13726}
13727
13728
13729/** Opcode 0xd9 0xf2. */
13730FNIEMOP_DEF(iemOp_fptan)
13731{
13732 IEMOP_MNEMONIC("fptan st0");
13733 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13734}
13735
13736
13737/**
13738 * Common worker for FPU instructions working on STn and ST0, storing the result
13739 * in STn, and popping the stack unless IE, DE or ZE was raised.
13740 *
13741 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13742 */
13743FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13744{
13745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13746
13747 IEM_MC_BEGIN(3, 1);
13748 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13749 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13752
13753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13754 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13755
13756 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13757 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13758 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13759 IEM_MC_ELSE()
13760 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13761 IEM_MC_ENDIF();
13762 IEM_MC_USED_FPU();
13763 IEM_MC_ADVANCE_RIP();
13764
13765 IEM_MC_END();
13766 return VINF_SUCCESS;
13767}
13768
13769
13770/** Opcode 0xd9 0xf3. */
13771FNIEMOP_DEF(iemOp_fpatan)
13772{
13773 IEMOP_MNEMONIC("fpatan st1,st0");
13774 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13775}
13776
13777
13778/** Opcode 0xd9 0xf4. */
13779FNIEMOP_DEF(iemOp_fxtract)
13780{
13781 IEMOP_MNEMONIC("fxtract st0");
13782 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13783}
13784
13785
13786/** Opcode 0xd9 0xf5. */
13787FNIEMOP_DEF(iemOp_fprem1)
13788{
13789 IEMOP_MNEMONIC("fprem1 st0, st1");
13790 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13791}
13792
13793
13794/** Opcode 0xd9 0xf6. */
13795FNIEMOP_DEF(iemOp_fdecstp)
13796{
13797 IEMOP_MNEMONIC("fdecstp");
13798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13799 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13800 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13801 * FINCSTP and FDECSTP. */
13802
13803 IEM_MC_BEGIN(0,0);
13804
13805 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13806 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13807
13808 IEM_MC_FPU_STACK_DEC_TOP();
13809 IEM_MC_UPDATE_FSW_CONST(0);
13810
13811 IEM_MC_USED_FPU();
13812 IEM_MC_ADVANCE_RIP();
13813 IEM_MC_END();
13814 return VINF_SUCCESS;
13815}
13816
13817
13818/** Opcode 0xd9 0xf7. */
13819FNIEMOP_DEF(iemOp_fincstp)
13820{
13821 IEMOP_MNEMONIC("fincstp");
13822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13823 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13824 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13825 * FINCSTP and FDECSTP. */
13826
13827 IEM_MC_BEGIN(0,0);
13828
13829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13831
13832 IEM_MC_FPU_STACK_INC_TOP();
13833 IEM_MC_UPDATE_FSW_CONST(0);
13834
13835 IEM_MC_USED_FPU();
13836 IEM_MC_ADVANCE_RIP();
13837 IEM_MC_END();
13838 return VINF_SUCCESS;
13839}
13840
13841
13842/** Opcode 0xd9 0xf8. */
13843FNIEMOP_DEF(iemOp_fprem)
13844{
13845 IEMOP_MNEMONIC("fprem st0, st1");
13846 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13847}
13848
13849
13850/** Opcode 0xd9 0xf9. */
13851FNIEMOP_DEF(iemOp_fyl2xp1)
13852{
13853 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13854 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13855}
13856
13857
13858/** Opcode 0xd9 0xfa. */
13859FNIEMOP_DEF(iemOp_fsqrt)
13860{
13861 IEMOP_MNEMONIC("fsqrt st0");
13862 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13863}
13864
13865
13866/** Opcode 0xd9 0xfb. */
13867FNIEMOP_DEF(iemOp_fsincos)
13868{
13869 IEMOP_MNEMONIC("fsincos st0");
13870 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13871}
13872
13873
13874/** Opcode 0xd9 0xfc. */
13875FNIEMOP_DEF(iemOp_frndint)
13876{
13877 IEMOP_MNEMONIC("frndint st0");
13878 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13879}
13880
13881
13882/** Opcode 0xd9 0xfd. */
13883FNIEMOP_DEF(iemOp_fscale)
13884{
13885 IEMOP_MNEMONIC("fscale st0, st1");
13886 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13887}
13888
13889
13890/** Opcode 0xd9 0xfe. */
13891FNIEMOP_DEF(iemOp_fsin)
13892{
13893 IEMOP_MNEMONIC("fsin st0");
13894 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13895}
13896
13897
13898/** Opcode 0xd9 0xff. */
13899FNIEMOP_DEF(iemOp_fcos)
13900{
13901 IEMOP_MNEMONIC("fcos st0");
13902 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13903}
13904
13905
13906/** Used by iemOp_EscF1. */
13907static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13908{
13909 /* 0xe0 */ iemOp_fchs,
13910 /* 0xe1 */ iemOp_fabs,
13911 /* 0xe2 */ iemOp_Invalid,
13912 /* 0xe3 */ iemOp_Invalid,
13913 /* 0xe4 */ iemOp_ftst,
13914 /* 0xe5 */ iemOp_fxam,
13915 /* 0xe6 */ iemOp_Invalid,
13916 /* 0xe7 */ iemOp_Invalid,
13917 /* 0xe8 */ iemOp_fld1,
13918 /* 0xe9 */ iemOp_fldl2t,
13919 /* 0xea */ iemOp_fldl2e,
13920 /* 0xeb */ iemOp_fldpi,
13921 /* 0xec */ iemOp_fldlg2,
13922 /* 0xed */ iemOp_fldln2,
13923 /* 0xee */ iemOp_fldz,
13924 /* 0xef */ iemOp_Invalid,
13925 /* 0xf0 */ iemOp_f2xm1,
13926 /* 0xf1 */ iemOp_fylx2,
13927 /* 0xf2 */ iemOp_fptan,
13928 /* 0xf3 */ iemOp_fpatan,
13929 /* 0xf4 */ iemOp_fxtract,
13930 /* 0xf5 */ iemOp_fprem1,
13931 /* 0xf6 */ iemOp_fdecstp,
13932 /* 0xf7 */ iemOp_fincstp,
13933 /* 0xf8 */ iemOp_fprem,
13934 /* 0xf9 */ iemOp_fyl2xp1,
13935 /* 0xfa */ iemOp_fsqrt,
13936 /* 0xfb */ iemOp_fsincos,
13937 /* 0xfc */ iemOp_frndint,
13938 /* 0xfd */ iemOp_fscale,
13939 /* 0xfe */ iemOp_fsin,
13940 /* 0xff */ iemOp_fcos
13941};
13942
13943
13944/** Opcode 0xd9. */
13945FNIEMOP_DEF(iemOp_EscF1)
13946{
13947 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13950 {
13951 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13952 {
13953 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13954 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13955 case 2:
13956 if (bRm == 0xd0)
13957 return FNIEMOP_CALL(iemOp_fnop);
13958 return IEMOP_RAISE_INVALID_OPCODE();
13959 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13960 case 4:
13961 case 5:
13962 case 6:
13963 case 7:
13964 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13965 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13967 }
13968 }
13969 else
13970 {
13971 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13972 {
13973 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13974 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13975 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13976 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13977 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13978 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13979 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13980 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13982 }
13983 }
13984}
13985
13986
13987/** Opcode 0xda 11/0. */
13988FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13989{
13990 IEMOP_MNEMONIC("fcmovb st0,stN");
13991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13992
13993 IEM_MC_BEGIN(0, 1);
13994 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13995
13996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13998
13999 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14000 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14001 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14002 IEM_MC_ENDIF();
14003 IEM_MC_UPDATE_FPU_OPCODE_IP();
14004 IEM_MC_ELSE()
14005 IEM_MC_FPU_STACK_UNDERFLOW(0);
14006 IEM_MC_ENDIF();
14007 IEM_MC_USED_FPU();
14008 IEM_MC_ADVANCE_RIP();
14009
14010 IEM_MC_END();
14011 return VINF_SUCCESS;
14012}
14013
14014
14015/** Opcode 0xda 11/1. */
14016FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14017{
14018 IEMOP_MNEMONIC("fcmove st0,stN");
14019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14020
14021 IEM_MC_BEGIN(0, 1);
14022 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14023
14024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14026
14027 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14029 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14030 IEM_MC_ENDIF();
14031 IEM_MC_UPDATE_FPU_OPCODE_IP();
14032 IEM_MC_ELSE()
14033 IEM_MC_FPU_STACK_UNDERFLOW(0);
14034 IEM_MC_ENDIF();
14035 IEM_MC_USED_FPU();
14036 IEM_MC_ADVANCE_RIP();
14037
14038 IEM_MC_END();
14039 return VINF_SUCCESS;
14040}
14041
14042
14043/** Opcode 0xda 11/2. */
14044FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14045{
14046 IEMOP_MNEMONIC("fcmovbe st0,stN");
14047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14048
14049 IEM_MC_BEGIN(0, 1);
14050 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14051
14052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14054
14055 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14056 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14057 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14058 IEM_MC_ENDIF();
14059 IEM_MC_UPDATE_FPU_OPCODE_IP();
14060 IEM_MC_ELSE()
14061 IEM_MC_FPU_STACK_UNDERFLOW(0);
14062 IEM_MC_ENDIF();
14063 IEM_MC_USED_FPU();
14064 IEM_MC_ADVANCE_RIP();
14065
14066 IEM_MC_END();
14067 return VINF_SUCCESS;
14068}
14069
14070
14071/** Opcode 0xda 11/3. */
14072FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14073{
14074 IEMOP_MNEMONIC("fcmovu st0,stN");
14075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14076
14077 IEM_MC_BEGIN(0, 1);
14078 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14079
14080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14082
14083 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14085 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14086 IEM_MC_ENDIF();
14087 IEM_MC_UPDATE_FPU_OPCODE_IP();
14088 IEM_MC_ELSE()
14089 IEM_MC_FPU_STACK_UNDERFLOW(0);
14090 IEM_MC_ENDIF();
14091 IEM_MC_USED_FPU();
14092 IEM_MC_ADVANCE_RIP();
14093
14094 IEM_MC_END();
14095 return VINF_SUCCESS;
14096}
14097
14098
14099/**
14100 * Common worker for FPU instructions working on ST0 and STn, only affecting
14101 * flags, and popping twice when done.
14102 *
14103 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14104 */
14105FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14106{
14107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14108
14109 IEM_MC_BEGIN(3, 1);
14110 IEM_MC_LOCAL(uint16_t, u16Fsw);
14111 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14114
14115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14117 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14118 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14119 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14120 IEM_MC_ELSE()
14121 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14122 IEM_MC_ENDIF();
14123 IEM_MC_USED_FPU();
14124 IEM_MC_ADVANCE_RIP();
14125
14126 IEM_MC_END();
14127 return VINF_SUCCESS;
14128}
14129
14130
14131/** Opcode 0xda 0xe9. */
14132FNIEMOP_DEF(iemOp_fucompp)
14133{
14134 IEMOP_MNEMONIC("fucompp st0,stN");
14135 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14136}
14137
14138
14139/**
14140 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14141 * the result in ST0.
14142 *
14143 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14144 */
14145FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14146{
14147 IEM_MC_BEGIN(3, 3);
14148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14149 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14150 IEM_MC_LOCAL(int32_t, i32Val2);
14151 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14152 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14153 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14154
14155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14157
14158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14160 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14161
14162 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14164 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14165 IEM_MC_ELSE()
14166 IEM_MC_FPU_STACK_UNDERFLOW(0);
14167 IEM_MC_ENDIF();
14168 IEM_MC_USED_FPU();
14169 IEM_MC_ADVANCE_RIP();
14170
14171 IEM_MC_END();
14172 return VINF_SUCCESS;
14173}
14174
14175
14176/** Opcode 0xda !11/0. */
14177FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14178{
14179 IEMOP_MNEMONIC("fiadd m32i");
14180 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14181}
14182
14183
14184/** Opcode 0xda !11/1. */
14185FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14186{
14187 IEMOP_MNEMONIC("fimul m32i");
14188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14189}
14190
14191
14192/** Opcode 0xda !11/2. */
14193FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14194{
14195 IEMOP_MNEMONIC("ficom st0,m32i");
14196
14197 IEM_MC_BEGIN(3, 3);
14198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14199 IEM_MC_LOCAL(uint16_t, u16Fsw);
14200 IEM_MC_LOCAL(int32_t, i32Val2);
14201 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14202 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14203 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14204
14205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14207
14208 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14209 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14210 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14211
14212 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14213 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14214 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14215 IEM_MC_ELSE()
14216 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14217 IEM_MC_ENDIF();
14218 IEM_MC_USED_FPU();
14219 IEM_MC_ADVANCE_RIP();
14220
14221 IEM_MC_END();
14222 return VINF_SUCCESS;
14223}
14224
14225
14226/** Opcode 0xda !11/3. */
14227FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14228{
14229 IEMOP_MNEMONIC("ficomp st0,m32i");
14230
14231 IEM_MC_BEGIN(3, 3);
14232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14233 IEM_MC_LOCAL(uint16_t, u16Fsw);
14234 IEM_MC_LOCAL(int32_t, i32Val2);
14235 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14237 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14238
14239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14241
14242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14244 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14245
14246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14248 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14249 IEM_MC_ELSE()
14250 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14251 IEM_MC_ENDIF();
14252 IEM_MC_USED_FPU();
14253 IEM_MC_ADVANCE_RIP();
14254
14255 IEM_MC_END();
14256 return VINF_SUCCESS;
14257}
14258
14259
14260/** Opcode 0xda !11/4. */
14261FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14262{
14263 IEMOP_MNEMONIC("fisub m32i");
14264 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14265}
14266
14267
14268/** Opcode 0xda !11/5. */
14269FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14270{
14271 IEMOP_MNEMONIC("fisubr m32i");
14272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14273}
14274
14275
14276/** Opcode 0xda !11/6. */
14277FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14278{
14279 IEMOP_MNEMONIC("fidiv m32i");
14280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14281}
14282
14283
14284/** Opcode 0xda !11/7. */
14285FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14286{
14287 IEMOP_MNEMONIC("fidivr m32i");
14288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14289}
14290
14291
14292/** Opcode 0xda. */
14293FNIEMOP_DEF(iemOp_EscF2)
14294{
14295 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14298 {
14299 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14300 {
14301 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14302 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14303 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14304 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14305 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14306 case 5:
14307 if (bRm == 0xe9)
14308 return FNIEMOP_CALL(iemOp_fucompp);
14309 return IEMOP_RAISE_INVALID_OPCODE();
14310 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14311 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14313 }
14314 }
14315 else
14316 {
14317 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14318 {
14319 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14320 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14321 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14322 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14323 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14324 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14325 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14326 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14328 }
14329 }
14330}
14331
14332
14333/** Opcode 0xdb !11/0. */
14334FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14335{
14336 IEMOP_MNEMONIC("fild m32i");
14337
14338 IEM_MC_BEGIN(2, 3);
14339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14340 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14341 IEM_MC_LOCAL(int32_t, i32Val);
14342 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14343 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14344
14345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14347
14348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14350 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14351
14352 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14353 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14354 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14355 IEM_MC_ELSE()
14356 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14357 IEM_MC_ENDIF();
14358 IEM_MC_USED_FPU();
14359 IEM_MC_ADVANCE_RIP();
14360
14361 IEM_MC_END();
14362 return VINF_SUCCESS;
14363}
14364
14365
14366/** Opcode 0xdb !11/1. */
14367FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14368{
14369 IEMOP_MNEMONIC("fisttp m32i");
14370 IEM_MC_BEGIN(3, 2);
14371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14372 IEM_MC_LOCAL(uint16_t, u16Fsw);
14373 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14374 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14375 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14376
14377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14379 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14380 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14381
14382 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14383 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14384 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14385 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14386 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14387 IEM_MC_ELSE()
14388 IEM_MC_IF_FCW_IM()
14389 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14390 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14391 IEM_MC_ENDIF();
14392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14393 IEM_MC_ENDIF();
14394 IEM_MC_USED_FPU();
14395 IEM_MC_ADVANCE_RIP();
14396
14397 IEM_MC_END();
14398 return VINF_SUCCESS;
14399}
14400
14401
14402/** Opcode 0xdb !11/2. */
14403FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14404{
14405 IEMOP_MNEMONIC("fist m32i");
14406 IEM_MC_BEGIN(3, 2);
14407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14408 IEM_MC_LOCAL(uint16_t, u16Fsw);
14409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14410 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14412
14413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14417
14418 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14419 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14420 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14421 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14422 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14423 IEM_MC_ELSE()
14424 IEM_MC_IF_FCW_IM()
14425 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14426 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14427 IEM_MC_ENDIF();
14428 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14429 IEM_MC_ENDIF();
14430 IEM_MC_USED_FPU();
14431 IEM_MC_ADVANCE_RIP();
14432
14433 IEM_MC_END();
14434 return VINF_SUCCESS;
14435}
14436
14437
14438/** Opcode 0xdb !11/3. */
14439FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14440{
14441 IEMOP_MNEMONIC("fisttp m32i");
14442 IEM_MC_BEGIN(3, 2);
14443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14444 IEM_MC_LOCAL(uint16_t, u16Fsw);
14445 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14446 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14448
14449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14453
14454 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14455 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14456 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14457 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14458 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14459 IEM_MC_ELSE()
14460 IEM_MC_IF_FCW_IM()
14461 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14462 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14463 IEM_MC_ENDIF();
14464 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14465 IEM_MC_ENDIF();
14466 IEM_MC_USED_FPU();
14467 IEM_MC_ADVANCE_RIP();
14468
14469 IEM_MC_END();
14470 return VINF_SUCCESS;
14471}
14472
14473
14474/** Opcode 0xdb !11/5. */
14475FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14476{
14477 IEMOP_MNEMONIC("fld m80r");
14478
14479 IEM_MC_BEGIN(2, 3);
14480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14481 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14482 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14483 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14484 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14485
14486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14488
14489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14491 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14492
14493 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14494 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14495 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14496 IEM_MC_ELSE()
14497 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14498 IEM_MC_ENDIF();
14499 IEM_MC_USED_FPU();
14500 IEM_MC_ADVANCE_RIP();
14501
14502 IEM_MC_END();
14503 return VINF_SUCCESS;
14504}
14505
14506
14507/** Opcode 0xdb !11/7. */
14508FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14509{
14510 IEMOP_MNEMONIC("fstp m80r");
14511 IEM_MC_BEGIN(3, 2);
14512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14513 IEM_MC_LOCAL(uint16_t, u16Fsw);
14514 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14515 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14516 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14517
14518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14521 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14522
14523 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14524 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14525 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14526 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14527 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14528 IEM_MC_ELSE()
14529 IEM_MC_IF_FCW_IM()
14530 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14531 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14532 IEM_MC_ENDIF();
14533 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14534 IEM_MC_ENDIF();
14535 IEM_MC_USED_FPU();
14536 IEM_MC_ADVANCE_RIP();
14537
14538 IEM_MC_END();
14539 return VINF_SUCCESS;
14540}
14541
14542
14543/** Opcode 0xdb 11/0. */
14544FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14545{
14546 IEMOP_MNEMONIC("fcmovnb st0,stN");
14547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14548
14549 IEM_MC_BEGIN(0, 1);
14550 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14551
14552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14554
14555 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14556 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14557 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14558 IEM_MC_ENDIF();
14559 IEM_MC_UPDATE_FPU_OPCODE_IP();
14560 IEM_MC_ELSE()
14561 IEM_MC_FPU_STACK_UNDERFLOW(0);
14562 IEM_MC_ENDIF();
14563 IEM_MC_USED_FPU();
14564 IEM_MC_ADVANCE_RIP();
14565
14566 IEM_MC_END();
14567 return VINF_SUCCESS;
14568}
14569
14570
14571/** Opcode 0xdb 11/1. */
14572FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14573{
14574 IEMOP_MNEMONIC("fcmovne st0,stN");
14575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14576
14577 IEM_MC_BEGIN(0, 1);
14578 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14579
14580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14581 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14582
14583 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14584 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14585 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14586 IEM_MC_ENDIF();
14587 IEM_MC_UPDATE_FPU_OPCODE_IP();
14588 IEM_MC_ELSE()
14589 IEM_MC_FPU_STACK_UNDERFLOW(0);
14590 IEM_MC_ENDIF();
14591 IEM_MC_USED_FPU();
14592 IEM_MC_ADVANCE_RIP();
14593
14594 IEM_MC_END();
14595 return VINF_SUCCESS;
14596}
14597
14598
14599/** Opcode 0xdb 11/2. */
14600FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14601{
14602 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14604
14605 IEM_MC_BEGIN(0, 1);
14606 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14607
14608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14610
14611 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14612 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14613 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14614 IEM_MC_ENDIF();
14615 IEM_MC_UPDATE_FPU_OPCODE_IP();
14616 IEM_MC_ELSE()
14617 IEM_MC_FPU_STACK_UNDERFLOW(0);
14618 IEM_MC_ENDIF();
14619 IEM_MC_USED_FPU();
14620 IEM_MC_ADVANCE_RIP();
14621
14622 IEM_MC_END();
14623 return VINF_SUCCESS;
14624}
14625
14626
14627/** Opcode 0xdb 11/3. */
14628FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14629{
14630 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14632
14633 IEM_MC_BEGIN(0, 1);
14634 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14635
14636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14638
14639 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14640 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14641 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14642 IEM_MC_ENDIF();
14643 IEM_MC_UPDATE_FPU_OPCODE_IP();
14644 IEM_MC_ELSE()
14645 IEM_MC_FPU_STACK_UNDERFLOW(0);
14646 IEM_MC_ENDIF();
14647 IEM_MC_USED_FPU();
14648 IEM_MC_ADVANCE_RIP();
14649
14650 IEM_MC_END();
14651 return VINF_SUCCESS;
14652}
14653
14654
14655/** Opcode 0xdb 0xe0. */
14656FNIEMOP_DEF(iemOp_fneni)
14657{
14658 IEMOP_MNEMONIC("fneni (8087/ign)");
14659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14660 IEM_MC_BEGIN(0,0);
14661 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14662 IEM_MC_ADVANCE_RIP();
14663 IEM_MC_END();
14664 return VINF_SUCCESS;
14665}
14666
14667
14668/** Opcode 0xdb 0xe1. */
14669FNIEMOP_DEF(iemOp_fndisi)
14670{
14671 IEMOP_MNEMONIC("fndisi (8087/ign)");
14672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14673 IEM_MC_BEGIN(0,0);
14674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14675 IEM_MC_ADVANCE_RIP();
14676 IEM_MC_END();
14677 return VINF_SUCCESS;
14678}
14679
14680
14681/** Opcode 0xdb 0xe2. */
14682FNIEMOP_DEF(iemOp_fnclex)
14683{
14684 IEMOP_MNEMONIC("fnclex");
14685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14686
14687 IEM_MC_BEGIN(0,0);
14688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14689 IEM_MC_CLEAR_FSW_EX();
14690 IEM_MC_ADVANCE_RIP();
14691 IEM_MC_END();
14692 return VINF_SUCCESS;
14693}
14694
14695
14696/** Opcode 0xdb 0xe3. */
14697FNIEMOP_DEF(iemOp_fninit)
14698{
14699 IEMOP_MNEMONIC("fninit");
14700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14701 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14702}
14703
14704
14705/** Opcode 0xdb 0xe4. */
14706FNIEMOP_DEF(iemOp_fnsetpm)
14707{
14708 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14710 IEM_MC_BEGIN(0,0);
14711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14712 IEM_MC_ADVANCE_RIP();
14713 IEM_MC_END();
14714 return VINF_SUCCESS;
14715}
14716
14717
14718/** Opcode 0xdb 0xe5. */
14719FNIEMOP_DEF(iemOp_frstpm)
14720{
14721 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14722#if 0 /* #UDs on newer CPUs */
14723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14724 IEM_MC_BEGIN(0,0);
14725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14726 IEM_MC_ADVANCE_RIP();
14727 IEM_MC_END();
14728 return VINF_SUCCESS;
14729#else
14730 return IEMOP_RAISE_INVALID_OPCODE();
14731#endif
14732}
14733
14734
14735/** Opcode 0xdb 11/5. */
14736FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14737{
14738 IEMOP_MNEMONIC("fucomi st0,stN");
14739 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14740}
14741
14742
14743/** Opcode 0xdb 11/6. */
14744FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14745{
14746 IEMOP_MNEMONIC("fcomi st0,stN");
14747 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14748}
14749
14750
14751/** Opcode 0xdb. */
14752FNIEMOP_DEF(iemOp_EscF3)
14753{
14754 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14757 {
14758 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14759 {
14760 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14761 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14762 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14763 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14764 case 4:
14765 switch (bRm)
14766 {
14767 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14768 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14769 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14770 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14771 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14772 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14773 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14774 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14776 }
14777 break;
14778 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14779 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14780 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14782 }
14783 }
14784 else
14785 {
14786 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14787 {
14788 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14789 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14790 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14791 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14792 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14793 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14794 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14795 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14797 }
14798 }
14799}
14800
14801
14802/**
14803 * Common worker for FPU instructions working on STn and ST0, and storing the
14804 * result in STn unless IE, DE or ZE was raised.
14805 *
14806 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14807 */
14808FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14809{
14810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14811
14812 IEM_MC_BEGIN(3, 1);
14813 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14814 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14815 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14817
14818 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14819 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14820
14821 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14822 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14823 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14824 IEM_MC_ELSE()
14825 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14826 IEM_MC_ENDIF();
14827 IEM_MC_USED_FPU();
14828 IEM_MC_ADVANCE_RIP();
14829
14830 IEM_MC_END();
14831 return VINF_SUCCESS;
14832}
14833
14834
14835/** Opcode 0xdc 11/0. */
14836FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14837{
14838 IEMOP_MNEMONIC("fadd stN,st0");
14839 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14840}
14841
14842
14843/** Opcode 0xdc 11/1. */
14844FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14845{
14846 IEMOP_MNEMONIC("fmul stN,st0");
14847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14848}
14849
14850
14851/** Opcode 0xdc 11/4. */
14852FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14853{
14854 IEMOP_MNEMONIC("fsubr stN,st0");
14855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14856}
14857
14858
14859/** Opcode 0xdc 11/5. */
14860FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC("fsub stN,st0");
14863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14864}
14865
14866
14867/** Opcode 0xdc 11/6. */
14868FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC("fdivr stN,st0");
14871 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14872}
14873
14874
14875/** Opcode 0xdc 11/7. */
14876FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC("fdiv stN,st0");
14879 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14880}
14881
14882
14883/**
14884 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14885 * memory operand, and storing the result in ST0.
14886 *
14887 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14888 */
14889FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14890{
14891 IEM_MC_BEGIN(3, 3);
14892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14893 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14894 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14895 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14896 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14897 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14898
14899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14903
14904 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14905 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14906 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14907 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14908 IEM_MC_ELSE()
14909 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14910 IEM_MC_ENDIF();
14911 IEM_MC_USED_FPU();
14912 IEM_MC_ADVANCE_RIP();
14913
14914 IEM_MC_END();
14915 return VINF_SUCCESS;
14916}
14917
14918
14919/** Opcode 0xdc !11/0. */
14920FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14921{
14922 IEMOP_MNEMONIC("fadd m64r");
14923 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14924}
14925
14926
14927/** Opcode 0xdc !11/1. */
14928FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fmul m64r");
14931 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14932}
14933
14934
14935/** Opcode 0xdc !11/2. */
14936FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14937{
14938 IEMOP_MNEMONIC("fcom st0,m64r");
14939
14940 IEM_MC_BEGIN(3, 3);
14941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14942 IEM_MC_LOCAL(uint16_t, u16Fsw);
14943 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14944 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14946 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14947
14948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14950
14951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14953 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14954
14955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14956 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14957 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14958 IEM_MC_ELSE()
14959 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14960 IEM_MC_ENDIF();
14961 IEM_MC_USED_FPU();
14962 IEM_MC_ADVANCE_RIP();
14963
14964 IEM_MC_END();
14965 return VINF_SUCCESS;
14966}
14967
14968
14969/** Opcode 0xdc !11/3. */
14970FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14971{
14972 IEMOP_MNEMONIC("fcomp st0,m64r");
14973
14974 IEM_MC_BEGIN(3, 3);
14975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14976 IEM_MC_LOCAL(uint16_t, u16Fsw);
14977 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14979 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14980 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14981
14982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14984
14985 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14986 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14987 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14988
14989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14990 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14991 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14992 IEM_MC_ELSE()
14993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14994 IEM_MC_ENDIF();
14995 IEM_MC_USED_FPU();
14996 IEM_MC_ADVANCE_RIP();
14997
14998 IEM_MC_END();
14999 return VINF_SUCCESS;
15000}
15001
15002
15003/** Opcode 0xdc !11/4. */
15004FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15005{
15006 IEMOP_MNEMONIC("fsub m64r");
15007 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15008}
15009
15010
15011/** Opcode 0xdc !11/5. */
15012FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15013{
15014 IEMOP_MNEMONIC("fsubr m64r");
15015 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15016}
15017
15018
15019/** Opcode 0xdc !11/6. */
15020FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15021{
15022 IEMOP_MNEMONIC("fdiv m64r");
15023 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15024}
15025
15026
15027/** Opcode 0xdc !11/7. */
15028FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15029{
15030 IEMOP_MNEMONIC("fdivr m64r");
15031 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15032}
15033
15034
15035/** Opcode 0xdc. */
15036FNIEMOP_DEF(iemOp_EscF4)
15037{
15038 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15041 {
15042 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15043 {
15044 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15045 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15046 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15047 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15048 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15049 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15050 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15051 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15053 }
15054 }
15055 else
15056 {
15057 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15058 {
15059 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15060 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15061 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15062 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15063 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15064 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15065 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15066 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15068 }
15069 }
15070}
15071
15072
15073/** Opcode 0xdd !11/0.
15074 * @sa iemOp_fld_m32r */
15075FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15076{
15077 IEMOP_MNEMONIC("fld m64r");
15078
15079 IEM_MC_BEGIN(2, 3);
15080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15081 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15082 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15083 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15084 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15085
15086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15090
15091 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15092 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15093 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15094 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15095 IEM_MC_ELSE()
15096 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15097 IEM_MC_ENDIF();
15098 IEM_MC_USED_FPU();
15099 IEM_MC_ADVANCE_RIP();
15100
15101 IEM_MC_END();
15102 return VINF_SUCCESS;
15103}
15104
15105
15106/** Opcode 0xdd !11/0. */
15107FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15108{
15109 IEMOP_MNEMONIC("fisttp m64i");
15110 IEM_MC_BEGIN(3, 2);
15111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15112 IEM_MC_LOCAL(uint16_t, u16Fsw);
15113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15114 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15116
15117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15121
15122 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15124 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15125 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15126 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15127 IEM_MC_ELSE()
15128 IEM_MC_IF_FCW_IM()
15129 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15130 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15131 IEM_MC_ENDIF();
15132 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15133 IEM_MC_ENDIF();
15134 IEM_MC_USED_FPU();
15135 IEM_MC_ADVANCE_RIP();
15136
15137 IEM_MC_END();
15138 return VINF_SUCCESS;
15139}
15140
15141
15142/** Opcode 0xdd !11/0. */
15143FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15144{
15145 IEMOP_MNEMONIC("fst m64r");
15146 IEM_MC_BEGIN(3, 2);
15147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15148 IEM_MC_LOCAL(uint16_t, u16Fsw);
15149 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15150 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15151 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15152
15153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15157
15158 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15159 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15160 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15161 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15162 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15163 IEM_MC_ELSE()
15164 IEM_MC_IF_FCW_IM()
15165 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15166 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15167 IEM_MC_ENDIF();
15168 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15169 IEM_MC_ENDIF();
15170 IEM_MC_USED_FPU();
15171 IEM_MC_ADVANCE_RIP();
15172
15173 IEM_MC_END();
15174 return VINF_SUCCESS;
15175}
15176
15177
15178
15179
15180/** Opcode 0xdd !11/0. */
15181FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15182{
15183 IEMOP_MNEMONIC("fstp m64r");
15184 IEM_MC_BEGIN(3, 2);
15185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15186 IEM_MC_LOCAL(uint16_t, u16Fsw);
15187 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15188 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15189 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15190
15191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15193 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15194 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15195
15196 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15197 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15198 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15199 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15200 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15201 IEM_MC_ELSE()
15202 IEM_MC_IF_FCW_IM()
15203 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15204 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15205 IEM_MC_ENDIF();
15206 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15207 IEM_MC_ENDIF();
15208 IEM_MC_USED_FPU();
15209 IEM_MC_ADVANCE_RIP();
15210
15211 IEM_MC_END();
15212 return VINF_SUCCESS;
15213}
15214
15215
15216/** Opcode 0xdd !11/0. */
15217FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15218{
15219 IEMOP_MNEMONIC("frstor m94/108byte");
15220 IEM_MC_BEGIN(3, 0);
15221 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15222 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15223 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15227 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15228 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15229 IEM_MC_END();
15230 return VINF_SUCCESS;
15231}
15232
15233
15234/** Opcode 0xdd !11/0. */
15235FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15236{
15237 IEMOP_MNEMONIC("fnsave m94/108byte");
15238 IEM_MC_BEGIN(3, 0);
15239 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15240 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15241 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15245 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15246 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15247 IEM_MC_END();
15248 return VINF_SUCCESS;
15249
15250}
15251
15252/** Opcode 0xdd !11/0. */
15253FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15254{
15255 IEMOP_MNEMONIC("fnstsw m16");
15256
15257 IEM_MC_BEGIN(0, 2);
15258 IEM_MC_LOCAL(uint16_t, u16Tmp);
15259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15260
15261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15264
15265 IEM_MC_FETCH_FSW(u16Tmp);
15266 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15267 IEM_MC_ADVANCE_RIP();
15268
15269/** @todo Debug / drop a hint to the verifier that things may differ
15270 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15271 * NT4SP1. (X86_FSW_PE) */
15272 IEM_MC_END();
15273 return VINF_SUCCESS;
15274}
15275
15276
15277/** Opcode 0xdd 11/0. */
15278FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15279{
15280 IEMOP_MNEMONIC("ffree stN");
15281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15282 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15283 unmodified. */
15284
15285 IEM_MC_BEGIN(0, 0);
15286
15287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15289
15290 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15291 IEM_MC_UPDATE_FPU_OPCODE_IP();
15292
15293 IEM_MC_USED_FPU();
15294 IEM_MC_ADVANCE_RIP();
15295 IEM_MC_END();
15296 return VINF_SUCCESS;
15297}
15298
15299
15300/** Opcode 0xdd 11/1. */
15301FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15302{
15303 IEMOP_MNEMONIC("fst st0,stN");
15304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15305
15306 IEM_MC_BEGIN(0, 2);
15307 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15308 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15310 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15312 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15313 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15314 IEM_MC_ELSE()
15315 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15316 IEM_MC_ENDIF();
15317 IEM_MC_USED_FPU();
15318 IEM_MC_ADVANCE_RIP();
15319 IEM_MC_END();
15320 return VINF_SUCCESS;
15321}
15322
15323
15324/** Opcode 0xdd 11/3. */
15325FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15326{
15327 IEMOP_MNEMONIC("fcom st0,stN");
15328 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15329}
15330
15331
15332/** Opcode 0xdd 11/4. */
15333FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15334{
15335 IEMOP_MNEMONIC("fcomp st0,stN");
15336 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15337}
15338
15339
15340/** Opcode 0xdd. */
15341FNIEMOP_DEF(iemOp_EscF5)
15342{
15343 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15346 {
15347 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15348 {
15349 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15350 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15351 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15352 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15353 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15354 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15355 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15356 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15358 }
15359 }
15360 else
15361 {
15362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15363 {
15364 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15365 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15366 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15367 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15368 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15369 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15370 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15371 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15373 }
15374 }
15375}
15376
15377
15378/** Opcode 0xde 11/0. */
15379FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15380{
15381 IEMOP_MNEMONIC("faddp stN,st0");
15382 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15383}
15384
15385
15386/** Opcode 0xde 11/0. */
15387FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC("fmulp stN,st0");
15390 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15391}
15392
15393
15394/** Opcode 0xde 0xd9. */
15395FNIEMOP_DEF(iemOp_fcompp)
15396{
15397 IEMOP_MNEMONIC("fucompp st0,stN");
15398 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15399}
15400
15401
15402/** Opcode 0xde 11/4. */
15403FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15404{
15405 IEMOP_MNEMONIC("fsubrp stN,st0");
15406 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15407}
15408
15409
15410/** Opcode 0xde 11/5. */
15411FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15412{
15413 IEMOP_MNEMONIC("fsubp stN,st0");
15414 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15415}
15416
15417
15418/** Opcode 0xde 11/6. */
15419FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15420{
15421 IEMOP_MNEMONIC("fdivrp stN,st0");
15422 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15423}
15424
15425
15426/** Opcode 0xde 11/7. */
15427FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15428{
15429 IEMOP_MNEMONIC("fdivp stN,st0");
15430 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15431}
15432
15433
15434/**
15435 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15436 * the result in ST0.
15437 *
15438 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15439 */
15440FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15441{
15442 IEM_MC_BEGIN(3, 3);
15443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15444 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15445 IEM_MC_LOCAL(int16_t, i16Val2);
15446 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15447 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15448 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15449
15450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15452
15453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15455 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15456
15457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15458 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15459 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15460 IEM_MC_ELSE()
15461 IEM_MC_FPU_STACK_UNDERFLOW(0);
15462 IEM_MC_ENDIF();
15463 IEM_MC_USED_FPU();
15464 IEM_MC_ADVANCE_RIP();
15465
15466 IEM_MC_END();
15467 return VINF_SUCCESS;
15468}
15469
15470
15471/** Opcode 0xde !11/0. */
15472FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15473{
15474 IEMOP_MNEMONIC("fiadd m16i");
15475 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15476}
15477
15478
15479/** Opcode 0xde !11/1. */
15480FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15481{
15482 IEMOP_MNEMONIC("fimul m16i");
15483 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15484}
15485
15486
15487/** Opcode 0xde !11/2. */
15488FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC("ficom st0,m16i");
15491
15492 IEM_MC_BEGIN(3, 3);
15493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15494 IEM_MC_LOCAL(uint16_t, u16Fsw);
15495 IEM_MC_LOCAL(int16_t, i16Val2);
15496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15498 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15499
15500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15502
15503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15505 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15506
15507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15508 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15509 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15510 IEM_MC_ELSE()
15511 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15512 IEM_MC_ENDIF();
15513 IEM_MC_USED_FPU();
15514 IEM_MC_ADVANCE_RIP();
15515
15516 IEM_MC_END();
15517 return VINF_SUCCESS;
15518}
15519
15520
15521/** Opcode 0xde !11/3. */
15522FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15523{
15524 IEMOP_MNEMONIC("ficomp st0,m16i");
15525
15526 IEM_MC_BEGIN(3, 3);
15527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15528 IEM_MC_LOCAL(uint16_t, u16Fsw);
15529 IEM_MC_LOCAL(int16_t, i16Val2);
15530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15532 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15533
15534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15536
15537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15538 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15539 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15540
15541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15542 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15543 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15544 IEM_MC_ELSE()
15545 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15546 IEM_MC_ENDIF();
15547 IEM_MC_USED_FPU();
15548 IEM_MC_ADVANCE_RIP();
15549
15550 IEM_MC_END();
15551 return VINF_SUCCESS;
15552}
15553
15554
15555/** Opcode 0xde !11/4. */
15556FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15557{
15558 IEMOP_MNEMONIC("fisub m16i");
15559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15560}
15561
15562
15563/** Opcode 0xde !11/5. */
15564FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15565{
15566 IEMOP_MNEMONIC("fisubr m16i");
15567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15568}
15569
15570
15571/** Opcode 0xde !11/6. */
15572FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15573{
15574 IEMOP_MNEMONIC("fiadd m16i");
15575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15576}
15577
15578
15579/** Opcode 0xde !11/7. */
15580FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15581{
15582 IEMOP_MNEMONIC("fiadd m16i");
15583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15584}
15585
15586
15587/** Opcode 0xde. */
15588FNIEMOP_DEF(iemOp_EscF6)
15589{
15590 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15593 {
15594 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15595 {
15596 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15597 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15598 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15599 case 3: if (bRm == 0xd9)
15600 return FNIEMOP_CALL(iemOp_fcompp);
15601 return IEMOP_RAISE_INVALID_OPCODE();
15602 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15603 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15604 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15605 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15607 }
15608 }
15609 else
15610 {
15611 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15612 {
15613 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15614 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15615 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15616 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15617 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15618 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15619 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15620 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15622 }
15623 }
15624}
15625
15626
15627/** Opcode 0xdf 11/0.
15628 * Undocument instruction, assumed to work like ffree + fincstp. */
15629FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15630{
15631 IEMOP_MNEMONIC("ffreep stN");
15632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15633
15634 IEM_MC_BEGIN(0, 0);
15635
15636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15638
15639 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15640 IEM_MC_FPU_STACK_INC_TOP();
15641 IEM_MC_UPDATE_FPU_OPCODE_IP();
15642
15643 IEM_MC_USED_FPU();
15644 IEM_MC_ADVANCE_RIP();
15645 IEM_MC_END();
15646 return VINF_SUCCESS;
15647}
15648
15649
15650/** Opcode 0xdf 0xe0. */
15651FNIEMOP_DEF(iemOp_fnstsw_ax)
15652{
15653 IEMOP_MNEMONIC("fnstsw ax");
15654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15655
15656 IEM_MC_BEGIN(0, 1);
15657 IEM_MC_LOCAL(uint16_t, u16Tmp);
15658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15659 IEM_MC_FETCH_FSW(u16Tmp);
15660 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15661 IEM_MC_ADVANCE_RIP();
15662 IEM_MC_END();
15663 return VINF_SUCCESS;
15664}
15665
15666
15667/** Opcode 0xdf 11/5. */
15668FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15669{
15670 IEMOP_MNEMONIC("fcomip st0,stN");
15671 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15672}
15673
15674
15675/** Opcode 0xdf 11/6. */
15676FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC("fcomip st0,stN");
15679 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15680}
15681
15682
15683/** Opcode 0xdf !11/0. */
15684FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15685{
15686 IEMOP_MNEMONIC("fild m16i");
15687
15688 IEM_MC_BEGIN(2, 3);
15689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15690 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15691 IEM_MC_LOCAL(int16_t, i16Val);
15692 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15693 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15694
15695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15697
15698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15700 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15701
15702 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15703 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15704 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15705 IEM_MC_ELSE()
15706 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15707 IEM_MC_ENDIF();
15708 IEM_MC_USED_FPU();
15709 IEM_MC_ADVANCE_RIP();
15710
15711 IEM_MC_END();
15712 return VINF_SUCCESS;
15713}
15714
15715
15716/** Opcode 0xdf !11/1. */
15717FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15718{
15719 IEMOP_MNEMONIC("fisttp m16i");
15720 IEM_MC_BEGIN(3, 2);
15721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15722 IEM_MC_LOCAL(uint16_t, u16Fsw);
15723 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15724 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15726
15727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15729 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15731
15732 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15733 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15734 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15735 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15736 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15737 IEM_MC_ELSE()
15738 IEM_MC_IF_FCW_IM()
15739 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15740 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15741 IEM_MC_ENDIF();
15742 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15743 IEM_MC_ENDIF();
15744 IEM_MC_USED_FPU();
15745 IEM_MC_ADVANCE_RIP();
15746
15747 IEM_MC_END();
15748 return VINF_SUCCESS;
15749}
15750
15751
15752/** Opcode 0xdf !11/2. */
15753FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15754{
15755 IEMOP_MNEMONIC("fistp m16i");
15756 IEM_MC_BEGIN(3, 2);
15757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15758 IEM_MC_LOCAL(uint16_t, u16Fsw);
15759 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15760 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15762
15763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15765 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15766 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15767
15768 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15769 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15770 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15771 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15772 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15773 IEM_MC_ELSE()
15774 IEM_MC_IF_FCW_IM()
15775 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15776 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15777 IEM_MC_ENDIF();
15778 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15779 IEM_MC_ENDIF();
15780 IEM_MC_USED_FPU();
15781 IEM_MC_ADVANCE_RIP();
15782
15783 IEM_MC_END();
15784 return VINF_SUCCESS;
15785}
15786
15787
15788/** Opcode 0xdf !11/3. */
15789FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15790{
15791 IEMOP_MNEMONIC("fistp m16i");
15792 IEM_MC_BEGIN(3, 2);
15793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15794 IEM_MC_LOCAL(uint16_t, u16Fsw);
15795 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15796 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15797 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15798
15799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15803
15804 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15805 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15806 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15807 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15808 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15809 IEM_MC_ELSE()
15810 IEM_MC_IF_FCW_IM()
15811 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15812 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15813 IEM_MC_ENDIF();
15814 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15815 IEM_MC_ENDIF();
15816 IEM_MC_USED_FPU();
15817 IEM_MC_ADVANCE_RIP();
15818
15819 IEM_MC_END();
15820 return VINF_SUCCESS;
15821}
15822
15823
15824/** Opcode 0xdf !11/4. */
15825FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15826
15827
15828/** Opcode 0xdf !11/5. */
15829FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15830{
15831 IEMOP_MNEMONIC("fild m64i");
15832
15833 IEM_MC_BEGIN(2, 3);
15834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15835 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15836 IEM_MC_LOCAL(int64_t, i64Val);
15837 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15838 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15839
15840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15842
15843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15845 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15846
15847 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15848 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15849 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15850 IEM_MC_ELSE()
15851 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15852 IEM_MC_ENDIF();
15853 IEM_MC_USED_FPU();
15854 IEM_MC_ADVANCE_RIP();
15855
15856 IEM_MC_END();
15857 return VINF_SUCCESS;
15858}
15859
15860
15861/** Opcode 0xdf !11/6. */
15862FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15863
15864
15865/** Opcode 0xdf !11/7. */
15866FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15867{
15868 IEMOP_MNEMONIC("fistp m64i");
15869 IEM_MC_BEGIN(3, 2);
15870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15871 IEM_MC_LOCAL(uint16_t, u16Fsw);
15872 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15873 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15874 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15875
15876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15880
15881 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15882 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15883 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15884 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15885 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15886 IEM_MC_ELSE()
15887 IEM_MC_IF_FCW_IM()
15888 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15889 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15890 IEM_MC_ENDIF();
15891 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15892 IEM_MC_ENDIF();
15893 IEM_MC_USED_FPU();
15894 IEM_MC_ADVANCE_RIP();
15895
15896 IEM_MC_END();
15897 return VINF_SUCCESS;
15898}
15899
15900
15901/** Opcode 0xdf. */
15902FNIEMOP_DEF(iemOp_EscF7)
15903{
15904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15906 {
15907 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15908 {
15909 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15910 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15911 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15912 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15913 case 4: if (bRm == 0xe0)
15914 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15915 return IEMOP_RAISE_INVALID_OPCODE();
15916 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15917 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15918 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15920 }
15921 }
15922 else
15923 {
15924 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15925 {
15926 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15927 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15928 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15929 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15930 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15931 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15932 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15933 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15935 }
15936 }
15937}
15938
15939
15940/** Opcode 0xe0. */
15941FNIEMOP_DEF(iemOp_loopne_Jb)
15942{
15943 IEMOP_MNEMONIC("loopne Jb");
15944 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15945 IEMOP_HLP_NO_LOCK_PREFIX();
15946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15947
15948 switch (pIemCpu->enmEffAddrMode)
15949 {
15950 case IEMMODE_16BIT:
15951 IEM_MC_BEGIN(0,0);
15952 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15953 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15954 IEM_MC_REL_JMP_S8(i8Imm);
15955 } IEM_MC_ELSE() {
15956 IEM_MC_ADVANCE_RIP();
15957 } IEM_MC_ENDIF();
15958 IEM_MC_END();
15959 return VINF_SUCCESS;
15960
15961 case IEMMODE_32BIT:
15962 IEM_MC_BEGIN(0,0);
15963 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15964 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15965 IEM_MC_REL_JMP_S8(i8Imm);
15966 } IEM_MC_ELSE() {
15967 IEM_MC_ADVANCE_RIP();
15968 } IEM_MC_ENDIF();
15969 IEM_MC_END();
15970 return VINF_SUCCESS;
15971
15972 case IEMMODE_64BIT:
15973 IEM_MC_BEGIN(0,0);
15974 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15975 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15976 IEM_MC_REL_JMP_S8(i8Imm);
15977 } IEM_MC_ELSE() {
15978 IEM_MC_ADVANCE_RIP();
15979 } IEM_MC_ENDIF();
15980 IEM_MC_END();
15981 return VINF_SUCCESS;
15982
15983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15984 }
15985}
15986
15987
15988/** Opcode 0xe1. */
15989FNIEMOP_DEF(iemOp_loope_Jb)
15990{
15991 IEMOP_MNEMONIC("loope Jb");
15992 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15993 IEMOP_HLP_NO_LOCK_PREFIX();
15994 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15995
15996 switch (pIemCpu->enmEffAddrMode)
15997 {
15998 case IEMMODE_16BIT:
15999 IEM_MC_BEGIN(0,0);
16000 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16001 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16002 IEM_MC_REL_JMP_S8(i8Imm);
16003 } IEM_MC_ELSE() {
16004 IEM_MC_ADVANCE_RIP();
16005 } IEM_MC_ENDIF();
16006 IEM_MC_END();
16007 return VINF_SUCCESS;
16008
16009 case IEMMODE_32BIT:
16010 IEM_MC_BEGIN(0,0);
16011 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16012 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16013 IEM_MC_REL_JMP_S8(i8Imm);
16014 } IEM_MC_ELSE() {
16015 IEM_MC_ADVANCE_RIP();
16016 } IEM_MC_ENDIF();
16017 IEM_MC_END();
16018 return VINF_SUCCESS;
16019
16020 case IEMMODE_64BIT:
16021 IEM_MC_BEGIN(0,0);
16022 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16023 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16024 IEM_MC_REL_JMP_S8(i8Imm);
16025 } IEM_MC_ELSE() {
16026 IEM_MC_ADVANCE_RIP();
16027 } IEM_MC_ENDIF();
16028 IEM_MC_END();
16029 return VINF_SUCCESS;
16030
16031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16032 }
16033}
16034
16035
16036/** Opcode 0xe2. */
16037FNIEMOP_DEF(iemOp_loop_Jb)
16038{
16039 IEMOP_MNEMONIC("loop Jb");
16040 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16041 IEMOP_HLP_NO_LOCK_PREFIX();
16042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16043
16044 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16045 * using the 32-bit operand size override. How can that be restarted? See
16046 * weird pseudo code in intel manual. */
16047 switch (pIemCpu->enmEffAddrMode)
16048 {
16049 case IEMMODE_16BIT:
16050 IEM_MC_BEGIN(0,0);
16051 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16052 {
16053 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16054 IEM_MC_IF_CX_IS_NZ() {
16055 IEM_MC_REL_JMP_S8(i8Imm);
16056 } IEM_MC_ELSE() {
16057 IEM_MC_ADVANCE_RIP();
16058 } IEM_MC_ENDIF();
16059 }
16060 else
16061 {
16062 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16063 IEM_MC_ADVANCE_RIP();
16064 }
16065 IEM_MC_END();
16066 return VINF_SUCCESS;
16067
16068 case IEMMODE_32BIT:
16069 IEM_MC_BEGIN(0,0);
16070 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16071 {
16072 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16073 IEM_MC_IF_ECX_IS_NZ() {
16074 IEM_MC_REL_JMP_S8(i8Imm);
16075 } IEM_MC_ELSE() {
16076 IEM_MC_ADVANCE_RIP();
16077 } IEM_MC_ENDIF();
16078 }
16079 else
16080 {
16081 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16082 IEM_MC_ADVANCE_RIP();
16083 }
16084 IEM_MC_END();
16085 return VINF_SUCCESS;
16086
16087 case IEMMODE_64BIT:
16088 IEM_MC_BEGIN(0,0);
16089 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16090 {
16091 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16092 IEM_MC_IF_RCX_IS_NZ() {
16093 IEM_MC_REL_JMP_S8(i8Imm);
16094 } IEM_MC_ELSE() {
16095 IEM_MC_ADVANCE_RIP();
16096 } IEM_MC_ENDIF();
16097 }
16098 else
16099 {
16100 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16101 IEM_MC_ADVANCE_RIP();
16102 }
16103 IEM_MC_END();
16104 return VINF_SUCCESS;
16105
16106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16107 }
16108}
16109
16110
16111/** Opcode 0xe3. */
16112FNIEMOP_DEF(iemOp_jecxz_Jb)
16113{
16114 IEMOP_MNEMONIC("jecxz Jb");
16115 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16116 IEMOP_HLP_NO_LOCK_PREFIX();
16117 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16118
16119 switch (pIemCpu->enmEffAddrMode)
16120 {
16121 case IEMMODE_16BIT:
16122 IEM_MC_BEGIN(0,0);
16123 IEM_MC_IF_CX_IS_NZ() {
16124 IEM_MC_ADVANCE_RIP();
16125 } IEM_MC_ELSE() {
16126 IEM_MC_REL_JMP_S8(i8Imm);
16127 } IEM_MC_ENDIF();
16128 IEM_MC_END();
16129 return VINF_SUCCESS;
16130
16131 case IEMMODE_32BIT:
16132 IEM_MC_BEGIN(0,0);
16133 IEM_MC_IF_ECX_IS_NZ() {
16134 IEM_MC_ADVANCE_RIP();
16135 } IEM_MC_ELSE() {
16136 IEM_MC_REL_JMP_S8(i8Imm);
16137 } IEM_MC_ENDIF();
16138 IEM_MC_END();
16139 return VINF_SUCCESS;
16140
16141 case IEMMODE_64BIT:
16142 IEM_MC_BEGIN(0,0);
16143 IEM_MC_IF_RCX_IS_NZ() {
16144 IEM_MC_ADVANCE_RIP();
16145 } IEM_MC_ELSE() {
16146 IEM_MC_REL_JMP_S8(i8Imm);
16147 } IEM_MC_ENDIF();
16148 IEM_MC_END();
16149 return VINF_SUCCESS;
16150
16151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16152 }
16153}
16154
16155
16156/** Opcode 0xe4 */
16157FNIEMOP_DEF(iemOp_in_AL_Ib)
16158{
16159 IEMOP_MNEMONIC("in eAX,Ib");
16160 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16161 IEMOP_HLP_NO_LOCK_PREFIX();
16162 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16163}
16164
16165
16166/** Opcode 0xe5 */
16167FNIEMOP_DEF(iemOp_in_eAX_Ib)
16168{
16169 IEMOP_MNEMONIC("in eAX,Ib");
16170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16171 IEMOP_HLP_NO_LOCK_PREFIX();
16172 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16173}
16174
16175
16176/** Opcode 0xe6 */
16177FNIEMOP_DEF(iemOp_out_Ib_AL)
16178{
16179 IEMOP_MNEMONIC("out Ib,AL");
16180 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16181 IEMOP_HLP_NO_LOCK_PREFIX();
16182 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16183}
16184
16185
16186/** Opcode 0xe7 */
16187FNIEMOP_DEF(iemOp_out_Ib_eAX)
16188{
16189 IEMOP_MNEMONIC("out Ib,eAX");
16190 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16191 IEMOP_HLP_NO_LOCK_PREFIX();
16192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16193}
16194
16195
16196/** Opcode 0xe8. */
16197FNIEMOP_DEF(iemOp_call_Jv)
16198{
16199 IEMOP_MNEMONIC("call Jv");
16200 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16201 switch (pIemCpu->enmEffOpSize)
16202 {
16203 case IEMMODE_16BIT:
16204 {
16205 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16206 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16207 }
16208
16209 case IEMMODE_32BIT:
16210 {
16211 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16212 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16213 }
16214
16215 case IEMMODE_64BIT:
16216 {
16217 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16218 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16219 }
16220
16221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16222 }
16223}
16224
16225
16226/** Opcode 0xe9. */
16227FNIEMOP_DEF(iemOp_jmp_Jv)
16228{
16229 IEMOP_MNEMONIC("jmp Jv");
16230 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16231 switch (pIemCpu->enmEffOpSize)
16232 {
16233 case IEMMODE_16BIT:
16234 {
16235 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16236 IEM_MC_BEGIN(0, 0);
16237 IEM_MC_REL_JMP_S16(i16Imm);
16238 IEM_MC_END();
16239 return VINF_SUCCESS;
16240 }
16241
16242 case IEMMODE_64BIT:
16243 case IEMMODE_32BIT:
16244 {
16245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16246 IEM_MC_BEGIN(0, 0);
16247 IEM_MC_REL_JMP_S32(i32Imm);
16248 IEM_MC_END();
16249 return VINF_SUCCESS;
16250 }
16251
16252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16253 }
16254}
16255
16256
16257/** Opcode 0xea. */
16258FNIEMOP_DEF(iemOp_jmp_Ap)
16259{
16260 IEMOP_MNEMONIC("jmp Ap");
16261 IEMOP_HLP_NO_64BIT();
16262
16263 /* Decode the far pointer address and pass it on to the far call C implementation. */
16264 uint32_t offSeg;
16265 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16266 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16267 else
16268 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16269 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16270 IEMOP_HLP_NO_LOCK_PREFIX();
16271 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16272}
16273
16274
16275/** Opcode 0xeb. */
16276FNIEMOP_DEF(iemOp_jmp_Jb)
16277{
16278 IEMOP_MNEMONIC("jmp Jb");
16279 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16280 IEMOP_HLP_NO_LOCK_PREFIX();
16281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16282
16283 IEM_MC_BEGIN(0, 0);
16284 IEM_MC_REL_JMP_S8(i8Imm);
16285 IEM_MC_END();
16286 return VINF_SUCCESS;
16287}
16288
16289
16290/** Opcode 0xec */
16291FNIEMOP_DEF(iemOp_in_AL_DX)
16292{
16293 IEMOP_MNEMONIC("in AL,DX");
16294 IEMOP_HLP_NO_LOCK_PREFIX();
16295 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16296}
16297
16298
16299/** Opcode 0xed */
16300FNIEMOP_DEF(iemOp_eAX_DX)
16301{
16302 IEMOP_MNEMONIC("in eAX,DX");
16303 IEMOP_HLP_NO_LOCK_PREFIX();
16304 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16305}
16306
16307
16308/** Opcode 0xee */
16309FNIEMOP_DEF(iemOp_out_DX_AL)
16310{
16311 IEMOP_MNEMONIC("out DX,AL");
16312 IEMOP_HLP_NO_LOCK_PREFIX();
16313 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16314}
16315
16316
16317/** Opcode 0xef */
16318FNIEMOP_DEF(iemOp_out_DX_eAX)
16319{
16320 IEMOP_MNEMONIC("out DX,eAX");
16321 IEMOP_HLP_NO_LOCK_PREFIX();
16322 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16323}
16324
16325
16326/** Opcode 0xf0. */
16327FNIEMOP_DEF(iemOp_lock)
16328{
16329 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16330 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16331
16332 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16333 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16334}
16335
16336
16337/** Opcode 0xf1. */
16338FNIEMOP_DEF(iemOp_int_1)
16339{
16340 IEMOP_MNEMONIC("int1"); /* icebp */
16341 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16342 /** @todo testcase! */
16343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16344}
16345
16346
16347/** Opcode 0xf2. */
16348FNIEMOP_DEF(iemOp_repne)
16349{
16350 /* This overrides any previous REPE prefix. */
16351 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16352 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16353 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16354
16355 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16356 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16357}
16358
16359
16360/** Opcode 0xf3. */
16361FNIEMOP_DEF(iemOp_repe)
16362{
16363 /* This overrides any previous REPNE prefix. */
16364 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16365 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16366 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16367
16368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16370}
16371
16372
16373/** Opcode 0xf4. */
16374FNIEMOP_DEF(iemOp_hlt)
16375{
16376 IEMOP_HLP_NO_LOCK_PREFIX();
16377#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC && 0
16378 if ( pIemCpu->uTargetCpu == IEMTARGETCPU_CURRENT
16379 && pIemCpu->CTX_SUFF(pCtx)->cs.Sel <= 1000)
16380 {
16381 pIemCpu->uTargetCpu = IEMTARGETCPU_286;
16382 LogAlways(("\niemOp_hlt: Enabled CPU restrictions!\n\n"));
16383 }
16384#endif
16385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16386}
16387
16388
16389/** Opcode 0xf5. */
16390FNIEMOP_DEF(iemOp_cmc)
16391{
16392 IEMOP_MNEMONIC("cmc");
16393 IEMOP_HLP_NO_LOCK_PREFIX();
16394 IEM_MC_BEGIN(0, 0);
16395 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16396 IEM_MC_ADVANCE_RIP();
16397 IEM_MC_END();
16398 return VINF_SUCCESS;
16399}
16400
16401
16402/**
16403 * Common implementation of 'inc/dec/not/neg Eb'.
16404 *
16405 * @param bRm The RM byte.
16406 * @param pImpl The instruction implementation.
16407 */
16408FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16409{
16410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16411 {
16412 /* register access */
16413 IEM_MC_BEGIN(2, 0);
16414 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16415 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16416 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16417 IEM_MC_REF_EFLAGS(pEFlags);
16418 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16419 IEM_MC_ADVANCE_RIP();
16420 IEM_MC_END();
16421 }
16422 else
16423 {
16424 /* memory access. */
16425 IEM_MC_BEGIN(2, 2);
16426 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16429
16430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16431 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16432 IEM_MC_FETCH_EFLAGS(EFlags);
16433 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16434 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16435 else
16436 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16437
16438 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16439 IEM_MC_COMMIT_EFLAGS(EFlags);
16440 IEM_MC_ADVANCE_RIP();
16441 IEM_MC_END();
16442 }
16443 return VINF_SUCCESS;
16444}
16445
16446
16447/**
16448 * Common implementation of 'inc/dec/not/neg Ev'.
16449 *
16450 * @param bRm The RM byte.
16451 * @param pImpl The instruction implementation.
16452 */
16453FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16454{
16455 /* Registers are handled by a common worker. */
16456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16457 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16458
16459 /* Memory we do here. */
16460 switch (pIemCpu->enmEffOpSize)
16461 {
16462 case IEMMODE_16BIT:
16463 IEM_MC_BEGIN(2, 2);
16464 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16465 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16467
16468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16469 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16470 IEM_MC_FETCH_EFLAGS(EFlags);
16471 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16472 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16473 else
16474 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16475
16476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16477 IEM_MC_COMMIT_EFLAGS(EFlags);
16478 IEM_MC_ADVANCE_RIP();
16479 IEM_MC_END();
16480 return VINF_SUCCESS;
16481
16482 case IEMMODE_32BIT:
16483 IEM_MC_BEGIN(2, 2);
16484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16485 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16487
16488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16489 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16490 IEM_MC_FETCH_EFLAGS(EFlags);
16491 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16492 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16493 else
16494 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16495
16496 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16497 IEM_MC_COMMIT_EFLAGS(EFlags);
16498 IEM_MC_ADVANCE_RIP();
16499 IEM_MC_END();
16500 return VINF_SUCCESS;
16501
16502 case IEMMODE_64BIT:
16503 IEM_MC_BEGIN(2, 2);
16504 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16507
16508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16509 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16510 IEM_MC_FETCH_EFLAGS(EFlags);
16511 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16512 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16513 else
16514 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16515
16516 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16517 IEM_MC_COMMIT_EFLAGS(EFlags);
16518 IEM_MC_ADVANCE_RIP();
16519 IEM_MC_END();
16520 return VINF_SUCCESS;
16521
16522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16523 }
16524}
16525
16526
16527/** Opcode 0xf6 /0. */
16528FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16529{
16530 IEMOP_MNEMONIC("test Eb,Ib");
16531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16532
16533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16534 {
16535 /* register access */
16536 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16537 IEMOP_HLP_NO_LOCK_PREFIX();
16538
16539 IEM_MC_BEGIN(3, 0);
16540 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16541 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16543 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16544 IEM_MC_REF_EFLAGS(pEFlags);
16545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16546 IEM_MC_ADVANCE_RIP();
16547 IEM_MC_END();
16548 }
16549 else
16550 {
16551 /* memory access. */
16552 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16553
16554 IEM_MC_BEGIN(3, 2);
16555 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16556 IEM_MC_ARG(uint8_t, u8Src, 1);
16557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16559
16560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16561 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16562 IEM_MC_ASSIGN(u8Src, u8Imm);
16563 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16564 IEM_MC_FETCH_EFLAGS(EFlags);
16565 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16566
16567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16568 IEM_MC_COMMIT_EFLAGS(EFlags);
16569 IEM_MC_ADVANCE_RIP();
16570 IEM_MC_END();
16571 }
16572 return VINF_SUCCESS;
16573}
16574
16575
16576/** Opcode 0xf7 /0. */
16577FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16578{
16579 IEMOP_MNEMONIC("test Ev,Iv");
16580 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16582
16583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16584 {
16585 /* register access */
16586 switch (pIemCpu->enmEffOpSize)
16587 {
16588 case IEMMODE_16BIT:
16589 {
16590 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16591 IEM_MC_BEGIN(3, 0);
16592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16593 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16595 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16596 IEM_MC_REF_EFLAGS(pEFlags);
16597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16598 IEM_MC_ADVANCE_RIP();
16599 IEM_MC_END();
16600 return VINF_SUCCESS;
16601 }
16602
16603 case IEMMODE_32BIT:
16604 {
16605 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16606 IEM_MC_BEGIN(3, 0);
16607 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16608 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16610 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16611 IEM_MC_REF_EFLAGS(pEFlags);
16612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16613 /* No clearing the high dword here - test doesn't write back the result. */
16614 IEM_MC_ADVANCE_RIP();
16615 IEM_MC_END();
16616 return VINF_SUCCESS;
16617 }
16618
16619 case IEMMODE_64BIT:
16620 {
16621 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16622 IEM_MC_BEGIN(3, 0);
16623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16624 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16626 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16627 IEM_MC_REF_EFLAGS(pEFlags);
16628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16629 IEM_MC_ADVANCE_RIP();
16630 IEM_MC_END();
16631 return VINF_SUCCESS;
16632 }
16633
16634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16635 }
16636 }
16637 else
16638 {
16639 /* memory access. */
16640 switch (pIemCpu->enmEffOpSize)
16641 {
16642 case IEMMODE_16BIT:
16643 {
16644 IEM_MC_BEGIN(3, 2);
16645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16646 IEM_MC_ARG(uint16_t, u16Src, 1);
16647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16649
16650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16651 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16652 IEM_MC_ASSIGN(u16Src, u16Imm);
16653 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16654 IEM_MC_FETCH_EFLAGS(EFlags);
16655 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16656
16657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16658 IEM_MC_COMMIT_EFLAGS(EFlags);
16659 IEM_MC_ADVANCE_RIP();
16660 IEM_MC_END();
16661 return VINF_SUCCESS;
16662 }
16663
16664 case IEMMODE_32BIT:
16665 {
16666 IEM_MC_BEGIN(3, 2);
16667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16668 IEM_MC_ARG(uint32_t, u32Src, 1);
16669 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16671
16672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16673 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16674 IEM_MC_ASSIGN(u32Src, u32Imm);
16675 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16676 IEM_MC_FETCH_EFLAGS(EFlags);
16677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16678
16679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16680 IEM_MC_COMMIT_EFLAGS(EFlags);
16681 IEM_MC_ADVANCE_RIP();
16682 IEM_MC_END();
16683 return VINF_SUCCESS;
16684 }
16685
16686 case IEMMODE_64BIT:
16687 {
16688 IEM_MC_BEGIN(3, 2);
16689 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16690 IEM_MC_ARG(uint64_t, u64Src, 1);
16691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16693
16694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16695 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16696 IEM_MC_ASSIGN(u64Src, u64Imm);
16697 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16698 IEM_MC_FETCH_EFLAGS(EFlags);
16699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16700
16701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16702 IEM_MC_COMMIT_EFLAGS(EFlags);
16703 IEM_MC_ADVANCE_RIP();
16704 IEM_MC_END();
16705 return VINF_SUCCESS;
16706 }
16707
16708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16709 }
16710 }
16711}
16712
16713
16714/** Opcode 0xf6 /4, /5, /6 and /7. */
16715FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16716{
16717 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16718
16719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16720 {
16721 /* register access */
16722 IEMOP_HLP_NO_LOCK_PREFIX();
16723 IEM_MC_BEGIN(3, 1);
16724 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16725 IEM_MC_ARG(uint8_t, u8Value, 1);
16726 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16727 IEM_MC_LOCAL(int32_t, rc);
16728
16729 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16730 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16731 IEM_MC_REF_EFLAGS(pEFlags);
16732 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16733 IEM_MC_IF_LOCAL_IS_Z(rc) {
16734 IEM_MC_ADVANCE_RIP();
16735 } IEM_MC_ELSE() {
16736 IEM_MC_RAISE_DIVIDE_ERROR();
16737 } IEM_MC_ENDIF();
16738
16739 IEM_MC_END();
16740 }
16741 else
16742 {
16743 /* memory access. */
16744 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16745
16746 IEM_MC_BEGIN(3, 2);
16747 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16748 IEM_MC_ARG(uint8_t, u8Value, 1);
16749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16751 IEM_MC_LOCAL(int32_t, rc);
16752
16753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16754 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16755 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16756 IEM_MC_REF_EFLAGS(pEFlags);
16757 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16758 IEM_MC_IF_LOCAL_IS_Z(rc) {
16759 IEM_MC_ADVANCE_RIP();
16760 } IEM_MC_ELSE() {
16761 IEM_MC_RAISE_DIVIDE_ERROR();
16762 } IEM_MC_ENDIF();
16763
16764 IEM_MC_END();
16765 }
16766 return VINF_SUCCESS;
16767}
16768
16769
16770/** Opcode 0xf7 /4, /5, /6 and /7. */
16771FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16772{
16773 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16775
16776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16777 {
16778 /* register access */
16779 switch (pIemCpu->enmEffOpSize)
16780 {
16781 case IEMMODE_16BIT:
16782 {
16783 IEMOP_HLP_NO_LOCK_PREFIX();
16784 IEM_MC_BEGIN(4, 1);
16785 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16786 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16787 IEM_MC_ARG(uint16_t, u16Value, 2);
16788 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16789 IEM_MC_LOCAL(int32_t, rc);
16790
16791 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16792 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16793 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16794 IEM_MC_REF_EFLAGS(pEFlags);
16795 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16796 IEM_MC_IF_LOCAL_IS_Z(rc) {
16797 IEM_MC_ADVANCE_RIP();
16798 } IEM_MC_ELSE() {
16799 IEM_MC_RAISE_DIVIDE_ERROR();
16800 } IEM_MC_ENDIF();
16801
16802 IEM_MC_END();
16803 return VINF_SUCCESS;
16804 }
16805
16806 case IEMMODE_32BIT:
16807 {
16808 IEMOP_HLP_NO_LOCK_PREFIX();
16809 IEM_MC_BEGIN(4, 1);
16810 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16811 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16812 IEM_MC_ARG(uint32_t, u32Value, 2);
16813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16814 IEM_MC_LOCAL(int32_t, rc);
16815
16816 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16817 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16818 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16819 IEM_MC_REF_EFLAGS(pEFlags);
16820 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16821 IEM_MC_IF_LOCAL_IS_Z(rc) {
16822 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16823 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16824 IEM_MC_ADVANCE_RIP();
16825 } IEM_MC_ELSE() {
16826 IEM_MC_RAISE_DIVIDE_ERROR();
16827 } IEM_MC_ENDIF();
16828
16829 IEM_MC_END();
16830 return VINF_SUCCESS;
16831 }
16832
16833 case IEMMODE_64BIT:
16834 {
16835 IEMOP_HLP_NO_LOCK_PREFIX();
16836 IEM_MC_BEGIN(4, 1);
16837 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16838 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16839 IEM_MC_ARG(uint64_t, u64Value, 2);
16840 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16841 IEM_MC_LOCAL(int32_t, rc);
16842
16843 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16844 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16845 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16846 IEM_MC_REF_EFLAGS(pEFlags);
16847 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16848 IEM_MC_IF_LOCAL_IS_Z(rc) {
16849 IEM_MC_ADVANCE_RIP();
16850 } IEM_MC_ELSE() {
16851 IEM_MC_RAISE_DIVIDE_ERROR();
16852 } IEM_MC_ENDIF();
16853
16854 IEM_MC_END();
16855 return VINF_SUCCESS;
16856 }
16857
16858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16859 }
16860 }
16861 else
16862 {
16863 /* memory access. */
16864 switch (pIemCpu->enmEffOpSize)
16865 {
16866 case IEMMODE_16BIT:
16867 {
16868 IEMOP_HLP_NO_LOCK_PREFIX();
16869 IEM_MC_BEGIN(4, 2);
16870 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16871 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16872 IEM_MC_ARG(uint16_t, u16Value, 2);
16873 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16875 IEM_MC_LOCAL(int32_t, rc);
16876
16877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16878 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16879 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16880 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16881 IEM_MC_REF_EFLAGS(pEFlags);
16882 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16883 IEM_MC_IF_LOCAL_IS_Z(rc) {
16884 IEM_MC_ADVANCE_RIP();
16885 } IEM_MC_ELSE() {
16886 IEM_MC_RAISE_DIVIDE_ERROR();
16887 } IEM_MC_ENDIF();
16888
16889 IEM_MC_END();
16890 return VINF_SUCCESS;
16891 }
16892
16893 case IEMMODE_32BIT:
16894 {
16895 IEMOP_HLP_NO_LOCK_PREFIX();
16896 IEM_MC_BEGIN(4, 2);
16897 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16898 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16899 IEM_MC_ARG(uint32_t, u32Value, 2);
16900 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16902 IEM_MC_LOCAL(int32_t, rc);
16903
16904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16905 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16906 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16907 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16908 IEM_MC_REF_EFLAGS(pEFlags);
16909 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16910 IEM_MC_IF_LOCAL_IS_Z(rc) {
16911 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16912 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16913 IEM_MC_ADVANCE_RIP();
16914 } IEM_MC_ELSE() {
16915 IEM_MC_RAISE_DIVIDE_ERROR();
16916 } IEM_MC_ENDIF();
16917
16918 IEM_MC_END();
16919 return VINF_SUCCESS;
16920 }
16921
16922 case IEMMODE_64BIT:
16923 {
16924 IEMOP_HLP_NO_LOCK_PREFIX();
16925 IEM_MC_BEGIN(4, 2);
16926 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16927 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16928 IEM_MC_ARG(uint64_t, u64Value, 2);
16929 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16931 IEM_MC_LOCAL(int32_t, rc);
16932
16933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16934 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16935 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16936 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16937 IEM_MC_REF_EFLAGS(pEFlags);
16938 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16939 IEM_MC_IF_LOCAL_IS_Z(rc) {
16940 IEM_MC_ADVANCE_RIP();
16941 } IEM_MC_ELSE() {
16942 IEM_MC_RAISE_DIVIDE_ERROR();
16943 } IEM_MC_ENDIF();
16944
16945 IEM_MC_END();
16946 return VINF_SUCCESS;
16947 }
16948
16949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16950 }
16951 }
16952}
16953
16954/** Opcode 0xf6. */
16955FNIEMOP_DEF(iemOp_Grp3_Eb)
16956{
16957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16958 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16959 {
16960 case 0:
16961 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16962 case 1:
16963/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16964 return IEMOP_RAISE_INVALID_OPCODE();
16965 case 2:
16966 IEMOP_MNEMONIC("not Eb");
16967 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16968 case 3:
16969 IEMOP_MNEMONIC("neg Eb");
16970 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16971 case 4:
16972 IEMOP_MNEMONIC("mul Eb");
16973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16974 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16975 case 5:
16976 IEMOP_MNEMONIC("imul Eb");
16977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16978 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16979 case 6:
16980 IEMOP_MNEMONIC("div Eb");
16981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16982 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16983 case 7:
16984 IEMOP_MNEMONIC("idiv Eb");
16985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16988 }
16989}
16990
16991
16992/** Opcode 0xf7. */
16993FNIEMOP_DEF(iemOp_Grp3_Ev)
16994{
16995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16996 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16997 {
16998 case 0:
16999 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17000 case 1:
17001/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17002 return IEMOP_RAISE_INVALID_OPCODE();
17003 case 2:
17004 IEMOP_MNEMONIC("not Ev");
17005 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17006 case 3:
17007 IEMOP_MNEMONIC("neg Ev");
17008 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17009 case 4:
17010 IEMOP_MNEMONIC("mul Ev");
17011 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17012 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17013 case 5:
17014 IEMOP_MNEMONIC("imul Ev");
17015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17016 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17017 case 6:
17018 IEMOP_MNEMONIC("div Ev");
17019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17020 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17021 case 7:
17022 IEMOP_MNEMONIC("idiv Ev");
17023 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17024 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17026 }
17027}
17028
17029
17030/** Opcode 0xf8. */
17031FNIEMOP_DEF(iemOp_clc)
17032{
17033 IEMOP_MNEMONIC("clc");
17034 IEMOP_HLP_NO_LOCK_PREFIX();
17035 IEM_MC_BEGIN(0, 0);
17036 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17037 IEM_MC_ADVANCE_RIP();
17038 IEM_MC_END();
17039 return VINF_SUCCESS;
17040}
17041
17042
17043/** Opcode 0xf9. */
17044FNIEMOP_DEF(iemOp_stc)
17045{
17046 IEMOP_MNEMONIC("stc");
17047 IEMOP_HLP_NO_LOCK_PREFIX();
17048 IEM_MC_BEGIN(0, 0);
17049 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17050 IEM_MC_ADVANCE_RIP();
17051 IEM_MC_END();
17052 return VINF_SUCCESS;
17053}
17054
17055
17056/** Opcode 0xfa. */
17057FNIEMOP_DEF(iemOp_cli)
17058{
17059 IEMOP_MNEMONIC("cli");
17060 IEMOP_HLP_NO_LOCK_PREFIX();
17061 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17062}
17063
17064
17065FNIEMOP_DEF(iemOp_sti)
17066{
17067 IEMOP_MNEMONIC("sti");
17068 IEMOP_HLP_NO_LOCK_PREFIX();
17069 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17070}
17071
17072
17073/** Opcode 0xfc. */
17074FNIEMOP_DEF(iemOp_cld)
17075{
17076 IEMOP_MNEMONIC("cld");
17077 IEMOP_HLP_NO_LOCK_PREFIX();
17078 IEM_MC_BEGIN(0, 0);
17079 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17080 IEM_MC_ADVANCE_RIP();
17081 IEM_MC_END();
17082 return VINF_SUCCESS;
17083}
17084
17085
17086/** Opcode 0xfd. */
17087FNIEMOP_DEF(iemOp_std)
17088{
17089 IEMOP_MNEMONIC("std");
17090 IEMOP_HLP_NO_LOCK_PREFIX();
17091 IEM_MC_BEGIN(0, 0);
17092 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17093 IEM_MC_ADVANCE_RIP();
17094 IEM_MC_END();
17095 return VINF_SUCCESS;
17096}
17097
17098
17099/** Opcode 0xfe. */
17100FNIEMOP_DEF(iemOp_Grp4)
17101{
17102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17104 {
17105 case 0:
17106 IEMOP_MNEMONIC("inc Ev");
17107 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17108 case 1:
17109 IEMOP_MNEMONIC("dec Ev");
17110 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17111 default:
17112 IEMOP_MNEMONIC("grp4-ud");
17113 return IEMOP_RAISE_INVALID_OPCODE();
17114 }
17115}
17116
17117
17118/**
17119 * Opcode 0xff /2.
17120 * @param bRm The RM byte.
17121 */
17122FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17123{
17124 IEMOP_MNEMONIC("calln Ev");
17125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17127
17128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17129 {
17130 /* The new RIP is taken from a register. */
17131 switch (pIemCpu->enmEffOpSize)
17132 {
17133 case IEMMODE_16BIT:
17134 IEM_MC_BEGIN(1, 0);
17135 IEM_MC_ARG(uint16_t, u16Target, 0);
17136 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17137 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17138 IEM_MC_END()
17139 return VINF_SUCCESS;
17140
17141 case IEMMODE_32BIT:
17142 IEM_MC_BEGIN(1, 0);
17143 IEM_MC_ARG(uint32_t, u32Target, 0);
17144 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17145 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17146 IEM_MC_END()
17147 return VINF_SUCCESS;
17148
17149 case IEMMODE_64BIT:
17150 IEM_MC_BEGIN(1, 0);
17151 IEM_MC_ARG(uint64_t, u64Target, 0);
17152 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17153 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17154 IEM_MC_END()
17155 return VINF_SUCCESS;
17156
17157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17158 }
17159 }
17160 else
17161 {
17162 /* The new RIP is taken from a register. */
17163 switch (pIemCpu->enmEffOpSize)
17164 {
17165 case IEMMODE_16BIT:
17166 IEM_MC_BEGIN(1, 1);
17167 IEM_MC_ARG(uint16_t, u16Target, 0);
17168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17170 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17171 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17172 IEM_MC_END()
17173 return VINF_SUCCESS;
17174
17175 case IEMMODE_32BIT:
17176 IEM_MC_BEGIN(1, 1);
17177 IEM_MC_ARG(uint32_t, u32Target, 0);
17178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17180 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17181 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17182 IEM_MC_END()
17183 return VINF_SUCCESS;
17184
17185 case IEMMODE_64BIT:
17186 IEM_MC_BEGIN(1, 1);
17187 IEM_MC_ARG(uint64_t, u64Target, 0);
17188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17190 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17191 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17192 IEM_MC_END()
17193 return VINF_SUCCESS;
17194
17195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17196 }
17197 }
17198}
17199
17200typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17201
17202FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17203{
17204 /* Registers? How?? */
17205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17206 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17207
17208 /* Far pointer loaded from memory. */
17209 switch (pIemCpu->enmEffOpSize)
17210 {
17211 case IEMMODE_16BIT:
17212 IEM_MC_BEGIN(3, 1);
17213 IEM_MC_ARG(uint16_t, u16Sel, 0);
17214 IEM_MC_ARG(uint16_t, offSeg, 1);
17215 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17219 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17220 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17221 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17222 IEM_MC_END();
17223 return VINF_SUCCESS;
17224
17225 case IEMMODE_64BIT:
17226 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17227 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17228 * and call far qword [rsp] encodings. */
17229 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17230 {
17231 IEM_MC_BEGIN(3, 1);
17232 IEM_MC_ARG(uint16_t, u16Sel, 0);
17233 IEM_MC_ARG(uint64_t, offSeg, 1);
17234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17238 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17239 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17240 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17241 IEM_MC_END();
17242 return VINF_SUCCESS;
17243 }
17244 /* AMD falls thru. */
17245
17246 case IEMMODE_32BIT:
17247 IEM_MC_BEGIN(3, 1);
17248 IEM_MC_ARG(uint16_t, u16Sel, 0);
17249 IEM_MC_ARG(uint32_t, offSeg, 1);
17250 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17254 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17255 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17256 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17257 IEM_MC_END();
17258 return VINF_SUCCESS;
17259
17260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17261 }
17262}
17263
17264
17265/**
17266 * Opcode 0xff /3.
17267 * @param bRm The RM byte.
17268 */
17269FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17270{
17271 IEMOP_MNEMONIC("callf Ep");
17272 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17273}
17274
17275
17276/**
17277 * Opcode 0xff /4.
17278 * @param bRm The RM byte.
17279 */
17280FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17281{
17282 IEMOP_MNEMONIC("jmpn Ev");
17283 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17285
17286 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17287 {
17288 /* The new RIP is taken from a register. */
17289 switch (pIemCpu->enmEffOpSize)
17290 {
17291 case IEMMODE_16BIT:
17292 IEM_MC_BEGIN(0, 1);
17293 IEM_MC_LOCAL(uint16_t, u16Target);
17294 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17295 IEM_MC_SET_RIP_U16(u16Target);
17296 IEM_MC_END()
17297 return VINF_SUCCESS;
17298
17299 case IEMMODE_32BIT:
17300 IEM_MC_BEGIN(0, 1);
17301 IEM_MC_LOCAL(uint32_t, u32Target);
17302 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17303 IEM_MC_SET_RIP_U32(u32Target);
17304 IEM_MC_END()
17305 return VINF_SUCCESS;
17306
17307 case IEMMODE_64BIT:
17308 IEM_MC_BEGIN(0, 1);
17309 IEM_MC_LOCAL(uint64_t, u64Target);
17310 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17311 IEM_MC_SET_RIP_U64(u64Target);
17312 IEM_MC_END()
17313 return VINF_SUCCESS;
17314
17315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17316 }
17317 }
17318 else
17319 {
17320 /* The new RIP is taken from a memory location. */
17321 switch (pIemCpu->enmEffOpSize)
17322 {
17323 case IEMMODE_16BIT:
17324 IEM_MC_BEGIN(0, 2);
17325 IEM_MC_LOCAL(uint16_t, u16Target);
17326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17328 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17329 IEM_MC_SET_RIP_U16(u16Target);
17330 IEM_MC_END()
17331 return VINF_SUCCESS;
17332
17333 case IEMMODE_32BIT:
17334 IEM_MC_BEGIN(0, 2);
17335 IEM_MC_LOCAL(uint32_t, u32Target);
17336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17338 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17339 IEM_MC_SET_RIP_U32(u32Target);
17340 IEM_MC_END()
17341 return VINF_SUCCESS;
17342
17343 case IEMMODE_64BIT:
17344 IEM_MC_BEGIN(0, 2);
17345 IEM_MC_LOCAL(uint64_t, u64Target);
17346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17348 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17349 IEM_MC_SET_RIP_U64(u64Target);
17350 IEM_MC_END()
17351 return VINF_SUCCESS;
17352
17353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17354 }
17355 }
17356}
17357
17358
17359/**
17360 * Opcode 0xff /5.
17361 * @param bRm The RM byte.
17362 */
17363FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17364{
17365 IEMOP_MNEMONIC("jmpf Ep");
17366 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17367}
17368
17369
17370/**
17371 * Opcode 0xff /6.
17372 * @param bRm The RM byte.
17373 */
17374FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17375{
17376 IEMOP_MNEMONIC("push Ev");
17377 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17378
17379 /* Registers are handled by a common worker. */
17380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17381 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17382
17383 /* Memory we do here. */
17384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17385 switch (pIemCpu->enmEffOpSize)
17386 {
17387 case IEMMODE_16BIT:
17388 IEM_MC_BEGIN(0, 2);
17389 IEM_MC_LOCAL(uint16_t, u16Src);
17390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17392 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17393 IEM_MC_PUSH_U16(u16Src);
17394 IEM_MC_ADVANCE_RIP();
17395 IEM_MC_END();
17396 return VINF_SUCCESS;
17397
17398 case IEMMODE_32BIT:
17399 IEM_MC_BEGIN(0, 2);
17400 IEM_MC_LOCAL(uint32_t, u32Src);
17401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17403 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17404 IEM_MC_PUSH_U32(u32Src);
17405 IEM_MC_ADVANCE_RIP();
17406 IEM_MC_END();
17407 return VINF_SUCCESS;
17408
17409 case IEMMODE_64BIT:
17410 IEM_MC_BEGIN(0, 2);
17411 IEM_MC_LOCAL(uint64_t, u64Src);
17412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17414 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17415 IEM_MC_PUSH_U64(u64Src);
17416 IEM_MC_ADVANCE_RIP();
17417 IEM_MC_END();
17418 return VINF_SUCCESS;
17419
17420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17421 }
17422}
17423
17424
17425/** Opcode 0xff. */
17426FNIEMOP_DEF(iemOp_Grp5)
17427{
17428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17429 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17430 {
17431 case 0:
17432 IEMOP_MNEMONIC("inc Ev");
17433 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17434 case 1:
17435 IEMOP_MNEMONIC("dec Ev");
17436 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17437 case 2:
17438 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17439 case 3:
17440 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17441 case 4:
17442 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17443 case 5:
17444 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17445 case 6:
17446 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17447 case 7:
17448 IEMOP_MNEMONIC("grp5-ud");
17449 return IEMOP_RAISE_INVALID_OPCODE();
17450 }
17451 AssertFailedReturn(VERR_IEM_IPE_3);
17452}
17453
17454
17455
17456const PFNIEMOP g_apfnOneByteMap[256] =
17457{
17458 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17459 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17460 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17461 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17462 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17463 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17464 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17465 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17466 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17467 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17468 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17469 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17470 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17471 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17472 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17473 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17474 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17475 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17476 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17477 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17478 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17479 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17480 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17481 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17482 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17483 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17484 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17485 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17486 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17487 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17488 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17489 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17490 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17491 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17492 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17493 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17494 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17495 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17496 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17497 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17498 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17499 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17500 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17501 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17502 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17503 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17504 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17505 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17506 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17507 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17508 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17509 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17510 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17511 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17512 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17513 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17514 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17515 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17516 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17517 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17518 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17519 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17520 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17521 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17522};
17523
17524
17525/** @} */
17526
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette