VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60847

Last change on this file since 60847 was 60776, checked in by vboxsync, 9 years ago

IEM: bugref:8118: sidt/sgdt fix.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60776 2016-05-02 08:11:36Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1698/** Opcode 0x0f 0x29. */
1699FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1700/** Opcode 0x0f 0x2a. */
1701FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1702/** Opcode 0x0f 0x2b. */
1703FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1704/** Opcode 0x0f 0x2c. */
1705FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1706/** Opcode 0x0f 0x2d. */
1707FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1708/** Opcode 0x0f 0x2e. */
1709FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1710/** Opcode 0x0f 0x2f. */
1711FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1712
1713
1714/** Opcode 0x0f 0x30. */
1715FNIEMOP_DEF(iemOp_wrmsr)
1716{
1717 IEMOP_MNEMONIC("wrmsr");
1718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1720}
1721
1722
1723/** Opcode 0x0f 0x31. */
1724FNIEMOP_DEF(iemOp_rdtsc)
1725{
1726 IEMOP_MNEMONIC("rdtsc");
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1729}
1730
1731
1732/** Opcode 0x0f 0x33. */
1733FNIEMOP_DEF(iemOp_rdmsr)
1734{
1735 IEMOP_MNEMONIC("rdmsr");
1736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1738}
1739
1740
1741/** Opcode 0x0f 0x34. */
1742FNIEMOP_STUB(iemOp_rdpmc);
1743/** Opcode 0x0f 0x34. */
1744FNIEMOP_STUB(iemOp_sysenter);
1745/** Opcode 0x0f 0x35. */
1746FNIEMOP_STUB(iemOp_sysexit);
1747/** Opcode 0x0f 0x37. */
1748FNIEMOP_STUB(iemOp_getsec);
1749/** Opcode 0x0f 0x38. */
1750FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1751/** Opcode 0x0f 0x3a. */
1752FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1753/** Opcode 0x0f 0x3c (?). */
1754FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1755
1756/**
1757 * Implements a conditional move.
1758 *
1759 * Wish there was an obvious way to do this where we could share and reduce
1760 * code bloat.
1761 *
1762 * @param a_Cnd The conditional "microcode" operation.
1763 */
1764#define CMOV_X(a_Cnd) \
1765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1767 { \
1768 switch (pIemCpu->enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(0, 1); \
1772 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1773 a_Cnd { \
1774 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1775 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1776 } IEM_MC_ENDIF(); \
1777 IEM_MC_ADVANCE_RIP(); \
1778 IEM_MC_END(); \
1779 return VINF_SUCCESS; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(0, 1); \
1783 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1784 a_Cnd { \
1785 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1786 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1787 } IEM_MC_ELSE() { \
1788 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1789 } IEM_MC_ENDIF(); \
1790 IEM_MC_ADVANCE_RIP(); \
1791 IEM_MC_END(); \
1792 return VINF_SUCCESS; \
1793 \
1794 case IEMMODE_64BIT: \
1795 IEM_MC_BEGIN(0, 1); \
1796 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1797 a_Cnd { \
1798 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1799 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1800 } IEM_MC_ENDIF(); \
1801 IEM_MC_ADVANCE_RIP(); \
1802 IEM_MC_END(); \
1803 return VINF_SUCCESS; \
1804 \
1805 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1806 } \
1807 } \
1808 else \
1809 { \
1810 switch (pIemCpu->enmEffOpSize) \
1811 { \
1812 case IEMMODE_16BIT: \
1813 IEM_MC_BEGIN(0, 2); \
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1815 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1817 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1818 a_Cnd { \
1819 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1820 } IEM_MC_ENDIF(); \
1821 IEM_MC_ADVANCE_RIP(); \
1822 IEM_MC_END(); \
1823 return VINF_SUCCESS; \
1824 \
1825 case IEMMODE_32BIT: \
1826 IEM_MC_BEGIN(0, 2); \
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1828 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1830 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1831 a_Cnd { \
1832 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1833 } IEM_MC_ELSE() { \
1834 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1835 } IEM_MC_ENDIF(); \
1836 IEM_MC_ADVANCE_RIP(); \
1837 IEM_MC_END(); \
1838 return VINF_SUCCESS; \
1839 \
1840 case IEMMODE_64BIT: \
1841 IEM_MC_BEGIN(0, 2); \
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1843 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1845 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1846 a_Cnd { \
1847 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1848 } IEM_MC_ENDIF(); \
1849 IEM_MC_ADVANCE_RIP(); \
1850 IEM_MC_END(); \
1851 return VINF_SUCCESS; \
1852 \
1853 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1854 } \
1855 } do {} while (0)
1856
1857
1858
1859/** Opcode 0x0f 0x40. */
1860FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1861{
1862 IEMOP_MNEMONIC("cmovo Gv,Ev");
1863 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1864}
1865
1866
1867/** Opcode 0x0f 0x41. */
1868FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1869{
1870 IEMOP_MNEMONIC("cmovno Gv,Ev");
1871 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1872}
1873
1874
1875/** Opcode 0x0f 0x42. */
1876FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1877{
1878 IEMOP_MNEMONIC("cmovc Gv,Ev");
1879 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1880}
1881
1882
1883/** Opcode 0x0f 0x43. */
1884FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1885{
1886 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1887 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1888}
1889
1890
1891/** Opcode 0x0f 0x44. */
1892FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1893{
1894 IEMOP_MNEMONIC("cmove Gv,Ev");
1895 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1896}
1897
1898
1899/** Opcode 0x0f 0x45. */
1900FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1901{
1902 IEMOP_MNEMONIC("cmovne Gv,Ev");
1903 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1904}
1905
1906
1907/** Opcode 0x0f 0x46. */
1908FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1909{
1910 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1911 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1912}
1913
1914
1915/** Opcode 0x0f 0x47. */
1916FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1917{
1918 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1919 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1920}
1921
1922
1923/** Opcode 0x0f 0x48. */
1924FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1925{
1926 IEMOP_MNEMONIC("cmovs Gv,Ev");
1927 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1928}
1929
1930
1931/** Opcode 0x0f 0x49. */
1932FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1933{
1934 IEMOP_MNEMONIC("cmovns Gv,Ev");
1935 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1936}
1937
1938
1939/** Opcode 0x0f 0x4a. */
1940FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1941{
1942 IEMOP_MNEMONIC("cmovp Gv,Ev");
1943 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1944}
1945
1946
1947/** Opcode 0x0f 0x4b. */
1948FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1949{
1950 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1951 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1952}
1953
1954
1955/** Opcode 0x0f 0x4c. */
1956FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1957{
1958 IEMOP_MNEMONIC("cmovl Gv,Ev");
1959 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1960}
1961
1962
1963/** Opcode 0x0f 0x4d. */
1964FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1965{
1966 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1967 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1968}
1969
1970
1971/** Opcode 0x0f 0x4e. */
1972FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1973{
1974 IEMOP_MNEMONIC("cmovle Gv,Ev");
1975 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1976}
1977
1978
1979/** Opcode 0x0f 0x4f. */
1980FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1981{
1982 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1983 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1984}
1985
1986#undef CMOV_X
1987
1988/** Opcode 0x0f 0x50. */
1989FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1990/** Opcode 0x0f 0x51. */
1991FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1992/** Opcode 0x0f 0x52. */
1993FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1994/** Opcode 0x0f 0x53. */
1995FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1996/** Opcode 0x0f 0x54. */
1997FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1998/** Opcode 0x0f 0x55. */
1999FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2000/** Opcode 0x0f 0x56. */
2001FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2002/** Opcode 0x0f 0x57. */
2003FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2004/** Opcode 0x0f 0x58. */
2005FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2006/** Opcode 0x0f 0x59. */
2007FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2008/** Opcode 0x0f 0x5a. */
2009FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2010/** Opcode 0x0f 0x5b. */
2011FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2012/** Opcode 0x0f 0x5c. */
2013FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2014/** Opcode 0x0f 0x5d. */
2015FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2016/** Opcode 0x0f 0x5e. */
2017FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2018/** Opcode 0x0f 0x5f. */
2019FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2020
2021
2022/**
2023 * Common worker for SSE2 and MMX instructions on the forms:
2024 * pxxxx xmm1, xmm2/mem128
2025 * pxxxx mm1, mm2/mem32
2026 *
2027 * The 2nd operand is the first half of a register, which in the memory case
2028 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2029 * memory accessed for MMX.
2030 *
2031 * Exceptions type 4.
2032 */
2033FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2034{
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2037 {
2038 case IEM_OP_PRF_SIZE_OP: /* SSE */
2039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2040 {
2041 /*
2042 * Register, register.
2043 */
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 IEM_MC_BEGIN(2, 0);
2046 IEM_MC_ARG(uint128_t *, pDst, 0);
2047 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2048 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2049 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2050 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2051 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2052 IEM_MC_ADVANCE_RIP();
2053 IEM_MC_END();
2054 }
2055 else
2056 {
2057 /*
2058 * Register, memory.
2059 */
2060 IEM_MC_BEGIN(2, 2);
2061 IEM_MC_ARG(uint128_t *, pDst, 0);
2062 IEM_MC_LOCAL(uint64_t, uSrc);
2063 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2065
2066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2069 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2070
2071 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2072 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2073
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 }
2077 return VINF_SUCCESS;
2078
2079 case 0: /* MMX */
2080 if (!pImpl->pfnU64)
2081 return IEMOP_RAISE_INVALID_OPCODE();
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2083 {
2084 /*
2085 * Register, register.
2086 */
2087 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2088 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2090 IEM_MC_BEGIN(2, 0);
2091 IEM_MC_ARG(uint64_t *, pDst, 0);
2092 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2093 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2094 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2095 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2096 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 else
2101 {
2102 /*
2103 * Register, memory.
2104 */
2105 IEM_MC_BEGIN(2, 2);
2106 IEM_MC_ARG(uint64_t *, pDst, 0);
2107 IEM_MC_LOCAL(uint32_t, uSrc);
2108 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2110
2111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2114 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2115
2116 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2117 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2118
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 return VINF_SUCCESS;
2123
2124 default:
2125 return IEMOP_RAISE_INVALID_OPCODE();
2126 }
2127}
2128
2129
2130/** Opcode 0x0f 0x60. */
2131FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2132{
2133 IEMOP_MNEMONIC("punpcklbw");
2134 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2135}
2136
2137
2138/** Opcode 0x0f 0x61. */
2139FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2140{
2141 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2142 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2143}
2144
2145
2146/** Opcode 0x0f 0x62. */
2147FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2148{
2149 IEMOP_MNEMONIC("punpckldq");
2150 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2151}
2152
2153
2154/** Opcode 0x0f 0x63. */
2155FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2156/** Opcode 0x0f 0x64. */
2157FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2158/** Opcode 0x0f 0x65. */
2159FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2160/** Opcode 0x0f 0x66. */
2161FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2162/** Opcode 0x0f 0x67. */
2163FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2164
2165
2166/**
2167 * Common worker for SSE2 and MMX instructions on the forms:
2168 * pxxxx xmm1, xmm2/mem128
2169 * pxxxx mm1, mm2/mem64
2170 *
2171 * The 2nd operand is the second half of a register, which in the memory case
2172 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2173 * where it may read the full 128 bits or only the upper 64 bits.
2174 *
2175 * Exceptions type 4.
2176 */
2177FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2178{
2179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2180 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2181 {
2182 case IEM_OP_PRF_SIZE_OP: /* SSE */
2183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2184 {
2185 /*
2186 * Register, register.
2187 */
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189 IEM_MC_BEGIN(2, 0);
2190 IEM_MC_ARG(uint128_t *, pDst, 0);
2191 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2192 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2193 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2194 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2195 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2196 IEM_MC_ADVANCE_RIP();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 /*
2202 * Register, memory.
2203 */
2204 IEM_MC_BEGIN(2, 2);
2205 IEM_MC_ARG(uint128_t *, pDst, 0);
2206 IEM_MC_LOCAL(uint128_t, uSrc);
2207 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2209
2210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2213 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2214
2215 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2216 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2217
2218 IEM_MC_ADVANCE_RIP();
2219 IEM_MC_END();
2220 }
2221 return VINF_SUCCESS;
2222
2223 case 0: /* MMX */
2224 if (!pImpl->pfnU64)
2225 return IEMOP_RAISE_INVALID_OPCODE();
2226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2227 {
2228 /*
2229 * Register, register.
2230 */
2231 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2232 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEM_MC_BEGIN(2, 0);
2235 IEM_MC_ARG(uint64_t *, pDst, 0);
2236 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2237 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2238 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2239 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2240 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2241 IEM_MC_ADVANCE_RIP();
2242 IEM_MC_END();
2243 }
2244 else
2245 {
2246 /*
2247 * Register, memory.
2248 */
2249 IEM_MC_BEGIN(2, 2);
2250 IEM_MC_ARG(uint64_t *, pDst, 0);
2251 IEM_MC_LOCAL(uint64_t, uSrc);
2252 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2254
2255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2258 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2259
2260 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2261 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2262
2263 IEM_MC_ADVANCE_RIP();
2264 IEM_MC_END();
2265 }
2266 return VINF_SUCCESS;
2267
2268 default:
2269 return IEMOP_RAISE_INVALID_OPCODE();
2270 }
2271}
2272
2273
2274/** Opcode 0x0f 0x68. */
2275FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2276{
2277 IEMOP_MNEMONIC("punpckhbw");
2278 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2279}
2280
2281
2282/** Opcode 0x0f 0x69. */
2283FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2284{
2285 IEMOP_MNEMONIC("punpckhwd");
2286 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2287}
2288
2289
2290/** Opcode 0x0f 0x6a. */
2291FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2292{
2293 IEMOP_MNEMONIC("punpckhdq");
2294 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2295}
2296
2297/** Opcode 0x0f 0x6b. */
2298FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2299
2300
2301/** Opcode 0x0f 0x6c. */
2302FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2303{
2304 IEMOP_MNEMONIC("punpcklqdq");
2305 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2306}
2307
2308
2309/** Opcode 0x0f 0x6d. */
2310FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2311{
2312 IEMOP_MNEMONIC("punpckhqdq");
2313 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2314}
2315
2316
2317/** Opcode 0x0f 0x6e. */
2318FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2319{
2320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2321 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2322 {
2323 case IEM_OP_PRF_SIZE_OP: /* SSE */
2324 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2326 {
2327 /* XMM, greg*/
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_BEGIN(0, 1);
2330 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2331 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2332 {
2333 IEM_MC_LOCAL(uint64_t, u64Tmp);
2334 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2335 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2336 }
2337 else
2338 {
2339 IEM_MC_LOCAL(uint32_t, u32Tmp);
2340 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2341 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2342 }
2343 IEM_MC_ADVANCE_RIP();
2344 IEM_MC_END();
2345 }
2346 else
2347 {
2348 /* XMM, [mem] */
2349 IEM_MC_BEGIN(0, 2);
2350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2351 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2354 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2355 {
2356 IEM_MC_LOCAL(uint64_t, u64Tmp);
2357 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2358 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2359 }
2360 else
2361 {
2362 IEM_MC_LOCAL(uint32_t, u32Tmp);
2363 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2364 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2365 }
2366 IEM_MC_ADVANCE_RIP();
2367 IEM_MC_END();
2368 }
2369 return VINF_SUCCESS;
2370
2371 case 0: /* MMX */
2372 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2373 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2374 {
2375 /* MMX, greg */
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_BEGIN(0, 1);
2378 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2379 IEM_MC_LOCAL(uint64_t, u64Tmp);
2380 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2381 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2382 else
2383 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2384 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2385 IEM_MC_ADVANCE_RIP();
2386 IEM_MC_END();
2387 }
2388 else
2389 {
2390 /* MMX, [mem] */
2391 IEM_MC_BEGIN(0, 2);
2392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2393 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2397 {
2398 IEM_MC_LOCAL(uint64_t, u64Tmp);
2399 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2400 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2401 }
2402 else
2403 {
2404 IEM_MC_LOCAL(uint32_t, u32Tmp);
2405 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2406 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2407 }
2408 IEM_MC_ADVANCE_RIP();
2409 IEM_MC_END();
2410 }
2411 return VINF_SUCCESS;
2412
2413 default:
2414 return IEMOP_RAISE_INVALID_OPCODE();
2415 }
2416}
2417
2418
2419/** Opcode 0x0f 0x6f. */
2420FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2421{
2422 bool fAligned = false;
2423 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2424 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2425 {
2426 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2427 fAligned = true;
2428 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2429 if (fAligned)
2430 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2431 else
2432 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2434 {
2435 /*
2436 * Register, register.
2437 */
2438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2439 IEM_MC_BEGIN(0, 1);
2440 IEM_MC_LOCAL(uint128_t, u128Tmp);
2441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2442 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2443 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2444 IEM_MC_ADVANCE_RIP();
2445 IEM_MC_END();
2446 }
2447 else
2448 {
2449 /*
2450 * Register, memory.
2451 */
2452 IEM_MC_BEGIN(0, 2);
2453 IEM_MC_LOCAL(uint128_t, u128Tmp);
2454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2455
2456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2459 if (fAligned)
2460 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2461 else
2462 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2463 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2464
2465 IEM_MC_ADVANCE_RIP();
2466 IEM_MC_END();
2467 }
2468 return VINF_SUCCESS;
2469
2470 case 0: /* MMX */
2471 IEMOP_MNEMONIC("movq Pq,Qq");
2472 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2473 {
2474 /*
2475 * Register, register.
2476 */
2477 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2478 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2480 IEM_MC_BEGIN(0, 1);
2481 IEM_MC_LOCAL(uint64_t, u64Tmp);
2482 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2483 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2484 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2485 IEM_MC_ADVANCE_RIP();
2486 IEM_MC_END();
2487 }
2488 else
2489 {
2490 /*
2491 * Register, memory.
2492 */
2493 IEM_MC_BEGIN(0, 2);
2494 IEM_MC_LOCAL(uint64_t, u64Tmp);
2495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2496
2497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2499 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2500 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2501 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2502
2503 IEM_MC_ADVANCE_RIP();
2504 IEM_MC_END();
2505 }
2506 return VINF_SUCCESS;
2507
2508 default:
2509 return IEMOP_RAISE_INVALID_OPCODE();
2510 }
2511}
2512
2513
2514/** Opcode 0x0f 0x70. The immediate here is evil! */
2515FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2516{
2517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2518 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2519 {
2520 case IEM_OP_PRF_SIZE_OP: /* SSE */
2521 case IEM_OP_PRF_REPNZ: /* SSE */
2522 case IEM_OP_PRF_REPZ: /* SSE */
2523 {
2524 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2525 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2526 {
2527 case IEM_OP_PRF_SIZE_OP:
2528 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2529 pfnAImpl = iemAImpl_pshufd;
2530 break;
2531 case IEM_OP_PRF_REPNZ:
2532 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2533 pfnAImpl = iemAImpl_pshuflw;
2534 break;
2535 case IEM_OP_PRF_REPZ:
2536 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2537 pfnAImpl = iemAImpl_pshufhw;
2538 break;
2539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2540 }
2541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2542 {
2543 /*
2544 * Register, register.
2545 */
2546 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2548
2549 IEM_MC_BEGIN(3, 0);
2550 IEM_MC_ARG(uint128_t *, pDst, 0);
2551 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2552 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2553 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2554 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2555 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2556 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 }
2560 else
2561 {
2562 /*
2563 * Register, memory.
2564 */
2565 IEM_MC_BEGIN(3, 2);
2566 IEM_MC_ARG(uint128_t *, pDst, 0);
2567 IEM_MC_LOCAL(uint128_t, uSrc);
2568 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2573 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2575 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2576
2577 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2578 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2579 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2580
2581 IEM_MC_ADVANCE_RIP();
2582 IEM_MC_END();
2583 }
2584 return VINF_SUCCESS;
2585 }
2586
2587 case 0: /* MMX Extension */
2588 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2589 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2590 {
2591 /*
2592 * Register, register.
2593 */
2594 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2596
2597 IEM_MC_BEGIN(3, 0);
2598 IEM_MC_ARG(uint64_t *, pDst, 0);
2599 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2600 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2602 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2603 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2604 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2605 IEM_MC_ADVANCE_RIP();
2606 IEM_MC_END();
2607 }
2608 else
2609 {
2610 /*
2611 * Register, memory.
2612 */
2613 IEM_MC_BEGIN(3, 2);
2614 IEM_MC_ARG(uint64_t *, pDst, 0);
2615 IEM_MC_LOCAL(uint64_t, uSrc);
2616 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2618
2619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2620 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2621 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2624
2625 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2626 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2627 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633
2634 default:
2635 return IEMOP_RAISE_INVALID_OPCODE();
2636 }
2637}
2638
2639
2640/** Opcode 0x0f 0x71 11/2. */
2641FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2642
2643/** Opcode 0x66 0x0f 0x71 11/2. */
2644FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2645
2646/** Opcode 0x0f 0x71 11/4. */
2647FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2648
2649/** Opcode 0x66 0x0f 0x71 11/4. */
2650FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2651
2652/** Opcode 0x0f 0x71 11/6. */
2653FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2654
2655/** Opcode 0x66 0x0f 0x71 11/6. */
2656FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2657
2658
2659/** Opcode 0x0f 0x71. */
2660FNIEMOP_DEF(iemOp_Grp12)
2661{
2662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2663 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2664 return IEMOP_RAISE_INVALID_OPCODE();
2665 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2666 {
2667 case 0: case 1: case 3: case 5: case 7:
2668 return IEMOP_RAISE_INVALID_OPCODE();
2669 case 2:
2670 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2671 {
2672 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2673 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2674 default: return IEMOP_RAISE_INVALID_OPCODE();
2675 }
2676 case 4:
2677 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2678 {
2679 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2680 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2681 default: return IEMOP_RAISE_INVALID_OPCODE();
2682 }
2683 case 6:
2684 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2685 {
2686 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2687 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2688 default: return IEMOP_RAISE_INVALID_OPCODE();
2689 }
2690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2691 }
2692}
2693
2694
2695/** Opcode 0x0f 0x72 11/2. */
2696FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2697
2698/** Opcode 0x66 0x0f 0x72 11/2. */
2699FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2700
2701/** Opcode 0x0f 0x72 11/4. */
2702FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2703
2704/** Opcode 0x66 0x0f 0x72 11/4. */
2705FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2706
2707/** Opcode 0x0f 0x72 11/6. */
2708FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2709
2710/** Opcode 0x66 0x0f 0x72 11/6. */
2711FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2712
2713
2714/** Opcode 0x0f 0x72. */
2715FNIEMOP_DEF(iemOp_Grp13)
2716{
2717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2718 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2719 return IEMOP_RAISE_INVALID_OPCODE();
2720 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2721 {
2722 case 0: case 1: case 3: case 5: case 7:
2723 return IEMOP_RAISE_INVALID_OPCODE();
2724 case 2:
2725 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2726 {
2727 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2728 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2729 default: return IEMOP_RAISE_INVALID_OPCODE();
2730 }
2731 case 4:
2732 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2733 {
2734 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2735 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2736 default: return IEMOP_RAISE_INVALID_OPCODE();
2737 }
2738 case 6:
2739 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2740 {
2741 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2742 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2743 default: return IEMOP_RAISE_INVALID_OPCODE();
2744 }
2745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2746 }
2747}
2748
2749
2750/** Opcode 0x0f 0x73 11/2. */
2751FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2752
2753/** Opcode 0x66 0x0f 0x73 11/2. */
2754FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2755
2756/** Opcode 0x66 0x0f 0x73 11/3. */
2757FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2758
2759/** Opcode 0x0f 0x73 11/6. */
2760FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2761
2762/** Opcode 0x66 0x0f 0x73 11/6. */
2763FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2764
2765/** Opcode 0x66 0x0f 0x73 11/7. */
2766FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2767
2768
2769/** Opcode 0x0f 0x73. */
2770FNIEMOP_DEF(iemOp_Grp14)
2771{
2772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2773 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2774 return IEMOP_RAISE_INVALID_OPCODE();
2775 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2776 {
2777 case 0: case 1: case 4: case 5:
2778 return IEMOP_RAISE_INVALID_OPCODE();
2779 case 2:
2780 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2781 {
2782 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2783 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2784 default: return IEMOP_RAISE_INVALID_OPCODE();
2785 }
2786 case 3:
2787 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2788 {
2789 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2790 default: return IEMOP_RAISE_INVALID_OPCODE();
2791 }
2792 case 6:
2793 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2794 {
2795 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2796 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2797 default: return IEMOP_RAISE_INVALID_OPCODE();
2798 }
2799 case 7:
2800 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2801 {
2802 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2803 default: return IEMOP_RAISE_INVALID_OPCODE();
2804 }
2805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2806 }
2807}
2808
2809
2810/**
2811 * Common worker for SSE2 and MMX instructions on the forms:
2812 * pxxx mm1, mm2/mem64
2813 * pxxx xmm1, xmm2/mem128
2814 *
2815 * Proper alignment of the 128-bit operand is enforced.
2816 * Exceptions type 4. SSE2 and MMX cpuid checks.
2817 */
2818FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2819{
2820 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2821 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2822 {
2823 case IEM_OP_PRF_SIZE_OP: /* SSE */
2824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2825 {
2826 /*
2827 * Register, register.
2828 */
2829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2830 IEM_MC_BEGIN(2, 0);
2831 IEM_MC_ARG(uint128_t *, pDst, 0);
2832 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2833 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2834 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2835 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2836 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2837 IEM_MC_ADVANCE_RIP();
2838 IEM_MC_END();
2839 }
2840 else
2841 {
2842 /*
2843 * Register, memory.
2844 */
2845 IEM_MC_BEGIN(2, 2);
2846 IEM_MC_ARG(uint128_t *, pDst, 0);
2847 IEM_MC_LOCAL(uint128_t, uSrc);
2848 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2850
2851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2854 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2855
2856 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2857 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2858
2859 IEM_MC_ADVANCE_RIP();
2860 IEM_MC_END();
2861 }
2862 return VINF_SUCCESS;
2863
2864 case 0: /* MMX */
2865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2866 {
2867 /*
2868 * Register, register.
2869 */
2870 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2871 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2873 IEM_MC_BEGIN(2, 0);
2874 IEM_MC_ARG(uint64_t *, pDst, 0);
2875 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2876 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2877 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2878 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2879 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2880 IEM_MC_ADVANCE_RIP();
2881 IEM_MC_END();
2882 }
2883 else
2884 {
2885 /*
2886 * Register, memory.
2887 */
2888 IEM_MC_BEGIN(2, 2);
2889 IEM_MC_ARG(uint64_t *, pDst, 0);
2890 IEM_MC_LOCAL(uint64_t, uSrc);
2891 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2893
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2897 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2898
2899 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2900 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2901
2902 IEM_MC_ADVANCE_RIP();
2903 IEM_MC_END();
2904 }
2905 return VINF_SUCCESS;
2906
2907 default:
2908 return IEMOP_RAISE_INVALID_OPCODE();
2909 }
2910}
2911
2912
2913/** Opcode 0x0f 0x74. */
2914FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2915{
2916 IEMOP_MNEMONIC("pcmpeqb");
2917 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2918}
2919
2920
2921/** Opcode 0x0f 0x75. */
2922FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2923{
2924 IEMOP_MNEMONIC("pcmpeqw");
2925 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2926}
2927
2928
2929/** Opcode 0x0f 0x76. */
2930FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2931{
2932 IEMOP_MNEMONIC("pcmpeqd");
2933 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2934}
2935
2936
2937/** Opcode 0x0f 0x77. */
2938FNIEMOP_STUB(iemOp_emms);
2939/** Opcode 0x0f 0x78. */
2940FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2941/** Opcode 0x0f 0x79. */
2942FNIEMOP_UD_STUB(iemOp_vmwrite);
2943/** Opcode 0x0f 0x7c. */
2944FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2945/** Opcode 0x0f 0x7d. */
2946FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2947
2948
2949/** Opcode 0x0f 0x7e. */
2950FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2951{
2952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2953 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2954 {
2955 case IEM_OP_PRF_SIZE_OP: /* SSE */
2956 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2958 {
2959 /* greg, XMM */
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_BEGIN(0, 1);
2962 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2963 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2964 {
2965 IEM_MC_LOCAL(uint64_t, u64Tmp);
2966 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2967 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2968 }
2969 else
2970 {
2971 IEM_MC_LOCAL(uint32_t, u32Tmp);
2972 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2973 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2974 }
2975 IEM_MC_ADVANCE_RIP();
2976 IEM_MC_END();
2977 }
2978 else
2979 {
2980 /* [mem], XMM */
2981 IEM_MC_BEGIN(0, 2);
2982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2983 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2986 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2987 {
2988 IEM_MC_LOCAL(uint64_t, u64Tmp);
2989 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2990 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2991 }
2992 else
2993 {
2994 IEM_MC_LOCAL(uint32_t, u32Tmp);
2995 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2996 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2997 }
2998 IEM_MC_ADVANCE_RIP();
2999 IEM_MC_END();
3000 }
3001 return VINF_SUCCESS;
3002
3003 case 0: /* MMX */
3004 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3006 {
3007 /* greg, MMX */
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3009 IEM_MC_BEGIN(0, 1);
3010 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3011 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3012 {
3013 IEM_MC_LOCAL(uint64_t, u64Tmp);
3014 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3015 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3016 }
3017 else
3018 {
3019 IEM_MC_LOCAL(uint32_t, u32Tmp);
3020 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3022 }
3023 IEM_MC_ADVANCE_RIP();
3024 IEM_MC_END();
3025 }
3026 else
3027 {
3028 /* [mem], MMX */
3029 IEM_MC_BEGIN(0, 2);
3030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3031 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3035 {
3036 IEM_MC_LOCAL(uint64_t, u64Tmp);
3037 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3038 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3039 }
3040 else
3041 {
3042 IEM_MC_LOCAL(uint32_t, u32Tmp);
3043 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3044 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3045 }
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 }
3049 return VINF_SUCCESS;
3050
3051 default:
3052 return IEMOP_RAISE_INVALID_OPCODE();
3053 }
3054}
3055
3056
3057/** Opcode 0x0f 0x7f. */
3058FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3059{
3060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3061 bool fAligned = false;
3062 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3063 {
3064 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3065 fAligned = true;
3066 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3067 if (fAligned)
3068 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3069 else
3070 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3072 {
3073 /*
3074 * Register, register.
3075 */
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077 IEM_MC_BEGIN(0, 1);
3078 IEM_MC_LOCAL(uint128_t, u128Tmp);
3079 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3080 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3081 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3082 IEM_MC_ADVANCE_RIP();
3083 IEM_MC_END();
3084 }
3085 else
3086 {
3087 /*
3088 * Register, memory.
3089 */
3090 IEM_MC_BEGIN(0, 2);
3091 IEM_MC_LOCAL(uint128_t, u128Tmp);
3092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3093
3094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3096 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3097 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3098 if (fAligned)
3099 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3100 else
3101 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3102
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 return VINF_SUCCESS;
3107
3108 case 0: /* MMX */
3109 IEMOP_MNEMONIC("movq Qq,Pq");
3110
3111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3112 {
3113 /*
3114 * Register, register.
3115 */
3116 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3117 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3119 IEM_MC_BEGIN(0, 1);
3120 IEM_MC_LOCAL(uint64_t, u64Tmp);
3121 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3122 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3123 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3124 IEM_MC_ADVANCE_RIP();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 /*
3130 * Register, memory.
3131 */
3132 IEM_MC_BEGIN(0, 2);
3133 IEM_MC_LOCAL(uint64_t, u64Tmp);
3134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3135
3136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3139 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3140 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3141
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 return VINF_SUCCESS;
3146
3147 default:
3148 return IEMOP_RAISE_INVALID_OPCODE();
3149 }
3150}
3151
3152
3153
3154/** Opcode 0x0f 0x80. */
3155FNIEMOP_DEF(iemOp_jo_Jv)
3156{
3157 IEMOP_MNEMONIC("jo Jv");
3158 IEMOP_HLP_MIN_386();
3159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3160 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3161 {
3162 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3163 IEMOP_HLP_NO_LOCK_PREFIX();
3164
3165 IEM_MC_BEGIN(0, 0);
3166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3167 IEM_MC_REL_JMP_S16(i16Imm);
3168 } IEM_MC_ELSE() {
3169 IEM_MC_ADVANCE_RIP();
3170 } IEM_MC_ENDIF();
3171 IEM_MC_END();
3172 }
3173 else
3174 {
3175 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3176 IEMOP_HLP_NO_LOCK_PREFIX();
3177
3178 IEM_MC_BEGIN(0, 0);
3179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3180 IEM_MC_REL_JMP_S32(i32Imm);
3181 } IEM_MC_ELSE() {
3182 IEM_MC_ADVANCE_RIP();
3183 } IEM_MC_ENDIF();
3184 IEM_MC_END();
3185 }
3186 return VINF_SUCCESS;
3187}
3188
3189
3190/** Opcode 0x0f 0x81. */
3191FNIEMOP_DEF(iemOp_jno_Jv)
3192{
3193 IEMOP_MNEMONIC("jno Jv");
3194 IEMOP_HLP_MIN_386();
3195 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3196 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3197 {
3198 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3199 IEMOP_HLP_NO_LOCK_PREFIX();
3200
3201 IEM_MC_BEGIN(0, 0);
3202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3203 IEM_MC_ADVANCE_RIP();
3204 } IEM_MC_ELSE() {
3205 IEM_MC_REL_JMP_S16(i16Imm);
3206 } IEM_MC_ENDIF();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3212 IEMOP_HLP_NO_LOCK_PREFIX();
3213
3214 IEM_MC_BEGIN(0, 0);
3215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3216 IEM_MC_ADVANCE_RIP();
3217 } IEM_MC_ELSE() {
3218 IEM_MC_REL_JMP_S32(i32Imm);
3219 } IEM_MC_ENDIF();
3220 IEM_MC_END();
3221 }
3222 return VINF_SUCCESS;
3223}
3224
3225
3226/** Opcode 0x0f 0x82. */
3227FNIEMOP_DEF(iemOp_jc_Jv)
3228{
3229 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3230 IEMOP_HLP_MIN_386();
3231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3232 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3233 {
3234 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3235 IEMOP_HLP_NO_LOCK_PREFIX();
3236
3237 IEM_MC_BEGIN(0, 0);
3238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3239 IEM_MC_REL_JMP_S16(i16Imm);
3240 } IEM_MC_ELSE() {
3241 IEM_MC_ADVANCE_RIP();
3242 } IEM_MC_ENDIF();
3243 IEM_MC_END();
3244 }
3245 else
3246 {
3247 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3248 IEMOP_HLP_NO_LOCK_PREFIX();
3249
3250 IEM_MC_BEGIN(0, 0);
3251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3252 IEM_MC_REL_JMP_S32(i32Imm);
3253 } IEM_MC_ELSE() {
3254 IEM_MC_ADVANCE_RIP();
3255 } IEM_MC_ENDIF();
3256 IEM_MC_END();
3257 }
3258 return VINF_SUCCESS;
3259}
3260
3261
3262/** Opcode 0x0f 0x83. */
3263FNIEMOP_DEF(iemOp_jnc_Jv)
3264{
3265 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3266 IEMOP_HLP_MIN_386();
3267 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3268 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3269 {
3270 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3271 IEMOP_HLP_NO_LOCK_PREFIX();
3272
3273 IEM_MC_BEGIN(0, 0);
3274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3275 IEM_MC_ADVANCE_RIP();
3276 } IEM_MC_ELSE() {
3277 IEM_MC_REL_JMP_S16(i16Imm);
3278 } IEM_MC_ENDIF();
3279 IEM_MC_END();
3280 }
3281 else
3282 {
3283 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3284 IEMOP_HLP_NO_LOCK_PREFIX();
3285
3286 IEM_MC_BEGIN(0, 0);
3287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3288 IEM_MC_ADVANCE_RIP();
3289 } IEM_MC_ELSE() {
3290 IEM_MC_REL_JMP_S32(i32Imm);
3291 } IEM_MC_ENDIF();
3292 IEM_MC_END();
3293 }
3294 return VINF_SUCCESS;
3295}
3296
3297
3298/** Opcode 0x0f 0x84. */
3299FNIEMOP_DEF(iemOp_je_Jv)
3300{
3301 IEMOP_MNEMONIC("je/jz Jv");
3302 IEMOP_HLP_MIN_386();
3303 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3304 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3305 {
3306 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3307 IEMOP_HLP_NO_LOCK_PREFIX();
3308
3309 IEM_MC_BEGIN(0, 0);
3310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3311 IEM_MC_REL_JMP_S16(i16Imm);
3312 } IEM_MC_ELSE() {
3313 IEM_MC_ADVANCE_RIP();
3314 } IEM_MC_ENDIF();
3315 IEM_MC_END();
3316 }
3317 else
3318 {
3319 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3320 IEMOP_HLP_NO_LOCK_PREFIX();
3321
3322 IEM_MC_BEGIN(0, 0);
3323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3324 IEM_MC_REL_JMP_S32(i32Imm);
3325 } IEM_MC_ELSE() {
3326 IEM_MC_ADVANCE_RIP();
3327 } IEM_MC_ENDIF();
3328 IEM_MC_END();
3329 }
3330 return VINF_SUCCESS;
3331}
3332
3333
3334/** Opcode 0x0f 0x85. */
3335FNIEMOP_DEF(iemOp_jne_Jv)
3336{
3337 IEMOP_MNEMONIC("jne/jnz Jv");
3338 IEMOP_HLP_MIN_386();
3339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3340 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3341 {
3342 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3343 IEMOP_HLP_NO_LOCK_PREFIX();
3344
3345 IEM_MC_BEGIN(0, 0);
3346 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3347 IEM_MC_ADVANCE_RIP();
3348 } IEM_MC_ELSE() {
3349 IEM_MC_REL_JMP_S16(i16Imm);
3350 } IEM_MC_ENDIF();
3351 IEM_MC_END();
3352 }
3353 else
3354 {
3355 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3356 IEMOP_HLP_NO_LOCK_PREFIX();
3357
3358 IEM_MC_BEGIN(0, 0);
3359 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3360 IEM_MC_ADVANCE_RIP();
3361 } IEM_MC_ELSE() {
3362 IEM_MC_REL_JMP_S32(i32Imm);
3363 } IEM_MC_ENDIF();
3364 IEM_MC_END();
3365 }
3366 return VINF_SUCCESS;
3367}
3368
3369
3370/** Opcode 0x0f 0x86. */
3371FNIEMOP_DEF(iemOp_jbe_Jv)
3372{
3373 IEMOP_MNEMONIC("jbe/jna Jv");
3374 IEMOP_HLP_MIN_386();
3375 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3376 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3377 {
3378 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3379 IEMOP_HLP_NO_LOCK_PREFIX();
3380
3381 IEM_MC_BEGIN(0, 0);
3382 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3383 IEM_MC_REL_JMP_S16(i16Imm);
3384 } IEM_MC_ELSE() {
3385 IEM_MC_ADVANCE_RIP();
3386 } IEM_MC_ENDIF();
3387 IEM_MC_END();
3388 }
3389 else
3390 {
3391 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3392 IEMOP_HLP_NO_LOCK_PREFIX();
3393
3394 IEM_MC_BEGIN(0, 0);
3395 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3396 IEM_MC_REL_JMP_S32(i32Imm);
3397 } IEM_MC_ELSE() {
3398 IEM_MC_ADVANCE_RIP();
3399 } IEM_MC_ENDIF();
3400 IEM_MC_END();
3401 }
3402 return VINF_SUCCESS;
3403}
3404
3405
3406/** Opcode 0x0f 0x87. */
3407FNIEMOP_DEF(iemOp_jnbe_Jv)
3408{
3409 IEMOP_MNEMONIC("jnbe/ja Jv");
3410 IEMOP_HLP_MIN_386();
3411 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3412 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3413 {
3414 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3415 IEMOP_HLP_NO_LOCK_PREFIX();
3416
3417 IEM_MC_BEGIN(0, 0);
3418 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3419 IEM_MC_ADVANCE_RIP();
3420 } IEM_MC_ELSE() {
3421 IEM_MC_REL_JMP_S16(i16Imm);
3422 } IEM_MC_ENDIF();
3423 IEM_MC_END();
3424 }
3425 else
3426 {
3427 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3428 IEMOP_HLP_NO_LOCK_PREFIX();
3429
3430 IEM_MC_BEGIN(0, 0);
3431 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3432 IEM_MC_ADVANCE_RIP();
3433 } IEM_MC_ELSE() {
3434 IEM_MC_REL_JMP_S32(i32Imm);
3435 } IEM_MC_ENDIF();
3436 IEM_MC_END();
3437 }
3438 return VINF_SUCCESS;
3439}
3440
3441
3442/** Opcode 0x0f 0x88. */
3443FNIEMOP_DEF(iemOp_js_Jv)
3444{
3445 IEMOP_MNEMONIC("js Jv");
3446 IEMOP_HLP_MIN_386();
3447 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3448 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3449 {
3450 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3451 IEMOP_HLP_NO_LOCK_PREFIX();
3452
3453 IEM_MC_BEGIN(0, 0);
3454 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3455 IEM_MC_REL_JMP_S16(i16Imm);
3456 } IEM_MC_ELSE() {
3457 IEM_MC_ADVANCE_RIP();
3458 } IEM_MC_ENDIF();
3459 IEM_MC_END();
3460 }
3461 else
3462 {
3463 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3464 IEMOP_HLP_NO_LOCK_PREFIX();
3465
3466 IEM_MC_BEGIN(0, 0);
3467 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3468 IEM_MC_REL_JMP_S32(i32Imm);
3469 } IEM_MC_ELSE() {
3470 IEM_MC_ADVANCE_RIP();
3471 } IEM_MC_ENDIF();
3472 IEM_MC_END();
3473 }
3474 return VINF_SUCCESS;
3475}
3476
3477
3478/** Opcode 0x0f 0x89. */
3479FNIEMOP_DEF(iemOp_jns_Jv)
3480{
3481 IEMOP_MNEMONIC("jns Jv");
3482 IEMOP_HLP_MIN_386();
3483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3484 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3485 {
3486 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3487 IEMOP_HLP_NO_LOCK_PREFIX();
3488
3489 IEM_MC_BEGIN(0, 0);
3490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3491 IEM_MC_ADVANCE_RIP();
3492 } IEM_MC_ELSE() {
3493 IEM_MC_REL_JMP_S16(i16Imm);
3494 } IEM_MC_ENDIF();
3495 IEM_MC_END();
3496 }
3497 else
3498 {
3499 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3500 IEMOP_HLP_NO_LOCK_PREFIX();
3501
3502 IEM_MC_BEGIN(0, 0);
3503 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3504 IEM_MC_ADVANCE_RIP();
3505 } IEM_MC_ELSE() {
3506 IEM_MC_REL_JMP_S32(i32Imm);
3507 } IEM_MC_ENDIF();
3508 IEM_MC_END();
3509 }
3510 return VINF_SUCCESS;
3511}
3512
3513
3514/** Opcode 0x0f 0x8a. */
3515FNIEMOP_DEF(iemOp_jp_Jv)
3516{
3517 IEMOP_MNEMONIC("jp Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x8b. */
3551FNIEMOP_DEF(iemOp_jnp_Jv)
3552{
3553 IEMOP_MNEMONIC("jo Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x8c. */
3587FNIEMOP_DEF(iemOp_jl_Jv)
3588{
3589 IEMOP_MNEMONIC("jl/jnge Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x8d. */
3623FNIEMOP_DEF(iemOp_jnl_Jv)
3624{
3625 IEMOP_MNEMONIC("jnl/jge Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x8e. */
3659FNIEMOP_DEF(iemOp_jle_Jv)
3660{
3661 IEMOP_MNEMONIC("jle/jng Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x8f. */
3695FNIEMOP_DEF(iemOp_jnle_Jv)
3696{
3697 IEMOP_MNEMONIC("jnle/jg Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x90. */
3731FNIEMOP_DEF(iemOp_seto_Eb)
3732{
3733 IEMOP_MNEMONIC("seto Eb");
3734 IEMOP_HLP_MIN_386();
3735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3736 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3737
3738 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3739 * any way. AMD says it's "unused", whatever that means. We're
3740 * ignoring for now. */
3741 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3742 {
3743 /* register target */
3744 IEM_MC_BEGIN(0, 0);
3745 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3746 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3747 } IEM_MC_ELSE() {
3748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3749 } IEM_MC_ENDIF();
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 else
3754 {
3755 /* memory target */
3756 IEM_MC_BEGIN(0, 1);
3757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3760 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3761 } IEM_MC_ELSE() {
3762 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3763 } IEM_MC_ENDIF();
3764 IEM_MC_ADVANCE_RIP();
3765 IEM_MC_END();
3766 }
3767 return VINF_SUCCESS;
3768}
3769
3770
3771/** Opcode 0x0f 0x91. */
3772FNIEMOP_DEF(iemOp_setno_Eb)
3773{
3774 IEMOP_MNEMONIC("setno Eb");
3775 IEMOP_HLP_MIN_386();
3776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3777 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3778
3779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3780 * any way. AMD says it's "unused", whatever that means. We're
3781 * ignoring for now. */
3782 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3783 {
3784 /* register target */
3785 IEM_MC_BEGIN(0, 0);
3786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3787 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3788 } IEM_MC_ELSE() {
3789 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3790 } IEM_MC_ENDIF();
3791 IEM_MC_ADVANCE_RIP();
3792 IEM_MC_END();
3793 }
3794 else
3795 {
3796 /* memory target */
3797 IEM_MC_BEGIN(0, 1);
3798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3801 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3802 } IEM_MC_ELSE() {
3803 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_ADVANCE_RIP();
3806 IEM_MC_END();
3807 }
3808 return VINF_SUCCESS;
3809}
3810
3811
3812/** Opcode 0x0f 0x92. */
3813FNIEMOP_DEF(iemOp_setc_Eb)
3814{
3815 IEMOP_MNEMONIC("setc Eb");
3816 IEMOP_HLP_MIN_386();
3817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3818 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3819
3820 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3821 * any way. AMD says it's "unused", whatever that means. We're
3822 * ignoring for now. */
3823 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3824 {
3825 /* register target */
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3828 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3831 } IEM_MC_ENDIF();
3832 IEM_MC_ADVANCE_RIP();
3833 IEM_MC_END();
3834 }
3835 else
3836 {
3837 /* memory target */
3838 IEM_MC_BEGIN(0, 1);
3839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3841 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3842 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3843 } IEM_MC_ELSE() {
3844 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3845 } IEM_MC_ENDIF();
3846 IEM_MC_ADVANCE_RIP();
3847 IEM_MC_END();
3848 }
3849 return VINF_SUCCESS;
3850}
3851
3852
3853/** Opcode 0x0f 0x93. */
3854FNIEMOP_DEF(iemOp_setnc_Eb)
3855{
3856 IEMOP_MNEMONIC("setnc Eb");
3857 IEMOP_HLP_MIN_386();
3858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3859 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3860
3861 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3862 * any way. AMD says it's "unused", whatever that means. We're
3863 * ignoring for now. */
3864 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3865 {
3866 /* register target */
3867 IEM_MC_BEGIN(0, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3869 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 else
3877 {
3878 /* memory target */
3879 IEM_MC_BEGIN(0, 1);
3880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3883 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3884 } IEM_MC_ELSE() {
3885 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3886 } IEM_MC_ENDIF();
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 }
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** Opcode 0x0f 0x94. */
3895FNIEMOP_DEF(iemOp_sete_Eb)
3896{
3897 IEMOP_MNEMONIC("sete Eb");
3898 IEMOP_HLP_MIN_386();
3899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3900 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3901
3902 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3903 * any way. AMD says it's "unused", whatever that means. We're
3904 * ignoring for now. */
3905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3906 {
3907 /* register target */
3908 IEM_MC_BEGIN(0, 0);
3909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3911 } IEM_MC_ELSE() {
3912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3913 } IEM_MC_ENDIF();
3914 IEM_MC_ADVANCE_RIP();
3915 IEM_MC_END();
3916 }
3917 else
3918 {
3919 /* memory target */
3920 IEM_MC_BEGIN(0, 1);
3921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3924 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3925 } IEM_MC_ELSE() {
3926 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3927 } IEM_MC_ENDIF();
3928 IEM_MC_ADVANCE_RIP();
3929 IEM_MC_END();
3930 }
3931 return VINF_SUCCESS;
3932}
3933
3934
3935/** Opcode 0x0f 0x95. */
3936FNIEMOP_DEF(iemOp_setne_Eb)
3937{
3938 IEMOP_MNEMONIC("setne Eb");
3939 IEMOP_HLP_MIN_386();
3940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3942
3943 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3944 * any way. AMD says it's "unused", whatever that means. We're
3945 * ignoring for now. */
3946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3947 {
3948 /* register target */
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3951 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3952 } IEM_MC_ELSE() {
3953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3954 } IEM_MC_ENDIF();
3955 IEM_MC_ADVANCE_RIP();
3956 IEM_MC_END();
3957 }
3958 else
3959 {
3960 /* memory target */
3961 IEM_MC_BEGIN(0, 1);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3965 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3966 } IEM_MC_ELSE() {
3967 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3968 } IEM_MC_ENDIF();
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 }
3972 return VINF_SUCCESS;
3973}
3974
3975
3976/** Opcode 0x0f 0x96. */
3977FNIEMOP_DEF(iemOp_setbe_Eb)
3978{
3979 IEMOP_MNEMONIC("setbe Eb");
3980 IEMOP_HLP_MIN_386();
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3983
3984 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3985 * any way. AMD says it's "unused", whatever that means. We're
3986 * ignoring for now. */
3987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3988 {
3989 /* register target */
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3992 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3995 } IEM_MC_ENDIF();
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 }
3999 else
4000 {
4001 /* memory target */
4002 IEM_MC_BEGIN(0, 1);
4003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4005 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4006 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4007 } IEM_MC_ELSE() {
4008 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4009 } IEM_MC_ENDIF();
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 }
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** Opcode 0x0f 0x97. */
4018FNIEMOP_DEF(iemOp_setnbe_Eb)
4019{
4020 IEMOP_MNEMONIC("setnbe Eb");
4021 IEMOP_HLP_MIN_386();
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4024
4025 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4026 * any way. AMD says it's "unused", whatever that means. We're
4027 * ignoring for now. */
4028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4029 {
4030 /* register target */
4031 IEM_MC_BEGIN(0, 0);
4032 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4033 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4034 } IEM_MC_ELSE() {
4035 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4036 } IEM_MC_ENDIF();
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 else
4041 {
4042 /* memory target */
4043 IEM_MC_BEGIN(0, 1);
4044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4046 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4047 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4048 } IEM_MC_ELSE() {
4049 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4050 } IEM_MC_ENDIF();
4051 IEM_MC_ADVANCE_RIP();
4052 IEM_MC_END();
4053 }
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** Opcode 0x0f 0x98. */
4059FNIEMOP_DEF(iemOp_sets_Eb)
4060{
4061 IEMOP_MNEMONIC("sets Eb");
4062 IEMOP_HLP_MIN_386();
4063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4064 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4065
4066 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4067 * any way. AMD says it's "unused", whatever that means. We're
4068 * ignoring for now. */
4069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4070 {
4071 /* register target */
4072 IEM_MC_BEGIN(0, 0);
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4074 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4075 } IEM_MC_ELSE() {
4076 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4077 } IEM_MC_ENDIF();
4078 IEM_MC_ADVANCE_RIP();
4079 IEM_MC_END();
4080 }
4081 else
4082 {
4083 /* memory target */
4084 IEM_MC_BEGIN(0, 1);
4085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4088 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4089 } IEM_MC_ELSE() {
4090 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4091 } IEM_MC_ENDIF();
4092 IEM_MC_ADVANCE_RIP();
4093 IEM_MC_END();
4094 }
4095 return VINF_SUCCESS;
4096}
4097
4098
4099/** Opcode 0x0f 0x99. */
4100FNIEMOP_DEF(iemOp_setns_Eb)
4101{
4102 IEMOP_MNEMONIC("setns Eb");
4103 IEMOP_HLP_MIN_386();
4104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4105 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4106
4107 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4108 * any way. AMD says it's "unused", whatever that means. We're
4109 * ignoring for now. */
4110 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4111 {
4112 /* register target */
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4115 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4118 } IEM_MC_ENDIF();
4119 IEM_MC_ADVANCE_RIP();
4120 IEM_MC_END();
4121 }
4122 else
4123 {
4124 /* memory target */
4125 IEM_MC_BEGIN(0, 1);
4126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4129 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4130 } IEM_MC_ELSE() {
4131 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4132 } IEM_MC_ENDIF();
4133 IEM_MC_ADVANCE_RIP();
4134 IEM_MC_END();
4135 }
4136 return VINF_SUCCESS;
4137}
4138
4139
4140/** Opcode 0x0f 0x9a. */
4141FNIEMOP_DEF(iemOp_setp_Eb)
4142{
4143 IEMOP_MNEMONIC("setnp Eb");
4144 IEMOP_HLP_MIN_386();
4145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4146 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4147
4148 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4149 * any way. AMD says it's "unused", whatever that means. We're
4150 * ignoring for now. */
4151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4152 {
4153 /* register target */
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4159 } IEM_MC_ENDIF();
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 /* memory target */
4166 IEM_MC_BEGIN(0, 1);
4167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4170 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4171 } IEM_MC_ELSE() {
4172 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4173 } IEM_MC_ENDIF();
4174 IEM_MC_ADVANCE_RIP();
4175 IEM_MC_END();
4176 }
4177 return VINF_SUCCESS;
4178}
4179
4180
4181/** Opcode 0x0f 0x9b. */
4182FNIEMOP_DEF(iemOp_setnp_Eb)
4183{
4184 IEMOP_MNEMONIC("setnp Eb");
4185 IEMOP_HLP_MIN_386();
4186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4187 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4188
4189 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4190 * any way. AMD says it's "unused", whatever that means. We're
4191 * ignoring for now. */
4192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4193 {
4194 /* register target */
4195 IEM_MC_BEGIN(0, 0);
4196 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4197 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4198 } IEM_MC_ELSE() {
4199 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4200 } IEM_MC_ENDIF();
4201 IEM_MC_ADVANCE_RIP();
4202 IEM_MC_END();
4203 }
4204 else
4205 {
4206 /* memory target */
4207 IEM_MC_BEGIN(0, 1);
4208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4211 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4212 } IEM_MC_ELSE() {
4213 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4214 } IEM_MC_ENDIF();
4215 IEM_MC_ADVANCE_RIP();
4216 IEM_MC_END();
4217 }
4218 return VINF_SUCCESS;
4219}
4220
4221
4222/** Opcode 0x0f 0x9c. */
4223FNIEMOP_DEF(iemOp_setl_Eb)
4224{
4225 IEMOP_MNEMONIC("setl Eb");
4226 IEMOP_HLP_MIN_386();
4227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4228 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4229
4230 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4231 * any way. AMD says it's "unused", whatever that means. We're
4232 * ignoring for now. */
4233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4234 {
4235 /* register target */
4236 IEM_MC_BEGIN(0, 0);
4237 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4238 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4241 } IEM_MC_ENDIF();
4242 IEM_MC_ADVANCE_RIP();
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /* memory target */
4248 IEM_MC_BEGIN(0, 1);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4251 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4252 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4253 } IEM_MC_ELSE() {
4254 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4255 } IEM_MC_ENDIF();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 }
4259 return VINF_SUCCESS;
4260}
4261
4262
4263/** Opcode 0x0f 0x9d. */
4264FNIEMOP_DEF(iemOp_setnl_Eb)
4265{
4266 IEMOP_MNEMONIC("setnl Eb");
4267 IEMOP_HLP_MIN_386();
4268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4269 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4270
4271 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4272 * any way. AMD says it's "unused", whatever that means. We're
4273 * ignoring for now. */
4274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4275 {
4276 /* register target */
4277 IEM_MC_BEGIN(0, 0);
4278 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4280 } IEM_MC_ELSE() {
4281 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4282 } IEM_MC_ENDIF();
4283 IEM_MC_ADVANCE_RIP();
4284 IEM_MC_END();
4285 }
4286 else
4287 {
4288 /* memory target */
4289 IEM_MC_BEGIN(0, 1);
4290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4292 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4293 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4294 } IEM_MC_ELSE() {
4295 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4296 } IEM_MC_ENDIF();
4297 IEM_MC_ADVANCE_RIP();
4298 IEM_MC_END();
4299 }
4300 return VINF_SUCCESS;
4301}
4302
4303
4304/** Opcode 0x0f 0x9e. */
4305FNIEMOP_DEF(iemOp_setle_Eb)
4306{
4307 IEMOP_MNEMONIC("setle Eb");
4308 IEMOP_HLP_MIN_386();
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4311
4312 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4313 * any way. AMD says it's "unused", whatever that means. We're
4314 * ignoring for now. */
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /* register target */
4318 IEM_MC_BEGIN(0, 0);
4319 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4320 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4321 } IEM_MC_ELSE() {
4322 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4323 } IEM_MC_ENDIF();
4324 IEM_MC_ADVANCE_RIP();
4325 IEM_MC_END();
4326 }
4327 else
4328 {
4329 /* memory target */
4330 IEM_MC_BEGIN(0, 1);
4331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4333 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4334 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4337 } IEM_MC_ENDIF();
4338 IEM_MC_ADVANCE_RIP();
4339 IEM_MC_END();
4340 }
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** Opcode 0x0f 0x9f. */
4346FNIEMOP_DEF(iemOp_setnle_Eb)
4347{
4348 IEMOP_MNEMONIC("setnle Eb");
4349 IEMOP_HLP_MIN_386();
4350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4351 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4352
4353 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4354 * any way. AMD says it's "unused", whatever that means. We're
4355 * ignoring for now. */
4356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4357 {
4358 /* register target */
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4361 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 /* memory target */
4371 IEM_MC_BEGIN(0, 1);
4372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4374 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4375 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ELSE() {
4377 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_ADVANCE_RIP();
4380 IEM_MC_END();
4381 }
4382 return VINF_SUCCESS;
4383}
4384
4385
4386/**
4387 * Common 'push segment-register' helper.
4388 */
4389FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4390{
4391 IEMOP_HLP_NO_LOCK_PREFIX();
4392 if (iReg < X86_SREG_FS)
4393 IEMOP_HLP_NO_64BIT();
4394 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4395
4396 switch (pIemCpu->enmEffOpSize)
4397 {
4398 case IEMMODE_16BIT:
4399 IEM_MC_BEGIN(0, 1);
4400 IEM_MC_LOCAL(uint16_t, u16Value);
4401 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4402 IEM_MC_PUSH_U16(u16Value);
4403 IEM_MC_ADVANCE_RIP();
4404 IEM_MC_END();
4405 break;
4406
4407 case IEMMODE_32BIT:
4408 IEM_MC_BEGIN(0, 1);
4409 IEM_MC_LOCAL(uint32_t, u32Value);
4410 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4411 IEM_MC_PUSH_U32_SREG(u32Value);
4412 IEM_MC_ADVANCE_RIP();
4413 IEM_MC_END();
4414 break;
4415
4416 case IEMMODE_64BIT:
4417 IEM_MC_BEGIN(0, 1);
4418 IEM_MC_LOCAL(uint64_t, u64Value);
4419 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4420 IEM_MC_PUSH_U64(u64Value);
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 break;
4424 }
4425
4426 return VINF_SUCCESS;
4427}
4428
4429
4430/** Opcode 0x0f 0xa0. */
4431FNIEMOP_DEF(iemOp_push_fs)
4432{
4433 IEMOP_MNEMONIC("push fs");
4434 IEMOP_HLP_MIN_386();
4435 IEMOP_HLP_NO_LOCK_PREFIX();
4436 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4437}
4438
4439
4440/** Opcode 0x0f 0xa1. */
4441FNIEMOP_DEF(iemOp_pop_fs)
4442{
4443 IEMOP_MNEMONIC("pop fs");
4444 IEMOP_HLP_MIN_386();
4445 IEMOP_HLP_NO_LOCK_PREFIX();
4446 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4447}
4448
4449
4450/** Opcode 0x0f 0xa2. */
4451FNIEMOP_DEF(iemOp_cpuid)
4452{
4453 IEMOP_MNEMONIC("cpuid");
4454 IEMOP_HLP_MIN_486(); /* not all 486es. */
4455 IEMOP_HLP_NO_LOCK_PREFIX();
4456 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4457}
4458
4459
4460/**
4461 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4462 * iemOp_bts_Ev_Gv.
4463 */
4464FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4465{
4466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4468
4469 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4470 {
4471 /* register destination. */
4472 IEMOP_HLP_NO_LOCK_PREFIX();
4473 switch (pIemCpu->enmEffOpSize)
4474 {
4475 case IEMMODE_16BIT:
4476 IEM_MC_BEGIN(3, 0);
4477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4478 IEM_MC_ARG(uint16_t, u16Src, 1);
4479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4480
4481 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4482 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4483 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4484 IEM_MC_REF_EFLAGS(pEFlags);
4485 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4486
4487 IEM_MC_ADVANCE_RIP();
4488 IEM_MC_END();
4489 return VINF_SUCCESS;
4490
4491 case IEMMODE_32BIT:
4492 IEM_MC_BEGIN(3, 0);
4493 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4494 IEM_MC_ARG(uint32_t, u32Src, 1);
4495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4496
4497 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4498 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4499 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4500 IEM_MC_REF_EFLAGS(pEFlags);
4501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4502
4503 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4504 IEM_MC_ADVANCE_RIP();
4505 IEM_MC_END();
4506 return VINF_SUCCESS;
4507
4508 case IEMMODE_64BIT:
4509 IEM_MC_BEGIN(3, 0);
4510 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4511 IEM_MC_ARG(uint64_t, u64Src, 1);
4512 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4513
4514 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4515 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4516 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4517 IEM_MC_REF_EFLAGS(pEFlags);
4518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4519
4520 IEM_MC_ADVANCE_RIP();
4521 IEM_MC_END();
4522 return VINF_SUCCESS;
4523
4524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4525 }
4526 }
4527 else
4528 {
4529 /* memory destination. */
4530
4531 uint32_t fAccess;
4532 if (pImpl->pfnLockedU16)
4533 fAccess = IEM_ACCESS_DATA_RW;
4534 else /* BT */
4535 {
4536 IEMOP_HLP_NO_LOCK_PREFIX();
4537 fAccess = IEM_ACCESS_DATA_R;
4538 }
4539
4540 NOREF(fAccess);
4541
4542 /** @todo test negative bit offsets! */
4543 switch (pIemCpu->enmEffOpSize)
4544 {
4545 case IEMMODE_16BIT:
4546 IEM_MC_BEGIN(3, 2);
4547 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4548 IEM_MC_ARG(uint16_t, u16Src, 1);
4549 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4551 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4552
4553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4554 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4555 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4556 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4557 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4558 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4559 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4560 IEM_MC_FETCH_EFLAGS(EFlags);
4561
4562 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4563 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4565 else
4566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4568
4569 IEM_MC_COMMIT_EFLAGS(EFlags);
4570 IEM_MC_ADVANCE_RIP();
4571 IEM_MC_END();
4572 return VINF_SUCCESS;
4573
4574 case IEMMODE_32BIT:
4575 IEM_MC_BEGIN(3, 2);
4576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4577 IEM_MC_ARG(uint32_t, u32Src, 1);
4578 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4581
4582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4583 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4584 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4585 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4586 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4587 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4588 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4589 IEM_MC_FETCH_EFLAGS(EFlags);
4590
4591 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4592 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4594 else
4595 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4597
4598 IEM_MC_COMMIT_EFLAGS(EFlags);
4599 IEM_MC_ADVANCE_RIP();
4600 IEM_MC_END();
4601 return VINF_SUCCESS;
4602
4603 case IEMMODE_64BIT:
4604 IEM_MC_BEGIN(3, 2);
4605 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4606 IEM_MC_ARG(uint64_t, u64Src, 1);
4607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4609 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4610
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4612 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4613 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4614 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4615 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4616 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4617 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4618 IEM_MC_FETCH_EFLAGS(EFlags);
4619
4620 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4621 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4623 else
4624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4626
4627 IEM_MC_COMMIT_EFLAGS(EFlags);
4628 IEM_MC_ADVANCE_RIP();
4629 IEM_MC_END();
4630 return VINF_SUCCESS;
4631
4632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4633 }
4634 }
4635}
4636
4637
4638/** Opcode 0x0f 0xa3. */
4639FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4640{
4641 IEMOP_MNEMONIC("bt Gv,Gv");
4642 IEMOP_HLP_MIN_386();
4643 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4644}
4645
4646
4647/**
4648 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4649 */
4650FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4651{
4652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4653 IEMOP_HLP_NO_LOCK_PREFIX();
4654 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4655
4656 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4657 {
4658 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4659 IEMOP_HLP_NO_LOCK_PREFIX();
4660
4661 switch (pIemCpu->enmEffOpSize)
4662 {
4663 case IEMMODE_16BIT:
4664 IEM_MC_BEGIN(4, 0);
4665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4666 IEM_MC_ARG(uint16_t, u16Src, 1);
4667 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4668 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4669
4670 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4671 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4672 IEM_MC_REF_EFLAGS(pEFlags);
4673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4674
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 return VINF_SUCCESS;
4678
4679 case IEMMODE_32BIT:
4680 IEM_MC_BEGIN(4, 0);
4681 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4682 IEM_MC_ARG(uint32_t, u32Src, 1);
4683 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4684 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4685
4686 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4687 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4688 IEM_MC_REF_EFLAGS(pEFlags);
4689 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4690
4691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4692 IEM_MC_ADVANCE_RIP();
4693 IEM_MC_END();
4694 return VINF_SUCCESS;
4695
4696 case IEMMODE_64BIT:
4697 IEM_MC_BEGIN(4, 0);
4698 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4699 IEM_MC_ARG(uint64_t, u64Src, 1);
4700 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4701 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4702
4703 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4704 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4705 IEM_MC_REF_EFLAGS(pEFlags);
4706 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4707
4708 IEM_MC_ADVANCE_RIP();
4709 IEM_MC_END();
4710 return VINF_SUCCESS;
4711
4712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4713 }
4714 }
4715 else
4716 {
4717 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4718
4719 switch (pIemCpu->enmEffOpSize)
4720 {
4721 case IEMMODE_16BIT:
4722 IEM_MC_BEGIN(4, 2);
4723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4724 IEM_MC_ARG(uint16_t, u16Src, 1);
4725 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4726 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4728
4729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4730 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4731 IEM_MC_ASSIGN(cShiftArg, cShift);
4732 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4733 IEM_MC_FETCH_EFLAGS(EFlags);
4734 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4735 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4736
4737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4738 IEM_MC_COMMIT_EFLAGS(EFlags);
4739 IEM_MC_ADVANCE_RIP();
4740 IEM_MC_END();
4741 return VINF_SUCCESS;
4742
4743 case IEMMODE_32BIT:
4744 IEM_MC_BEGIN(4, 2);
4745 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4746 IEM_MC_ARG(uint32_t, u32Src, 1);
4747 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4748 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4750
4751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4752 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4753 IEM_MC_ASSIGN(cShiftArg, cShift);
4754 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4755 IEM_MC_FETCH_EFLAGS(EFlags);
4756 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4757 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4758
4759 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4760 IEM_MC_COMMIT_EFLAGS(EFlags);
4761 IEM_MC_ADVANCE_RIP();
4762 IEM_MC_END();
4763 return VINF_SUCCESS;
4764
4765 case IEMMODE_64BIT:
4766 IEM_MC_BEGIN(4, 2);
4767 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4768 IEM_MC_ARG(uint64_t, u64Src, 1);
4769 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4770 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4772
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4774 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4775 IEM_MC_ASSIGN(cShiftArg, cShift);
4776 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4777 IEM_MC_FETCH_EFLAGS(EFlags);
4778 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4779 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4780
4781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4782 IEM_MC_COMMIT_EFLAGS(EFlags);
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 return VINF_SUCCESS;
4786
4787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4788 }
4789 }
4790}
4791
4792
4793/**
4794 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4795 */
4796FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4797{
4798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4799 IEMOP_HLP_NO_LOCK_PREFIX();
4800 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4801
4802 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4803 {
4804 IEMOP_HLP_NO_LOCK_PREFIX();
4805
4806 switch (pIemCpu->enmEffOpSize)
4807 {
4808 case IEMMODE_16BIT:
4809 IEM_MC_BEGIN(4, 0);
4810 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4811 IEM_MC_ARG(uint16_t, u16Src, 1);
4812 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4814
4815 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4816 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4818 IEM_MC_REF_EFLAGS(pEFlags);
4819 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4820
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 return VINF_SUCCESS;
4824
4825 case IEMMODE_32BIT:
4826 IEM_MC_BEGIN(4, 0);
4827 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4828 IEM_MC_ARG(uint32_t, u32Src, 1);
4829 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4830 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4831
4832 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4833 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4834 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4835 IEM_MC_REF_EFLAGS(pEFlags);
4836 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4837
4838 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4839 IEM_MC_ADVANCE_RIP();
4840 IEM_MC_END();
4841 return VINF_SUCCESS;
4842
4843 case IEMMODE_64BIT:
4844 IEM_MC_BEGIN(4, 0);
4845 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4846 IEM_MC_ARG(uint64_t, u64Src, 1);
4847 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4848 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4849
4850 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4851 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4852 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4853 IEM_MC_REF_EFLAGS(pEFlags);
4854 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4855
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 return VINF_SUCCESS;
4859
4860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4861 }
4862 }
4863 else
4864 {
4865 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4866
4867 switch (pIemCpu->enmEffOpSize)
4868 {
4869 case IEMMODE_16BIT:
4870 IEM_MC_BEGIN(4, 2);
4871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4872 IEM_MC_ARG(uint16_t, u16Src, 1);
4873 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4876
4877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4878 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4879 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4880 IEM_MC_FETCH_EFLAGS(EFlags);
4881 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4882 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4883
4884 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4885 IEM_MC_COMMIT_EFLAGS(EFlags);
4886 IEM_MC_ADVANCE_RIP();
4887 IEM_MC_END();
4888 return VINF_SUCCESS;
4889
4890 case IEMMODE_32BIT:
4891 IEM_MC_BEGIN(4, 2);
4892 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4893 IEM_MC_ARG(uint32_t, u32Src, 1);
4894 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4897
4898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4899 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4900 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4901 IEM_MC_FETCH_EFLAGS(EFlags);
4902 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4903 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4904
4905 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4906 IEM_MC_COMMIT_EFLAGS(EFlags);
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 return VINF_SUCCESS;
4910
4911 case IEMMODE_64BIT:
4912 IEM_MC_BEGIN(4, 2);
4913 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4914 IEM_MC_ARG(uint64_t, u64Src, 1);
4915 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4916 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4918
4919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4920 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4921 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4922 IEM_MC_FETCH_EFLAGS(EFlags);
4923 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4924 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4925
4926 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4927 IEM_MC_COMMIT_EFLAGS(EFlags);
4928 IEM_MC_ADVANCE_RIP();
4929 IEM_MC_END();
4930 return VINF_SUCCESS;
4931
4932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4933 }
4934 }
4935}
4936
4937
4938
4939/** Opcode 0x0f 0xa4. */
4940FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4941{
4942 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4943 IEMOP_HLP_MIN_386();
4944 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4945}
4946
4947
4948/** Opcode 0x0f 0xa5. */
4949FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4950{
4951 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4952 IEMOP_HLP_MIN_386();
4953 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4954}
4955
4956
4957/** Opcode 0x0f 0xa8. */
4958FNIEMOP_DEF(iemOp_push_gs)
4959{
4960 IEMOP_MNEMONIC("push gs");
4961 IEMOP_HLP_MIN_386();
4962 IEMOP_HLP_NO_LOCK_PREFIX();
4963 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4964}
4965
4966
4967/** Opcode 0x0f 0xa9. */
4968FNIEMOP_DEF(iemOp_pop_gs)
4969{
4970 IEMOP_MNEMONIC("pop gs");
4971 IEMOP_HLP_MIN_386();
4972 IEMOP_HLP_NO_LOCK_PREFIX();
4973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4974}
4975
4976
4977/** Opcode 0x0f 0xaa. */
4978FNIEMOP_STUB(iemOp_rsm);
4979//IEMOP_HLP_MIN_386();
4980
4981
4982/** Opcode 0x0f 0xab. */
4983FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4984{
4985 IEMOP_MNEMONIC("bts Ev,Gv");
4986 IEMOP_HLP_MIN_386();
4987 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4988}
4989
4990
4991/** Opcode 0x0f 0xac. */
4992FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4993{
4994 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4995 IEMOP_HLP_MIN_386();
4996 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4997}
4998
4999
5000/** Opcode 0x0f 0xad. */
5001FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5002{
5003 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5004 IEMOP_HLP_MIN_386();
5005 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5006}
5007
5008
5009/** Opcode 0x0f 0xae mem/0. */
5010FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5011{
5012 IEMOP_MNEMONIC("fxsave m512");
5013 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5014 return IEMOP_RAISE_INVALID_OPCODE();
5015
5016 IEM_MC_BEGIN(3, 1);
5017 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5018 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5019 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5022 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5023 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5024 IEM_MC_END();
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** Opcode 0x0f 0xae mem/1. */
5030FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5031{
5032 IEMOP_MNEMONIC("fxrstor m512");
5033 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5034 return IEMOP_RAISE_INVALID_OPCODE();
5035
5036 IEM_MC_BEGIN(3, 1);
5037 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5038 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5039 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5043 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5044 IEM_MC_END();
5045 return VINF_SUCCESS;
5046}
5047
5048
5049/** Opcode 0x0f 0xae mem/2. */
5050FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5051
5052/** Opcode 0x0f 0xae mem/3. */
5053FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5054
5055/** Opcode 0x0f 0xae mem/4. */
5056FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5057
5058/** Opcode 0x0f 0xae mem/5. */
5059FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5060
5061/** Opcode 0x0f 0xae mem/6. */
5062FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5063
5064/** Opcode 0x0f 0xae mem/7. */
5065FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5066
5067
5068/** Opcode 0x0f 0xae 11b/5. */
5069FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5070{
5071 IEMOP_MNEMONIC("lfence");
5072 IEMOP_HLP_NO_LOCK_PREFIX();
5073 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5074 return IEMOP_RAISE_INVALID_OPCODE();
5075
5076 IEM_MC_BEGIN(0, 0);
5077 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5078 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5079 else
5080 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5081 IEM_MC_ADVANCE_RIP();
5082 IEM_MC_END();
5083 return VINF_SUCCESS;
5084}
5085
5086
5087/** Opcode 0x0f 0xae 11b/6. */
5088FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5089{
5090 IEMOP_MNEMONIC("mfence");
5091 IEMOP_HLP_NO_LOCK_PREFIX();
5092 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5093 return IEMOP_RAISE_INVALID_OPCODE();
5094
5095 IEM_MC_BEGIN(0, 0);
5096 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5097 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5098 else
5099 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5100 IEM_MC_ADVANCE_RIP();
5101 IEM_MC_END();
5102 return VINF_SUCCESS;
5103}
5104
5105
5106/** Opcode 0x0f 0xae 11b/7. */
5107FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5108{
5109 IEMOP_MNEMONIC("sfence");
5110 IEMOP_HLP_NO_LOCK_PREFIX();
5111 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5112 return IEMOP_RAISE_INVALID_OPCODE();
5113
5114 IEM_MC_BEGIN(0, 0);
5115 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5116 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5117 else
5118 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5119 IEM_MC_ADVANCE_RIP();
5120 IEM_MC_END();
5121 return VINF_SUCCESS;
5122}
5123
5124
5125/** Opcode 0xf3 0x0f 0xae 11b/0. */
5126FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5127
5128/** Opcode 0xf3 0x0f 0xae 11b/1. */
5129FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5130
5131/** Opcode 0xf3 0x0f 0xae 11b/2. */
5132FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5133
5134/** Opcode 0xf3 0x0f 0xae 11b/3. */
5135FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5136
5137
5138/** Opcode 0x0f 0xae. */
5139FNIEMOP_DEF(iemOp_Grp15)
5140{
5141 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5143 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5144 {
5145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5146 {
5147 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5148 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5149 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5150 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5151 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5152 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5153 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5154 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5156 }
5157 }
5158 else
5159 {
5160 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5161 {
5162 case 0:
5163 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5164 {
5165 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5166 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5167 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5168 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5169 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5170 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5171 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5172 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5174 }
5175 break;
5176
5177 case IEM_OP_PRF_REPZ:
5178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5179 {
5180 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5181 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5182 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5183 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5184 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5185 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5186 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5187 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5189 }
5190 break;
5191
5192 default:
5193 return IEMOP_RAISE_INVALID_OPCODE();
5194 }
5195 }
5196}
5197
5198
5199/** Opcode 0x0f 0xaf. */
5200FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5201{
5202 IEMOP_MNEMONIC("imul Gv,Ev");
5203 IEMOP_HLP_MIN_386();
5204 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5206}
5207
5208
5209/** Opcode 0x0f 0xb0. */
5210FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5211{
5212 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5213 IEMOP_HLP_MIN_486();
5214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5215
5216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5217 {
5218 IEMOP_HLP_DONE_DECODING();
5219 IEM_MC_BEGIN(4, 0);
5220 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5221 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5222 IEM_MC_ARG(uint8_t, u8Src, 2);
5223 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5224
5225 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5226 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5227 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5228 IEM_MC_REF_EFLAGS(pEFlags);
5229 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5231 else
5232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5233
5234 IEM_MC_ADVANCE_RIP();
5235 IEM_MC_END();
5236 }
5237 else
5238 {
5239 IEM_MC_BEGIN(4, 3);
5240 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5241 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5242 IEM_MC_ARG(uint8_t, u8Src, 2);
5243 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5245 IEM_MC_LOCAL(uint8_t, u8Al);
5246
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING();
5249 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5250 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5251 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5252 IEM_MC_FETCH_EFLAGS(EFlags);
5253 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5254 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5255 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5256 else
5257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5258
5259 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5260 IEM_MC_COMMIT_EFLAGS(EFlags);
5261 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5262 IEM_MC_ADVANCE_RIP();
5263 IEM_MC_END();
5264 }
5265 return VINF_SUCCESS;
5266}
5267
5268/** Opcode 0x0f 0xb1. */
5269FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5270{
5271 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5272 IEMOP_HLP_MIN_486();
5273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5274
5275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5276 {
5277 IEMOP_HLP_DONE_DECODING();
5278 switch (pIemCpu->enmEffOpSize)
5279 {
5280 case IEMMODE_16BIT:
5281 IEM_MC_BEGIN(4, 0);
5282 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5283 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5284 IEM_MC_ARG(uint16_t, u16Src, 2);
5285 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5286
5287 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5288 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5289 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5290 IEM_MC_REF_EFLAGS(pEFlags);
5291 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5292 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5293 else
5294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5295
5296 IEM_MC_ADVANCE_RIP();
5297 IEM_MC_END();
5298 return VINF_SUCCESS;
5299
5300 case IEMMODE_32BIT:
5301 IEM_MC_BEGIN(4, 0);
5302 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5303 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5304 IEM_MC_ARG(uint32_t, u32Src, 2);
5305 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5306
5307 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5308 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5309 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5310 IEM_MC_REF_EFLAGS(pEFlags);
5311 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5312 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5313 else
5314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5315
5316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5317 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5318 IEM_MC_ADVANCE_RIP();
5319 IEM_MC_END();
5320 return VINF_SUCCESS;
5321
5322 case IEMMODE_64BIT:
5323 IEM_MC_BEGIN(4, 0);
5324 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5325 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5326#ifdef RT_ARCH_X86
5327 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5328#else
5329 IEM_MC_ARG(uint64_t, u64Src, 2);
5330#endif
5331 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5332
5333 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5334 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5335 IEM_MC_REF_EFLAGS(pEFlags);
5336#ifdef RT_ARCH_X86
5337 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5338 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5340 else
5341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5342#else
5343 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5344 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5345 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5346 else
5347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5348#endif
5349
5350 IEM_MC_ADVANCE_RIP();
5351 IEM_MC_END();
5352 return VINF_SUCCESS;
5353
5354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5355 }
5356 }
5357 else
5358 {
5359 switch (pIemCpu->enmEffOpSize)
5360 {
5361 case IEMMODE_16BIT:
5362 IEM_MC_BEGIN(4, 3);
5363 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5364 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5365 IEM_MC_ARG(uint16_t, u16Src, 2);
5366 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5368 IEM_MC_LOCAL(uint16_t, u16Ax);
5369
5370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5371 IEMOP_HLP_DONE_DECODING();
5372 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5373 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5374 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5375 IEM_MC_FETCH_EFLAGS(EFlags);
5376 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5377 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5378 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5379 else
5380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5381
5382 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5383 IEM_MC_COMMIT_EFLAGS(EFlags);
5384 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 return VINF_SUCCESS;
5388
5389 case IEMMODE_32BIT:
5390 IEM_MC_BEGIN(4, 3);
5391 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5392 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5393 IEM_MC_ARG(uint32_t, u32Src, 2);
5394 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5396 IEM_MC_LOCAL(uint32_t, u32Eax);
5397
5398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5399 IEMOP_HLP_DONE_DECODING();
5400 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5401 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5402 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5403 IEM_MC_FETCH_EFLAGS(EFlags);
5404 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5405 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5406 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5407 else
5408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5409
5410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5411 IEM_MC_COMMIT_EFLAGS(EFlags);
5412 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5413 IEM_MC_ADVANCE_RIP();
5414 IEM_MC_END();
5415 return VINF_SUCCESS;
5416
5417 case IEMMODE_64BIT:
5418 IEM_MC_BEGIN(4, 3);
5419 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5420 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5421#ifdef RT_ARCH_X86
5422 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5423#else
5424 IEM_MC_ARG(uint64_t, u64Src, 2);
5425#endif
5426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5428 IEM_MC_LOCAL(uint64_t, u64Rax);
5429
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING();
5432 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5433 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5434 IEM_MC_FETCH_EFLAGS(EFlags);
5435 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5436#ifdef RT_ARCH_X86
5437 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5438 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5440 else
5441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5442#else
5443 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5444 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5446 else
5447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5448#endif
5449
5450 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5451 IEM_MC_COMMIT_EFLAGS(EFlags);
5452 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5453 IEM_MC_ADVANCE_RIP();
5454 IEM_MC_END();
5455 return VINF_SUCCESS;
5456
5457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5458 }
5459 }
5460}
5461
5462
5463FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5464{
5465 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5466 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5467
5468 switch (pIemCpu->enmEffOpSize)
5469 {
5470 case IEMMODE_16BIT:
5471 IEM_MC_BEGIN(5, 1);
5472 IEM_MC_ARG(uint16_t, uSel, 0);
5473 IEM_MC_ARG(uint16_t, offSeg, 1);
5474 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5475 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5476 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5477 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5480 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5481 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5482 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5483 IEM_MC_END();
5484 return VINF_SUCCESS;
5485
5486 case IEMMODE_32BIT:
5487 IEM_MC_BEGIN(5, 1);
5488 IEM_MC_ARG(uint16_t, uSel, 0);
5489 IEM_MC_ARG(uint32_t, offSeg, 1);
5490 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5491 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5492 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5493 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5496 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5497 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5498 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5499 IEM_MC_END();
5500 return VINF_SUCCESS;
5501
5502 case IEMMODE_64BIT:
5503 IEM_MC_BEGIN(5, 1);
5504 IEM_MC_ARG(uint16_t, uSel, 0);
5505 IEM_MC_ARG(uint64_t, offSeg, 1);
5506 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5507 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5509 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5513 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5514 else
5515 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5516 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5517 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5518 IEM_MC_END();
5519 return VINF_SUCCESS;
5520
5521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5522 }
5523}
5524
5525
5526/** Opcode 0x0f 0xb2. */
5527FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5528{
5529 IEMOP_MNEMONIC("lss Gv,Mp");
5530 IEMOP_HLP_MIN_386();
5531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5533 return IEMOP_RAISE_INVALID_OPCODE();
5534 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5535}
5536
5537
5538/** Opcode 0x0f 0xb3. */
5539FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5540{
5541 IEMOP_MNEMONIC("btr Ev,Gv");
5542 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5543}
5544
5545
5546/** Opcode 0x0f 0xb4. */
5547FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5548{
5549 IEMOP_MNEMONIC("lfs Gv,Mp");
5550 IEMOP_HLP_MIN_386();
5551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5552 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5553 return IEMOP_RAISE_INVALID_OPCODE();
5554 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5555}
5556
5557
5558/** Opcode 0x0f 0xb5. */
5559FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5560{
5561 IEMOP_MNEMONIC("lgs Gv,Mp");
5562 IEMOP_HLP_MIN_386();
5563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5565 return IEMOP_RAISE_INVALID_OPCODE();
5566 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5567}
5568
5569
5570/** Opcode 0x0f 0xb6. */
5571FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5572{
5573 IEMOP_MNEMONIC("movzx Gv,Eb");
5574 IEMOP_HLP_MIN_386();
5575
5576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5577 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5578
5579 /*
5580 * If rm is denoting a register, no more instruction bytes.
5581 */
5582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5583 {
5584 switch (pIemCpu->enmEffOpSize)
5585 {
5586 case IEMMODE_16BIT:
5587 IEM_MC_BEGIN(0, 1);
5588 IEM_MC_LOCAL(uint16_t, u16Value);
5589 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5590 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5591 IEM_MC_ADVANCE_RIP();
5592 IEM_MC_END();
5593 return VINF_SUCCESS;
5594
5595 case IEMMODE_32BIT:
5596 IEM_MC_BEGIN(0, 1);
5597 IEM_MC_LOCAL(uint32_t, u32Value);
5598 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5599 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5600 IEM_MC_ADVANCE_RIP();
5601 IEM_MC_END();
5602 return VINF_SUCCESS;
5603
5604 case IEMMODE_64BIT:
5605 IEM_MC_BEGIN(0, 1);
5606 IEM_MC_LOCAL(uint64_t, u64Value);
5607 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5608 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 return VINF_SUCCESS;
5612
5613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5614 }
5615 }
5616 else
5617 {
5618 /*
5619 * We're loading a register from memory.
5620 */
5621 switch (pIemCpu->enmEffOpSize)
5622 {
5623 case IEMMODE_16BIT:
5624 IEM_MC_BEGIN(0, 2);
5625 IEM_MC_LOCAL(uint16_t, u16Value);
5626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5628 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5629 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5630 IEM_MC_ADVANCE_RIP();
5631 IEM_MC_END();
5632 return VINF_SUCCESS;
5633
5634 case IEMMODE_32BIT:
5635 IEM_MC_BEGIN(0, 2);
5636 IEM_MC_LOCAL(uint32_t, u32Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5640 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5641 IEM_MC_ADVANCE_RIP();
5642 IEM_MC_END();
5643 return VINF_SUCCESS;
5644
5645 case IEMMODE_64BIT:
5646 IEM_MC_BEGIN(0, 2);
5647 IEM_MC_LOCAL(uint64_t, u64Value);
5648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5650 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5651 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 return VINF_SUCCESS;
5655
5656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5657 }
5658 }
5659}
5660
5661
5662/** Opcode 0x0f 0xb7. */
5663FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5664{
5665 IEMOP_MNEMONIC("movzx Gv,Ew");
5666 IEMOP_HLP_MIN_386();
5667
5668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5669 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5670
5671 /** @todo Not entirely sure how the operand size prefix is handled here,
5672 * assuming that it will be ignored. Would be nice to have a few
5673 * test for this. */
5674 /*
5675 * If rm is denoting a register, no more instruction bytes.
5676 */
5677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5678 {
5679 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5680 {
5681 IEM_MC_BEGIN(0, 1);
5682 IEM_MC_LOCAL(uint32_t, u32Value);
5683 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5684 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5685 IEM_MC_ADVANCE_RIP();
5686 IEM_MC_END();
5687 }
5688 else
5689 {
5690 IEM_MC_BEGIN(0, 1);
5691 IEM_MC_LOCAL(uint64_t, u64Value);
5692 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5693 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5694 IEM_MC_ADVANCE_RIP();
5695 IEM_MC_END();
5696 }
5697 }
5698 else
5699 {
5700 /*
5701 * We're loading a register from memory.
5702 */
5703 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5704 {
5705 IEM_MC_BEGIN(0, 2);
5706 IEM_MC_LOCAL(uint32_t, u32Value);
5707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5709 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5710 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5711 IEM_MC_ADVANCE_RIP();
5712 IEM_MC_END();
5713 }
5714 else
5715 {
5716 IEM_MC_BEGIN(0, 2);
5717 IEM_MC_LOCAL(uint64_t, u64Value);
5718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5720 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5721 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724 }
5725 }
5726 return VINF_SUCCESS;
5727}
5728
5729
5730/** Opcode 0x0f 0xb8. */
5731FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5732
5733
5734/** Opcode 0x0f 0xb9. */
5735FNIEMOP_DEF(iemOp_Grp10)
5736{
5737 Log(("iemOp_Grp10 -> #UD\n"));
5738 return IEMOP_RAISE_INVALID_OPCODE();
5739}
5740
5741
5742/** Opcode 0x0f 0xba. */
5743FNIEMOP_DEF(iemOp_Grp8)
5744{
5745 IEMOP_HLP_MIN_386();
5746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5747 PCIEMOPBINSIZES pImpl;
5748 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5749 {
5750 case 0: case 1: case 2: case 3:
5751 return IEMOP_RAISE_INVALID_OPCODE();
5752 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5753 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5754 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5755 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5757 }
5758 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5759
5760 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5761 {
5762 /* register destination. */
5763 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5764 IEMOP_HLP_NO_LOCK_PREFIX();
5765
5766 switch (pIemCpu->enmEffOpSize)
5767 {
5768 case IEMMODE_16BIT:
5769 IEM_MC_BEGIN(3, 0);
5770 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5771 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5773
5774 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5775 IEM_MC_REF_EFLAGS(pEFlags);
5776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5777
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780 return VINF_SUCCESS;
5781
5782 case IEMMODE_32BIT:
5783 IEM_MC_BEGIN(3, 0);
5784 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5785 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5787
5788 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5789 IEM_MC_REF_EFLAGS(pEFlags);
5790 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5791
5792 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5793 IEM_MC_ADVANCE_RIP();
5794 IEM_MC_END();
5795 return VINF_SUCCESS;
5796
5797 case IEMMODE_64BIT:
5798 IEM_MC_BEGIN(3, 0);
5799 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5800 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5801 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5802
5803 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5804 IEM_MC_REF_EFLAGS(pEFlags);
5805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5806
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810
5811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5812 }
5813 }
5814 else
5815 {
5816 /* memory destination. */
5817
5818 uint32_t fAccess;
5819 if (pImpl->pfnLockedU16)
5820 fAccess = IEM_ACCESS_DATA_RW;
5821 else /* BT */
5822 {
5823 IEMOP_HLP_NO_LOCK_PREFIX();
5824 fAccess = IEM_ACCESS_DATA_R;
5825 }
5826
5827 /** @todo test negative bit offsets! */
5828 switch (pIemCpu->enmEffOpSize)
5829 {
5830 case IEMMODE_16BIT:
5831 IEM_MC_BEGIN(3, 1);
5832 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5833 IEM_MC_ARG(uint16_t, u16Src, 1);
5834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5836
5837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5838 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5839 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5840 IEM_MC_FETCH_EFLAGS(EFlags);
5841 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5842 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5844 else
5845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5847
5848 IEM_MC_COMMIT_EFLAGS(EFlags);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 return VINF_SUCCESS;
5852
5853 case IEMMODE_32BIT:
5854 IEM_MC_BEGIN(3, 1);
5855 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5856 IEM_MC_ARG(uint32_t, u32Src, 1);
5857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5859
5860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5861 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5862 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5863 IEM_MC_FETCH_EFLAGS(EFlags);
5864 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5865 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5866 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5867 else
5868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5870
5871 IEM_MC_COMMIT_EFLAGS(EFlags);
5872 IEM_MC_ADVANCE_RIP();
5873 IEM_MC_END();
5874 return VINF_SUCCESS;
5875
5876 case IEMMODE_64BIT:
5877 IEM_MC_BEGIN(3, 1);
5878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5879 IEM_MC_ARG(uint64_t, u64Src, 1);
5880 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5882
5883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5884 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5885 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5886 IEM_MC_FETCH_EFLAGS(EFlags);
5887 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5888 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5890 else
5891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5893
5894 IEM_MC_COMMIT_EFLAGS(EFlags);
5895 IEM_MC_ADVANCE_RIP();
5896 IEM_MC_END();
5897 return VINF_SUCCESS;
5898
5899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5900 }
5901 }
5902
5903}
5904
5905
5906/** Opcode 0x0f 0xbb. */
5907FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5908{
5909 IEMOP_MNEMONIC("btc Ev,Gv");
5910 IEMOP_HLP_MIN_386();
5911 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5912}
5913
5914
5915/** Opcode 0x0f 0xbc. */
5916FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5917{
5918 IEMOP_MNEMONIC("bsf Gv,Ev");
5919 IEMOP_HLP_MIN_386();
5920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5922}
5923
5924
5925/** Opcode 0x0f 0xbd. */
5926FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5927{
5928 IEMOP_MNEMONIC("bsr Gv,Ev");
5929 IEMOP_HLP_MIN_386();
5930 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5931 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5932}
5933
5934
5935/** Opcode 0x0f 0xbe. */
5936FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5937{
5938 IEMOP_MNEMONIC("movsx Gv,Eb");
5939 IEMOP_HLP_MIN_386();
5940
5941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5942 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5943
5944 /*
5945 * If rm is denoting a register, no more instruction bytes.
5946 */
5947 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5948 {
5949 switch (pIemCpu->enmEffOpSize)
5950 {
5951 case IEMMODE_16BIT:
5952 IEM_MC_BEGIN(0, 1);
5953 IEM_MC_LOCAL(uint16_t, u16Value);
5954 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5955 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 return VINF_SUCCESS;
5959
5960 case IEMMODE_32BIT:
5961 IEM_MC_BEGIN(0, 1);
5962 IEM_MC_LOCAL(uint32_t, u32Value);
5963 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5964 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5965 IEM_MC_ADVANCE_RIP();
5966 IEM_MC_END();
5967 return VINF_SUCCESS;
5968
5969 case IEMMODE_64BIT:
5970 IEM_MC_BEGIN(0, 1);
5971 IEM_MC_LOCAL(uint64_t, u64Value);
5972 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5973 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5974 IEM_MC_ADVANCE_RIP();
5975 IEM_MC_END();
5976 return VINF_SUCCESS;
5977
5978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5979 }
5980 }
5981 else
5982 {
5983 /*
5984 * We're loading a register from memory.
5985 */
5986 switch (pIemCpu->enmEffOpSize)
5987 {
5988 case IEMMODE_16BIT:
5989 IEM_MC_BEGIN(0, 2);
5990 IEM_MC_LOCAL(uint16_t, u16Value);
5991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5993 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5994 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 return VINF_SUCCESS;
5998
5999 case IEMMODE_32BIT:
6000 IEM_MC_BEGIN(0, 2);
6001 IEM_MC_LOCAL(uint32_t, u32Value);
6002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6004 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6005 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6006 IEM_MC_ADVANCE_RIP();
6007 IEM_MC_END();
6008 return VINF_SUCCESS;
6009
6010 case IEMMODE_64BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint64_t, u64Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6016 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6017 IEM_MC_ADVANCE_RIP();
6018 IEM_MC_END();
6019 return VINF_SUCCESS;
6020
6021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6022 }
6023 }
6024}
6025
6026
6027/** Opcode 0x0f 0xbf. */
6028FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6029{
6030 IEMOP_MNEMONIC("movsx Gv,Ew");
6031 IEMOP_HLP_MIN_386();
6032
6033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6035
6036 /** @todo Not entirely sure how the operand size prefix is handled here,
6037 * assuming that it will be ignored. Would be nice to have a few
6038 * test for this. */
6039 /*
6040 * If rm is denoting a register, no more instruction bytes.
6041 */
6042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6043 {
6044 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6045 {
6046 IEM_MC_BEGIN(0, 1);
6047 IEM_MC_LOCAL(uint32_t, u32Value);
6048 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6049 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6050 IEM_MC_ADVANCE_RIP();
6051 IEM_MC_END();
6052 }
6053 else
6054 {
6055 IEM_MC_BEGIN(0, 1);
6056 IEM_MC_LOCAL(uint64_t, u64Value);
6057 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6058 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 }
6062 }
6063 else
6064 {
6065 /*
6066 * We're loading a register from memory.
6067 */
6068 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6069 {
6070 IEM_MC_BEGIN(0, 2);
6071 IEM_MC_LOCAL(uint32_t, u32Value);
6072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6074 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6075 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6076 IEM_MC_ADVANCE_RIP();
6077 IEM_MC_END();
6078 }
6079 else
6080 {
6081 IEM_MC_BEGIN(0, 2);
6082 IEM_MC_LOCAL(uint64_t, u64Value);
6083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6085 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6086 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6087 IEM_MC_ADVANCE_RIP();
6088 IEM_MC_END();
6089 }
6090 }
6091 return VINF_SUCCESS;
6092}
6093
6094
6095/** Opcode 0x0f 0xc0. */
6096FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6097{
6098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6099 IEMOP_HLP_MIN_486();
6100 IEMOP_MNEMONIC("xadd Eb,Gb");
6101
6102 /*
6103 * If rm is denoting a register, no more instruction bytes.
6104 */
6105 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6106 {
6107 IEMOP_HLP_NO_LOCK_PREFIX();
6108
6109 IEM_MC_BEGIN(3, 0);
6110 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6111 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6113
6114 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6115 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6118
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 }
6122 else
6123 {
6124 /*
6125 * We're accessing memory.
6126 */
6127 IEM_MC_BEGIN(3, 3);
6128 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6129 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6131 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6133
6134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6136 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6137 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6138 IEM_MC_FETCH_EFLAGS(EFlags);
6139 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6140 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6141 else
6142 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6143
6144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6145 IEM_MC_COMMIT_EFLAGS(EFlags);
6146 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6147 IEM_MC_ADVANCE_RIP();
6148 IEM_MC_END();
6149 return VINF_SUCCESS;
6150 }
6151 return VINF_SUCCESS;
6152}
6153
6154
6155/** Opcode 0x0f 0xc1. */
6156FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6157{
6158 IEMOP_MNEMONIC("xadd Ev,Gv");
6159 IEMOP_HLP_MIN_486();
6160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6161
6162 /*
6163 * If rm is denoting a register, no more instruction bytes.
6164 */
6165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6166 {
6167 IEMOP_HLP_NO_LOCK_PREFIX();
6168
6169 switch (pIemCpu->enmEffOpSize)
6170 {
6171 case IEMMODE_16BIT:
6172 IEM_MC_BEGIN(3, 0);
6173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6174 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6176
6177 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6178 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6179 IEM_MC_REF_EFLAGS(pEFlags);
6180 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6181
6182 IEM_MC_ADVANCE_RIP();
6183 IEM_MC_END();
6184 return VINF_SUCCESS;
6185
6186 case IEMMODE_32BIT:
6187 IEM_MC_BEGIN(3, 0);
6188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6189 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6191
6192 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6193 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6194 IEM_MC_REF_EFLAGS(pEFlags);
6195 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6196
6197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6198 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 case IEMMODE_64BIT:
6204 IEM_MC_BEGIN(3, 0);
6205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6206 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6208
6209 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6210 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6211 IEM_MC_REF_EFLAGS(pEFlags);
6212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6213
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217
6218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6219 }
6220 }
6221 else
6222 {
6223 /*
6224 * We're accessing memory.
6225 */
6226 switch (pIemCpu->enmEffOpSize)
6227 {
6228 case IEMMODE_16BIT:
6229 IEM_MC_BEGIN(3, 3);
6230 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6231 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6232 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6233 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6235
6236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6237 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6238 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6239 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6240 IEM_MC_FETCH_EFLAGS(EFlags);
6241 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6242 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6243 else
6244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6245
6246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6247 IEM_MC_COMMIT_EFLAGS(EFlags);
6248 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6249 IEM_MC_ADVANCE_RIP();
6250 IEM_MC_END();
6251 return VINF_SUCCESS;
6252
6253 case IEMMODE_32BIT:
6254 IEM_MC_BEGIN(3, 3);
6255 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6256 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6257 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6258 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6260
6261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6262 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6263 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6264 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6265 IEM_MC_FETCH_EFLAGS(EFlags);
6266 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6268 else
6269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6270
6271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6272 IEM_MC_COMMIT_EFLAGS(EFlags);
6273 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6274 IEM_MC_ADVANCE_RIP();
6275 IEM_MC_END();
6276 return VINF_SUCCESS;
6277
6278 case IEMMODE_64BIT:
6279 IEM_MC_BEGIN(3, 3);
6280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6281 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6282 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6283 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6285
6286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6287 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6288 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6289 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6290 IEM_MC_FETCH_EFLAGS(EFlags);
6291 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6293 else
6294 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6295
6296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6297 IEM_MC_COMMIT_EFLAGS(EFlags);
6298 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6299 IEM_MC_ADVANCE_RIP();
6300 IEM_MC_END();
6301 return VINF_SUCCESS;
6302
6303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6304 }
6305 }
6306}
6307
6308/** Opcode 0x0f 0xc2. */
6309FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6310
6311/** Opcode 0x0f 0xc3. */
6312FNIEMOP_STUB(iemOp_movnti_My_Gy);
6313
6314/** Opcode 0x0f 0xc4. */
6315FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6316
6317/** Opcode 0x0f 0xc5. */
6318FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6319
6320/** Opcode 0x0f 0xc6. */
6321FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6322
6323
6324/** Opcode 0x0f 0xc7 !11/1. */
6325FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6326{
6327 IEMOP_MNEMONIC("cmpxchg8b Mq");
6328
6329 IEM_MC_BEGIN(4, 3);
6330 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6331 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6332 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6333 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6334 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6335 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6337
6338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6339 IEMOP_HLP_DONE_DECODING();
6340 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6341
6342 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6343 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6344 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6345
6346 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6347 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6348 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6349
6350 IEM_MC_FETCH_EFLAGS(EFlags);
6351 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6352 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6353 else
6354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6355
6356 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6357 IEM_MC_COMMIT_EFLAGS(EFlags);
6358 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6359 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6360 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6361 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6362 IEM_MC_ENDIF();
6363 IEM_MC_ADVANCE_RIP();
6364
6365 IEM_MC_END();
6366 return VINF_SUCCESS;
6367}
6368
6369
6370/** Opcode REX.W 0x0f 0xc7 !11/1. */
6371FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6372
6373/** Opcode 0x0f 0xc7 11/6. */
6374FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6375
6376/** Opcode 0x0f 0xc7 !11/6. */
6377FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6378
6379/** Opcode 0x66 0x0f 0xc7 !11/6. */
6380FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6381
6382/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6383FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6384
6385/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6386FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6387
6388
6389/** Opcode 0x0f 0xc7. */
6390FNIEMOP_DEF(iemOp_Grp9)
6391{
6392 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6394 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6395 {
6396 case 0: case 2: case 3: case 4: case 5:
6397 return IEMOP_RAISE_INVALID_OPCODE();
6398 case 1:
6399 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6400 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6401 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6402 return IEMOP_RAISE_INVALID_OPCODE();
6403 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6404 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6405 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6406 case 6:
6407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6408 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6409 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6410 {
6411 case 0:
6412 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6413 case IEM_OP_PRF_SIZE_OP:
6414 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6415 case IEM_OP_PRF_REPZ:
6416 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6417 default:
6418 return IEMOP_RAISE_INVALID_OPCODE();
6419 }
6420 case 7:
6421 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6422 {
6423 case 0:
6424 case IEM_OP_PRF_REPZ:
6425 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6426 default:
6427 return IEMOP_RAISE_INVALID_OPCODE();
6428 }
6429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6430 }
6431}
6432
6433
6434/**
6435 * Common 'bswap register' helper.
6436 */
6437FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6438{
6439 IEMOP_HLP_NO_LOCK_PREFIX();
6440 switch (pIemCpu->enmEffOpSize)
6441 {
6442 case IEMMODE_16BIT:
6443 IEM_MC_BEGIN(1, 0);
6444 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6445 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6446 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6447 IEM_MC_ADVANCE_RIP();
6448 IEM_MC_END();
6449 return VINF_SUCCESS;
6450
6451 case IEMMODE_32BIT:
6452 IEM_MC_BEGIN(1, 0);
6453 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6454 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6455 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6456 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 case IEMMODE_64BIT:
6462 IEM_MC_BEGIN(1, 0);
6463 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6464 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6465 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6466 IEM_MC_ADVANCE_RIP();
6467 IEM_MC_END();
6468 return VINF_SUCCESS;
6469
6470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6471 }
6472}
6473
6474
6475/** Opcode 0x0f 0xc8. */
6476FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6477{
6478 IEMOP_MNEMONIC("bswap rAX/r8");
6479 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6480 prefix. REX.B is the correct prefix it appears. For a parallel
6481 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6482 IEMOP_HLP_MIN_486();
6483 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6484}
6485
6486
6487/** Opcode 0x0f 0xc9. */
6488FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6489{
6490 IEMOP_MNEMONIC("bswap rCX/r9");
6491 IEMOP_HLP_MIN_486();
6492 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6493}
6494
6495
6496/** Opcode 0x0f 0xca. */
6497FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6498{
6499 IEMOP_MNEMONIC("bswap rDX/r9");
6500 IEMOP_HLP_MIN_486();
6501 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6502}
6503
6504
6505/** Opcode 0x0f 0xcb. */
6506FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6507{
6508 IEMOP_MNEMONIC("bswap rBX/r9");
6509 IEMOP_HLP_MIN_486();
6510 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6511}
6512
6513
6514/** Opcode 0x0f 0xcc. */
6515FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6516{
6517 IEMOP_MNEMONIC("bswap rSP/r12");
6518 IEMOP_HLP_MIN_486();
6519 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6520}
6521
6522
6523/** Opcode 0x0f 0xcd. */
6524FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6525{
6526 IEMOP_MNEMONIC("bswap rBP/r13");
6527 IEMOP_HLP_MIN_486();
6528 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6529}
6530
6531
6532/** Opcode 0x0f 0xce. */
6533FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6534{
6535 IEMOP_MNEMONIC("bswap rSI/r14");
6536 IEMOP_HLP_MIN_486();
6537 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6538}
6539
6540
6541/** Opcode 0x0f 0xcf. */
6542FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6543{
6544 IEMOP_MNEMONIC("bswap rDI/r15");
6545 IEMOP_HLP_MIN_486();
6546 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6547}
6548
6549
6550
6551/** Opcode 0x0f 0xd0. */
6552FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6553/** Opcode 0x0f 0xd1. */
6554FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6555/** Opcode 0x0f 0xd2. */
6556FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6557/** Opcode 0x0f 0xd3. */
6558FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6559/** Opcode 0x0f 0xd4. */
6560FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6561/** Opcode 0x0f 0xd5. */
6562FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6563/** Opcode 0x0f 0xd6. */
6564FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6565
6566
6567/** Opcode 0x0f 0xd7. */
6568FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6569{
6570 /* Docs says register only. */
6571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6572 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6573 return IEMOP_RAISE_INVALID_OPCODE();
6574
6575 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6576 /** @todo testcase: Check that the instruction implicitly clears the high
6577 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6578 * and opcode modifications are made to work with the whole width (not
6579 * just 128). */
6580 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6581 {
6582 case IEM_OP_PRF_SIZE_OP: /* SSE */
6583 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6584 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6585 IEM_MC_BEGIN(2, 0);
6586 IEM_MC_ARG(uint64_t *, pDst, 0);
6587 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6589 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6590 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6591 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6592 IEM_MC_ADVANCE_RIP();
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595
6596 case 0: /* MMX */
6597 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6598 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6599 IEM_MC_BEGIN(2, 0);
6600 IEM_MC_ARG(uint64_t *, pDst, 0);
6601 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6602 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6603 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6604 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6605 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6606 IEM_MC_ADVANCE_RIP();
6607 IEM_MC_END();
6608 return VINF_SUCCESS;
6609
6610 default:
6611 return IEMOP_RAISE_INVALID_OPCODE();
6612 }
6613}
6614
6615
6616/** Opcode 0x0f 0xd8. */
6617FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6618/** Opcode 0x0f 0xd9. */
6619FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6620/** Opcode 0x0f 0xda. */
6621FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6622/** Opcode 0x0f 0xdb. */
6623FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6624/** Opcode 0x0f 0xdc. */
6625FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6626/** Opcode 0x0f 0xdd. */
6627FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6628/** Opcode 0x0f 0xde. */
6629FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6630/** Opcode 0x0f 0xdf. */
6631FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6632/** Opcode 0x0f 0xe0. */
6633FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6634/** Opcode 0x0f 0xe1. */
6635FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6636/** Opcode 0x0f 0xe2. */
6637FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6638/** Opcode 0x0f 0xe3. */
6639FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6640/** Opcode 0x0f 0xe4. */
6641FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6642/** Opcode 0x0f 0xe5. */
6643FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6644/** Opcode 0x0f 0xe6. */
6645FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6646/** Opcode 0x0f 0xe7. */
6647FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6648/** Opcode 0x0f 0xe8. */
6649FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6650/** Opcode 0x0f 0xe9. */
6651FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6652/** Opcode 0x0f 0xea. */
6653FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6654/** Opcode 0x0f 0xeb. */
6655FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6656/** Opcode 0x0f 0xec. */
6657FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6658/** Opcode 0x0f 0xed. */
6659FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6660/** Opcode 0x0f 0xee. */
6661FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6662
6663
6664/** Opcode 0x0f 0xef. */
6665FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6666{
6667 IEMOP_MNEMONIC("pxor");
6668 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6669}
6670
6671
6672/** Opcode 0x0f 0xf0. */
6673FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6674/** Opcode 0x0f 0xf1. */
6675FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6676/** Opcode 0x0f 0xf2. */
6677FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6678/** Opcode 0x0f 0xf3. */
6679FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6680/** Opcode 0x0f 0xf4. */
6681FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6682/** Opcode 0x0f 0xf5. */
6683FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6684/** Opcode 0x0f 0xf6. */
6685FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6686/** Opcode 0x0f 0xf7. */
6687FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6688/** Opcode 0x0f 0xf8. */
6689FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6690/** Opcode 0x0f 0xf9. */
6691FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6692/** Opcode 0x0f 0xfa. */
6693FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6694/** Opcode 0x0f 0xfb. */
6695FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6696/** Opcode 0x0f 0xfc. */
6697FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6698/** Opcode 0x0f 0xfd. */
6699FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6700/** Opcode 0x0f 0xfe. */
6701FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6702
6703
6704const PFNIEMOP g_apfnTwoByteMap[256] =
6705{
6706 /* 0x00 */ iemOp_Grp6,
6707 /* 0x01 */ iemOp_Grp7,
6708 /* 0x02 */ iemOp_lar_Gv_Ew,
6709 /* 0x03 */ iemOp_lsl_Gv_Ew,
6710 /* 0x04 */ iemOp_Invalid,
6711 /* 0x05 */ iemOp_syscall,
6712 /* 0x06 */ iemOp_clts,
6713 /* 0x07 */ iemOp_sysret,
6714 /* 0x08 */ iemOp_invd,
6715 /* 0x09 */ iemOp_wbinvd,
6716 /* 0x0a */ iemOp_Invalid,
6717 /* 0x0b */ iemOp_ud2,
6718 /* 0x0c */ iemOp_Invalid,
6719 /* 0x0d */ iemOp_nop_Ev_GrpP,
6720 /* 0x0e */ iemOp_femms,
6721 /* 0x0f */ iemOp_3Dnow,
6722 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6723 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6724 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6725 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6726 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6727 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6728 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6729 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6730 /* 0x18 */ iemOp_prefetch_Grp16,
6731 /* 0x19 */ iemOp_nop_Ev,
6732 /* 0x1a */ iemOp_nop_Ev,
6733 /* 0x1b */ iemOp_nop_Ev,
6734 /* 0x1c */ iemOp_nop_Ev,
6735 /* 0x1d */ iemOp_nop_Ev,
6736 /* 0x1e */ iemOp_nop_Ev,
6737 /* 0x1f */ iemOp_nop_Ev,
6738 /* 0x20 */ iemOp_mov_Rd_Cd,
6739 /* 0x21 */ iemOp_mov_Rd_Dd,
6740 /* 0x22 */ iemOp_mov_Cd_Rd,
6741 /* 0x23 */ iemOp_mov_Dd_Rd,
6742 /* 0x24 */ iemOp_mov_Rd_Td,
6743 /* 0x25 */ iemOp_Invalid,
6744 /* 0x26 */ iemOp_mov_Td_Rd,
6745 /* 0x27 */ iemOp_Invalid,
6746 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6747 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6748 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6749 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6750 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6751 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6752 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6753 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6754 /* 0x30 */ iemOp_wrmsr,
6755 /* 0x31 */ iemOp_rdtsc,
6756 /* 0x32 */ iemOp_rdmsr,
6757 /* 0x33 */ iemOp_rdpmc,
6758 /* 0x34 */ iemOp_sysenter,
6759 /* 0x35 */ iemOp_sysexit,
6760 /* 0x36 */ iemOp_Invalid,
6761 /* 0x37 */ iemOp_getsec,
6762 /* 0x38 */ iemOp_3byte_Esc_A4,
6763 /* 0x39 */ iemOp_Invalid,
6764 /* 0x3a */ iemOp_3byte_Esc_A5,
6765 /* 0x3b */ iemOp_Invalid,
6766 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6767 /* 0x3d */ iemOp_Invalid,
6768 /* 0x3e */ iemOp_Invalid,
6769 /* 0x3f */ iemOp_Invalid,
6770 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6771 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6772 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6773 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6774 /* 0x44 */ iemOp_cmove_Gv_Ev,
6775 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6776 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6777 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6778 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6779 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6780 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6781 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6782 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6783 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6784 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6785 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6786 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6787 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6788 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6789 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6790 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6791 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6792 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6793 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6794 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6795 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6796 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6797 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6798 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6799 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6800 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6801 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6802 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6803 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6804 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6805 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6806 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6807 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6808 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6809 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6810 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6811 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6812 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6813 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6814 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6815 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6816 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6817 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6818 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6819 /* 0x71 */ iemOp_Grp12,
6820 /* 0x72 */ iemOp_Grp13,
6821 /* 0x73 */ iemOp_Grp14,
6822 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6823 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6824 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6825 /* 0x77 */ iemOp_emms,
6826 /* 0x78 */ iemOp_vmread_AmdGrp17,
6827 /* 0x79 */ iemOp_vmwrite,
6828 /* 0x7a */ iemOp_Invalid,
6829 /* 0x7b */ iemOp_Invalid,
6830 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6831 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6832 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6833 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6834 /* 0x80 */ iemOp_jo_Jv,
6835 /* 0x81 */ iemOp_jno_Jv,
6836 /* 0x82 */ iemOp_jc_Jv,
6837 /* 0x83 */ iemOp_jnc_Jv,
6838 /* 0x84 */ iemOp_je_Jv,
6839 /* 0x85 */ iemOp_jne_Jv,
6840 /* 0x86 */ iemOp_jbe_Jv,
6841 /* 0x87 */ iemOp_jnbe_Jv,
6842 /* 0x88 */ iemOp_js_Jv,
6843 /* 0x89 */ iemOp_jns_Jv,
6844 /* 0x8a */ iemOp_jp_Jv,
6845 /* 0x8b */ iemOp_jnp_Jv,
6846 /* 0x8c */ iemOp_jl_Jv,
6847 /* 0x8d */ iemOp_jnl_Jv,
6848 /* 0x8e */ iemOp_jle_Jv,
6849 /* 0x8f */ iemOp_jnle_Jv,
6850 /* 0x90 */ iemOp_seto_Eb,
6851 /* 0x91 */ iemOp_setno_Eb,
6852 /* 0x92 */ iemOp_setc_Eb,
6853 /* 0x93 */ iemOp_setnc_Eb,
6854 /* 0x94 */ iemOp_sete_Eb,
6855 /* 0x95 */ iemOp_setne_Eb,
6856 /* 0x96 */ iemOp_setbe_Eb,
6857 /* 0x97 */ iemOp_setnbe_Eb,
6858 /* 0x98 */ iemOp_sets_Eb,
6859 /* 0x99 */ iemOp_setns_Eb,
6860 /* 0x9a */ iemOp_setp_Eb,
6861 /* 0x9b */ iemOp_setnp_Eb,
6862 /* 0x9c */ iemOp_setl_Eb,
6863 /* 0x9d */ iemOp_setnl_Eb,
6864 /* 0x9e */ iemOp_setle_Eb,
6865 /* 0x9f */ iemOp_setnle_Eb,
6866 /* 0xa0 */ iemOp_push_fs,
6867 /* 0xa1 */ iemOp_pop_fs,
6868 /* 0xa2 */ iemOp_cpuid,
6869 /* 0xa3 */ iemOp_bt_Ev_Gv,
6870 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6871 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6872 /* 0xa6 */ iemOp_Invalid,
6873 /* 0xa7 */ iemOp_Invalid,
6874 /* 0xa8 */ iemOp_push_gs,
6875 /* 0xa9 */ iemOp_pop_gs,
6876 /* 0xaa */ iemOp_rsm,
6877 /* 0xab */ iemOp_bts_Ev_Gv,
6878 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6879 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6880 /* 0xae */ iemOp_Grp15,
6881 /* 0xaf */ iemOp_imul_Gv_Ev,
6882 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6883 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6884 /* 0xb2 */ iemOp_lss_Gv_Mp,
6885 /* 0xb3 */ iemOp_btr_Ev_Gv,
6886 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6887 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6888 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6889 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6890 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6891 /* 0xb9 */ iemOp_Grp10,
6892 /* 0xba */ iemOp_Grp8,
6893 /* 0xbd */ iemOp_btc_Ev_Gv,
6894 /* 0xbc */ iemOp_bsf_Gv_Ev,
6895 /* 0xbd */ iemOp_bsr_Gv_Ev,
6896 /* 0xbe */ iemOp_movsx_Gv_Eb,
6897 /* 0xbf */ iemOp_movsx_Gv_Ew,
6898 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6899 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6900 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6901 /* 0xc3 */ iemOp_movnti_My_Gy,
6902 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6903 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6904 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6905 /* 0xc7 */ iemOp_Grp9,
6906 /* 0xc8 */ iemOp_bswap_rAX_r8,
6907 /* 0xc9 */ iemOp_bswap_rCX_r9,
6908 /* 0xca */ iemOp_bswap_rDX_r10,
6909 /* 0xcb */ iemOp_bswap_rBX_r11,
6910 /* 0xcc */ iemOp_bswap_rSP_r12,
6911 /* 0xcd */ iemOp_bswap_rBP_r13,
6912 /* 0xce */ iemOp_bswap_rSI_r14,
6913 /* 0xcf */ iemOp_bswap_rDI_r15,
6914 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6915 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6916 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6917 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6918 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6919 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6920 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6921 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6922 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6923 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6924 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6925 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6926 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6927 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6928 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6929 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6930 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6931 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6932 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6933 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6934 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6935 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6936 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6937 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6938 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6939 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6940 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6941 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6942 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6943 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6944 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6945 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6946 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6947 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6948 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6949 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6950 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6951 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6952 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6953 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6954 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6955 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6956 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6957 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6958 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6959 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6960 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6961 /* 0xff */ iemOp_Invalid
6962};
6963
6964/** @} */
6965
6966
6967/** @name One byte opcodes.
6968 *
6969 * @{
6970 */
6971
6972/** Opcode 0x00. */
6973FNIEMOP_DEF(iemOp_add_Eb_Gb)
6974{
6975 IEMOP_MNEMONIC("add Eb,Gb");
6976 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6977}
6978
6979
6980/** Opcode 0x01. */
6981FNIEMOP_DEF(iemOp_add_Ev_Gv)
6982{
6983 IEMOP_MNEMONIC("add Ev,Gv");
6984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6985}
6986
6987
6988/** Opcode 0x02. */
6989FNIEMOP_DEF(iemOp_add_Gb_Eb)
6990{
6991 IEMOP_MNEMONIC("add Gb,Eb");
6992 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6993}
6994
6995
6996/** Opcode 0x03. */
6997FNIEMOP_DEF(iemOp_add_Gv_Ev)
6998{
6999 IEMOP_MNEMONIC("add Gv,Ev");
7000 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7001}
7002
7003
7004/** Opcode 0x04. */
7005FNIEMOP_DEF(iemOp_add_Al_Ib)
7006{
7007 IEMOP_MNEMONIC("add al,Ib");
7008 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7009}
7010
7011
7012/** Opcode 0x05. */
7013FNIEMOP_DEF(iemOp_add_eAX_Iz)
7014{
7015 IEMOP_MNEMONIC("add rAX,Iz");
7016 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7017}
7018
7019
7020/** Opcode 0x06. */
7021FNIEMOP_DEF(iemOp_push_ES)
7022{
7023 IEMOP_MNEMONIC("push es");
7024 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7025}
7026
7027
7028/** Opcode 0x07. */
7029FNIEMOP_DEF(iemOp_pop_ES)
7030{
7031 IEMOP_MNEMONIC("pop es");
7032 IEMOP_HLP_NO_64BIT();
7033 IEMOP_HLP_NO_LOCK_PREFIX();
7034 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7035}
7036
7037
7038/** Opcode 0x08. */
7039FNIEMOP_DEF(iemOp_or_Eb_Gb)
7040{
7041 IEMOP_MNEMONIC("or Eb,Gb");
7042 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7043 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7044}
7045
7046
7047/** Opcode 0x09. */
7048FNIEMOP_DEF(iemOp_or_Ev_Gv)
7049{
7050 IEMOP_MNEMONIC("or Ev,Gv ");
7051 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7052 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7053}
7054
7055
7056/** Opcode 0x0a. */
7057FNIEMOP_DEF(iemOp_or_Gb_Eb)
7058{
7059 IEMOP_MNEMONIC("or Gb,Eb");
7060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7061 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7062}
7063
7064
7065/** Opcode 0x0b. */
7066FNIEMOP_DEF(iemOp_or_Gv_Ev)
7067{
7068 IEMOP_MNEMONIC("or Gv,Ev");
7069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7070 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7071}
7072
7073
7074/** Opcode 0x0c. */
7075FNIEMOP_DEF(iemOp_or_Al_Ib)
7076{
7077 IEMOP_MNEMONIC("or al,Ib");
7078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7079 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7080}
7081
7082
7083/** Opcode 0x0d. */
7084FNIEMOP_DEF(iemOp_or_eAX_Iz)
7085{
7086 IEMOP_MNEMONIC("or rAX,Iz");
7087 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7088 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7089}
7090
7091
7092/** Opcode 0x0e. */
7093FNIEMOP_DEF(iemOp_push_CS)
7094{
7095 IEMOP_MNEMONIC("push cs");
7096 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7097}
7098
7099
7100/** Opcode 0x0f. */
7101FNIEMOP_DEF(iemOp_2byteEscape)
7102{
7103 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7104 /** @todo PUSH CS on 8086, undefined on 80186. */
7105 IEMOP_HLP_MIN_286();
7106 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7107}
7108
7109/** Opcode 0x10. */
7110FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7111{
7112 IEMOP_MNEMONIC("adc Eb,Gb");
7113 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7114}
7115
7116
7117/** Opcode 0x11. */
7118FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7119{
7120 IEMOP_MNEMONIC("adc Ev,Gv");
7121 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7122}
7123
7124
7125/** Opcode 0x12. */
7126FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7127{
7128 IEMOP_MNEMONIC("adc Gb,Eb");
7129 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7130}
7131
7132
7133/** Opcode 0x13. */
7134FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7135{
7136 IEMOP_MNEMONIC("adc Gv,Ev");
7137 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7138}
7139
7140
7141/** Opcode 0x14. */
7142FNIEMOP_DEF(iemOp_adc_Al_Ib)
7143{
7144 IEMOP_MNEMONIC("adc al,Ib");
7145 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7146}
7147
7148
7149/** Opcode 0x15. */
7150FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7151{
7152 IEMOP_MNEMONIC("adc rAX,Iz");
7153 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7154}
7155
7156
7157/** Opcode 0x16. */
7158FNIEMOP_DEF(iemOp_push_SS)
7159{
7160 IEMOP_MNEMONIC("push ss");
7161 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7162}
7163
7164
7165/** Opcode 0x17. */
7166FNIEMOP_DEF(iemOp_pop_SS)
7167{
7168 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7169 IEMOP_HLP_NO_LOCK_PREFIX();
7170 IEMOP_HLP_NO_64BIT();
7171 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7172}
7173
7174
7175/** Opcode 0x18. */
7176FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7177{
7178 IEMOP_MNEMONIC("sbb Eb,Gb");
7179 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7180}
7181
7182
7183/** Opcode 0x19. */
7184FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7185{
7186 IEMOP_MNEMONIC("sbb Ev,Gv");
7187 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7188}
7189
7190
7191/** Opcode 0x1a. */
7192FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7193{
7194 IEMOP_MNEMONIC("sbb Gb,Eb");
7195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7196}
7197
7198
7199/** Opcode 0x1b. */
7200FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7201{
7202 IEMOP_MNEMONIC("sbb Gv,Ev");
7203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7204}
7205
7206
7207/** Opcode 0x1c. */
7208FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7209{
7210 IEMOP_MNEMONIC("sbb al,Ib");
7211 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7212}
7213
7214
7215/** Opcode 0x1d. */
7216FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7217{
7218 IEMOP_MNEMONIC("sbb rAX,Iz");
7219 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7220}
7221
7222
7223/** Opcode 0x1e. */
7224FNIEMOP_DEF(iemOp_push_DS)
7225{
7226 IEMOP_MNEMONIC("push ds");
7227 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7228}
7229
7230
7231/** Opcode 0x1f. */
7232FNIEMOP_DEF(iemOp_pop_DS)
7233{
7234 IEMOP_MNEMONIC("pop ds");
7235 IEMOP_HLP_NO_LOCK_PREFIX();
7236 IEMOP_HLP_NO_64BIT();
7237 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7238}
7239
7240
7241/** Opcode 0x20. */
7242FNIEMOP_DEF(iemOp_and_Eb_Gb)
7243{
7244 IEMOP_MNEMONIC("and Eb,Gb");
7245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7246 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7247}
7248
7249
7250/** Opcode 0x21. */
7251FNIEMOP_DEF(iemOp_and_Ev_Gv)
7252{
7253 IEMOP_MNEMONIC("and Ev,Gv");
7254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7255 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7256}
7257
7258
7259/** Opcode 0x22. */
7260FNIEMOP_DEF(iemOp_and_Gb_Eb)
7261{
7262 IEMOP_MNEMONIC("and Gb,Eb");
7263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7265}
7266
7267
7268/** Opcode 0x23. */
7269FNIEMOP_DEF(iemOp_and_Gv_Ev)
7270{
7271 IEMOP_MNEMONIC("and Gv,Ev");
7272 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7273 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7274}
7275
7276
7277/** Opcode 0x24. */
7278FNIEMOP_DEF(iemOp_and_Al_Ib)
7279{
7280 IEMOP_MNEMONIC("and al,Ib");
7281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7282 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7283}
7284
7285
7286/** Opcode 0x25. */
7287FNIEMOP_DEF(iemOp_and_eAX_Iz)
7288{
7289 IEMOP_MNEMONIC("and rAX,Iz");
7290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7291 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7292}
7293
7294
7295/** Opcode 0x26. */
7296FNIEMOP_DEF(iemOp_seg_ES)
7297{
7298 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7299 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7300 pIemCpu->iEffSeg = X86_SREG_ES;
7301
7302 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7303 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7304}
7305
7306
7307/** Opcode 0x27. */
7308FNIEMOP_DEF(iemOp_daa)
7309{
7310 IEMOP_MNEMONIC("daa AL");
7311 IEMOP_HLP_NO_64BIT();
7312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7315}
7316
7317
7318/** Opcode 0x28. */
7319FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7320{
7321 IEMOP_MNEMONIC("sub Eb,Gb");
7322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7323}
7324
7325
7326/** Opcode 0x29. */
7327FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7328{
7329 IEMOP_MNEMONIC("sub Ev,Gv");
7330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7331}
7332
7333
7334/** Opcode 0x2a. */
7335FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7336{
7337 IEMOP_MNEMONIC("sub Gb,Eb");
7338 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7339}
7340
7341
7342/** Opcode 0x2b. */
7343FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7344{
7345 IEMOP_MNEMONIC("sub Gv,Ev");
7346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7347}
7348
7349
7350/** Opcode 0x2c. */
7351FNIEMOP_DEF(iemOp_sub_Al_Ib)
7352{
7353 IEMOP_MNEMONIC("sub al,Ib");
7354 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7355}
7356
7357
7358/** Opcode 0x2d. */
7359FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7360{
7361 IEMOP_MNEMONIC("sub rAX,Iz");
7362 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7363}
7364
7365
7366/** Opcode 0x2e. */
7367FNIEMOP_DEF(iemOp_seg_CS)
7368{
7369 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7370 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7371 pIemCpu->iEffSeg = X86_SREG_CS;
7372
7373 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7374 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7375}
7376
7377
7378/** Opcode 0x2f. */
7379FNIEMOP_DEF(iemOp_das)
7380{
7381 IEMOP_MNEMONIC("das AL");
7382 IEMOP_HLP_NO_64BIT();
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7386}
7387
7388
7389/** Opcode 0x30. */
7390FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7391{
7392 IEMOP_MNEMONIC("xor Eb,Gb");
7393 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7395}
7396
7397
7398/** Opcode 0x31. */
7399FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7400{
7401 IEMOP_MNEMONIC("xor Ev,Gv");
7402 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7403 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7404}
7405
7406
7407/** Opcode 0x32. */
7408FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7409{
7410 IEMOP_MNEMONIC("xor Gb,Eb");
7411 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7412 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7413}
7414
7415
7416/** Opcode 0x33. */
7417FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7418{
7419 IEMOP_MNEMONIC("xor Gv,Ev");
7420 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7421 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7422}
7423
7424
7425/** Opcode 0x34. */
7426FNIEMOP_DEF(iemOp_xor_Al_Ib)
7427{
7428 IEMOP_MNEMONIC("xor al,Ib");
7429 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7430 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7431}
7432
7433
7434/** Opcode 0x35. */
7435FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7436{
7437 IEMOP_MNEMONIC("xor rAX,Iz");
7438 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7439 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7440}
7441
7442
7443/** Opcode 0x36. */
7444FNIEMOP_DEF(iemOp_seg_SS)
7445{
7446 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7447 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7448 pIemCpu->iEffSeg = X86_SREG_SS;
7449
7450 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7451 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7452}
7453
7454
7455/** Opcode 0x37. */
7456FNIEMOP_STUB(iemOp_aaa);
7457
7458
7459/** Opcode 0x38. */
7460FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7461{
7462 IEMOP_MNEMONIC("cmp Eb,Gb");
7463 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7464 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7465}
7466
7467
7468/** Opcode 0x39. */
7469FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7470{
7471 IEMOP_MNEMONIC("cmp Ev,Gv");
7472 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7474}
7475
7476
7477/** Opcode 0x3a. */
7478FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7479{
7480 IEMOP_MNEMONIC("cmp Gb,Eb");
7481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7482}
7483
7484
7485/** Opcode 0x3b. */
7486FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7487{
7488 IEMOP_MNEMONIC("cmp Gv,Ev");
7489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7490}
7491
7492
7493/** Opcode 0x3c. */
7494FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7495{
7496 IEMOP_MNEMONIC("cmp al,Ib");
7497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7498}
7499
7500
7501/** Opcode 0x3d. */
7502FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7503{
7504 IEMOP_MNEMONIC("cmp rAX,Iz");
7505 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7506}
7507
7508
7509/** Opcode 0x3e. */
7510FNIEMOP_DEF(iemOp_seg_DS)
7511{
7512 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7513 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7514 pIemCpu->iEffSeg = X86_SREG_DS;
7515
7516 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7517 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7518}
7519
7520
7521/** Opcode 0x3f. */
7522FNIEMOP_STUB(iemOp_aas);
7523
7524/**
7525 * Common 'inc/dec/not/neg register' helper.
7526 */
7527FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7528{
7529 IEMOP_HLP_NO_LOCK_PREFIX();
7530 switch (pIemCpu->enmEffOpSize)
7531 {
7532 case IEMMODE_16BIT:
7533 IEM_MC_BEGIN(2, 0);
7534 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7535 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7536 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7537 IEM_MC_REF_EFLAGS(pEFlags);
7538 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 case IEMMODE_32BIT:
7544 IEM_MC_BEGIN(2, 0);
7545 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7546 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7547 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7548 IEM_MC_REF_EFLAGS(pEFlags);
7549 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7550 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7551 IEM_MC_ADVANCE_RIP();
7552 IEM_MC_END();
7553 return VINF_SUCCESS;
7554
7555 case IEMMODE_64BIT:
7556 IEM_MC_BEGIN(2, 0);
7557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7558 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7559 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7560 IEM_MC_REF_EFLAGS(pEFlags);
7561 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7562 IEM_MC_ADVANCE_RIP();
7563 IEM_MC_END();
7564 return VINF_SUCCESS;
7565 }
7566 return VINF_SUCCESS;
7567}
7568
7569
7570/** Opcode 0x40. */
7571FNIEMOP_DEF(iemOp_inc_eAX)
7572{
7573 /*
7574 * This is a REX prefix in 64-bit mode.
7575 */
7576 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7577 {
7578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7579 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7580
7581 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7582 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7583 }
7584
7585 IEMOP_MNEMONIC("inc eAX");
7586 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7587}
7588
7589
7590/** Opcode 0x41. */
7591FNIEMOP_DEF(iemOp_inc_eCX)
7592{
7593 /*
7594 * This is a REX prefix in 64-bit mode.
7595 */
7596 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7597 {
7598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7599 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7600 pIemCpu->uRexB = 1 << 3;
7601
7602 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7603 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7604 }
7605
7606 IEMOP_MNEMONIC("inc eCX");
7607 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7608}
7609
7610
7611/** Opcode 0x42. */
7612FNIEMOP_DEF(iemOp_inc_eDX)
7613{
7614 /*
7615 * This is a REX prefix in 64-bit mode.
7616 */
7617 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7618 {
7619 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7620 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7621 pIemCpu->uRexIndex = 1 << 3;
7622
7623 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7624 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7625 }
7626
7627 IEMOP_MNEMONIC("inc eDX");
7628 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7629}
7630
7631
7632
7633/** Opcode 0x43. */
7634FNIEMOP_DEF(iemOp_inc_eBX)
7635{
7636 /*
7637 * This is a REX prefix in 64-bit mode.
7638 */
7639 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7640 {
7641 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7642 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7643 pIemCpu->uRexB = 1 << 3;
7644 pIemCpu->uRexIndex = 1 << 3;
7645
7646 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7647 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7648 }
7649
7650 IEMOP_MNEMONIC("inc eBX");
7651 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7652}
7653
7654
7655/** Opcode 0x44. */
7656FNIEMOP_DEF(iemOp_inc_eSP)
7657{
7658 /*
7659 * This is a REX prefix in 64-bit mode.
7660 */
7661 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7662 {
7663 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7664 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7665 pIemCpu->uRexReg = 1 << 3;
7666
7667 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7668 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7669 }
7670
7671 IEMOP_MNEMONIC("inc eSP");
7672 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7673}
7674
7675
7676/** Opcode 0x45. */
7677FNIEMOP_DEF(iemOp_inc_eBP)
7678{
7679 /*
7680 * This is a REX prefix in 64-bit mode.
7681 */
7682 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7683 {
7684 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7685 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7686 pIemCpu->uRexReg = 1 << 3;
7687 pIemCpu->uRexB = 1 << 3;
7688
7689 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7690 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7691 }
7692
7693 IEMOP_MNEMONIC("inc eBP");
7694 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7695}
7696
7697
7698/** Opcode 0x46. */
7699FNIEMOP_DEF(iemOp_inc_eSI)
7700{
7701 /*
7702 * This is a REX prefix in 64-bit mode.
7703 */
7704 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7705 {
7706 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7707 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7708 pIemCpu->uRexReg = 1 << 3;
7709 pIemCpu->uRexIndex = 1 << 3;
7710
7711 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7712 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7713 }
7714
7715 IEMOP_MNEMONIC("inc eSI");
7716 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7717}
7718
7719
7720/** Opcode 0x47. */
7721FNIEMOP_DEF(iemOp_inc_eDI)
7722{
7723 /*
7724 * This is a REX prefix in 64-bit mode.
7725 */
7726 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7727 {
7728 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7729 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7730 pIemCpu->uRexReg = 1 << 3;
7731 pIemCpu->uRexB = 1 << 3;
7732 pIemCpu->uRexIndex = 1 << 3;
7733
7734 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7735 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7736 }
7737
7738 IEMOP_MNEMONIC("inc eDI");
7739 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7740}
7741
7742
7743/** Opcode 0x48. */
7744FNIEMOP_DEF(iemOp_dec_eAX)
7745{
7746 /*
7747 * This is a REX prefix in 64-bit mode.
7748 */
7749 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7750 {
7751 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7752 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7753 iemRecalEffOpSize(pIemCpu);
7754
7755 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7756 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7757 }
7758
7759 IEMOP_MNEMONIC("dec eAX");
7760 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7761}
7762
7763
7764/** Opcode 0x49. */
7765FNIEMOP_DEF(iemOp_dec_eCX)
7766{
7767 /*
7768 * This is a REX prefix in 64-bit mode.
7769 */
7770 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7771 {
7772 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7773 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7774 pIemCpu->uRexB = 1 << 3;
7775 iemRecalEffOpSize(pIemCpu);
7776
7777 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7778 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7779 }
7780
7781 IEMOP_MNEMONIC("dec eCX");
7782 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7783}
7784
7785
7786/** Opcode 0x4a. */
7787FNIEMOP_DEF(iemOp_dec_eDX)
7788{
7789 /*
7790 * This is a REX prefix in 64-bit mode.
7791 */
7792 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7793 {
7794 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7795 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7796 pIemCpu->uRexIndex = 1 << 3;
7797 iemRecalEffOpSize(pIemCpu);
7798
7799 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7800 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7801 }
7802
7803 IEMOP_MNEMONIC("dec eDX");
7804 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7805}
7806
7807
7808/** Opcode 0x4b. */
7809FNIEMOP_DEF(iemOp_dec_eBX)
7810{
7811 /*
7812 * This is a REX prefix in 64-bit mode.
7813 */
7814 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7815 {
7816 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7817 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7818 pIemCpu->uRexB = 1 << 3;
7819 pIemCpu->uRexIndex = 1 << 3;
7820 iemRecalEffOpSize(pIemCpu);
7821
7822 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7823 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7824 }
7825
7826 IEMOP_MNEMONIC("dec eBX");
7827 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7828}
7829
7830
7831/** Opcode 0x4c. */
7832FNIEMOP_DEF(iemOp_dec_eSP)
7833{
7834 /*
7835 * This is a REX prefix in 64-bit mode.
7836 */
7837 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7838 {
7839 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7840 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7841 pIemCpu->uRexReg = 1 << 3;
7842 iemRecalEffOpSize(pIemCpu);
7843
7844 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7845 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7846 }
7847
7848 IEMOP_MNEMONIC("dec eSP");
7849 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7850}
7851
7852
7853/** Opcode 0x4d. */
7854FNIEMOP_DEF(iemOp_dec_eBP)
7855{
7856 /*
7857 * This is a REX prefix in 64-bit mode.
7858 */
7859 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7860 {
7861 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7862 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7863 pIemCpu->uRexReg = 1 << 3;
7864 pIemCpu->uRexB = 1 << 3;
7865 iemRecalEffOpSize(pIemCpu);
7866
7867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7869 }
7870
7871 IEMOP_MNEMONIC("dec eBP");
7872 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7873}
7874
7875
7876/** Opcode 0x4e. */
7877FNIEMOP_DEF(iemOp_dec_eSI)
7878{
7879 /*
7880 * This is a REX prefix in 64-bit mode.
7881 */
7882 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7883 {
7884 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7885 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7886 pIemCpu->uRexReg = 1 << 3;
7887 pIemCpu->uRexIndex = 1 << 3;
7888 iemRecalEffOpSize(pIemCpu);
7889
7890 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7891 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7892 }
7893
7894 IEMOP_MNEMONIC("dec eSI");
7895 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7896}
7897
7898
7899/** Opcode 0x4f. */
7900FNIEMOP_DEF(iemOp_dec_eDI)
7901{
7902 /*
7903 * This is a REX prefix in 64-bit mode.
7904 */
7905 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7906 {
7907 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7908 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7909 pIemCpu->uRexReg = 1 << 3;
7910 pIemCpu->uRexB = 1 << 3;
7911 pIemCpu->uRexIndex = 1 << 3;
7912 iemRecalEffOpSize(pIemCpu);
7913
7914 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7915 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7916 }
7917
7918 IEMOP_MNEMONIC("dec eDI");
7919 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7920}
7921
7922
7923/**
7924 * Common 'push register' helper.
7925 */
7926FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7927{
7928 IEMOP_HLP_NO_LOCK_PREFIX();
7929 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7930 {
7931 iReg |= pIemCpu->uRexB;
7932 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7933 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7934 }
7935
7936 switch (pIemCpu->enmEffOpSize)
7937 {
7938 case IEMMODE_16BIT:
7939 IEM_MC_BEGIN(0, 1);
7940 IEM_MC_LOCAL(uint16_t, u16Value);
7941 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7942 IEM_MC_PUSH_U16(u16Value);
7943 IEM_MC_ADVANCE_RIP();
7944 IEM_MC_END();
7945 break;
7946
7947 case IEMMODE_32BIT:
7948 IEM_MC_BEGIN(0, 1);
7949 IEM_MC_LOCAL(uint32_t, u32Value);
7950 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7951 IEM_MC_PUSH_U32(u32Value);
7952 IEM_MC_ADVANCE_RIP();
7953 IEM_MC_END();
7954 break;
7955
7956 case IEMMODE_64BIT:
7957 IEM_MC_BEGIN(0, 1);
7958 IEM_MC_LOCAL(uint64_t, u64Value);
7959 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7960 IEM_MC_PUSH_U64(u64Value);
7961 IEM_MC_ADVANCE_RIP();
7962 IEM_MC_END();
7963 break;
7964 }
7965
7966 return VINF_SUCCESS;
7967}
7968
7969
7970/** Opcode 0x50. */
7971FNIEMOP_DEF(iemOp_push_eAX)
7972{
7973 IEMOP_MNEMONIC("push rAX");
7974 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7975}
7976
7977
7978/** Opcode 0x51. */
7979FNIEMOP_DEF(iemOp_push_eCX)
7980{
7981 IEMOP_MNEMONIC("push rCX");
7982 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7983}
7984
7985
7986/** Opcode 0x52. */
7987FNIEMOP_DEF(iemOp_push_eDX)
7988{
7989 IEMOP_MNEMONIC("push rDX");
7990 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7991}
7992
7993
7994/** Opcode 0x53. */
7995FNIEMOP_DEF(iemOp_push_eBX)
7996{
7997 IEMOP_MNEMONIC("push rBX");
7998 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7999}
8000
8001
8002/** Opcode 0x54. */
8003FNIEMOP_DEF(iemOp_push_eSP)
8004{
8005 IEMOP_MNEMONIC("push rSP");
8006 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8007 {
8008 IEM_MC_BEGIN(0, 1);
8009 IEM_MC_LOCAL(uint16_t, u16Value);
8010 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8011 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8012 IEM_MC_PUSH_U16(u16Value);
8013 IEM_MC_ADVANCE_RIP();
8014 IEM_MC_END();
8015 }
8016 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8017}
8018
8019
8020/** Opcode 0x55. */
8021FNIEMOP_DEF(iemOp_push_eBP)
8022{
8023 IEMOP_MNEMONIC("push rBP");
8024 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8025}
8026
8027
8028/** Opcode 0x56. */
8029FNIEMOP_DEF(iemOp_push_eSI)
8030{
8031 IEMOP_MNEMONIC("push rSI");
8032 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8033}
8034
8035
8036/** Opcode 0x57. */
8037FNIEMOP_DEF(iemOp_push_eDI)
8038{
8039 IEMOP_MNEMONIC("push rDI");
8040 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8041}
8042
8043
8044/**
8045 * Common 'pop register' helper.
8046 */
8047FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8048{
8049 IEMOP_HLP_NO_LOCK_PREFIX();
8050 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8051 {
8052 iReg |= pIemCpu->uRexB;
8053 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8054 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8055 }
8056
8057 switch (pIemCpu->enmEffOpSize)
8058 {
8059 case IEMMODE_16BIT:
8060 IEM_MC_BEGIN(0, 1);
8061 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8062 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8063 IEM_MC_POP_U16(pu16Dst);
8064 IEM_MC_ADVANCE_RIP();
8065 IEM_MC_END();
8066 break;
8067
8068 case IEMMODE_32BIT:
8069 IEM_MC_BEGIN(0, 1);
8070 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8071 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8072 IEM_MC_POP_U32(pu32Dst);
8073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8074 IEM_MC_ADVANCE_RIP();
8075 IEM_MC_END();
8076 break;
8077
8078 case IEMMODE_64BIT:
8079 IEM_MC_BEGIN(0, 1);
8080 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8081 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8082 IEM_MC_POP_U64(pu64Dst);
8083 IEM_MC_ADVANCE_RIP();
8084 IEM_MC_END();
8085 break;
8086 }
8087
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x58. */
8093FNIEMOP_DEF(iemOp_pop_eAX)
8094{
8095 IEMOP_MNEMONIC("pop rAX");
8096 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8097}
8098
8099
8100/** Opcode 0x59. */
8101FNIEMOP_DEF(iemOp_pop_eCX)
8102{
8103 IEMOP_MNEMONIC("pop rCX");
8104 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8105}
8106
8107
8108/** Opcode 0x5a. */
8109FNIEMOP_DEF(iemOp_pop_eDX)
8110{
8111 IEMOP_MNEMONIC("pop rDX");
8112 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8113}
8114
8115
8116/** Opcode 0x5b. */
8117FNIEMOP_DEF(iemOp_pop_eBX)
8118{
8119 IEMOP_MNEMONIC("pop rBX");
8120 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8121}
8122
8123
8124/** Opcode 0x5c. */
8125FNIEMOP_DEF(iemOp_pop_eSP)
8126{
8127 IEMOP_MNEMONIC("pop rSP");
8128 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8129 {
8130 if (pIemCpu->uRexB)
8131 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8132 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8133 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8134 }
8135
8136 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8137 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8138 /** @todo add testcase for this instruction. */
8139 switch (pIemCpu->enmEffOpSize)
8140 {
8141 case IEMMODE_16BIT:
8142 IEM_MC_BEGIN(0, 1);
8143 IEM_MC_LOCAL(uint16_t, u16Dst);
8144 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8145 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8146 IEM_MC_ADVANCE_RIP();
8147 IEM_MC_END();
8148 break;
8149
8150 case IEMMODE_32BIT:
8151 IEM_MC_BEGIN(0, 1);
8152 IEM_MC_LOCAL(uint32_t, u32Dst);
8153 IEM_MC_POP_U32(&u32Dst);
8154 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8155 IEM_MC_ADVANCE_RIP();
8156 IEM_MC_END();
8157 break;
8158
8159 case IEMMODE_64BIT:
8160 IEM_MC_BEGIN(0, 1);
8161 IEM_MC_LOCAL(uint64_t, u64Dst);
8162 IEM_MC_POP_U64(&u64Dst);
8163 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8164 IEM_MC_ADVANCE_RIP();
8165 IEM_MC_END();
8166 break;
8167 }
8168
8169 return VINF_SUCCESS;
8170}
8171
8172
8173/** Opcode 0x5d. */
8174FNIEMOP_DEF(iemOp_pop_eBP)
8175{
8176 IEMOP_MNEMONIC("pop rBP");
8177 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8178}
8179
8180
8181/** Opcode 0x5e. */
8182FNIEMOP_DEF(iemOp_pop_eSI)
8183{
8184 IEMOP_MNEMONIC("pop rSI");
8185 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8186}
8187
8188
8189/** Opcode 0x5f. */
8190FNIEMOP_DEF(iemOp_pop_eDI)
8191{
8192 IEMOP_MNEMONIC("pop rDI");
8193 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8194}
8195
8196
8197/** Opcode 0x60. */
8198FNIEMOP_DEF(iemOp_pusha)
8199{
8200 IEMOP_MNEMONIC("pusha");
8201 IEMOP_HLP_MIN_186();
8202 IEMOP_HLP_NO_64BIT();
8203 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8204 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8205 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8207}
8208
8209
8210/** Opcode 0x61. */
8211FNIEMOP_DEF(iemOp_popa)
8212{
8213 IEMOP_MNEMONIC("popa");
8214 IEMOP_HLP_MIN_186();
8215 IEMOP_HLP_NO_64BIT();
8216 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8217 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8218 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8219 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8220}
8221
8222
8223/** Opcode 0x62. */
8224FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8225// IEMOP_HLP_MIN_186();
8226
8227
8228/** Opcode 0x63 - non-64-bit modes. */
8229FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8230{
8231 IEMOP_MNEMONIC("arpl Ew,Gw");
8232 IEMOP_HLP_MIN_286();
8233 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8235
8236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8237 {
8238 /* Register */
8239 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8240 IEM_MC_BEGIN(3, 0);
8241 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8242 IEM_MC_ARG(uint16_t, u16Src, 1);
8243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8244
8245 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8246 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8247 IEM_MC_REF_EFLAGS(pEFlags);
8248 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8249
8250 IEM_MC_ADVANCE_RIP();
8251 IEM_MC_END();
8252 }
8253 else
8254 {
8255 /* Memory */
8256 IEM_MC_BEGIN(3, 2);
8257 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8258 IEM_MC_ARG(uint16_t, u16Src, 1);
8259 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8261
8262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8263 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8264 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8265 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8266 IEM_MC_FETCH_EFLAGS(EFlags);
8267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8268
8269 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8270 IEM_MC_COMMIT_EFLAGS(EFlags);
8271 IEM_MC_ADVANCE_RIP();
8272 IEM_MC_END();
8273 }
8274 return VINF_SUCCESS;
8275
8276}
8277
8278
8279/** Opcode 0x63.
8280 * @note This is a weird one. It works like a regular move instruction if
8281 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8282 * @todo This definitely needs a testcase to verify the odd cases. */
8283FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8284{
8285 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8286
8287 IEMOP_MNEMONIC("movsxd Gv,Ev");
8288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8289
8290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8291 {
8292 /*
8293 * Register to register.
8294 */
8295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8296 IEM_MC_BEGIN(0, 1);
8297 IEM_MC_LOCAL(uint64_t, u64Value);
8298 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8299 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8300 IEM_MC_ADVANCE_RIP();
8301 IEM_MC_END();
8302 }
8303 else
8304 {
8305 /*
8306 * We're loading a register from memory.
8307 */
8308 IEM_MC_BEGIN(0, 2);
8309 IEM_MC_LOCAL(uint64_t, u64Value);
8310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8314 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8315 IEM_MC_ADVANCE_RIP();
8316 IEM_MC_END();
8317 }
8318 return VINF_SUCCESS;
8319}
8320
8321
8322/** Opcode 0x64. */
8323FNIEMOP_DEF(iemOp_seg_FS)
8324{
8325 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8326 IEMOP_HLP_MIN_386();
8327
8328 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8329 pIemCpu->iEffSeg = X86_SREG_FS;
8330
8331 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8332 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8333}
8334
8335
8336/** Opcode 0x65. */
8337FNIEMOP_DEF(iemOp_seg_GS)
8338{
8339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8340 IEMOP_HLP_MIN_386();
8341
8342 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8343 pIemCpu->iEffSeg = X86_SREG_GS;
8344
8345 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8346 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8347}
8348
8349
8350/** Opcode 0x66. */
8351FNIEMOP_DEF(iemOp_op_size)
8352{
8353 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8354 IEMOP_HLP_MIN_386();
8355
8356 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8357 iemRecalEffOpSize(pIemCpu);
8358
8359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8361}
8362
8363
8364/** Opcode 0x67. */
8365FNIEMOP_DEF(iemOp_addr_size)
8366{
8367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8368 IEMOP_HLP_MIN_386();
8369
8370 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8371 switch (pIemCpu->enmDefAddrMode)
8372 {
8373 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8374 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8375 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8376 default: AssertFailed();
8377 }
8378
8379 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8380 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8381}
8382
8383
8384/** Opcode 0x68. */
8385FNIEMOP_DEF(iemOp_push_Iz)
8386{
8387 IEMOP_MNEMONIC("push Iz");
8388 IEMOP_HLP_MIN_186();
8389 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8390 switch (pIemCpu->enmEffOpSize)
8391 {
8392 case IEMMODE_16BIT:
8393 {
8394 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8395 IEMOP_HLP_NO_LOCK_PREFIX();
8396 IEM_MC_BEGIN(0,0);
8397 IEM_MC_PUSH_U16(u16Imm);
8398 IEM_MC_ADVANCE_RIP();
8399 IEM_MC_END();
8400 return VINF_SUCCESS;
8401 }
8402
8403 case IEMMODE_32BIT:
8404 {
8405 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8406 IEMOP_HLP_NO_LOCK_PREFIX();
8407 IEM_MC_BEGIN(0,0);
8408 IEM_MC_PUSH_U32(u32Imm);
8409 IEM_MC_ADVANCE_RIP();
8410 IEM_MC_END();
8411 return VINF_SUCCESS;
8412 }
8413
8414 case IEMMODE_64BIT:
8415 {
8416 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8417 IEMOP_HLP_NO_LOCK_PREFIX();
8418 IEM_MC_BEGIN(0,0);
8419 IEM_MC_PUSH_U64(u64Imm);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 return VINF_SUCCESS;
8423 }
8424
8425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8426 }
8427}
8428
8429
8430/** Opcode 0x69. */
8431FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8432{
8433 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8434 IEMOP_HLP_MIN_186();
8435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8436 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8437
8438 switch (pIemCpu->enmEffOpSize)
8439 {
8440 case IEMMODE_16BIT:
8441 {
8442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8443 {
8444 /* register operand */
8445 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447
8448 IEM_MC_BEGIN(3, 1);
8449 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8450 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8452 IEM_MC_LOCAL(uint16_t, u16Tmp);
8453
8454 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8455 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8456 IEM_MC_REF_EFLAGS(pEFlags);
8457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8458 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8459
8460 IEM_MC_ADVANCE_RIP();
8461 IEM_MC_END();
8462 }
8463 else
8464 {
8465 /* memory operand */
8466 IEM_MC_BEGIN(3, 2);
8467 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8468 IEM_MC_ARG(uint16_t, u16Src, 1);
8469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8470 IEM_MC_LOCAL(uint16_t, u16Tmp);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472
8473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8474 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8475 IEM_MC_ASSIGN(u16Src, u16Imm);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8478 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8479 IEM_MC_REF_EFLAGS(pEFlags);
8480 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8481 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8482
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 }
8486 return VINF_SUCCESS;
8487 }
8488
8489 case IEMMODE_32BIT:
8490 {
8491 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8492 {
8493 /* register operand */
8494 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496
8497 IEM_MC_BEGIN(3, 1);
8498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8499 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8501 IEM_MC_LOCAL(uint32_t, u32Tmp);
8502
8503 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8504 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8505 IEM_MC_REF_EFLAGS(pEFlags);
8506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8507 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8508
8509 IEM_MC_ADVANCE_RIP();
8510 IEM_MC_END();
8511 }
8512 else
8513 {
8514 /* memory operand */
8515 IEM_MC_BEGIN(3, 2);
8516 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8517 IEM_MC_ARG(uint32_t, u32Src, 1);
8518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8519 IEM_MC_LOCAL(uint32_t, u32Tmp);
8520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8521
8522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8524 IEM_MC_ASSIGN(u32Src, u32Imm);
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8527 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8528 IEM_MC_REF_EFLAGS(pEFlags);
8529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8530 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8531
8532 IEM_MC_ADVANCE_RIP();
8533 IEM_MC_END();
8534 }
8535 return VINF_SUCCESS;
8536 }
8537
8538 case IEMMODE_64BIT:
8539 {
8540 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8541 {
8542 /* register operand */
8543 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8545
8546 IEM_MC_BEGIN(3, 1);
8547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8548 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8550 IEM_MC_LOCAL(uint64_t, u64Tmp);
8551
8552 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8553 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8554 IEM_MC_REF_EFLAGS(pEFlags);
8555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8556 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8557
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 }
8561 else
8562 {
8563 /* memory operand */
8564 IEM_MC_BEGIN(3, 2);
8565 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8566 IEM_MC_ARG(uint64_t, u64Src, 1);
8567 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8568 IEM_MC_LOCAL(uint64_t, u64Tmp);
8569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8570
8571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8572 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8573 IEM_MC_ASSIGN(u64Src, u64Imm);
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8576 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8577 IEM_MC_REF_EFLAGS(pEFlags);
8578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8579 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8580
8581 IEM_MC_ADVANCE_RIP();
8582 IEM_MC_END();
8583 }
8584 return VINF_SUCCESS;
8585 }
8586 }
8587 AssertFailedReturn(VERR_IEM_IPE_9);
8588}
8589
8590
8591/** Opcode 0x6a. */
8592FNIEMOP_DEF(iemOp_push_Ib)
8593{
8594 IEMOP_MNEMONIC("push Ib");
8595 IEMOP_HLP_MIN_186();
8596 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8597 IEMOP_HLP_NO_LOCK_PREFIX();
8598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8599
8600 IEM_MC_BEGIN(0,0);
8601 switch (pIemCpu->enmEffOpSize)
8602 {
8603 case IEMMODE_16BIT:
8604 IEM_MC_PUSH_U16(i8Imm);
8605 break;
8606 case IEMMODE_32BIT:
8607 IEM_MC_PUSH_U32(i8Imm);
8608 break;
8609 case IEMMODE_64BIT:
8610 IEM_MC_PUSH_U64(i8Imm);
8611 break;
8612 }
8613 IEM_MC_ADVANCE_RIP();
8614 IEM_MC_END();
8615 return VINF_SUCCESS;
8616}
8617
8618
8619/** Opcode 0x6b. */
8620FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8621{
8622 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8623 IEMOP_HLP_MIN_186();
8624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8626
8627 switch (pIemCpu->enmEffOpSize)
8628 {
8629 case IEMMODE_16BIT:
8630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8631 {
8632 /* register operand */
8633 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635
8636 IEM_MC_BEGIN(3, 1);
8637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8638 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8640 IEM_MC_LOCAL(uint16_t, u16Tmp);
8641
8642 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8643 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8644 IEM_MC_REF_EFLAGS(pEFlags);
8645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8646 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8647
8648 IEM_MC_ADVANCE_RIP();
8649 IEM_MC_END();
8650 }
8651 else
8652 {
8653 /* memory operand */
8654 IEM_MC_BEGIN(3, 2);
8655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8656 IEM_MC_ARG(uint16_t, u16Src, 1);
8657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8658 IEM_MC_LOCAL(uint16_t, u16Tmp);
8659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8660
8661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8662 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8663 IEM_MC_ASSIGN(u16Src, u16Imm);
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8666 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8667 IEM_MC_REF_EFLAGS(pEFlags);
8668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8669 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8670
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 }
8674 return VINF_SUCCESS;
8675
8676 case IEMMODE_32BIT:
8677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8678 {
8679 /* register operand */
8680 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682
8683 IEM_MC_BEGIN(3, 1);
8684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8685 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8686 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8687 IEM_MC_LOCAL(uint32_t, u32Tmp);
8688
8689 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8690 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8691 IEM_MC_REF_EFLAGS(pEFlags);
8692 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8693 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8694
8695 IEM_MC_ADVANCE_RIP();
8696 IEM_MC_END();
8697 }
8698 else
8699 {
8700 /* memory operand */
8701 IEM_MC_BEGIN(3, 2);
8702 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8703 IEM_MC_ARG(uint32_t, u32Src, 1);
8704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8705 IEM_MC_LOCAL(uint32_t, u32Tmp);
8706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8707
8708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8709 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8710 IEM_MC_ASSIGN(u32Src, u32Imm);
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8713 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8714 IEM_MC_REF_EFLAGS(pEFlags);
8715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8716 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8717
8718 IEM_MC_ADVANCE_RIP();
8719 IEM_MC_END();
8720 }
8721 return VINF_SUCCESS;
8722
8723 case IEMMODE_64BIT:
8724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8725 {
8726 /* register operand */
8727 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8729
8730 IEM_MC_BEGIN(3, 1);
8731 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8732 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8733 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8734 IEM_MC_LOCAL(uint64_t, u64Tmp);
8735
8736 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8737 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8738 IEM_MC_REF_EFLAGS(pEFlags);
8739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8740 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8741
8742 IEM_MC_ADVANCE_RIP();
8743 IEM_MC_END();
8744 }
8745 else
8746 {
8747 /* memory operand */
8748 IEM_MC_BEGIN(3, 2);
8749 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8750 IEM_MC_ARG(uint64_t, u64Src, 1);
8751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8752 IEM_MC_LOCAL(uint64_t, u64Tmp);
8753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8754
8755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8756 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8757 IEM_MC_ASSIGN(u64Src, u64Imm);
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8760 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8761 IEM_MC_REF_EFLAGS(pEFlags);
8762 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8763 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8764
8765 IEM_MC_ADVANCE_RIP();
8766 IEM_MC_END();
8767 }
8768 return VINF_SUCCESS;
8769 }
8770 AssertFailedReturn(VERR_IEM_IPE_8);
8771}
8772
8773
8774/** Opcode 0x6c. */
8775FNIEMOP_DEF(iemOp_insb_Yb_DX)
8776{
8777 IEMOP_HLP_MIN_186();
8778 IEMOP_HLP_NO_LOCK_PREFIX();
8779 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8780 {
8781 IEMOP_MNEMONIC("rep ins Yb,DX");
8782 switch (pIemCpu->enmEffAddrMode)
8783 {
8784 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8785 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8786 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8788 }
8789 }
8790 else
8791 {
8792 IEMOP_MNEMONIC("ins Yb,DX");
8793 switch (pIemCpu->enmEffAddrMode)
8794 {
8795 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8796 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8797 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8799 }
8800 }
8801}
8802
8803
8804/** Opcode 0x6d. */
8805FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8806{
8807 IEMOP_HLP_MIN_186();
8808 IEMOP_HLP_NO_LOCK_PREFIX();
8809 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8810 {
8811 IEMOP_MNEMONIC("rep ins Yv,DX");
8812 switch (pIemCpu->enmEffOpSize)
8813 {
8814 case IEMMODE_16BIT:
8815 switch (pIemCpu->enmEffAddrMode)
8816 {
8817 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8818 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8819 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8821 }
8822 break;
8823 case IEMMODE_64BIT:
8824 case IEMMODE_32BIT:
8825 switch (pIemCpu->enmEffAddrMode)
8826 {
8827 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8828 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8829 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8831 }
8832 break;
8833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8834 }
8835 }
8836 else
8837 {
8838 IEMOP_MNEMONIC("ins Yv,DX");
8839 switch (pIemCpu->enmEffOpSize)
8840 {
8841 case IEMMODE_16BIT:
8842 switch (pIemCpu->enmEffAddrMode)
8843 {
8844 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8848 }
8849 break;
8850 case IEMMODE_64BIT:
8851 case IEMMODE_32BIT:
8852 switch (pIemCpu->enmEffAddrMode)
8853 {
8854 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8855 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8856 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8858 }
8859 break;
8860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8861 }
8862 }
8863}
8864
8865
8866/** Opcode 0x6e. */
8867FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8868{
8869 IEMOP_HLP_MIN_186();
8870 IEMOP_HLP_NO_LOCK_PREFIX();
8871 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8872 {
8873 IEMOP_MNEMONIC("rep outs DX,Yb");
8874 switch (pIemCpu->enmEffAddrMode)
8875 {
8876 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8877 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8878 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8880 }
8881 }
8882 else
8883 {
8884 IEMOP_MNEMONIC("outs DX,Yb");
8885 switch (pIemCpu->enmEffAddrMode)
8886 {
8887 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8888 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8889 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8890 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8891 }
8892 }
8893}
8894
8895
8896/** Opcode 0x6f. */
8897FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8898{
8899 IEMOP_HLP_MIN_186();
8900 IEMOP_HLP_NO_LOCK_PREFIX();
8901 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8902 {
8903 IEMOP_MNEMONIC("rep outs DX,Yv");
8904 switch (pIemCpu->enmEffOpSize)
8905 {
8906 case IEMMODE_16BIT:
8907 switch (pIemCpu->enmEffAddrMode)
8908 {
8909 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8910 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8911 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8913 }
8914 break;
8915 case IEMMODE_64BIT:
8916 case IEMMODE_32BIT:
8917 switch (pIemCpu->enmEffAddrMode)
8918 {
8919 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8920 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8921 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8923 }
8924 break;
8925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8926 }
8927 }
8928 else
8929 {
8930 IEMOP_MNEMONIC("outs DX,Yv");
8931 switch (pIemCpu->enmEffOpSize)
8932 {
8933 case IEMMODE_16BIT:
8934 switch (pIemCpu->enmEffAddrMode)
8935 {
8936 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8937 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8938 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8940 }
8941 break;
8942 case IEMMODE_64BIT:
8943 case IEMMODE_32BIT:
8944 switch (pIemCpu->enmEffAddrMode)
8945 {
8946 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8947 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8948 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8950 }
8951 break;
8952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8953 }
8954 }
8955}
8956
8957
8958/** Opcode 0x70. */
8959FNIEMOP_DEF(iemOp_jo_Jb)
8960{
8961 IEMOP_MNEMONIC("jo Jb");
8962 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8963 IEMOP_HLP_NO_LOCK_PREFIX();
8964 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8965
8966 IEM_MC_BEGIN(0, 0);
8967 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8968 IEM_MC_REL_JMP_S8(i8Imm);
8969 } IEM_MC_ELSE() {
8970 IEM_MC_ADVANCE_RIP();
8971 } IEM_MC_ENDIF();
8972 IEM_MC_END();
8973 return VINF_SUCCESS;
8974}
8975
8976
8977/** Opcode 0x71. */
8978FNIEMOP_DEF(iemOp_jno_Jb)
8979{
8980 IEMOP_MNEMONIC("jno Jb");
8981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8982 IEMOP_HLP_NO_LOCK_PREFIX();
8983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8984
8985 IEM_MC_BEGIN(0, 0);
8986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8987 IEM_MC_ADVANCE_RIP();
8988 } IEM_MC_ELSE() {
8989 IEM_MC_REL_JMP_S8(i8Imm);
8990 } IEM_MC_ENDIF();
8991 IEM_MC_END();
8992 return VINF_SUCCESS;
8993}
8994
8995/** Opcode 0x72. */
8996FNIEMOP_DEF(iemOp_jc_Jb)
8997{
8998 IEMOP_MNEMONIC("jc/jnae Jb");
8999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9000 IEMOP_HLP_NO_LOCK_PREFIX();
9001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9002
9003 IEM_MC_BEGIN(0, 0);
9004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9005 IEM_MC_REL_JMP_S8(i8Imm);
9006 } IEM_MC_ELSE() {
9007 IEM_MC_ADVANCE_RIP();
9008 } IEM_MC_ENDIF();
9009 IEM_MC_END();
9010 return VINF_SUCCESS;
9011}
9012
9013
9014/** Opcode 0x73. */
9015FNIEMOP_DEF(iemOp_jnc_Jb)
9016{
9017 IEMOP_MNEMONIC("jnc/jnb Jb");
9018 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9019 IEMOP_HLP_NO_LOCK_PREFIX();
9020 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9021
9022 IEM_MC_BEGIN(0, 0);
9023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9024 IEM_MC_ADVANCE_RIP();
9025 } IEM_MC_ELSE() {
9026 IEM_MC_REL_JMP_S8(i8Imm);
9027 } IEM_MC_ENDIF();
9028 IEM_MC_END();
9029 return VINF_SUCCESS;
9030}
9031
9032
9033/** Opcode 0x74. */
9034FNIEMOP_DEF(iemOp_je_Jb)
9035{
9036 IEMOP_MNEMONIC("je/jz Jb");
9037 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9038 IEMOP_HLP_NO_LOCK_PREFIX();
9039 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9040
9041 IEM_MC_BEGIN(0, 0);
9042 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9043 IEM_MC_REL_JMP_S8(i8Imm);
9044 } IEM_MC_ELSE() {
9045 IEM_MC_ADVANCE_RIP();
9046 } IEM_MC_ENDIF();
9047 IEM_MC_END();
9048 return VINF_SUCCESS;
9049}
9050
9051
9052/** Opcode 0x75. */
9053FNIEMOP_DEF(iemOp_jne_Jb)
9054{
9055 IEMOP_MNEMONIC("jne/jnz Jb");
9056 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9057 IEMOP_HLP_NO_LOCK_PREFIX();
9058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9059
9060 IEM_MC_BEGIN(0, 0);
9061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9062 IEM_MC_ADVANCE_RIP();
9063 } IEM_MC_ELSE() {
9064 IEM_MC_REL_JMP_S8(i8Imm);
9065 } IEM_MC_ENDIF();
9066 IEM_MC_END();
9067 return VINF_SUCCESS;
9068}
9069
9070
9071/** Opcode 0x76. */
9072FNIEMOP_DEF(iemOp_jbe_Jb)
9073{
9074 IEMOP_MNEMONIC("jbe/jna Jb");
9075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9076 IEMOP_HLP_NO_LOCK_PREFIX();
9077 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9078
9079 IEM_MC_BEGIN(0, 0);
9080 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9081 IEM_MC_REL_JMP_S8(i8Imm);
9082 } IEM_MC_ELSE() {
9083 IEM_MC_ADVANCE_RIP();
9084 } IEM_MC_ENDIF();
9085 IEM_MC_END();
9086 return VINF_SUCCESS;
9087}
9088
9089
9090/** Opcode 0x77. */
9091FNIEMOP_DEF(iemOp_jnbe_Jb)
9092{
9093 IEMOP_MNEMONIC("jnbe/ja Jb");
9094 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9095 IEMOP_HLP_NO_LOCK_PREFIX();
9096 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9097
9098 IEM_MC_BEGIN(0, 0);
9099 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9100 IEM_MC_ADVANCE_RIP();
9101 } IEM_MC_ELSE() {
9102 IEM_MC_REL_JMP_S8(i8Imm);
9103 } IEM_MC_ENDIF();
9104 IEM_MC_END();
9105 return VINF_SUCCESS;
9106}
9107
9108
9109/** Opcode 0x78. */
9110FNIEMOP_DEF(iemOp_js_Jb)
9111{
9112 IEMOP_MNEMONIC("js Jb");
9113 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9114 IEMOP_HLP_NO_LOCK_PREFIX();
9115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9116
9117 IEM_MC_BEGIN(0, 0);
9118 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9119 IEM_MC_REL_JMP_S8(i8Imm);
9120 } IEM_MC_ELSE() {
9121 IEM_MC_ADVANCE_RIP();
9122 } IEM_MC_ENDIF();
9123 IEM_MC_END();
9124 return VINF_SUCCESS;
9125}
9126
9127
9128/** Opcode 0x79. */
9129FNIEMOP_DEF(iemOp_jns_Jb)
9130{
9131 IEMOP_MNEMONIC("jns Jb");
9132 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9133 IEMOP_HLP_NO_LOCK_PREFIX();
9134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9135
9136 IEM_MC_BEGIN(0, 0);
9137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9138 IEM_MC_ADVANCE_RIP();
9139 } IEM_MC_ELSE() {
9140 IEM_MC_REL_JMP_S8(i8Imm);
9141 } IEM_MC_ENDIF();
9142 IEM_MC_END();
9143 return VINF_SUCCESS;
9144}
9145
9146
9147/** Opcode 0x7a. */
9148FNIEMOP_DEF(iemOp_jp_Jb)
9149{
9150 IEMOP_MNEMONIC("jp Jb");
9151 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9152 IEMOP_HLP_NO_LOCK_PREFIX();
9153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9154
9155 IEM_MC_BEGIN(0, 0);
9156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9157 IEM_MC_REL_JMP_S8(i8Imm);
9158 } IEM_MC_ELSE() {
9159 IEM_MC_ADVANCE_RIP();
9160 } IEM_MC_ENDIF();
9161 IEM_MC_END();
9162 return VINF_SUCCESS;
9163}
9164
9165
9166/** Opcode 0x7b. */
9167FNIEMOP_DEF(iemOp_jnp_Jb)
9168{
9169 IEMOP_MNEMONIC("jnp Jb");
9170 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9171 IEMOP_HLP_NO_LOCK_PREFIX();
9172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9173
9174 IEM_MC_BEGIN(0, 0);
9175 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9176 IEM_MC_ADVANCE_RIP();
9177 } IEM_MC_ELSE() {
9178 IEM_MC_REL_JMP_S8(i8Imm);
9179 } IEM_MC_ENDIF();
9180 IEM_MC_END();
9181 return VINF_SUCCESS;
9182}
9183
9184
9185/** Opcode 0x7c. */
9186FNIEMOP_DEF(iemOp_jl_Jb)
9187{
9188 IEMOP_MNEMONIC("jl/jnge Jb");
9189 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9190 IEMOP_HLP_NO_LOCK_PREFIX();
9191 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9192
9193 IEM_MC_BEGIN(0, 0);
9194 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9195 IEM_MC_REL_JMP_S8(i8Imm);
9196 } IEM_MC_ELSE() {
9197 IEM_MC_ADVANCE_RIP();
9198 } IEM_MC_ENDIF();
9199 IEM_MC_END();
9200 return VINF_SUCCESS;
9201}
9202
9203
9204/** Opcode 0x7d. */
9205FNIEMOP_DEF(iemOp_jnl_Jb)
9206{
9207 IEMOP_MNEMONIC("jnl/jge Jb");
9208 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9209 IEMOP_HLP_NO_LOCK_PREFIX();
9210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9211
9212 IEM_MC_BEGIN(0, 0);
9213 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9214 IEM_MC_ADVANCE_RIP();
9215 } IEM_MC_ELSE() {
9216 IEM_MC_REL_JMP_S8(i8Imm);
9217 } IEM_MC_ENDIF();
9218 IEM_MC_END();
9219 return VINF_SUCCESS;
9220}
9221
9222
9223/** Opcode 0x7e. */
9224FNIEMOP_DEF(iemOp_jle_Jb)
9225{
9226 IEMOP_MNEMONIC("jle/jng Jb");
9227 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9228 IEMOP_HLP_NO_LOCK_PREFIX();
9229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9230
9231 IEM_MC_BEGIN(0, 0);
9232 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9233 IEM_MC_REL_JMP_S8(i8Imm);
9234 } IEM_MC_ELSE() {
9235 IEM_MC_ADVANCE_RIP();
9236 } IEM_MC_ENDIF();
9237 IEM_MC_END();
9238 return VINF_SUCCESS;
9239}
9240
9241
9242/** Opcode 0x7f. */
9243FNIEMOP_DEF(iemOp_jnle_Jb)
9244{
9245 IEMOP_MNEMONIC("jnle/jg Jb");
9246 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9247 IEMOP_HLP_NO_LOCK_PREFIX();
9248 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9249
9250 IEM_MC_BEGIN(0, 0);
9251 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9252 IEM_MC_ADVANCE_RIP();
9253 } IEM_MC_ELSE() {
9254 IEM_MC_REL_JMP_S8(i8Imm);
9255 } IEM_MC_ENDIF();
9256 IEM_MC_END();
9257 return VINF_SUCCESS;
9258}
9259
9260
9261/** Opcode 0x80. */
9262FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9263{
9264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9265 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9266 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9267
9268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9269 {
9270 /* register target */
9271 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9272 IEMOP_HLP_NO_LOCK_PREFIX();
9273 IEM_MC_BEGIN(3, 0);
9274 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9275 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9277
9278 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9279 IEM_MC_REF_EFLAGS(pEFlags);
9280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9281
9282 IEM_MC_ADVANCE_RIP();
9283 IEM_MC_END();
9284 }
9285 else
9286 {
9287 /* memory target */
9288 uint32_t fAccess;
9289 if (pImpl->pfnLockedU8)
9290 fAccess = IEM_ACCESS_DATA_RW;
9291 else
9292 { /* CMP */
9293 IEMOP_HLP_NO_LOCK_PREFIX();
9294 fAccess = IEM_ACCESS_DATA_R;
9295 }
9296 IEM_MC_BEGIN(3, 2);
9297 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9300
9301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9302 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9303 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9304
9305 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9306 IEM_MC_FETCH_EFLAGS(EFlags);
9307 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9309 else
9310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9311
9312 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9313 IEM_MC_COMMIT_EFLAGS(EFlags);
9314 IEM_MC_ADVANCE_RIP();
9315 IEM_MC_END();
9316 }
9317 return VINF_SUCCESS;
9318}
9319
9320
9321/** Opcode 0x81. */
9322FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9323{
9324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9325 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9326 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9327
9328 switch (pIemCpu->enmEffOpSize)
9329 {
9330 case IEMMODE_16BIT:
9331 {
9332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9333 {
9334 /* register target */
9335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9336 IEMOP_HLP_NO_LOCK_PREFIX();
9337 IEM_MC_BEGIN(3, 0);
9338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9339 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9341
9342 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9343 IEM_MC_REF_EFLAGS(pEFlags);
9344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9345
9346 IEM_MC_ADVANCE_RIP();
9347 IEM_MC_END();
9348 }
9349 else
9350 {
9351 /* memory target */
9352 uint32_t fAccess;
9353 if (pImpl->pfnLockedU16)
9354 fAccess = IEM_ACCESS_DATA_RW;
9355 else
9356 { /* CMP, TEST */
9357 IEMOP_HLP_NO_LOCK_PREFIX();
9358 fAccess = IEM_ACCESS_DATA_R;
9359 }
9360 IEM_MC_BEGIN(3, 2);
9361 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9362 IEM_MC_ARG(uint16_t, u16Src, 1);
9363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9365
9366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9367 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9368 IEM_MC_ASSIGN(u16Src, u16Imm);
9369 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9370 IEM_MC_FETCH_EFLAGS(EFlags);
9371 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9373 else
9374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9375
9376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9377 IEM_MC_COMMIT_EFLAGS(EFlags);
9378 IEM_MC_ADVANCE_RIP();
9379 IEM_MC_END();
9380 }
9381 break;
9382 }
9383
9384 case IEMMODE_32BIT:
9385 {
9386 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9387 {
9388 /* register target */
9389 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9390 IEMOP_HLP_NO_LOCK_PREFIX();
9391 IEM_MC_BEGIN(3, 0);
9392 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9393 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9394 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9395
9396 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9397 IEM_MC_REF_EFLAGS(pEFlags);
9398 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9399 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9400
9401 IEM_MC_ADVANCE_RIP();
9402 IEM_MC_END();
9403 }
9404 else
9405 {
9406 /* memory target */
9407 uint32_t fAccess;
9408 if (pImpl->pfnLockedU32)
9409 fAccess = IEM_ACCESS_DATA_RW;
9410 else
9411 { /* CMP, TEST */
9412 IEMOP_HLP_NO_LOCK_PREFIX();
9413 fAccess = IEM_ACCESS_DATA_R;
9414 }
9415 IEM_MC_BEGIN(3, 2);
9416 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9417 IEM_MC_ARG(uint32_t, u32Src, 1);
9418 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9420
9421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9422 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9423 IEM_MC_ASSIGN(u32Src, u32Imm);
9424 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9425 IEM_MC_FETCH_EFLAGS(EFlags);
9426 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9427 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9428 else
9429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9430
9431 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9432 IEM_MC_COMMIT_EFLAGS(EFlags);
9433 IEM_MC_ADVANCE_RIP();
9434 IEM_MC_END();
9435 }
9436 break;
9437 }
9438
9439 case IEMMODE_64BIT:
9440 {
9441 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9442 {
9443 /* register target */
9444 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9445 IEMOP_HLP_NO_LOCK_PREFIX();
9446 IEM_MC_BEGIN(3, 0);
9447 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9448 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9449 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9450
9451 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9452 IEM_MC_REF_EFLAGS(pEFlags);
9453 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9454
9455 IEM_MC_ADVANCE_RIP();
9456 IEM_MC_END();
9457 }
9458 else
9459 {
9460 /* memory target */
9461 uint32_t fAccess;
9462 if (pImpl->pfnLockedU64)
9463 fAccess = IEM_ACCESS_DATA_RW;
9464 else
9465 { /* CMP */
9466 IEMOP_HLP_NO_LOCK_PREFIX();
9467 fAccess = IEM_ACCESS_DATA_R;
9468 }
9469 IEM_MC_BEGIN(3, 2);
9470 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9471 IEM_MC_ARG(uint64_t, u64Src, 1);
9472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9476 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9477 IEM_MC_ASSIGN(u64Src, u64Imm);
9478 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9479 IEM_MC_FETCH_EFLAGS(EFlags);
9480 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9481 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9482 else
9483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9484
9485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9486 IEM_MC_COMMIT_EFLAGS(EFlags);
9487 IEM_MC_ADVANCE_RIP();
9488 IEM_MC_END();
9489 }
9490 break;
9491 }
9492 }
9493 return VINF_SUCCESS;
9494}
9495
9496
9497/** Opcode 0x82. */
9498FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9499{
9500 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9501 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9502}
9503
9504
9505/** Opcode 0x83. */
9506FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9507{
9508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9509 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9510 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9511 to the 386 even if absent in the intel reference manuals and some
9512 3rd party opcode listings. */
9513 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9514
9515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9516 {
9517 /*
9518 * Register target
9519 */
9520 IEMOP_HLP_NO_LOCK_PREFIX();
9521 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9522 switch (pIemCpu->enmEffOpSize)
9523 {
9524 case IEMMODE_16BIT:
9525 {
9526 IEM_MC_BEGIN(3, 0);
9527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9528 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9530
9531 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9532 IEM_MC_REF_EFLAGS(pEFlags);
9533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9534
9535 IEM_MC_ADVANCE_RIP();
9536 IEM_MC_END();
9537 break;
9538 }
9539
9540 case IEMMODE_32BIT:
9541 {
9542 IEM_MC_BEGIN(3, 0);
9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9544 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9546
9547 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9548 IEM_MC_REF_EFLAGS(pEFlags);
9549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9550 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9551
9552 IEM_MC_ADVANCE_RIP();
9553 IEM_MC_END();
9554 break;
9555 }
9556
9557 case IEMMODE_64BIT:
9558 {
9559 IEM_MC_BEGIN(3, 0);
9560 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9561 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9562 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9563
9564 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9565 IEM_MC_REF_EFLAGS(pEFlags);
9566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9567
9568 IEM_MC_ADVANCE_RIP();
9569 IEM_MC_END();
9570 break;
9571 }
9572 }
9573 }
9574 else
9575 {
9576 /*
9577 * Memory target.
9578 */
9579 uint32_t fAccess;
9580 if (pImpl->pfnLockedU16)
9581 fAccess = IEM_ACCESS_DATA_RW;
9582 else
9583 { /* CMP */
9584 IEMOP_HLP_NO_LOCK_PREFIX();
9585 fAccess = IEM_ACCESS_DATA_R;
9586 }
9587
9588 switch (pIemCpu->enmEffOpSize)
9589 {
9590 case IEMMODE_16BIT:
9591 {
9592 IEM_MC_BEGIN(3, 2);
9593 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9594 IEM_MC_ARG(uint16_t, u16Src, 1);
9595 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9597
9598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9599 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9600 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9601 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9602 IEM_MC_FETCH_EFLAGS(EFlags);
9603 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9604 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9605 else
9606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9607
9608 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9609 IEM_MC_COMMIT_EFLAGS(EFlags);
9610 IEM_MC_ADVANCE_RIP();
9611 IEM_MC_END();
9612 break;
9613 }
9614
9615 case IEMMODE_32BIT:
9616 {
9617 IEM_MC_BEGIN(3, 2);
9618 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9619 IEM_MC_ARG(uint32_t, u32Src, 1);
9620 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9622
9623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9625 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9626 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9627 IEM_MC_FETCH_EFLAGS(EFlags);
9628 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9629 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9630 else
9631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9632
9633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9634 IEM_MC_COMMIT_EFLAGS(EFlags);
9635 IEM_MC_ADVANCE_RIP();
9636 IEM_MC_END();
9637 break;
9638 }
9639
9640 case IEMMODE_64BIT:
9641 {
9642 IEM_MC_BEGIN(3, 2);
9643 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9644 IEM_MC_ARG(uint64_t, u64Src, 1);
9645 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9647
9648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9649 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9650 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9651 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9652 IEM_MC_FETCH_EFLAGS(EFlags);
9653 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9655 else
9656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9657
9658 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9659 IEM_MC_COMMIT_EFLAGS(EFlags);
9660 IEM_MC_ADVANCE_RIP();
9661 IEM_MC_END();
9662 break;
9663 }
9664 }
9665 }
9666 return VINF_SUCCESS;
9667}
9668
9669
9670/** Opcode 0x84. */
9671FNIEMOP_DEF(iemOp_test_Eb_Gb)
9672{
9673 IEMOP_MNEMONIC("test Eb,Gb");
9674 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9675 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9676 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9677}
9678
9679
9680/** Opcode 0x85. */
9681FNIEMOP_DEF(iemOp_test_Ev_Gv)
9682{
9683 IEMOP_MNEMONIC("test Ev,Gv");
9684 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9685 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9686 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9687}
9688
9689
9690/** Opcode 0x86. */
9691FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9692{
9693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9694 IEMOP_MNEMONIC("xchg Eb,Gb");
9695
9696 /*
9697 * If rm is denoting a register, no more instruction bytes.
9698 */
9699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9700 {
9701 IEMOP_HLP_NO_LOCK_PREFIX();
9702
9703 IEM_MC_BEGIN(0, 2);
9704 IEM_MC_LOCAL(uint8_t, uTmp1);
9705 IEM_MC_LOCAL(uint8_t, uTmp2);
9706
9707 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9708 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9709 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9710 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9711
9712 IEM_MC_ADVANCE_RIP();
9713 IEM_MC_END();
9714 }
9715 else
9716 {
9717 /*
9718 * We're accessing memory.
9719 */
9720/** @todo the register must be committed separately! */
9721 IEM_MC_BEGIN(2, 2);
9722 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9723 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9725
9726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9727 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9728 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9729 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9731
9732 IEM_MC_ADVANCE_RIP();
9733 IEM_MC_END();
9734 }
9735 return VINF_SUCCESS;
9736}
9737
9738
9739/** Opcode 0x87. */
9740FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9741{
9742 IEMOP_MNEMONIC("xchg Ev,Gv");
9743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9744
9745 /*
9746 * If rm is denoting a register, no more instruction bytes.
9747 */
9748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9749 {
9750 IEMOP_HLP_NO_LOCK_PREFIX();
9751
9752 switch (pIemCpu->enmEffOpSize)
9753 {
9754 case IEMMODE_16BIT:
9755 IEM_MC_BEGIN(0, 2);
9756 IEM_MC_LOCAL(uint16_t, uTmp1);
9757 IEM_MC_LOCAL(uint16_t, uTmp2);
9758
9759 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9760 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9761 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9762 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9763
9764 IEM_MC_ADVANCE_RIP();
9765 IEM_MC_END();
9766 return VINF_SUCCESS;
9767
9768 case IEMMODE_32BIT:
9769 IEM_MC_BEGIN(0, 2);
9770 IEM_MC_LOCAL(uint32_t, uTmp1);
9771 IEM_MC_LOCAL(uint32_t, uTmp2);
9772
9773 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9774 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9775 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9776 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9777
9778 IEM_MC_ADVANCE_RIP();
9779 IEM_MC_END();
9780 return VINF_SUCCESS;
9781
9782 case IEMMODE_64BIT:
9783 IEM_MC_BEGIN(0, 2);
9784 IEM_MC_LOCAL(uint64_t, uTmp1);
9785 IEM_MC_LOCAL(uint64_t, uTmp2);
9786
9787 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9788 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9789 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9790 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9791
9792 IEM_MC_ADVANCE_RIP();
9793 IEM_MC_END();
9794 return VINF_SUCCESS;
9795
9796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9797 }
9798 }
9799 else
9800 {
9801 /*
9802 * We're accessing memory.
9803 */
9804 switch (pIemCpu->enmEffOpSize)
9805 {
9806/** @todo the register must be committed separately! */
9807 case IEMMODE_16BIT:
9808 IEM_MC_BEGIN(2, 2);
9809 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9810 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9812
9813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9814 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9815 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9816 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9818
9819 IEM_MC_ADVANCE_RIP();
9820 IEM_MC_END();
9821 return VINF_SUCCESS;
9822
9823 case IEMMODE_32BIT:
9824 IEM_MC_BEGIN(2, 2);
9825 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9826 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9828
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9831 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9832 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9834
9835 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9836 IEM_MC_ADVANCE_RIP();
9837 IEM_MC_END();
9838 return VINF_SUCCESS;
9839
9840 case IEMMODE_64BIT:
9841 IEM_MC_BEGIN(2, 2);
9842 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9843 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9845
9846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9847 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9848 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9849 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9851
9852 IEM_MC_ADVANCE_RIP();
9853 IEM_MC_END();
9854 return VINF_SUCCESS;
9855
9856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9857 }
9858 }
9859}
9860
9861
9862/** Opcode 0x88. */
9863FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9864{
9865 IEMOP_MNEMONIC("mov Eb,Gb");
9866
9867 uint8_t bRm;
9868 IEM_OPCODE_GET_NEXT_U8(&bRm);
9869 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9870
9871 /*
9872 * If rm is denoting a register, no more instruction bytes.
9873 */
9874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9875 {
9876 IEM_MC_BEGIN(0, 1);
9877 IEM_MC_LOCAL(uint8_t, u8Value);
9878 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9879 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 }
9883 else
9884 {
9885 /*
9886 * We're writing a register to memory.
9887 */
9888 IEM_MC_BEGIN(0, 2);
9889 IEM_MC_LOCAL(uint8_t, u8Value);
9890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9892 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9893 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9894 IEM_MC_ADVANCE_RIP();
9895 IEM_MC_END();
9896 }
9897 return VINF_SUCCESS;
9898
9899}
9900
9901
9902/** Opcode 0x89. */
9903FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9904{
9905 IEMOP_MNEMONIC("mov Ev,Gv");
9906
9907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9908 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9909
9910 /*
9911 * If rm is denoting a register, no more instruction bytes.
9912 */
9913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9914 {
9915 switch (pIemCpu->enmEffOpSize)
9916 {
9917 case IEMMODE_16BIT:
9918 IEM_MC_BEGIN(0, 1);
9919 IEM_MC_LOCAL(uint16_t, u16Value);
9920 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9921 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9922 IEM_MC_ADVANCE_RIP();
9923 IEM_MC_END();
9924 break;
9925
9926 case IEMMODE_32BIT:
9927 IEM_MC_BEGIN(0, 1);
9928 IEM_MC_LOCAL(uint32_t, u32Value);
9929 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9930 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9931 IEM_MC_ADVANCE_RIP();
9932 IEM_MC_END();
9933 break;
9934
9935 case IEMMODE_64BIT:
9936 IEM_MC_BEGIN(0, 1);
9937 IEM_MC_LOCAL(uint64_t, u64Value);
9938 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9939 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9940 IEM_MC_ADVANCE_RIP();
9941 IEM_MC_END();
9942 break;
9943 }
9944 }
9945 else
9946 {
9947 /*
9948 * We're writing a register to memory.
9949 */
9950 switch (pIemCpu->enmEffOpSize)
9951 {
9952 case IEMMODE_16BIT:
9953 IEM_MC_BEGIN(0, 2);
9954 IEM_MC_LOCAL(uint16_t, u16Value);
9955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9957 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9958 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9959 IEM_MC_ADVANCE_RIP();
9960 IEM_MC_END();
9961 break;
9962
9963 case IEMMODE_32BIT:
9964 IEM_MC_BEGIN(0, 2);
9965 IEM_MC_LOCAL(uint32_t, u32Value);
9966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9968 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9969 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9970 IEM_MC_ADVANCE_RIP();
9971 IEM_MC_END();
9972 break;
9973
9974 case IEMMODE_64BIT:
9975 IEM_MC_BEGIN(0, 2);
9976 IEM_MC_LOCAL(uint64_t, u64Value);
9977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9979 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9980 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9981 IEM_MC_ADVANCE_RIP();
9982 IEM_MC_END();
9983 break;
9984 }
9985 }
9986 return VINF_SUCCESS;
9987}
9988
9989
9990/** Opcode 0x8a. */
9991FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9992{
9993 IEMOP_MNEMONIC("mov Gb,Eb");
9994
9995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9997
9998 /*
9999 * If rm is denoting a register, no more instruction bytes.
10000 */
10001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10002 {
10003 IEM_MC_BEGIN(0, 1);
10004 IEM_MC_LOCAL(uint8_t, u8Value);
10005 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10006 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10007 IEM_MC_ADVANCE_RIP();
10008 IEM_MC_END();
10009 }
10010 else
10011 {
10012 /*
10013 * We're loading a register from memory.
10014 */
10015 IEM_MC_BEGIN(0, 2);
10016 IEM_MC_LOCAL(uint8_t, u8Value);
10017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10019 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10020 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10021 IEM_MC_ADVANCE_RIP();
10022 IEM_MC_END();
10023 }
10024 return VINF_SUCCESS;
10025}
10026
10027
10028/** Opcode 0x8b. */
10029FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10030{
10031 IEMOP_MNEMONIC("mov Gv,Ev");
10032
10033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10035
10036 /*
10037 * If rm is denoting a register, no more instruction bytes.
10038 */
10039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10040 {
10041 switch (pIemCpu->enmEffOpSize)
10042 {
10043 case IEMMODE_16BIT:
10044 IEM_MC_BEGIN(0, 1);
10045 IEM_MC_LOCAL(uint16_t, u16Value);
10046 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10047 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10048 IEM_MC_ADVANCE_RIP();
10049 IEM_MC_END();
10050 break;
10051
10052 case IEMMODE_32BIT:
10053 IEM_MC_BEGIN(0, 1);
10054 IEM_MC_LOCAL(uint32_t, u32Value);
10055 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10056 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10057 IEM_MC_ADVANCE_RIP();
10058 IEM_MC_END();
10059 break;
10060
10061 case IEMMODE_64BIT:
10062 IEM_MC_BEGIN(0, 1);
10063 IEM_MC_LOCAL(uint64_t, u64Value);
10064 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10065 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10066 IEM_MC_ADVANCE_RIP();
10067 IEM_MC_END();
10068 break;
10069 }
10070 }
10071 else
10072 {
10073 /*
10074 * We're loading a register from memory.
10075 */
10076 switch (pIemCpu->enmEffOpSize)
10077 {
10078 case IEMMODE_16BIT:
10079 IEM_MC_BEGIN(0, 2);
10080 IEM_MC_LOCAL(uint16_t, u16Value);
10081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10083 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10084 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10085 IEM_MC_ADVANCE_RIP();
10086 IEM_MC_END();
10087 break;
10088
10089 case IEMMODE_32BIT:
10090 IEM_MC_BEGIN(0, 2);
10091 IEM_MC_LOCAL(uint32_t, u32Value);
10092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10094 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10095 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10096 IEM_MC_ADVANCE_RIP();
10097 IEM_MC_END();
10098 break;
10099
10100 case IEMMODE_64BIT:
10101 IEM_MC_BEGIN(0, 2);
10102 IEM_MC_LOCAL(uint64_t, u64Value);
10103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10105 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10106 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10107 IEM_MC_ADVANCE_RIP();
10108 IEM_MC_END();
10109 break;
10110 }
10111 }
10112 return VINF_SUCCESS;
10113}
10114
10115
10116/** Opcode 0x63. */
10117FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10118{
10119 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10120 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10121 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10122 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10123 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10124}
10125
10126
10127/** Opcode 0x8c. */
10128FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10129{
10130 IEMOP_MNEMONIC("mov Ev,Sw");
10131
10132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10133 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10134
10135 /*
10136 * Check that the destination register exists. The REX.R prefix is ignored.
10137 */
10138 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10139 if ( iSegReg > X86_SREG_GS)
10140 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10141
10142 /*
10143 * If rm is denoting a register, no more instruction bytes.
10144 * In that case, the operand size is respected and the upper bits are
10145 * cleared (starting with some pentium).
10146 */
10147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10148 {
10149 switch (pIemCpu->enmEffOpSize)
10150 {
10151 case IEMMODE_16BIT:
10152 IEM_MC_BEGIN(0, 1);
10153 IEM_MC_LOCAL(uint16_t, u16Value);
10154 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10155 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10156 IEM_MC_ADVANCE_RIP();
10157 IEM_MC_END();
10158 break;
10159
10160 case IEMMODE_32BIT:
10161 IEM_MC_BEGIN(0, 1);
10162 IEM_MC_LOCAL(uint32_t, u32Value);
10163 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10164 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10165 IEM_MC_ADVANCE_RIP();
10166 IEM_MC_END();
10167 break;
10168
10169 case IEMMODE_64BIT:
10170 IEM_MC_BEGIN(0, 1);
10171 IEM_MC_LOCAL(uint64_t, u64Value);
10172 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10173 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10174 IEM_MC_ADVANCE_RIP();
10175 IEM_MC_END();
10176 break;
10177 }
10178 }
10179 else
10180 {
10181 /*
10182 * We're saving the register to memory. The access is word sized
10183 * regardless of operand size prefixes.
10184 */
10185#if 0 /* not necessary */
10186 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10187#endif
10188 IEM_MC_BEGIN(0, 2);
10189 IEM_MC_LOCAL(uint16_t, u16Value);
10190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10192 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10193 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10194 IEM_MC_ADVANCE_RIP();
10195 IEM_MC_END();
10196 }
10197 return VINF_SUCCESS;
10198}
10199
10200
10201
10202
10203/** Opcode 0x8d. */
10204FNIEMOP_DEF(iemOp_lea_Gv_M)
10205{
10206 IEMOP_MNEMONIC("lea Gv,M");
10207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10208 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10210 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10211
10212 switch (pIemCpu->enmEffOpSize)
10213 {
10214 case IEMMODE_16BIT:
10215 IEM_MC_BEGIN(0, 2);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10217 IEM_MC_LOCAL(uint16_t, u16Cast);
10218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10219 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10220 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10221 IEM_MC_ADVANCE_RIP();
10222 IEM_MC_END();
10223 return VINF_SUCCESS;
10224
10225 case IEMMODE_32BIT:
10226 IEM_MC_BEGIN(0, 2);
10227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10228 IEM_MC_LOCAL(uint32_t, u32Cast);
10229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10230 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10231 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10232 IEM_MC_ADVANCE_RIP();
10233 IEM_MC_END();
10234 return VINF_SUCCESS;
10235
10236 case IEMMODE_64BIT:
10237 IEM_MC_BEGIN(0, 1);
10238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10240 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10241 IEM_MC_ADVANCE_RIP();
10242 IEM_MC_END();
10243 return VINF_SUCCESS;
10244 }
10245 AssertFailedReturn(VERR_IEM_IPE_7);
10246}
10247
10248
10249/** Opcode 0x8e. */
10250FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10251{
10252 IEMOP_MNEMONIC("mov Sw,Ev");
10253
10254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10255 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10256
10257 /*
10258 * The practical operand size is 16-bit.
10259 */
10260#if 0 /* not necessary */
10261 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10262#endif
10263
10264 /*
10265 * Check that the destination register exists and can be used with this
10266 * instruction. The REX.R prefix is ignored.
10267 */
10268 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10269 if ( iSegReg == X86_SREG_CS
10270 || iSegReg > X86_SREG_GS)
10271 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10272
10273 /*
10274 * If rm is denoting a register, no more instruction bytes.
10275 */
10276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10277 {
10278 IEM_MC_BEGIN(2, 0);
10279 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10280 IEM_MC_ARG(uint16_t, u16Value, 1);
10281 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10282 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10283 IEM_MC_END();
10284 }
10285 else
10286 {
10287 /*
10288 * We're loading the register from memory. The access is word sized
10289 * regardless of operand size prefixes.
10290 */
10291 IEM_MC_BEGIN(2, 1);
10292 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10293 IEM_MC_ARG(uint16_t, u16Value, 1);
10294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10296 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10297 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10298 IEM_MC_END();
10299 }
10300 return VINF_SUCCESS;
10301}
10302
10303
10304/** Opcode 0x8f /0. */
10305FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10306{
10307 /* This bugger is rather annoying as it requires rSP to be updated before
10308 doing the effective address calculations. Will eventually require a
10309 split between the R/M+SIB decoding and the effective address
10310 calculation - which is something that is required for any attempt at
10311 reusing this code for a recompiler. It may also be good to have if we
10312 need to delay #UD exception caused by invalid lock prefixes.
10313
10314 For now, we'll do a mostly safe interpreter-only implementation here. */
10315 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10316 * now until tests show it's checked.. */
10317 IEMOP_MNEMONIC("pop Ev");
10318 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10319
10320 /* Register access is relatively easy and can share code. */
10321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10322 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10323
10324 /*
10325 * Memory target.
10326 *
10327 * Intel says that RSP is incremented before it's used in any effective
10328 * address calcuations. This means some serious extra annoyance here since
10329 * we decode and calculate the effective address in one step and like to
10330 * delay committing registers till everything is done.
10331 *
10332 * So, we'll decode and calculate the effective address twice. This will
10333 * require some recoding if turned into a recompiler.
10334 */
10335 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10336
10337#ifndef TST_IEM_CHECK_MC
10338 /* Calc effective address with modified ESP. */
10339 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10340 RTGCPTR GCPtrEff;
10341 VBOXSTRICTRC rcStrict;
10342 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10343 if (rcStrict != VINF_SUCCESS)
10344 return rcStrict;
10345 pIemCpu->offOpcode = offOpcodeSaved;
10346
10347 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10348 uint64_t const RspSaved = pCtx->rsp;
10349 switch (pIemCpu->enmEffOpSize)
10350 {
10351 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10352 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10353 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10355 }
10356 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10357 Assert(rcStrict == VINF_SUCCESS);
10358 pCtx->rsp = RspSaved;
10359
10360 /* Perform the operation - this should be CImpl. */
10361 RTUINT64U TmpRsp;
10362 TmpRsp.u = pCtx->rsp;
10363 switch (pIemCpu->enmEffOpSize)
10364 {
10365 case IEMMODE_16BIT:
10366 {
10367 uint16_t u16Value;
10368 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10369 if (rcStrict == VINF_SUCCESS)
10370 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10371 break;
10372 }
10373
10374 case IEMMODE_32BIT:
10375 {
10376 uint32_t u32Value;
10377 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10378 if (rcStrict == VINF_SUCCESS)
10379 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10380 break;
10381 }
10382
10383 case IEMMODE_64BIT:
10384 {
10385 uint64_t u64Value;
10386 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10387 if (rcStrict == VINF_SUCCESS)
10388 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10389 break;
10390 }
10391
10392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10393 }
10394 if (rcStrict == VINF_SUCCESS)
10395 {
10396 pCtx->rsp = TmpRsp.u;
10397 iemRegUpdateRipAndClearRF(pIemCpu);
10398 }
10399 return rcStrict;
10400
10401#else
10402 return VERR_IEM_IPE_2;
10403#endif
10404}
10405
10406
10407/** Opcode 0x8f. */
10408FNIEMOP_DEF(iemOp_Grp1A)
10409{
10410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10411 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10412 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10413
10414 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10415 /** @todo XOP decoding. */
10416 IEMOP_MNEMONIC("3-byte-xop");
10417 return IEMOP_RAISE_INVALID_OPCODE();
10418}
10419
10420
10421/**
10422 * Common 'xchg reg,rAX' helper.
10423 */
10424FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10425{
10426 IEMOP_HLP_NO_LOCK_PREFIX();
10427
10428 iReg |= pIemCpu->uRexB;
10429 switch (pIemCpu->enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(0, 2);
10433 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10434 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10435 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10436 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10437 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10438 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442
10443 case IEMMODE_32BIT:
10444 IEM_MC_BEGIN(0, 2);
10445 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10446 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10447 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10448 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10449 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10450 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 return VINF_SUCCESS;
10454
10455 case IEMMODE_64BIT:
10456 IEM_MC_BEGIN(0, 2);
10457 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10458 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10459 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10460 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10461 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10462 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466
10467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10468 }
10469}
10470
10471
10472/** Opcode 0x90. */
10473FNIEMOP_DEF(iemOp_nop)
10474{
10475 /* R8/R8D and RAX/EAX can be exchanged. */
10476 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10477 {
10478 IEMOP_MNEMONIC("xchg r8,rAX");
10479 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10480 }
10481
10482 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10483 IEMOP_MNEMONIC("pause");
10484 else
10485 IEMOP_MNEMONIC("nop");
10486 IEM_MC_BEGIN(0, 0);
10487 IEM_MC_ADVANCE_RIP();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490}
10491
10492
10493/** Opcode 0x91. */
10494FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10495{
10496 IEMOP_MNEMONIC("xchg rCX,rAX");
10497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10498}
10499
10500
10501/** Opcode 0x92. */
10502FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10503{
10504 IEMOP_MNEMONIC("xchg rDX,rAX");
10505 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10506}
10507
10508
10509/** Opcode 0x93. */
10510FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10511{
10512 IEMOP_MNEMONIC("xchg rBX,rAX");
10513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10514}
10515
10516
10517/** Opcode 0x94. */
10518FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10519{
10520 IEMOP_MNEMONIC("xchg rSX,rAX");
10521 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10522}
10523
10524
10525/** Opcode 0x95. */
10526FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10527{
10528 IEMOP_MNEMONIC("xchg rBP,rAX");
10529 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10530}
10531
10532
10533/** Opcode 0x96. */
10534FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10535{
10536 IEMOP_MNEMONIC("xchg rSI,rAX");
10537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10538}
10539
10540
10541/** Opcode 0x97. */
10542FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10543{
10544 IEMOP_MNEMONIC("xchg rDI,rAX");
10545 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10546}
10547
10548
10549/** Opcode 0x98. */
10550FNIEMOP_DEF(iemOp_cbw)
10551{
10552 IEMOP_HLP_NO_LOCK_PREFIX();
10553 switch (pIemCpu->enmEffOpSize)
10554 {
10555 case IEMMODE_16BIT:
10556 IEMOP_MNEMONIC("cbw");
10557 IEM_MC_BEGIN(0, 1);
10558 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10559 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10560 } IEM_MC_ELSE() {
10561 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10562 } IEM_MC_ENDIF();
10563 IEM_MC_ADVANCE_RIP();
10564 IEM_MC_END();
10565 return VINF_SUCCESS;
10566
10567 case IEMMODE_32BIT:
10568 IEMOP_MNEMONIC("cwde");
10569 IEM_MC_BEGIN(0, 1);
10570 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10571 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10572 } IEM_MC_ELSE() {
10573 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10574 } IEM_MC_ENDIF();
10575 IEM_MC_ADVANCE_RIP();
10576 IEM_MC_END();
10577 return VINF_SUCCESS;
10578
10579 case IEMMODE_64BIT:
10580 IEMOP_MNEMONIC("cdqe");
10581 IEM_MC_BEGIN(0, 1);
10582 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10583 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10584 } IEM_MC_ELSE() {
10585 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10586 } IEM_MC_ENDIF();
10587 IEM_MC_ADVANCE_RIP();
10588 IEM_MC_END();
10589 return VINF_SUCCESS;
10590
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10592 }
10593}
10594
10595
10596/** Opcode 0x99. */
10597FNIEMOP_DEF(iemOp_cwd)
10598{
10599 IEMOP_HLP_NO_LOCK_PREFIX();
10600 switch (pIemCpu->enmEffOpSize)
10601 {
10602 case IEMMODE_16BIT:
10603 IEMOP_MNEMONIC("cwd");
10604 IEM_MC_BEGIN(0, 1);
10605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10606 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10607 } IEM_MC_ELSE() {
10608 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10609 } IEM_MC_ENDIF();
10610 IEM_MC_ADVANCE_RIP();
10611 IEM_MC_END();
10612 return VINF_SUCCESS;
10613
10614 case IEMMODE_32BIT:
10615 IEMOP_MNEMONIC("cdq");
10616 IEM_MC_BEGIN(0, 1);
10617 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10618 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10619 } IEM_MC_ELSE() {
10620 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10621 } IEM_MC_ENDIF();
10622 IEM_MC_ADVANCE_RIP();
10623 IEM_MC_END();
10624 return VINF_SUCCESS;
10625
10626 case IEMMODE_64BIT:
10627 IEMOP_MNEMONIC("cqo");
10628 IEM_MC_BEGIN(0, 1);
10629 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10630 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10631 } IEM_MC_ELSE() {
10632 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10633 } IEM_MC_ENDIF();
10634 IEM_MC_ADVANCE_RIP();
10635 IEM_MC_END();
10636 return VINF_SUCCESS;
10637
10638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10639 }
10640}
10641
10642
10643/** Opcode 0x9a. */
10644FNIEMOP_DEF(iemOp_call_Ap)
10645{
10646 IEMOP_MNEMONIC("call Ap");
10647 IEMOP_HLP_NO_64BIT();
10648
10649 /* Decode the far pointer address and pass it on to the far call C implementation. */
10650 uint32_t offSeg;
10651 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10652 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10653 else
10654 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10655 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10657 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10658}
10659
10660
10661/** Opcode 0x9b. (aka fwait) */
10662FNIEMOP_DEF(iemOp_wait)
10663{
10664 IEMOP_MNEMONIC("wait");
10665 IEMOP_HLP_NO_LOCK_PREFIX();
10666
10667 IEM_MC_BEGIN(0, 0);
10668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10670 IEM_MC_ADVANCE_RIP();
10671 IEM_MC_END();
10672 return VINF_SUCCESS;
10673}
10674
10675
10676/** Opcode 0x9c. */
10677FNIEMOP_DEF(iemOp_pushf_Fv)
10678{
10679 IEMOP_HLP_NO_LOCK_PREFIX();
10680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10681 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10682}
10683
10684
10685/** Opcode 0x9d. */
10686FNIEMOP_DEF(iemOp_popf_Fv)
10687{
10688 IEMOP_HLP_NO_LOCK_PREFIX();
10689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10690 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10691}
10692
10693
10694/** Opcode 0x9e. */
10695FNIEMOP_DEF(iemOp_sahf)
10696{
10697 IEMOP_MNEMONIC("sahf");
10698 IEMOP_HLP_NO_LOCK_PREFIX();
10699 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10700 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10701 return IEMOP_RAISE_INVALID_OPCODE();
10702 IEM_MC_BEGIN(0, 2);
10703 IEM_MC_LOCAL(uint32_t, u32Flags);
10704 IEM_MC_LOCAL(uint32_t, EFlags);
10705 IEM_MC_FETCH_EFLAGS(EFlags);
10706 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10707 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10708 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10709 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10710 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10711 IEM_MC_COMMIT_EFLAGS(EFlags);
10712 IEM_MC_ADVANCE_RIP();
10713 IEM_MC_END();
10714 return VINF_SUCCESS;
10715}
10716
10717
10718/** Opcode 0x9f. */
10719FNIEMOP_DEF(iemOp_lahf)
10720{
10721 IEMOP_MNEMONIC("lahf");
10722 IEMOP_HLP_NO_LOCK_PREFIX();
10723 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10724 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10725 return IEMOP_RAISE_INVALID_OPCODE();
10726 IEM_MC_BEGIN(0, 1);
10727 IEM_MC_LOCAL(uint8_t, u8Flags);
10728 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10729 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10730 IEM_MC_ADVANCE_RIP();
10731 IEM_MC_END();
10732 return VINF_SUCCESS;
10733}
10734
10735
10736/**
10737 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10738 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10739 * prefixes. Will return on failures.
10740 * @param a_GCPtrMemOff The variable to store the offset in.
10741 */
10742#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10743 do \
10744 { \
10745 switch (pIemCpu->enmEffAddrMode) \
10746 { \
10747 case IEMMODE_16BIT: \
10748 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10749 break; \
10750 case IEMMODE_32BIT: \
10751 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10752 break; \
10753 case IEMMODE_64BIT: \
10754 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10755 break; \
10756 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10757 } \
10758 IEMOP_HLP_NO_LOCK_PREFIX(); \
10759 } while (0)
10760
10761/** Opcode 0xa0. */
10762FNIEMOP_DEF(iemOp_mov_Al_Ob)
10763{
10764 /*
10765 * Get the offset and fend of lock prefixes.
10766 */
10767 RTGCPTR GCPtrMemOff;
10768 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10769
10770 /*
10771 * Fetch AL.
10772 */
10773 IEM_MC_BEGIN(0,1);
10774 IEM_MC_LOCAL(uint8_t, u8Tmp);
10775 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10776 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10777 IEM_MC_ADVANCE_RIP();
10778 IEM_MC_END();
10779 return VINF_SUCCESS;
10780}
10781
10782
10783/** Opcode 0xa1. */
10784FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10785{
10786 /*
10787 * Get the offset and fend of lock prefixes.
10788 */
10789 IEMOP_MNEMONIC("mov rAX,Ov");
10790 RTGCPTR GCPtrMemOff;
10791 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10792
10793 /*
10794 * Fetch rAX.
10795 */
10796 switch (pIemCpu->enmEffOpSize)
10797 {
10798 case IEMMODE_16BIT:
10799 IEM_MC_BEGIN(0,1);
10800 IEM_MC_LOCAL(uint16_t, u16Tmp);
10801 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10802 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10803 IEM_MC_ADVANCE_RIP();
10804 IEM_MC_END();
10805 return VINF_SUCCESS;
10806
10807 case IEMMODE_32BIT:
10808 IEM_MC_BEGIN(0,1);
10809 IEM_MC_LOCAL(uint32_t, u32Tmp);
10810 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10811 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10812 IEM_MC_ADVANCE_RIP();
10813 IEM_MC_END();
10814 return VINF_SUCCESS;
10815
10816 case IEMMODE_64BIT:
10817 IEM_MC_BEGIN(0,1);
10818 IEM_MC_LOCAL(uint64_t, u64Tmp);
10819 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10820 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 return VINF_SUCCESS;
10824
10825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10826 }
10827}
10828
10829
10830/** Opcode 0xa2. */
10831FNIEMOP_DEF(iemOp_mov_Ob_AL)
10832{
10833 /*
10834 * Get the offset and fend of lock prefixes.
10835 */
10836 RTGCPTR GCPtrMemOff;
10837 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10838
10839 /*
10840 * Store AL.
10841 */
10842 IEM_MC_BEGIN(0,1);
10843 IEM_MC_LOCAL(uint8_t, u8Tmp);
10844 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10845 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10846 IEM_MC_ADVANCE_RIP();
10847 IEM_MC_END();
10848 return VINF_SUCCESS;
10849}
10850
10851
10852/** Opcode 0xa3. */
10853FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10854{
10855 /*
10856 * Get the offset and fend of lock prefixes.
10857 */
10858 RTGCPTR GCPtrMemOff;
10859 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10860
10861 /*
10862 * Store rAX.
10863 */
10864 switch (pIemCpu->enmEffOpSize)
10865 {
10866 case IEMMODE_16BIT:
10867 IEM_MC_BEGIN(0,1);
10868 IEM_MC_LOCAL(uint16_t, u16Tmp);
10869 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10870 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10871 IEM_MC_ADVANCE_RIP();
10872 IEM_MC_END();
10873 return VINF_SUCCESS;
10874
10875 case IEMMODE_32BIT:
10876 IEM_MC_BEGIN(0,1);
10877 IEM_MC_LOCAL(uint32_t, u32Tmp);
10878 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10879 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10880 IEM_MC_ADVANCE_RIP();
10881 IEM_MC_END();
10882 return VINF_SUCCESS;
10883
10884 case IEMMODE_64BIT:
10885 IEM_MC_BEGIN(0,1);
10886 IEM_MC_LOCAL(uint64_t, u64Tmp);
10887 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10888 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10889 IEM_MC_ADVANCE_RIP();
10890 IEM_MC_END();
10891 return VINF_SUCCESS;
10892
10893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10894 }
10895}
10896
10897/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10898#define IEM_MOVS_CASE(ValBits, AddrBits) \
10899 IEM_MC_BEGIN(0, 2); \
10900 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10901 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10902 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10903 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10904 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10905 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10907 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10908 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10909 } IEM_MC_ELSE() { \
10910 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10911 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10912 } IEM_MC_ENDIF(); \
10913 IEM_MC_ADVANCE_RIP(); \
10914 IEM_MC_END();
10915
10916/** Opcode 0xa4. */
10917FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10918{
10919 IEMOP_HLP_NO_LOCK_PREFIX();
10920
10921 /*
10922 * Use the C implementation if a repeat prefix is encountered.
10923 */
10924 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10925 {
10926 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10927 switch (pIemCpu->enmEffAddrMode)
10928 {
10929 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10930 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10931 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10933 }
10934 }
10935 IEMOP_MNEMONIC("movsb Xb,Yb");
10936
10937 /*
10938 * Sharing case implementation with movs[wdq] below.
10939 */
10940 switch (pIemCpu->enmEffAddrMode)
10941 {
10942 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10943 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10944 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10946 }
10947 return VINF_SUCCESS;
10948}
10949
10950
10951/** Opcode 0xa5. */
10952FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10953{
10954 IEMOP_HLP_NO_LOCK_PREFIX();
10955
10956 /*
10957 * Use the C implementation if a repeat prefix is encountered.
10958 */
10959 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10960 {
10961 IEMOP_MNEMONIC("rep movs Xv,Yv");
10962 switch (pIemCpu->enmEffOpSize)
10963 {
10964 case IEMMODE_16BIT:
10965 switch (pIemCpu->enmEffAddrMode)
10966 {
10967 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10968 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10969 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10971 }
10972 break;
10973 case IEMMODE_32BIT:
10974 switch (pIemCpu->enmEffAddrMode)
10975 {
10976 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10977 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10978 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10980 }
10981 case IEMMODE_64BIT:
10982 switch (pIemCpu->enmEffAddrMode)
10983 {
10984 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10985 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10986 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10988 }
10989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10990 }
10991 }
10992 IEMOP_MNEMONIC("movs Xv,Yv");
10993
10994 /*
10995 * Annoying double switch here.
10996 * Using ugly macro for implementing the cases, sharing it with movsb.
10997 */
10998 switch (pIemCpu->enmEffOpSize)
10999 {
11000 case IEMMODE_16BIT:
11001 switch (pIemCpu->enmEffAddrMode)
11002 {
11003 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11004 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11005 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11007 }
11008 break;
11009
11010 case IEMMODE_32BIT:
11011 switch (pIemCpu->enmEffAddrMode)
11012 {
11013 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11014 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11015 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11017 }
11018 break;
11019
11020 case IEMMODE_64BIT:
11021 switch (pIemCpu->enmEffAddrMode)
11022 {
11023 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11024 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11025 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11027 }
11028 break;
11029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11030 }
11031 return VINF_SUCCESS;
11032}
11033
11034#undef IEM_MOVS_CASE
11035
11036/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11037#define IEM_CMPS_CASE(ValBits, AddrBits) \
11038 IEM_MC_BEGIN(3, 3); \
11039 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11040 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11041 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11042 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11043 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11044 \
11045 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11046 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11047 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11048 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11049 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11050 IEM_MC_REF_EFLAGS(pEFlags); \
11051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11052 \
11053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11054 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11055 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11056 } IEM_MC_ELSE() { \
11057 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11058 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11059 } IEM_MC_ENDIF(); \
11060 IEM_MC_ADVANCE_RIP(); \
11061 IEM_MC_END(); \
11062
11063/** Opcode 0xa6. */
11064FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11065{
11066 IEMOP_HLP_NO_LOCK_PREFIX();
11067
11068 /*
11069 * Use the C implementation if a repeat prefix is encountered.
11070 */
11071 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11072 {
11073 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11074 switch (pIemCpu->enmEffAddrMode)
11075 {
11076 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11077 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11078 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11080 }
11081 }
11082 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11083 {
11084 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11085 switch (pIemCpu->enmEffAddrMode)
11086 {
11087 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11088 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11089 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11091 }
11092 }
11093 IEMOP_MNEMONIC("cmps Xb,Yb");
11094
11095 /*
11096 * Sharing case implementation with cmps[wdq] below.
11097 */
11098 switch (pIemCpu->enmEffAddrMode)
11099 {
11100 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11101 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11102 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11104 }
11105 return VINF_SUCCESS;
11106
11107}
11108
11109
11110/** Opcode 0xa7. */
11111FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11112{
11113 IEMOP_HLP_NO_LOCK_PREFIX();
11114
11115 /*
11116 * Use the C implementation if a repeat prefix is encountered.
11117 */
11118 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11119 {
11120 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11121 switch (pIemCpu->enmEffOpSize)
11122 {
11123 case IEMMODE_16BIT:
11124 switch (pIemCpu->enmEffAddrMode)
11125 {
11126 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11127 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11128 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11130 }
11131 break;
11132 case IEMMODE_32BIT:
11133 switch (pIemCpu->enmEffAddrMode)
11134 {
11135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11139 }
11140 case IEMMODE_64BIT:
11141 switch (pIemCpu->enmEffAddrMode)
11142 {
11143 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11144 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11145 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11147 }
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150 }
11151
11152 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11153 {
11154 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11155 switch (pIemCpu->enmEffOpSize)
11156 {
11157 case IEMMODE_16BIT:
11158 switch (pIemCpu->enmEffAddrMode)
11159 {
11160 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11161 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11162 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11164 }
11165 break;
11166 case IEMMODE_32BIT:
11167 switch (pIemCpu->enmEffAddrMode)
11168 {
11169 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11170 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11171 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174 case IEMMODE_64BIT:
11175 switch (pIemCpu->enmEffAddrMode)
11176 {
11177 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11178 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11179 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11183 }
11184 }
11185
11186 IEMOP_MNEMONIC("cmps Xv,Yv");
11187
11188 /*
11189 * Annoying double switch here.
11190 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11191 */
11192 switch (pIemCpu->enmEffOpSize)
11193 {
11194 case IEMMODE_16BIT:
11195 switch (pIemCpu->enmEffAddrMode)
11196 {
11197 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11198 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11199 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 break;
11203
11204 case IEMMODE_32BIT:
11205 switch (pIemCpu->enmEffAddrMode)
11206 {
11207 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11208 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11209 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11211 }
11212 break;
11213
11214 case IEMMODE_64BIT:
11215 switch (pIemCpu->enmEffAddrMode)
11216 {
11217 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11218 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11219 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11221 }
11222 break;
11223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11224 }
11225 return VINF_SUCCESS;
11226
11227}
11228
11229#undef IEM_CMPS_CASE
11230
11231/** Opcode 0xa8. */
11232FNIEMOP_DEF(iemOp_test_AL_Ib)
11233{
11234 IEMOP_MNEMONIC("test al,Ib");
11235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11236 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11237}
11238
11239
11240/** Opcode 0xa9. */
11241FNIEMOP_DEF(iemOp_test_eAX_Iz)
11242{
11243 IEMOP_MNEMONIC("test rAX,Iz");
11244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11246}
11247
11248
11249/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11250#define IEM_STOS_CASE(ValBits, AddrBits) \
11251 IEM_MC_BEGIN(0, 2); \
11252 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11253 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11254 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11255 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11256 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11258 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11259 } IEM_MC_ELSE() { \
11260 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11261 } IEM_MC_ENDIF(); \
11262 IEM_MC_ADVANCE_RIP(); \
11263 IEM_MC_END(); \
11264
11265/** Opcode 0xaa. */
11266FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11267{
11268 IEMOP_HLP_NO_LOCK_PREFIX();
11269
11270 /*
11271 * Use the C implementation if a repeat prefix is encountered.
11272 */
11273 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11274 {
11275 IEMOP_MNEMONIC("rep stos Yb,al");
11276 switch (pIemCpu->enmEffAddrMode)
11277 {
11278 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11279 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11280 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11282 }
11283 }
11284 IEMOP_MNEMONIC("stos Yb,al");
11285
11286 /*
11287 * Sharing case implementation with stos[wdq] below.
11288 */
11289 switch (pIemCpu->enmEffAddrMode)
11290 {
11291 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11292 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11293 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11295 }
11296 return VINF_SUCCESS;
11297}
11298
11299
11300/** Opcode 0xab. */
11301FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11302{
11303 IEMOP_HLP_NO_LOCK_PREFIX();
11304
11305 /*
11306 * Use the C implementation if a repeat prefix is encountered.
11307 */
11308 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11309 {
11310 IEMOP_MNEMONIC("rep stos Yv,rAX");
11311 switch (pIemCpu->enmEffOpSize)
11312 {
11313 case IEMMODE_16BIT:
11314 switch (pIemCpu->enmEffAddrMode)
11315 {
11316 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11317 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11318 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11320 }
11321 break;
11322 case IEMMODE_32BIT:
11323 switch (pIemCpu->enmEffAddrMode)
11324 {
11325 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11326 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11327 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11329 }
11330 case IEMMODE_64BIT:
11331 switch (pIemCpu->enmEffAddrMode)
11332 {
11333 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11334 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11335 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11337 }
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340 }
11341 IEMOP_MNEMONIC("stos Yv,rAX");
11342
11343 /*
11344 * Annoying double switch here.
11345 * Using ugly macro for implementing the cases, sharing it with stosb.
11346 */
11347 switch (pIemCpu->enmEffOpSize)
11348 {
11349 case IEMMODE_16BIT:
11350 switch (pIemCpu->enmEffAddrMode)
11351 {
11352 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11353 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11354 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11356 }
11357 break;
11358
11359 case IEMMODE_32BIT:
11360 switch (pIemCpu->enmEffAddrMode)
11361 {
11362 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11363 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11364 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11366 }
11367 break;
11368
11369 case IEMMODE_64BIT:
11370 switch (pIemCpu->enmEffAddrMode)
11371 {
11372 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11373 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11374 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11376 }
11377 break;
11378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11379 }
11380 return VINF_SUCCESS;
11381}
11382
11383#undef IEM_STOS_CASE
11384
11385/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11386#define IEM_LODS_CASE(ValBits, AddrBits) \
11387 IEM_MC_BEGIN(0, 2); \
11388 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11389 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11390 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11391 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11392 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11394 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11395 } IEM_MC_ELSE() { \
11396 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11397 } IEM_MC_ENDIF(); \
11398 IEM_MC_ADVANCE_RIP(); \
11399 IEM_MC_END();
11400
11401/** Opcode 0xac. */
11402FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11403{
11404 IEMOP_HLP_NO_LOCK_PREFIX();
11405
11406 /*
11407 * Use the C implementation if a repeat prefix is encountered.
11408 */
11409 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11410 {
11411 IEMOP_MNEMONIC("rep lodsb al,Xb");
11412 switch (pIemCpu->enmEffAddrMode)
11413 {
11414 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11415 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11416 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11418 }
11419 }
11420 IEMOP_MNEMONIC("lodsb al,Xb");
11421
11422 /*
11423 * Sharing case implementation with stos[wdq] below.
11424 */
11425 switch (pIemCpu->enmEffAddrMode)
11426 {
11427 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11428 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11429 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11431 }
11432 return VINF_SUCCESS;
11433}
11434
11435
11436/** Opcode 0xad. */
11437FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11438{
11439 IEMOP_HLP_NO_LOCK_PREFIX();
11440
11441 /*
11442 * Use the C implementation if a repeat prefix is encountered.
11443 */
11444 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11445 {
11446 IEMOP_MNEMONIC("rep lods rAX,Xv");
11447 switch (pIemCpu->enmEffOpSize)
11448 {
11449 case IEMMODE_16BIT:
11450 switch (pIemCpu->enmEffAddrMode)
11451 {
11452 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11453 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11454 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11456 }
11457 break;
11458 case IEMMODE_32BIT:
11459 switch (pIemCpu->enmEffAddrMode)
11460 {
11461 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11465 }
11466 case IEMMODE_64BIT:
11467 switch (pIemCpu->enmEffAddrMode)
11468 {
11469 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11470 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11471 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11473 }
11474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11475 }
11476 }
11477 IEMOP_MNEMONIC("lods rAX,Xv");
11478
11479 /*
11480 * Annoying double switch here.
11481 * Using ugly macro for implementing the cases, sharing it with lodsb.
11482 */
11483 switch (pIemCpu->enmEffOpSize)
11484 {
11485 case IEMMODE_16BIT:
11486 switch (pIemCpu->enmEffAddrMode)
11487 {
11488 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11489 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11490 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11492 }
11493 break;
11494
11495 case IEMMODE_32BIT:
11496 switch (pIemCpu->enmEffAddrMode)
11497 {
11498 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11499 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11500 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11502 }
11503 break;
11504
11505 case IEMMODE_64BIT:
11506 switch (pIemCpu->enmEffAddrMode)
11507 {
11508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11509 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11510 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11512 }
11513 break;
11514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11515 }
11516 return VINF_SUCCESS;
11517}
11518
11519#undef IEM_LODS_CASE
11520
11521/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11522#define IEM_SCAS_CASE(ValBits, AddrBits) \
11523 IEM_MC_BEGIN(3, 2); \
11524 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11525 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11526 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11527 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11528 \
11529 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11530 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11531 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11532 IEM_MC_REF_EFLAGS(pEFlags); \
11533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11534 \
11535 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11536 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11537 } IEM_MC_ELSE() { \
11538 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11539 } IEM_MC_ENDIF(); \
11540 IEM_MC_ADVANCE_RIP(); \
11541 IEM_MC_END();
11542
11543/** Opcode 0xae. */
11544FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11545{
11546 IEMOP_HLP_NO_LOCK_PREFIX();
11547
11548 /*
11549 * Use the C implementation if a repeat prefix is encountered.
11550 */
11551 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11552 {
11553 IEMOP_MNEMONIC("repe scasb al,Xb");
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11557 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11558 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 }
11562 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11563 {
11564 IEMOP_MNEMONIC("repne scasb al,Xb");
11565 switch (pIemCpu->enmEffAddrMode)
11566 {
11567 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11568 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11569 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11571 }
11572 }
11573 IEMOP_MNEMONIC("scasb al,Xb");
11574
11575 /*
11576 * Sharing case implementation with stos[wdq] below.
11577 */
11578 switch (pIemCpu->enmEffAddrMode)
11579 {
11580 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11581 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11582 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585 return VINF_SUCCESS;
11586}
11587
11588
11589/** Opcode 0xaf. */
11590FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11591{
11592 IEMOP_HLP_NO_LOCK_PREFIX();
11593
11594 /*
11595 * Use the C implementation if a repeat prefix is encountered.
11596 */
11597 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11598 {
11599 IEMOP_MNEMONIC("repe scas rAX,Xv");
11600 switch (pIemCpu->enmEffOpSize)
11601 {
11602 case IEMMODE_16BIT:
11603 switch (pIemCpu->enmEffAddrMode)
11604 {
11605 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11606 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11607 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11609 }
11610 break;
11611 case IEMMODE_32BIT:
11612 switch (pIemCpu->enmEffAddrMode)
11613 {
11614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11618 }
11619 case IEMMODE_64BIT:
11620 switch (pIemCpu->enmEffAddrMode)
11621 {
11622 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11623 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11624 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11626 }
11627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11628 }
11629 }
11630 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11631 {
11632 IEMOP_MNEMONIC("repne scas rAX,Xv");
11633 switch (pIemCpu->enmEffOpSize)
11634 {
11635 case IEMMODE_16BIT:
11636 switch (pIemCpu->enmEffAddrMode)
11637 {
11638 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11639 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11640 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11642 }
11643 break;
11644 case IEMMODE_32BIT:
11645 switch (pIemCpu->enmEffAddrMode)
11646 {
11647 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11648 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11649 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11651 }
11652 case IEMMODE_64BIT:
11653 switch (pIemCpu->enmEffAddrMode)
11654 {
11655 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11656 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11657 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11659 }
11660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11661 }
11662 }
11663 IEMOP_MNEMONIC("scas rAX,Xv");
11664
11665 /*
11666 * Annoying double switch here.
11667 * Using ugly macro for implementing the cases, sharing it with scasb.
11668 */
11669 switch (pIemCpu->enmEffOpSize)
11670 {
11671 case IEMMODE_16BIT:
11672 switch (pIemCpu->enmEffAddrMode)
11673 {
11674 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11675 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11676 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11678 }
11679 break;
11680
11681 case IEMMODE_32BIT:
11682 switch (pIemCpu->enmEffAddrMode)
11683 {
11684 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11685 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11686 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11688 }
11689 break;
11690
11691 case IEMMODE_64BIT:
11692 switch (pIemCpu->enmEffAddrMode)
11693 {
11694 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11695 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11696 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11698 }
11699 break;
11700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11701 }
11702 return VINF_SUCCESS;
11703}
11704
11705#undef IEM_SCAS_CASE
11706
11707/**
11708 * Common 'mov r8, imm8' helper.
11709 */
11710FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11711{
11712 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11713 IEMOP_HLP_NO_LOCK_PREFIX();
11714
11715 IEM_MC_BEGIN(0, 1);
11716 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11717 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11718 IEM_MC_ADVANCE_RIP();
11719 IEM_MC_END();
11720
11721 return VINF_SUCCESS;
11722}
11723
11724
11725/** Opcode 0xb0. */
11726FNIEMOP_DEF(iemOp_mov_AL_Ib)
11727{
11728 IEMOP_MNEMONIC("mov AL,Ib");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xb1. */
11734FNIEMOP_DEF(iemOp_CL_Ib)
11735{
11736 IEMOP_MNEMONIC("mov CL,Ib");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xb2. */
11742FNIEMOP_DEF(iemOp_DL_Ib)
11743{
11744 IEMOP_MNEMONIC("mov DL,Ib");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xb3. */
11750FNIEMOP_DEF(iemOp_BL_Ib)
11751{
11752 IEMOP_MNEMONIC("mov BL,Ib");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xb4. */
11758FNIEMOP_DEF(iemOp_mov_AH_Ib)
11759{
11760 IEMOP_MNEMONIC("mov AH,Ib");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11762}
11763
11764
11765/** Opcode 0xb5. */
11766FNIEMOP_DEF(iemOp_CH_Ib)
11767{
11768 IEMOP_MNEMONIC("mov CH,Ib");
11769 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11770}
11771
11772
11773/** Opcode 0xb6. */
11774FNIEMOP_DEF(iemOp_DH_Ib)
11775{
11776 IEMOP_MNEMONIC("mov DH,Ib");
11777 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11778}
11779
11780
11781/** Opcode 0xb7. */
11782FNIEMOP_DEF(iemOp_BH_Ib)
11783{
11784 IEMOP_MNEMONIC("mov BH,Ib");
11785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11786}
11787
11788
11789/**
11790 * Common 'mov regX,immX' helper.
11791 */
11792FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11793{
11794 switch (pIemCpu->enmEffOpSize)
11795 {
11796 case IEMMODE_16BIT:
11797 {
11798 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11799 IEMOP_HLP_NO_LOCK_PREFIX();
11800
11801 IEM_MC_BEGIN(0, 1);
11802 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11803 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11804 IEM_MC_ADVANCE_RIP();
11805 IEM_MC_END();
11806 break;
11807 }
11808
11809 case IEMMODE_32BIT:
11810 {
11811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11812 IEMOP_HLP_NO_LOCK_PREFIX();
11813
11814 IEM_MC_BEGIN(0, 1);
11815 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11816 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11817 IEM_MC_ADVANCE_RIP();
11818 IEM_MC_END();
11819 break;
11820 }
11821 case IEMMODE_64BIT:
11822 {
11823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11824 IEMOP_HLP_NO_LOCK_PREFIX();
11825
11826 IEM_MC_BEGIN(0, 1);
11827 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11828 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11829 IEM_MC_ADVANCE_RIP();
11830 IEM_MC_END();
11831 break;
11832 }
11833 }
11834
11835 return VINF_SUCCESS;
11836}
11837
11838
11839/** Opcode 0xb8. */
11840FNIEMOP_DEF(iemOp_eAX_Iv)
11841{
11842 IEMOP_MNEMONIC("mov rAX,IV");
11843 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11844}
11845
11846
11847/** Opcode 0xb9. */
11848FNIEMOP_DEF(iemOp_eCX_Iv)
11849{
11850 IEMOP_MNEMONIC("mov rCX,IV");
11851 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11852}
11853
11854
11855/** Opcode 0xba. */
11856FNIEMOP_DEF(iemOp_eDX_Iv)
11857{
11858 IEMOP_MNEMONIC("mov rDX,IV");
11859 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11860}
11861
11862
11863/** Opcode 0xbb. */
11864FNIEMOP_DEF(iemOp_eBX_Iv)
11865{
11866 IEMOP_MNEMONIC("mov rBX,IV");
11867 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11868}
11869
11870
11871/** Opcode 0xbc. */
11872FNIEMOP_DEF(iemOp_eSP_Iv)
11873{
11874 IEMOP_MNEMONIC("mov rSP,IV");
11875 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11876}
11877
11878
11879/** Opcode 0xbd. */
11880FNIEMOP_DEF(iemOp_eBP_Iv)
11881{
11882 IEMOP_MNEMONIC("mov rBP,IV");
11883 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11884}
11885
11886
11887/** Opcode 0xbe. */
11888FNIEMOP_DEF(iemOp_eSI_Iv)
11889{
11890 IEMOP_MNEMONIC("mov rSI,IV");
11891 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11892}
11893
11894
11895/** Opcode 0xbf. */
11896FNIEMOP_DEF(iemOp_eDI_Iv)
11897{
11898 IEMOP_MNEMONIC("mov rDI,IV");
11899 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11900}
11901
11902
11903/** Opcode 0xc0. */
11904FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11905{
11906 IEMOP_HLP_MIN_186();
11907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11908 PCIEMOPSHIFTSIZES pImpl;
11909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11910 {
11911 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11912 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11913 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11914 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11915 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11916 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11917 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11918 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11919 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11920 }
11921 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11922
11923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11924 {
11925 /* register */
11926 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11927 IEMOP_HLP_NO_LOCK_PREFIX();
11928 IEM_MC_BEGIN(3, 0);
11929 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11930 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11932 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11933 IEM_MC_REF_EFLAGS(pEFlags);
11934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11935 IEM_MC_ADVANCE_RIP();
11936 IEM_MC_END();
11937 }
11938 else
11939 {
11940 /* memory */
11941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11942 IEM_MC_BEGIN(3, 2);
11943 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11944 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11945 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11947
11948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11949 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11950 IEM_MC_ASSIGN(cShiftArg, cShift);
11951 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11952 IEM_MC_FETCH_EFLAGS(EFlags);
11953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11954
11955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11956 IEM_MC_COMMIT_EFLAGS(EFlags);
11957 IEM_MC_ADVANCE_RIP();
11958 IEM_MC_END();
11959 }
11960 return VINF_SUCCESS;
11961}
11962
11963
11964/** Opcode 0xc1. */
11965FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11966{
11967 IEMOP_HLP_MIN_186();
11968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11969 PCIEMOPSHIFTSIZES pImpl;
11970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11971 {
11972 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11973 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11974 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11975 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11976 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11977 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11978 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11979 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11980 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11981 }
11982 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11983
11984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11985 {
11986 /* register */
11987 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11988 IEMOP_HLP_NO_LOCK_PREFIX();
11989 switch (pIemCpu->enmEffOpSize)
11990 {
11991 case IEMMODE_16BIT:
11992 IEM_MC_BEGIN(3, 0);
11993 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11994 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11996 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11997 IEM_MC_REF_EFLAGS(pEFlags);
11998 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11999 IEM_MC_ADVANCE_RIP();
12000 IEM_MC_END();
12001 return VINF_SUCCESS;
12002
12003 case IEMMODE_32BIT:
12004 IEM_MC_BEGIN(3, 0);
12005 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12006 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12007 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12008 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12009 IEM_MC_REF_EFLAGS(pEFlags);
12010 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12011 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12012 IEM_MC_ADVANCE_RIP();
12013 IEM_MC_END();
12014 return VINF_SUCCESS;
12015
12016 case IEMMODE_64BIT:
12017 IEM_MC_BEGIN(3, 0);
12018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12019 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12021 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12022 IEM_MC_REF_EFLAGS(pEFlags);
12023 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12024 IEM_MC_ADVANCE_RIP();
12025 IEM_MC_END();
12026 return VINF_SUCCESS;
12027
12028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12029 }
12030 }
12031 else
12032 {
12033 /* memory */
12034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12035 switch (pIemCpu->enmEffOpSize)
12036 {
12037 case IEMMODE_16BIT:
12038 IEM_MC_BEGIN(3, 2);
12039 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12040 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12041 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12043
12044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12045 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12046 IEM_MC_ASSIGN(cShiftArg, cShift);
12047 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12048 IEM_MC_FETCH_EFLAGS(EFlags);
12049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12050
12051 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12052 IEM_MC_COMMIT_EFLAGS(EFlags);
12053 IEM_MC_ADVANCE_RIP();
12054 IEM_MC_END();
12055 return VINF_SUCCESS;
12056
12057 case IEMMODE_32BIT:
12058 IEM_MC_BEGIN(3, 2);
12059 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12060 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12061 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12063
12064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12065 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12066 IEM_MC_ASSIGN(cShiftArg, cShift);
12067 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12068 IEM_MC_FETCH_EFLAGS(EFlags);
12069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12070
12071 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12072 IEM_MC_COMMIT_EFLAGS(EFlags);
12073 IEM_MC_ADVANCE_RIP();
12074 IEM_MC_END();
12075 return VINF_SUCCESS;
12076
12077 case IEMMODE_64BIT:
12078 IEM_MC_BEGIN(3, 2);
12079 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12080 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12081 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12083
12084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12085 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12086 IEM_MC_ASSIGN(cShiftArg, cShift);
12087 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12088 IEM_MC_FETCH_EFLAGS(EFlags);
12089 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12090
12091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12092 IEM_MC_COMMIT_EFLAGS(EFlags);
12093 IEM_MC_ADVANCE_RIP();
12094 IEM_MC_END();
12095 return VINF_SUCCESS;
12096
12097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12098 }
12099 }
12100}
12101
12102
12103/** Opcode 0xc2. */
12104FNIEMOP_DEF(iemOp_retn_Iw)
12105{
12106 IEMOP_MNEMONIC("retn Iw");
12107 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12108 IEMOP_HLP_NO_LOCK_PREFIX();
12109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12110 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12111}
12112
12113
12114/** Opcode 0xc3. */
12115FNIEMOP_DEF(iemOp_retn)
12116{
12117 IEMOP_MNEMONIC("retn");
12118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12119 IEMOP_HLP_NO_LOCK_PREFIX();
12120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12121}
12122
12123
12124/** Opcode 0xc4. */
12125FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12126{
12127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12128 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12129 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12130 {
12131 IEMOP_MNEMONIC("2-byte-vex");
12132 /* The LES instruction is invalid 64-bit mode. In legacy and
12133 compatability mode it is invalid with MOD=3.
12134 The use as a VEX prefix is made possible by assigning the inverted
12135 REX.R to the top MOD bit, and the top bit in the inverted register
12136 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12137 to accessing registers 0..7 in this VEX form. */
12138 /** @todo VEX: Just use new tables for it. */
12139 return IEMOP_RAISE_INVALID_OPCODE();
12140 }
12141 IEMOP_MNEMONIC("les Gv,Mp");
12142 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12143}
12144
12145
12146/** Opcode 0xc5. */
12147FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12148{
12149 /* The LDS instruction is invalid 64-bit mode. In legacy and
12150 compatability mode it is invalid with MOD=3.
12151 The use as a VEX prefix is made possible by assigning the inverted
12152 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12153 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12155 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12156 {
12157 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12158 {
12159 IEMOP_MNEMONIC("lds Gv,Mp");
12160 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12161 }
12162 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12163 }
12164
12165 IEMOP_MNEMONIC("3-byte-vex");
12166 /** @todo Test when exctly the VEX conformance checks kick in during
12167 * instruction decoding and fetching (using \#PF). */
12168 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12169 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12170 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12171#if 0 /* will make sense of this next week... */
12172 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12173 &&
12174 )
12175 {
12176
12177 }
12178#endif
12179
12180 /** @todo VEX: Just use new tables for it. */
12181 return IEMOP_RAISE_INVALID_OPCODE();
12182}
12183
12184
12185/** Opcode 0xc6. */
12186FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12187{
12188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12189 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12190 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12191 return IEMOP_RAISE_INVALID_OPCODE();
12192 IEMOP_MNEMONIC("mov Eb,Ib");
12193
12194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12195 {
12196 /* register access */
12197 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12198 IEM_MC_BEGIN(0, 0);
12199 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12200 IEM_MC_ADVANCE_RIP();
12201 IEM_MC_END();
12202 }
12203 else
12204 {
12205 /* memory access. */
12206 IEM_MC_BEGIN(0, 1);
12207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12209 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12210 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12211 IEM_MC_ADVANCE_RIP();
12212 IEM_MC_END();
12213 }
12214 return VINF_SUCCESS;
12215}
12216
12217
12218/** Opcode 0xc7. */
12219FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12220{
12221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12222 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12223 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12224 return IEMOP_RAISE_INVALID_OPCODE();
12225 IEMOP_MNEMONIC("mov Ev,Iz");
12226
12227 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12228 {
12229 /* register access */
12230 switch (pIemCpu->enmEffOpSize)
12231 {
12232 case IEMMODE_16BIT:
12233 IEM_MC_BEGIN(0, 0);
12234 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12235 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12236 IEM_MC_ADVANCE_RIP();
12237 IEM_MC_END();
12238 return VINF_SUCCESS;
12239
12240 case IEMMODE_32BIT:
12241 IEM_MC_BEGIN(0, 0);
12242 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12243 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12244 IEM_MC_ADVANCE_RIP();
12245 IEM_MC_END();
12246 return VINF_SUCCESS;
12247
12248 case IEMMODE_64BIT:
12249 IEM_MC_BEGIN(0, 0);
12250 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12251 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12252 IEM_MC_ADVANCE_RIP();
12253 IEM_MC_END();
12254 return VINF_SUCCESS;
12255
12256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12257 }
12258 }
12259 else
12260 {
12261 /* memory access. */
12262 switch (pIemCpu->enmEffOpSize)
12263 {
12264 case IEMMODE_16BIT:
12265 IEM_MC_BEGIN(0, 1);
12266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12268 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12269 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12270 IEM_MC_ADVANCE_RIP();
12271 IEM_MC_END();
12272 return VINF_SUCCESS;
12273
12274 case IEMMODE_32BIT:
12275 IEM_MC_BEGIN(0, 1);
12276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12278 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12279 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12280 IEM_MC_ADVANCE_RIP();
12281 IEM_MC_END();
12282 return VINF_SUCCESS;
12283
12284 case IEMMODE_64BIT:
12285 IEM_MC_BEGIN(0, 1);
12286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12288 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12289 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12290 IEM_MC_ADVANCE_RIP();
12291 IEM_MC_END();
12292 return VINF_SUCCESS;
12293
12294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12295 }
12296 }
12297}
12298
12299
12300
12301
12302/** Opcode 0xc8. */
12303FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12304{
12305 IEMOP_MNEMONIC("enter Iw,Ib");
12306 IEMOP_HLP_MIN_186();
12307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12308 IEMOP_HLP_NO_LOCK_PREFIX();
12309 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12310 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12311 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12312}
12313
12314
12315/** Opcode 0xc9. */
12316FNIEMOP_DEF(iemOp_leave)
12317{
12318 IEMOP_MNEMONIC("retn");
12319 IEMOP_HLP_MIN_186();
12320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12321 IEMOP_HLP_NO_LOCK_PREFIX();
12322 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12323}
12324
12325
12326/** Opcode 0xca. */
12327FNIEMOP_DEF(iemOp_retf_Iw)
12328{
12329 IEMOP_MNEMONIC("retf Iw");
12330 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12331 IEMOP_HLP_NO_LOCK_PREFIX();
12332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12333 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12334}
12335
12336
12337/** Opcode 0xcb. */
12338FNIEMOP_DEF(iemOp_retf)
12339{
12340 IEMOP_MNEMONIC("retf");
12341 IEMOP_HLP_NO_LOCK_PREFIX();
12342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12344}
12345
12346
12347/** Opcode 0xcc. */
12348FNIEMOP_DEF(iemOp_int_3)
12349{
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12352}
12353
12354
12355/** Opcode 0xcd. */
12356FNIEMOP_DEF(iemOp_int_Ib)
12357{
12358 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12360 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12361}
12362
12363
12364/** Opcode 0xce. */
12365FNIEMOP_DEF(iemOp_into)
12366{
12367 IEMOP_MNEMONIC("into");
12368 IEMOP_HLP_NO_64BIT();
12369
12370 IEM_MC_BEGIN(2, 0);
12371 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12372 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12373 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12374 IEM_MC_END();
12375 return VINF_SUCCESS;
12376}
12377
12378
12379/** Opcode 0xcf. */
12380FNIEMOP_DEF(iemOp_iret)
12381{
12382 IEMOP_MNEMONIC("iret");
12383 IEMOP_HLP_NO_LOCK_PREFIX();
12384 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12385}
12386
12387
12388/** Opcode 0xd0. */
12389FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12390{
12391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12392 PCIEMOPSHIFTSIZES pImpl;
12393 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12394 {
12395 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12396 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12397 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12398 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12399 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12400 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12401 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12402 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12403 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12404 }
12405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12406
12407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12408 {
12409 /* register */
12410 IEMOP_HLP_NO_LOCK_PREFIX();
12411 IEM_MC_BEGIN(3, 0);
12412 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12413 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12414 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12415 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12416 IEM_MC_REF_EFLAGS(pEFlags);
12417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12418 IEM_MC_ADVANCE_RIP();
12419 IEM_MC_END();
12420 }
12421 else
12422 {
12423 /* memory */
12424 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12425 IEM_MC_BEGIN(3, 2);
12426 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12427 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12430
12431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12432 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12433 IEM_MC_FETCH_EFLAGS(EFlags);
12434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12435
12436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12437 IEM_MC_COMMIT_EFLAGS(EFlags);
12438 IEM_MC_ADVANCE_RIP();
12439 IEM_MC_END();
12440 }
12441 return VINF_SUCCESS;
12442}
12443
12444
12445
12446/** Opcode 0xd1. */
12447FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12448{
12449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12450 PCIEMOPSHIFTSIZES pImpl;
12451 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12452 {
12453 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12454 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12455 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12456 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12457 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12458 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12459 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12460 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12461 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12462 }
12463 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12464
12465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12466 {
12467 /* register */
12468 IEMOP_HLP_NO_LOCK_PREFIX();
12469 switch (pIemCpu->enmEffOpSize)
12470 {
12471 case IEMMODE_16BIT:
12472 IEM_MC_BEGIN(3, 0);
12473 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12474 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12475 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12476 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12477 IEM_MC_REF_EFLAGS(pEFlags);
12478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12479 IEM_MC_ADVANCE_RIP();
12480 IEM_MC_END();
12481 return VINF_SUCCESS;
12482
12483 case IEMMODE_32BIT:
12484 IEM_MC_BEGIN(3, 0);
12485 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12486 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12487 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12488 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12489 IEM_MC_REF_EFLAGS(pEFlags);
12490 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12491 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12492 IEM_MC_ADVANCE_RIP();
12493 IEM_MC_END();
12494 return VINF_SUCCESS;
12495
12496 case IEMMODE_64BIT:
12497 IEM_MC_BEGIN(3, 0);
12498 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12499 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12501 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12502 IEM_MC_REF_EFLAGS(pEFlags);
12503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12504 IEM_MC_ADVANCE_RIP();
12505 IEM_MC_END();
12506 return VINF_SUCCESS;
12507
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12509 }
12510 }
12511 else
12512 {
12513 /* memory */
12514 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12515 switch (pIemCpu->enmEffOpSize)
12516 {
12517 case IEMMODE_16BIT:
12518 IEM_MC_BEGIN(3, 2);
12519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12520 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12521 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12523
12524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12525 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12526 IEM_MC_FETCH_EFLAGS(EFlags);
12527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12528
12529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12530 IEM_MC_COMMIT_EFLAGS(EFlags);
12531 IEM_MC_ADVANCE_RIP();
12532 IEM_MC_END();
12533 return VINF_SUCCESS;
12534
12535 case IEMMODE_32BIT:
12536 IEM_MC_BEGIN(3, 2);
12537 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12538 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12539 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12541
12542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12543 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12544 IEM_MC_FETCH_EFLAGS(EFlags);
12545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12546
12547 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12548 IEM_MC_COMMIT_EFLAGS(EFlags);
12549 IEM_MC_ADVANCE_RIP();
12550 IEM_MC_END();
12551 return VINF_SUCCESS;
12552
12553 case IEMMODE_64BIT:
12554 IEM_MC_BEGIN(3, 2);
12555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12556 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12557 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12559
12560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12561 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12562 IEM_MC_FETCH_EFLAGS(EFlags);
12563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12564
12565 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12566 IEM_MC_COMMIT_EFLAGS(EFlags);
12567 IEM_MC_ADVANCE_RIP();
12568 IEM_MC_END();
12569 return VINF_SUCCESS;
12570
12571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12572 }
12573 }
12574}
12575
12576
12577/** Opcode 0xd2. */
12578FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12579{
12580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12581 PCIEMOPSHIFTSIZES pImpl;
12582 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12583 {
12584 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12585 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12586 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12587 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12588 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12589 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12590 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12591 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12592 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12593 }
12594 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12595
12596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12597 {
12598 /* register */
12599 IEMOP_HLP_NO_LOCK_PREFIX();
12600 IEM_MC_BEGIN(3, 0);
12601 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12602 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12604 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12605 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12606 IEM_MC_REF_EFLAGS(pEFlags);
12607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12608 IEM_MC_ADVANCE_RIP();
12609 IEM_MC_END();
12610 }
12611 else
12612 {
12613 /* memory */
12614 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12615 IEM_MC_BEGIN(3, 2);
12616 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12617 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12618 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12620
12621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12622 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12623 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12624 IEM_MC_FETCH_EFLAGS(EFlags);
12625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12626
12627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12628 IEM_MC_COMMIT_EFLAGS(EFlags);
12629 IEM_MC_ADVANCE_RIP();
12630 IEM_MC_END();
12631 }
12632 return VINF_SUCCESS;
12633}
12634
12635
12636/** Opcode 0xd3. */
12637FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12638{
12639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12640 PCIEMOPSHIFTSIZES pImpl;
12641 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12642 {
12643 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12644 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12645 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12646 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12647 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12648 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12649 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12650 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12651 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12652 }
12653 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12654
12655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12656 {
12657 /* register */
12658 IEMOP_HLP_NO_LOCK_PREFIX();
12659 switch (pIemCpu->enmEffOpSize)
12660 {
12661 case IEMMODE_16BIT:
12662 IEM_MC_BEGIN(3, 0);
12663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12664 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12666 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12667 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12668 IEM_MC_REF_EFLAGS(pEFlags);
12669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12670 IEM_MC_ADVANCE_RIP();
12671 IEM_MC_END();
12672 return VINF_SUCCESS;
12673
12674 case IEMMODE_32BIT:
12675 IEM_MC_BEGIN(3, 0);
12676 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12677 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12679 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12680 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12681 IEM_MC_REF_EFLAGS(pEFlags);
12682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12684 IEM_MC_ADVANCE_RIP();
12685 IEM_MC_END();
12686 return VINF_SUCCESS;
12687
12688 case IEMMODE_64BIT:
12689 IEM_MC_BEGIN(3, 0);
12690 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12691 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12692 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12693 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12694 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12695 IEM_MC_REF_EFLAGS(pEFlags);
12696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12697 IEM_MC_ADVANCE_RIP();
12698 IEM_MC_END();
12699 return VINF_SUCCESS;
12700
12701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12702 }
12703 }
12704 else
12705 {
12706 /* memory */
12707 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12708 switch (pIemCpu->enmEffOpSize)
12709 {
12710 case IEMMODE_16BIT:
12711 IEM_MC_BEGIN(3, 2);
12712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12713 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12714 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12716
12717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12718 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12719 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12720 IEM_MC_FETCH_EFLAGS(EFlags);
12721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12722
12723 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12724 IEM_MC_COMMIT_EFLAGS(EFlags);
12725 IEM_MC_ADVANCE_RIP();
12726 IEM_MC_END();
12727 return VINF_SUCCESS;
12728
12729 case IEMMODE_32BIT:
12730 IEM_MC_BEGIN(3, 2);
12731 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12732 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12733 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12735
12736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12737 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12738 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12739 IEM_MC_FETCH_EFLAGS(EFlags);
12740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12741
12742 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12743 IEM_MC_COMMIT_EFLAGS(EFlags);
12744 IEM_MC_ADVANCE_RIP();
12745 IEM_MC_END();
12746 return VINF_SUCCESS;
12747
12748 case IEMMODE_64BIT:
12749 IEM_MC_BEGIN(3, 2);
12750 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12751 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12752 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12754
12755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12756 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12757 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12758 IEM_MC_FETCH_EFLAGS(EFlags);
12759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12760
12761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12762 IEM_MC_COMMIT_EFLAGS(EFlags);
12763 IEM_MC_ADVANCE_RIP();
12764 IEM_MC_END();
12765 return VINF_SUCCESS;
12766
12767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12768 }
12769 }
12770}
12771
12772/** Opcode 0xd4. */
12773FNIEMOP_DEF(iemOp_aam_Ib)
12774{
12775 IEMOP_MNEMONIC("aam Ib");
12776 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12777 IEMOP_HLP_NO_LOCK_PREFIX();
12778 IEMOP_HLP_NO_64BIT();
12779 if (!bImm)
12780 return IEMOP_RAISE_DIVIDE_ERROR();
12781 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12782}
12783
12784
12785/** Opcode 0xd5. */
12786FNIEMOP_DEF(iemOp_aad_Ib)
12787{
12788 IEMOP_MNEMONIC("aad Ib");
12789 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12790 IEMOP_HLP_NO_LOCK_PREFIX();
12791 IEMOP_HLP_NO_64BIT();
12792 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12793}
12794
12795
12796/** Opcode 0xd6. */
12797FNIEMOP_DEF(iemOp_salc)
12798{
12799 IEMOP_MNEMONIC("salc");
12800 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12801 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12803 IEMOP_HLP_NO_64BIT();
12804
12805 IEM_MC_BEGIN(0, 0);
12806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12807 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12808 } IEM_MC_ELSE() {
12809 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12810 } IEM_MC_ENDIF();
12811 IEM_MC_ADVANCE_RIP();
12812 IEM_MC_END();
12813 return VINF_SUCCESS;
12814}
12815
12816
12817/** Opcode 0xd7. */
12818FNIEMOP_DEF(iemOp_xlat)
12819{
12820 IEMOP_MNEMONIC("xlat");
12821 IEMOP_HLP_NO_LOCK_PREFIX();
12822 switch (pIemCpu->enmEffAddrMode)
12823 {
12824 case IEMMODE_16BIT:
12825 IEM_MC_BEGIN(2, 0);
12826 IEM_MC_LOCAL(uint8_t, u8Tmp);
12827 IEM_MC_LOCAL(uint16_t, u16Addr);
12828 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12829 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12830 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12831 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12832 IEM_MC_ADVANCE_RIP();
12833 IEM_MC_END();
12834 return VINF_SUCCESS;
12835
12836 case IEMMODE_32BIT:
12837 IEM_MC_BEGIN(2, 0);
12838 IEM_MC_LOCAL(uint8_t, u8Tmp);
12839 IEM_MC_LOCAL(uint32_t, u32Addr);
12840 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12841 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12842 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12843 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12844 IEM_MC_ADVANCE_RIP();
12845 IEM_MC_END();
12846 return VINF_SUCCESS;
12847
12848 case IEMMODE_64BIT:
12849 IEM_MC_BEGIN(2, 0);
12850 IEM_MC_LOCAL(uint8_t, u8Tmp);
12851 IEM_MC_LOCAL(uint64_t, u64Addr);
12852 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12853 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12854 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12855 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12856 IEM_MC_ADVANCE_RIP();
12857 IEM_MC_END();
12858 return VINF_SUCCESS;
12859
12860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12861 }
12862}
12863
12864
12865/**
12866 * Common worker for FPU instructions working on ST0 and STn, and storing the
12867 * result in ST0.
12868 *
12869 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12870 */
12871FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12872{
12873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12874
12875 IEM_MC_BEGIN(3, 1);
12876 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12877 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12878 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12879 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12880
12881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12883 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12884 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12885 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12886 IEM_MC_ELSE()
12887 IEM_MC_FPU_STACK_UNDERFLOW(0);
12888 IEM_MC_ENDIF();
12889 IEM_MC_USED_FPU();
12890 IEM_MC_ADVANCE_RIP();
12891
12892 IEM_MC_END();
12893 return VINF_SUCCESS;
12894}
12895
12896
12897/**
12898 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12899 * flags.
12900 *
12901 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12902 */
12903FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12904{
12905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12906
12907 IEM_MC_BEGIN(3, 1);
12908 IEM_MC_LOCAL(uint16_t, u16Fsw);
12909 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12910 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12911 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12912
12913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12915 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12916 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12917 IEM_MC_UPDATE_FSW(u16Fsw);
12918 IEM_MC_ELSE()
12919 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12920 IEM_MC_ENDIF();
12921 IEM_MC_USED_FPU();
12922 IEM_MC_ADVANCE_RIP();
12923
12924 IEM_MC_END();
12925 return VINF_SUCCESS;
12926}
12927
12928
12929/**
12930 * Common worker for FPU instructions working on ST0 and STn, only affecting
12931 * flags, and popping when done.
12932 *
12933 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12934 */
12935FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12936{
12937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12938
12939 IEM_MC_BEGIN(3, 1);
12940 IEM_MC_LOCAL(uint16_t, u16Fsw);
12941 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12942 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12944
12945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12947 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12948 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12949 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12950 IEM_MC_ELSE()
12951 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12952 IEM_MC_ENDIF();
12953 IEM_MC_USED_FPU();
12954 IEM_MC_ADVANCE_RIP();
12955
12956 IEM_MC_END();
12957 return VINF_SUCCESS;
12958}
12959
12960
12961/** Opcode 0xd8 11/0. */
12962FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12963{
12964 IEMOP_MNEMONIC("fadd st0,stN");
12965 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12966}
12967
12968
12969/** Opcode 0xd8 11/1. */
12970FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC("fmul st0,stN");
12973 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12974}
12975
12976
12977/** Opcode 0xd8 11/2. */
12978FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12979{
12980 IEMOP_MNEMONIC("fcom st0,stN");
12981 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12982}
12983
12984
12985/** Opcode 0xd8 11/3. */
12986FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12987{
12988 IEMOP_MNEMONIC("fcomp st0,stN");
12989 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12990}
12991
12992
12993/** Opcode 0xd8 11/4. */
12994FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12995{
12996 IEMOP_MNEMONIC("fsub st0,stN");
12997 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12998}
12999
13000
13001/** Opcode 0xd8 11/5. */
13002FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13003{
13004 IEMOP_MNEMONIC("fsubr st0,stN");
13005 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13006}
13007
13008
13009/** Opcode 0xd8 11/6. */
13010FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13011{
13012 IEMOP_MNEMONIC("fdiv st0,stN");
13013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13014}
13015
13016
13017/** Opcode 0xd8 11/7. */
13018FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13019{
13020 IEMOP_MNEMONIC("fdivr st0,stN");
13021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13022}
13023
13024
13025/**
13026 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13027 * the result in ST0.
13028 *
13029 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13030 */
13031FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13032{
13033 IEM_MC_BEGIN(3, 3);
13034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13035 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13036 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13037 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13038 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13039 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13040
13041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13043
13044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13045 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13046 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13047
13048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13049 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13050 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13051 IEM_MC_ELSE()
13052 IEM_MC_FPU_STACK_UNDERFLOW(0);
13053 IEM_MC_ENDIF();
13054 IEM_MC_USED_FPU();
13055 IEM_MC_ADVANCE_RIP();
13056
13057 IEM_MC_END();
13058 return VINF_SUCCESS;
13059}
13060
13061
13062/** Opcode 0xd8 !11/0. */
13063FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13064{
13065 IEMOP_MNEMONIC("fadd st0,m32r");
13066 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13067}
13068
13069
13070/** Opcode 0xd8 !11/1. */
13071FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13072{
13073 IEMOP_MNEMONIC("fmul st0,m32r");
13074 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13075}
13076
13077
13078/** Opcode 0xd8 !11/2. */
13079FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13080{
13081 IEMOP_MNEMONIC("fcom st0,m32r");
13082
13083 IEM_MC_BEGIN(3, 3);
13084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13085 IEM_MC_LOCAL(uint16_t, u16Fsw);
13086 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13087 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13089 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13090
13091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13093
13094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13096 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13097
13098 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13099 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13100 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13101 IEM_MC_ELSE()
13102 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13103 IEM_MC_ENDIF();
13104 IEM_MC_USED_FPU();
13105 IEM_MC_ADVANCE_RIP();
13106
13107 IEM_MC_END();
13108 return VINF_SUCCESS;
13109}
13110
13111
13112/** Opcode 0xd8 !11/3. */
13113FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13114{
13115 IEMOP_MNEMONIC("fcomp st0,m32r");
13116
13117 IEM_MC_BEGIN(3, 3);
13118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13119 IEM_MC_LOCAL(uint16_t, u16Fsw);
13120 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13123 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13124
13125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13127
13128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13130 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13131
13132 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13133 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13134 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13135 IEM_MC_ELSE()
13136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13137 IEM_MC_ENDIF();
13138 IEM_MC_USED_FPU();
13139 IEM_MC_ADVANCE_RIP();
13140
13141 IEM_MC_END();
13142 return VINF_SUCCESS;
13143}
13144
13145
13146/** Opcode 0xd8 !11/4. */
13147FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13148{
13149 IEMOP_MNEMONIC("fsub st0,m32r");
13150 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13151}
13152
13153
13154/** Opcode 0xd8 !11/5. */
13155FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13156{
13157 IEMOP_MNEMONIC("fsubr st0,m32r");
13158 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13159}
13160
13161
13162/** Opcode 0xd8 !11/6. */
13163FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13164{
13165 IEMOP_MNEMONIC("fdiv st0,m32r");
13166 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13167}
13168
13169
13170/** Opcode 0xd8 !11/7. */
13171FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13172{
13173 IEMOP_MNEMONIC("fdivr st0,m32r");
13174 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13175}
13176
13177
13178/** Opcode 0xd8. */
13179FNIEMOP_DEF(iemOp_EscF0)
13180{
13181 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13183
13184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13185 {
13186 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13187 {
13188 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13189 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13190 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13191 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13192 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13193 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13194 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13195 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13197 }
13198 }
13199 else
13200 {
13201 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13202 {
13203 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13204 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13205 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13206 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13207 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13208 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13209 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13210 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13212 }
13213 }
13214}
13215
13216
13217/** Opcode 0xd9 /0 mem32real
13218 * @sa iemOp_fld_m64r */
13219FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13220{
13221 IEMOP_MNEMONIC("fld m32r");
13222
13223 IEM_MC_BEGIN(2, 3);
13224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13225 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13226 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13227 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13228 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13229
13230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13232
13233 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13234 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13235 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13236
13237 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13238 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13239 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13240 IEM_MC_ELSE()
13241 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13242 IEM_MC_ENDIF();
13243 IEM_MC_USED_FPU();
13244 IEM_MC_ADVANCE_RIP();
13245
13246 IEM_MC_END();
13247 return VINF_SUCCESS;
13248}
13249
13250
13251/** Opcode 0xd9 !11/2 mem32real */
13252FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13253{
13254 IEMOP_MNEMONIC("fst m32r");
13255 IEM_MC_BEGIN(3, 2);
13256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13257 IEM_MC_LOCAL(uint16_t, u16Fsw);
13258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13259 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13261
13262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13266
13267 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13268 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13269 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13270 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13271 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13272 IEM_MC_ELSE()
13273 IEM_MC_IF_FCW_IM()
13274 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13275 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13276 IEM_MC_ENDIF();
13277 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13278 IEM_MC_ENDIF();
13279 IEM_MC_USED_FPU();
13280 IEM_MC_ADVANCE_RIP();
13281
13282 IEM_MC_END();
13283 return VINF_SUCCESS;
13284}
13285
13286
13287/** Opcode 0xd9 !11/3 */
13288FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13289{
13290 IEMOP_MNEMONIC("fstp m32r");
13291 IEM_MC_BEGIN(3, 2);
13292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13293 IEM_MC_LOCAL(uint16_t, u16Fsw);
13294 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13295 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13296 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13297
13298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13302
13303 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13305 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13306 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13307 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13308 IEM_MC_ELSE()
13309 IEM_MC_IF_FCW_IM()
13310 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13311 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13312 IEM_MC_ENDIF();
13313 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13314 IEM_MC_ENDIF();
13315 IEM_MC_USED_FPU();
13316 IEM_MC_ADVANCE_RIP();
13317
13318 IEM_MC_END();
13319 return VINF_SUCCESS;
13320}
13321
13322
13323/** Opcode 0xd9 !11/4 */
13324FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13325{
13326 IEMOP_MNEMONIC("fldenv m14/28byte");
13327 IEM_MC_BEGIN(3, 0);
13328 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13329 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13330 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13334 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13335 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13336 IEM_MC_END();
13337 return VINF_SUCCESS;
13338}
13339
13340
13341/** Opcode 0xd9 !11/5 */
13342FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13343{
13344 IEMOP_MNEMONIC("fldcw m2byte");
13345 IEM_MC_BEGIN(1, 1);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13351 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13352 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13353 IEM_MC_END();
13354 return VINF_SUCCESS;
13355}
13356
13357
13358/** Opcode 0xd9 !11/6 */
13359FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13360{
13361 IEMOP_MNEMONIC("fstenv m14/m28byte");
13362 IEM_MC_BEGIN(3, 0);
13363 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13364 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13365 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13369 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13370 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13371 IEM_MC_END();
13372 return VINF_SUCCESS;
13373}
13374
13375
13376/** Opcode 0xd9 !11/7 */
13377FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13378{
13379 IEMOP_MNEMONIC("fnstcw m2byte");
13380 IEM_MC_BEGIN(2, 0);
13381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13382 IEM_MC_LOCAL(uint16_t, u16Fcw);
13383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13386 IEM_MC_FETCH_FCW(u16Fcw);
13387 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13388 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13389 IEM_MC_END();
13390 return VINF_SUCCESS;
13391}
13392
13393
13394/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13395FNIEMOP_DEF(iemOp_fnop)
13396{
13397 IEMOP_MNEMONIC("fnop");
13398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13399
13400 IEM_MC_BEGIN(0, 0);
13401 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13402 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13403 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13404 * intel optimizations. Investigate. */
13405 IEM_MC_UPDATE_FPU_OPCODE_IP();
13406 IEM_MC_USED_FPU();
13407 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13408 IEM_MC_END();
13409 return VINF_SUCCESS;
13410}
13411
13412
13413/** Opcode 0xd9 11/0 stN */
13414FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13415{
13416 IEMOP_MNEMONIC("fld stN");
13417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13418
13419 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13420 * indicates that it does. */
13421 IEM_MC_BEGIN(0, 2);
13422 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13423 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13424 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13426 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13427 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13428 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13429 IEM_MC_ELSE()
13430 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13431 IEM_MC_ENDIF();
13432 IEM_MC_USED_FPU();
13433 IEM_MC_ADVANCE_RIP();
13434 IEM_MC_END();
13435
13436 return VINF_SUCCESS;
13437}
13438
13439
13440/** Opcode 0xd9 11/3 stN */
13441FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC("fxch stN");
13444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13445
13446 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13447 * indicates that it does. */
13448 IEM_MC_BEGIN(1, 3);
13449 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13450 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13451 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13452 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13453 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13454 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13455 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13456 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13457 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13458 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13459 IEM_MC_ELSE()
13460 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13461 IEM_MC_ENDIF();
13462 IEM_MC_USED_FPU();
13463 IEM_MC_ADVANCE_RIP();
13464 IEM_MC_END();
13465
13466 return VINF_SUCCESS;
13467}
13468
13469
13470/** Opcode 0xd9 11/4, 0xdd 11/2. */
13471FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13472{
13473 IEMOP_MNEMONIC("fstp st0,stN");
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13475
13476 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13477 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13478 if (!iDstReg)
13479 {
13480 IEM_MC_BEGIN(0, 1);
13481 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13484 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13485 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13486 IEM_MC_ELSE()
13487 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13488 IEM_MC_ENDIF();
13489 IEM_MC_USED_FPU();
13490 IEM_MC_ADVANCE_RIP();
13491 IEM_MC_END();
13492 }
13493 else
13494 {
13495 IEM_MC_BEGIN(0, 2);
13496 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13497 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13500 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13501 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13502 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13503 IEM_MC_ELSE()
13504 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13505 IEM_MC_ENDIF();
13506 IEM_MC_USED_FPU();
13507 IEM_MC_ADVANCE_RIP();
13508 IEM_MC_END();
13509 }
13510 return VINF_SUCCESS;
13511}
13512
13513
13514/**
13515 * Common worker for FPU instructions working on ST0 and replaces it with the
13516 * result, i.e. unary operators.
13517 *
13518 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13519 */
13520FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13521{
13522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13523
13524 IEM_MC_BEGIN(2, 1);
13525 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13526 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13527 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13528
13529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13530 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13531 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13532 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13533 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13534 IEM_MC_ELSE()
13535 IEM_MC_FPU_STACK_UNDERFLOW(0);
13536 IEM_MC_ENDIF();
13537 IEM_MC_USED_FPU();
13538 IEM_MC_ADVANCE_RIP();
13539
13540 IEM_MC_END();
13541 return VINF_SUCCESS;
13542}
13543
13544
13545/** Opcode 0xd9 0xe0. */
13546FNIEMOP_DEF(iemOp_fchs)
13547{
13548 IEMOP_MNEMONIC("fchs st0");
13549 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13550}
13551
13552
13553/** Opcode 0xd9 0xe1. */
13554FNIEMOP_DEF(iemOp_fabs)
13555{
13556 IEMOP_MNEMONIC("fabs st0");
13557 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13558}
13559
13560
13561/**
13562 * Common worker for FPU instructions working on ST0 and only returns FSW.
13563 *
13564 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13565 */
13566FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13567{
13568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13569
13570 IEM_MC_BEGIN(2, 1);
13571 IEM_MC_LOCAL(uint16_t, u16Fsw);
13572 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13574
13575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13578 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13579 IEM_MC_UPDATE_FSW(u16Fsw);
13580 IEM_MC_ELSE()
13581 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13582 IEM_MC_ENDIF();
13583 IEM_MC_USED_FPU();
13584 IEM_MC_ADVANCE_RIP();
13585
13586 IEM_MC_END();
13587 return VINF_SUCCESS;
13588}
13589
13590
13591/** Opcode 0xd9 0xe4. */
13592FNIEMOP_DEF(iemOp_ftst)
13593{
13594 IEMOP_MNEMONIC("ftst st0");
13595 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13596}
13597
13598
13599/** Opcode 0xd9 0xe5. */
13600FNIEMOP_DEF(iemOp_fxam)
13601{
13602 IEMOP_MNEMONIC("fxam st0");
13603 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13604}
13605
13606
13607/**
13608 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13609 *
13610 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13611 */
13612FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13613{
13614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13615
13616 IEM_MC_BEGIN(1, 1);
13617 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13618 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13619
13620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13622 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13623 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13624 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13625 IEM_MC_ELSE()
13626 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13627 IEM_MC_ENDIF();
13628 IEM_MC_USED_FPU();
13629 IEM_MC_ADVANCE_RIP();
13630
13631 IEM_MC_END();
13632 return VINF_SUCCESS;
13633}
13634
13635
13636/** Opcode 0xd9 0xe8. */
13637FNIEMOP_DEF(iemOp_fld1)
13638{
13639 IEMOP_MNEMONIC("fld1");
13640 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13641}
13642
13643
13644/** Opcode 0xd9 0xe9. */
13645FNIEMOP_DEF(iemOp_fldl2t)
13646{
13647 IEMOP_MNEMONIC("fldl2t");
13648 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13649}
13650
13651
13652/** Opcode 0xd9 0xea. */
13653FNIEMOP_DEF(iemOp_fldl2e)
13654{
13655 IEMOP_MNEMONIC("fldl2e");
13656 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13657}
13658
13659/** Opcode 0xd9 0xeb. */
13660FNIEMOP_DEF(iemOp_fldpi)
13661{
13662 IEMOP_MNEMONIC("fldpi");
13663 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13664}
13665
13666
13667/** Opcode 0xd9 0xec. */
13668FNIEMOP_DEF(iemOp_fldlg2)
13669{
13670 IEMOP_MNEMONIC("fldlg2");
13671 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13672}
13673
13674/** Opcode 0xd9 0xed. */
13675FNIEMOP_DEF(iemOp_fldln2)
13676{
13677 IEMOP_MNEMONIC("fldln2");
13678 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13679}
13680
13681
13682/** Opcode 0xd9 0xee. */
13683FNIEMOP_DEF(iemOp_fldz)
13684{
13685 IEMOP_MNEMONIC("fldz");
13686 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13687}
13688
13689
13690/** Opcode 0xd9 0xf0. */
13691FNIEMOP_DEF(iemOp_f2xm1)
13692{
13693 IEMOP_MNEMONIC("f2xm1 st0");
13694 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13695}
13696
13697
13698/** Opcode 0xd9 0xf1. */
13699FNIEMOP_DEF(iemOp_fylx2)
13700{
13701 IEMOP_MNEMONIC("fylx2 st0");
13702 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13703}
13704
13705
13706/**
13707 * Common worker for FPU instructions working on ST0 and having two outputs, one
13708 * replacing ST0 and one pushed onto the stack.
13709 *
13710 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13711 */
13712FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13713{
13714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13715
13716 IEM_MC_BEGIN(2, 1);
13717 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13718 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13719 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13720
13721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13723 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13724 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13725 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13726 IEM_MC_ELSE()
13727 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13728 IEM_MC_ENDIF();
13729 IEM_MC_USED_FPU();
13730 IEM_MC_ADVANCE_RIP();
13731
13732 IEM_MC_END();
13733 return VINF_SUCCESS;
13734}
13735
13736
13737/** Opcode 0xd9 0xf2. */
13738FNIEMOP_DEF(iemOp_fptan)
13739{
13740 IEMOP_MNEMONIC("fptan st0");
13741 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13742}
13743
13744
13745/**
13746 * Common worker for FPU instructions working on STn and ST0, storing the result
13747 * in STn, and popping the stack unless IE, DE or ZE was raised.
13748 *
13749 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13750 */
13751FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13752{
13753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13754
13755 IEM_MC_BEGIN(3, 1);
13756 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13757 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13758 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13759 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13760
13761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13763
13764 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13765 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13766 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13767 IEM_MC_ELSE()
13768 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13769 IEM_MC_ENDIF();
13770 IEM_MC_USED_FPU();
13771 IEM_MC_ADVANCE_RIP();
13772
13773 IEM_MC_END();
13774 return VINF_SUCCESS;
13775}
13776
13777
13778/** Opcode 0xd9 0xf3. */
13779FNIEMOP_DEF(iemOp_fpatan)
13780{
13781 IEMOP_MNEMONIC("fpatan st1,st0");
13782 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13783}
13784
13785
13786/** Opcode 0xd9 0xf4. */
13787FNIEMOP_DEF(iemOp_fxtract)
13788{
13789 IEMOP_MNEMONIC("fxtract st0");
13790 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13791}
13792
13793
13794/** Opcode 0xd9 0xf5. */
13795FNIEMOP_DEF(iemOp_fprem1)
13796{
13797 IEMOP_MNEMONIC("fprem1 st0, st1");
13798 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13799}
13800
13801
13802/** Opcode 0xd9 0xf6. */
13803FNIEMOP_DEF(iemOp_fdecstp)
13804{
13805 IEMOP_MNEMONIC("fdecstp");
13806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13807 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13808 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13809 * FINCSTP and FDECSTP. */
13810
13811 IEM_MC_BEGIN(0,0);
13812
13813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13815
13816 IEM_MC_FPU_STACK_DEC_TOP();
13817 IEM_MC_UPDATE_FSW_CONST(0);
13818
13819 IEM_MC_USED_FPU();
13820 IEM_MC_ADVANCE_RIP();
13821 IEM_MC_END();
13822 return VINF_SUCCESS;
13823}
13824
13825
13826/** Opcode 0xd9 0xf7. */
13827FNIEMOP_DEF(iemOp_fincstp)
13828{
13829 IEMOP_MNEMONIC("fincstp");
13830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13831 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13832 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13833 * FINCSTP and FDECSTP. */
13834
13835 IEM_MC_BEGIN(0,0);
13836
13837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13839
13840 IEM_MC_FPU_STACK_INC_TOP();
13841 IEM_MC_UPDATE_FSW_CONST(0);
13842
13843 IEM_MC_USED_FPU();
13844 IEM_MC_ADVANCE_RIP();
13845 IEM_MC_END();
13846 return VINF_SUCCESS;
13847}
13848
13849
13850/** Opcode 0xd9 0xf8. */
13851FNIEMOP_DEF(iemOp_fprem)
13852{
13853 IEMOP_MNEMONIC("fprem st0, st1");
13854 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13855}
13856
13857
13858/** Opcode 0xd9 0xf9. */
13859FNIEMOP_DEF(iemOp_fyl2xp1)
13860{
13861 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13862 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13863}
13864
13865
13866/** Opcode 0xd9 0xfa. */
13867FNIEMOP_DEF(iemOp_fsqrt)
13868{
13869 IEMOP_MNEMONIC("fsqrt st0");
13870 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13871}
13872
13873
13874/** Opcode 0xd9 0xfb. */
13875FNIEMOP_DEF(iemOp_fsincos)
13876{
13877 IEMOP_MNEMONIC("fsincos st0");
13878 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13879}
13880
13881
13882/** Opcode 0xd9 0xfc. */
13883FNIEMOP_DEF(iemOp_frndint)
13884{
13885 IEMOP_MNEMONIC("frndint st0");
13886 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13887}
13888
13889
13890/** Opcode 0xd9 0xfd. */
13891FNIEMOP_DEF(iemOp_fscale)
13892{
13893 IEMOP_MNEMONIC("fscale st0, st1");
13894 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13895}
13896
13897
13898/** Opcode 0xd9 0xfe. */
13899FNIEMOP_DEF(iemOp_fsin)
13900{
13901 IEMOP_MNEMONIC("fsin st0");
13902 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13903}
13904
13905
13906/** Opcode 0xd9 0xff. */
13907FNIEMOP_DEF(iemOp_fcos)
13908{
13909 IEMOP_MNEMONIC("fcos st0");
13910 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13911}
13912
13913
13914/** Used by iemOp_EscF1. */
13915static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13916{
13917 /* 0xe0 */ iemOp_fchs,
13918 /* 0xe1 */ iemOp_fabs,
13919 /* 0xe2 */ iemOp_Invalid,
13920 /* 0xe3 */ iemOp_Invalid,
13921 /* 0xe4 */ iemOp_ftst,
13922 /* 0xe5 */ iemOp_fxam,
13923 /* 0xe6 */ iemOp_Invalid,
13924 /* 0xe7 */ iemOp_Invalid,
13925 /* 0xe8 */ iemOp_fld1,
13926 /* 0xe9 */ iemOp_fldl2t,
13927 /* 0xea */ iemOp_fldl2e,
13928 /* 0xeb */ iemOp_fldpi,
13929 /* 0xec */ iemOp_fldlg2,
13930 /* 0xed */ iemOp_fldln2,
13931 /* 0xee */ iemOp_fldz,
13932 /* 0xef */ iemOp_Invalid,
13933 /* 0xf0 */ iemOp_f2xm1,
13934 /* 0xf1 */ iemOp_fylx2,
13935 /* 0xf2 */ iemOp_fptan,
13936 /* 0xf3 */ iemOp_fpatan,
13937 /* 0xf4 */ iemOp_fxtract,
13938 /* 0xf5 */ iemOp_fprem1,
13939 /* 0xf6 */ iemOp_fdecstp,
13940 /* 0xf7 */ iemOp_fincstp,
13941 /* 0xf8 */ iemOp_fprem,
13942 /* 0xf9 */ iemOp_fyl2xp1,
13943 /* 0xfa */ iemOp_fsqrt,
13944 /* 0xfb */ iemOp_fsincos,
13945 /* 0xfc */ iemOp_frndint,
13946 /* 0xfd */ iemOp_fscale,
13947 /* 0xfe */ iemOp_fsin,
13948 /* 0xff */ iemOp_fcos
13949};
13950
13951
13952/** Opcode 0xd9. */
13953FNIEMOP_DEF(iemOp_EscF1)
13954{
13955 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13958 {
13959 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13960 {
13961 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13962 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13963 case 2:
13964 if (bRm == 0xd0)
13965 return FNIEMOP_CALL(iemOp_fnop);
13966 return IEMOP_RAISE_INVALID_OPCODE();
13967 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13968 case 4:
13969 case 5:
13970 case 6:
13971 case 7:
13972 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13973 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13975 }
13976 }
13977 else
13978 {
13979 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13980 {
13981 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13982 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13983 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13984 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13985 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13986 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13987 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13988 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13990 }
13991 }
13992}
13993
13994
13995/** Opcode 0xda 11/0. */
13996FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13997{
13998 IEMOP_MNEMONIC("fcmovb st0,stN");
13999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14000
14001 IEM_MC_BEGIN(0, 1);
14002 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14003
14004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14006
14007 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14009 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14010 IEM_MC_ENDIF();
14011 IEM_MC_UPDATE_FPU_OPCODE_IP();
14012 IEM_MC_ELSE()
14013 IEM_MC_FPU_STACK_UNDERFLOW(0);
14014 IEM_MC_ENDIF();
14015 IEM_MC_USED_FPU();
14016 IEM_MC_ADVANCE_RIP();
14017
14018 IEM_MC_END();
14019 return VINF_SUCCESS;
14020}
14021
14022
14023/** Opcode 0xda 11/1. */
14024FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14025{
14026 IEMOP_MNEMONIC("fcmove st0,stN");
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028
14029 IEM_MC_BEGIN(0, 1);
14030 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14031
14032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14034
14035 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14037 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14038 IEM_MC_ENDIF();
14039 IEM_MC_UPDATE_FPU_OPCODE_IP();
14040 IEM_MC_ELSE()
14041 IEM_MC_FPU_STACK_UNDERFLOW(0);
14042 IEM_MC_ENDIF();
14043 IEM_MC_USED_FPU();
14044 IEM_MC_ADVANCE_RIP();
14045
14046 IEM_MC_END();
14047 return VINF_SUCCESS;
14048}
14049
14050
14051/** Opcode 0xda 11/2. */
14052FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14053{
14054 IEMOP_MNEMONIC("fcmovbe st0,stN");
14055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14056
14057 IEM_MC_BEGIN(0, 1);
14058 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14059
14060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14062
14063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14064 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14065 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14066 IEM_MC_ENDIF();
14067 IEM_MC_UPDATE_FPU_OPCODE_IP();
14068 IEM_MC_ELSE()
14069 IEM_MC_FPU_STACK_UNDERFLOW(0);
14070 IEM_MC_ENDIF();
14071 IEM_MC_USED_FPU();
14072 IEM_MC_ADVANCE_RIP();
14073
14074 IEM_MC_END();
14075 return VINF_SUCCESS;
14076}
14077
14078
14079/** Opcode 0xda 11/3. */
14080FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14081{
14082 IEMOP_MNEMONIC("fcmovu st0,stN");
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14084
14085 IEM_MC_BEGIN(0, 1);
14086 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14087
14088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14090
14091 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14093 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14094 IEM_MC_ENDIF();
14095 IEM_MC_UPDATE_FPU_OPCODE_IP();
14096 IEM_MC_ELSE()
14097 IEM_MC_FPU_STACK_UNDERFLOW(0);
14098 IEM_MC_ENDIF();
14099 IEM_MC_USED_FPU();
14100 IEM_MC_ADVANCE_RIP();
14101
14102 IEM_MC_END();
14103 return VINF_SUCCESS;
14104}
14105
14106
14107/**
14108 * Common worker for FPU instructions working on ST0 and STn, only affecting
14109 * flags, and popping twice when done.
14110 *
14111 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14112 */
14113FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14114{
14115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14116
14117 IEM_MC_BEGIN(3, 1);
14118 IEM_MC_LOCAL(uint16_t, u16Fsw);
14119 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14122
14123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14125 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14126 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14127 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14128 IEM_MC_ELSE()
14129 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14130 IEM_MC_ENDIF();
14131 IEM_MC_USED_FPU();
14132 IEM_MC_ADVANCE_RIP();
14133
14134 IEM_MC_END();
14135 return VINF_SUCCESS;
14136}
14137
14138
14139/** Opcode 0xda 0xe9. */
14140FNIEMOP_DEF(iemOp_fucompp)
14141{
14142 IEMOP_MNEMONIC("fucompp st0,stN");
14143 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14144}
14145
14146
14147/**
14148 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14149 * the result in ST0.
14150 *
14151 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14152 */
14153FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14154{
14155 IEM_MC_BEGIN(3, 3);
14156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14157 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14158 IEM_MC_LOCAL(int32_t, i32Val2);
14159 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14161 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14162
14163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14165
14166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14168 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14169
14170 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14171 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14172 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14173 IEM_MC_ELSE()
14174 IEM_MC_FPU_STACK_UNDERFLOW(0);
14175 IEM_MC_ENDIF();
14176 IEM_MC_USED_FPU();
14177 IEM_MC_ADVANCE_RIP();
14178
14179 IEM_MC_END();
14180 return VINF_SUCCESS;
14181}
14182
14183
14184/** Opcode 0xda !11/0. */
14185FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14186{
14187 IEMOP_MNEMONIC("fiadd m32i");
14188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14189}
14190
14191
14192/** Opcode 0xda !11/1. */
14193FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14194{
14195 IEMOP_MNEMONIC("fimul m32i");
14196 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14197}
14198
14199
14200/** Opcode 0xda !11/2. */
14201FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14202{
14203 IEMOP_MNEMONIC("ficom st0,m32i");
14204
14205 IEM_MC_BEGIN(3, 3);
14206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14207 IEM_MC_LOCAL(uint16_t, u16Fsw);
14208 IEM_MC_LOCAL(int32_t, i32Val2);
14209 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14210 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14211 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14212
14213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14215
14216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14218 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14219
14220 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14221 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14222 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14223 IEM_MC_ELSE()
14224 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14225 IEM_MC_ENDIF();
14226 IEM_MC_USED_FPU();
14227 IEM_MC_ADVANCE_RIP();
14228
14229 IEM_MC_END();
14230 return VINF_SUCCESS;
14231}
14232
14233
14234/** Opcode 0xda !11/3. */
14235FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14236{
14237 IEMOP_MNEMONIC("ficomp st0,m32i");
14238
14239 IEM_MC_BEGIN(3, 3);
14240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14241 IEM_MC_LOCAL(uint16_t, u16Fsw);
14242 IEM_MC_LOCAL(int32_t, i32Val2);
14243 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14244 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14245 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14246
14247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14249
14250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14252 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14253
14254 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14255 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14256 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14257 IEM_MC_ELSE()
14258 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14259 IEM_MC_ENDIF();
14260 IEM_MC_USED_FPU();
14261 IEM_MC_ADVANCE_RIP();
14262
14263 IEM_MC_END();
14264 return VINF_SUCCESS;
14265}
14266
14267
14268/** Opcode 0xda !11/4. */
14269FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14270{
14271 IEMOP_MNEMONIC("fisub m32i");
14272 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14273}
14274
14275
14276/** Opcode 0xda !11/5. */
14277FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14278{
14279 IEMOP_MNEMONIC("fisubr m32i");
14280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14281}
14282
14283
14284/** Opcode 0xda !11/6. */
14285FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14286{
14287 IEMOP_MNEMONIC("fidiv m32i");
14288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14289}
14290
14291
14292/** Opcode 0xda !11/7. */
14293FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14294{
14295 IEMOP_MNEMONIC("fidivr m32i");
14296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14297}
14298
14299
14300/** Opcode 0xda. */
14301FNIEMOP_DEF(iemOp_EscF2)
14302{
14303 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14306 {
14307 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14308 {
14309 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14310 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14311 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14312 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14313 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14314 case 5:
14315 if (bRm == 0xe9)
14316 return FNIEMOP_CALL(iemOp_fucompp);
14317 return IEMOP_RAISE_INVALID_OPCODE();
14318 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14319 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14321 }
14322 }
14323 else
14324 {
14325 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14326 {
14327 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14328 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14329 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14330 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14331 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14332 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14333 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14334 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14336 }
14337 }
14338}
14339
14340
14341/** Opcode 0xdb !11/0. */
14342FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14343{
14344 IEMOP_MNEMONIC("fild m32i");
14345
14346 IEM_MC_BEGIN(2, 3);
14347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14348 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14349 IEM_MC_LOCAL(int32_t, i32Val);
14350 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14351 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14352
14353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14355
14356 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14357 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14358 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14359
14360 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14361 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14362 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14363 IEM_MC_ELSE()
14364 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14365 IEM_MC_ENDIF();
14366 IEM_MC_USED_FPU();
14367 IEM_MC_ADVANCE_RIP();
14368
14369 IEM_MC_END();
14370 return VINF_SUCCESS;
14371}
14372
14373
14374/** Opcode 0xdb !11/1. */
14375FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14376{
14377 IEMOP_MNEMONIC("fisttp m32i");
14378 IEM_MC_BEGIN(3, 2);
14379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14380 IEM_MC_LOCAL(uint16_t, u16Fsw);
14381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14382 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14384
14385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14389
14390 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14392 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14393 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14394 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14395 IEM_MC_ELSE()
14396 IEM_MC_IF_FCW_IM()
14397 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14398 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14399 IEM_MC_ENDIF();
14400 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14401 IEM_MC_ENDIF();
14402 IEM_MC_USED_FPU();
14403 IEM_MC_ADVANCE_RIP();
14404
14405 IEM_MC_END();
14406 return VINF_SUCCESS;
14407}
14408
14409
14410/** Opcode 0xdb !11/2. */
14411FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14412{
14413 IEMOP_MNEMONIC("fist m32i");
14414 IEM_MC_BEGIN(3, 2);
14415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14416 IEM_MC_LOCAL(uint16_t, u16Fsw);
14417 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14418 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14419 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14420
14421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14425
14426 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14427 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14428 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14429 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14430 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14431 IEM_MC_ELSE()
14432 IEM_MC_IF_FCW_IM()
14433 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14434 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14435 IEM_MC_ENDIF();
14436 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14437 IEM_MC_ENDIF();
14438 IEM_MC_USED_FPU();
14439 IEM_MC_ADVANCE_RIP();
14440
14441 IEM_MC_END();
14442 return VINF_SUCCESS;
14443}
14444
14445
14446/** Opcode 0xdb !11/3. */
14447FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14448{
14449 IEMOP_MNEMONIC("fisttp m32i");
14450 IEM_MC_BEGIN(3, 2);
14451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14452 IEM_MC_LOCAL(uint16_t, u16Fsw);
14453 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14454 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14456
14457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14461
14462 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14463 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14465 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14466 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14467 IEM_MC_ELSE()
14468 IEM_MC_IF_FCW_IM()
14469 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14470 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14471 IEM_MC_ENDIF();
14472 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14473 IEM_MC_ENDIF();
14474 IEM_MC_USED_FPU();
14475 IEM_MC_ADVANCE_RIP();
14476
14477 IEM_MC_END();
14478 return VINF_SUCCESS;
14479}
14480
14481
14482/** Opcode 0xdb !11/5. */
14483FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14484{
14485 IEMOP_MNEMONIC("fld m80r");
14486
14487 IEM_MC_BEGIN(2, 3);
14488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14489 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14490 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14491 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14492 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14493
14494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14496
14497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14499 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14500
14501 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14502 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14503 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14504 IEM_MC_ELSE()
14505 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14506 IEM_MC_ENDIF();
14507 IEM_MC_USED_FPU();
14508 IEM_MC_ADVANCE_RIP();
14509
14510 IEM_MC_END();
14511 return VINF_SUCCESS;
14512}
14513
14514
14515/** Opcode 0xdb !11/7. */
14516FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14517{
14518 IEMOP_MNEMONIC("fstp m80r");
14519 IEM_MC_BEGIN(3, 2);
14520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14521 IEM_MC_LOCAL(uint16_t, u16Fsw);
14522 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14523 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14524 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14525
14526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14529 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14530
14531 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14532 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14533 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14534 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14535 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14536 IEM_MC_ELSE()
14537 IEM_MC_IF_FCW_IM()
14538 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14539 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14540 IEM_MC_ENDIF();
14541 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14542 IEM_MC_ENDIF();
14543 IEM_MC_USED_FPU();
14544 IEM_MC_ADVANCE_RIP();
14545
14546 IEM_MC_END();
14547 return VINF_SUCCESS;
14548}
14549
14550
14551/** Opcode 0xdb 11/0. */
14552FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14553{
14554 IEMOP_MNEMONIC("fcmovnb st0,stN");
14555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14556
14557 IEM_MC_BEGIN(0, 1);
14558 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14559
14560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14562
14563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14564 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14565 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14566 IEM_MC_ENDIF();
14567 IEM_MC_UPDATE_FPU_OPCODE_IP();
14568 IEM_MC_ELSE()
14569 IEM_MC_FPU_STACK_UNDERFLOW(0);
14570 IEM_MC_ENDIF();
14571 IEM_MC_USED_FPU();
14572 IEM_MC_ADVANCE_RIP();
14573
14574 IEM_MC_END();
14575 return VINF_SUCCESS;
14576}
14577
14578
14579/** Opcode 0xdb 11/1. */
14580FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14581{
14582 IEMOP_MNEMONIC("fcmovne st0,stN");
14583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14584
14585 IEM_MC_BEGIN(0, 1);
14586 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14587
14588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14590
14591 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14592 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14593 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14594 IEM_MC_ENDIF();
14595 IEM_MC_UPDATE_FPU_OPCODE_IP();
14596 IEM_MC_ELSE()
14597 IEM_MC_FPU_STACK_UNDERFLOW(0);
14598 IEM_MC_ENDIF();
14599 IEM_MC_USED_FPU();
14600 IEM_MC_ADVANCE_RIP();
14601
14602 IEM_MC_END();
14603 return VINF_SUCCESS;
14604}
14605
14606
14607/** Opcode 0xdb 11/2. */
14608FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14609{
14610 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14612
14613 IEM_MC_BEGIN(0, 1);
14614 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14615
14616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14618
14619 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14620 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14621 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14622 IEM_MC_ENDIF();
14623 IEM_MC_UPDATE_FPU_OPCODE_IP();
14624 IEM_MC_ELSE()
14625 IEM_MC_FPU_STACK_UNDERFLOW(0);
14626 IEM_MC_ENDIF();
14627 IEM_MC_USED_FPU();
14628 IEM_MC_ADVANCE_RIP();
14629
14630 IEM_MC_END();
14631 return VINF_SUCCESS;
14632}
14633
14634
14635/** Opcode 0xdb 11/3. */
14636FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14637{
14638 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14640
14641 IEM_MC_BEGIN(0, 1);
14642 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14643
14644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14646
14647 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14648 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14649 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14650 IEM_MC_ENDIF();
14651 IEM_MC_UPDATE_FPU_OPCODE_IP();
14652 IEM_MC_ELSE()
14653 IEM_MC_FPU_STACK_UNDERFLOW(0);
14654 IEM_MC_ENDIF();
14655 IEM_MC_USED_FPU();
14656 IEM_MC_ADVANCE_RIP();
14657
14658 IEM_MC_END();
14659 return VINF_SUCCESS;
14660}
14661
14662
14663/** Opcode 0xdb 0xe0. */
14664FNIEMOP_DEF(iemOp_fneni)
14665{
14666 IEMOP_MNEMONIC("fneni (8087/ign)");
14667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14668 IEM_MC_BEGIN(0,0);
14669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14670 IEM_MC_ADVANCE_RIP();
14671 IEM_MC_END();
14672 return VINF_SUCCESS;
14673}
14674
14675
14676/** Opcode 0xdb 0xe1. */
14677FNIEMOP_DEF(iemOp_fndisi)
14678{
14679 IEMOP_MNEMONIC("fndisi (8087/ign)");
14680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14681 IEM_MC_BEGIN(0,0);
14682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14683 IEM_MC_ADVANCE_RIP();
14684 IEM_MC_END();
14685 return VINF_SUCCESS;
14686}
14687
14688
14689/** Opcode 0xdb 0xe2. */
14690FNIEMOP_DEF(iemOp_fnclex)
14691{
14692 IEMOP_MNEMONIC("fnclex");
14693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14694
14695 IEM_MC_BEGIN(0,0);
14696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14697 IEM_MC_CLEAR_FSW_EX();
14698 IEM_MC_ADVANCE_RIP();
14699 IEM_MC_END();
14700 return VINF_SUCCESS;
14701}
14702
14703
14704/** Opcode 0xdb 0xe3. */
14705FNIEMOP_DEF(iemOp_fninit)
14706{
14707 IEMOP_MNEMONIC("fninit");
14708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14709 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14710}
14711
14712
14713/** Opcode 0xdb 0xe4. */
14714FNIEMOP_DEF(iemOp_fnsetpm)
14715{
14716 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14718 IEM_MC_BEGIN(0,0);
14719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14720 IEM_MC_ADVANCE_RIP();
14721 IEM_MC_END();
14722 return VINF_SUCCESS;
14723}
14724
14725
14726/** Opcode 0xdb 0xe5. */
14727FNIEMOP_DEF(iemOp_frstpm)
14728{
14729 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14730#if 0 /* #UDs on newer CPUs */
14731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14732 IEM_MC_BEGIN(0,0);
14733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14734 IEM_MC_ADVANCE_RIP();
14735 IEM_MC_END();
14736 return VINF_SUCCESS;
14737#else
14738 return IEMOP_RAISE_INVALID_OPCODE();
14739#endif
14740}
14741
14742
14743/** Opcode 0xdb 11/5. */
14744FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14745{
14746 IEMOP_MNEMONIC("fucomi st0,stN");
14747 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14748}
14749
14750
14751/** Opcode 0xdb 11/6. */
14752FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14753{
14754 IEMOP_MNEMONIC("fcomi st0,stN");
14755 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14756}
14757
14758
14759/** Opcode 0xdb. */
14760FNIEMOP_DEF(iemOp_EscF3)
14761{
14762 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14765 {
14766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14767 {
14768 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14769 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14770 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14771 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14772 case 4:
14773 switch (bRm)
14774 {
14775 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14776 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14777 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14778 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14779 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14780 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14781 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14782 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14784 }
14785 break;
14786 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14787 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14788 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14790 }
14791 }
14792 else
14793 {
14794 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14795 {
14796 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14797 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14798 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14799 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14800 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14801 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14802 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14803 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14805 }
14806 }
14807}
14808
14809
14810/**
14811 * Common worker for FPU instructions working on STn and ST0, and storing the
14812 * result in STn unless IE, DE or ZE was raised.
14813 *
14814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14815 */
14816FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14817{
14818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14819
14820 IEM_MC_BEGIN(3, 1);
14821 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14822 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14824 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14825
14826 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14827 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14828
14829 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14830 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14831 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14832 IEM_MC_ELSE()
14833 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14834 IEM_MC_ENDIF();
14835 IEM_MC_USED_FPU();
14836 IEM_MC_ADVANCE_RIP();
14837
14838 IEM_MC_END();
14839 return VINF_SUCCESS;
14840}
14841
14842
14843/** Opcode 0xdc 11/0. */
14844FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14845{
14846 IEMOP_MNEMONIC("fadd stN,st0");
14847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14848}
14849
14850
14851/** Opcode 0xdc 11/1. */
14852FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14853{
14854 IEMOP_MNEMONIC("fmul stN,st0");
14855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14856}
14857
14858
14859/** Opcode 0xdc 11/4. */
14860FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14861{
14862 IEMOP_MNEMONIC("fsubr stN,st0");
14863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14864}
14865
14866
14867/** Opcode 0xdc 11/5. */
14868FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14869{
14870 IEMOP_MNEMONIC("fsub stN,st0");
14871 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14872}
14873
14874
14875/** Opcode 0xdc 11/6. */
14876FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14877{
14878 IEMOP_MNEMONIC("fdivr stN,st0");
14879 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14880}
14881
14882
14883/** Opcode 0xdc 11/7. */
14884FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14885{
14886 IEMOP_MNEMONIC("fdiv stN,st0");
14887 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14888}
14889
14890
14891/**
14892 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14893 * memory operand, and storing the result in ST0.
14894 *
14895 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14896 */
14897FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14898{
14899 IEM_MC_BEGIN(3, 3);
14900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14902 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14904 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14905 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14906
14907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14911
14912 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14914 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14915 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14916 IEM_MC_ELSE()
14917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14918 IEM_MC_ENDIF();
14919 IEM_MC_USED_FPU();
14920 IEM_MC_ADVANCE_RIP();
14921
14922 IEM_MC_END();
14923 return VINF_SUCCESS;
14924}
14925
14926
14927/** Opcode 0xdc !11/0. */
14928FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14929{
14930 IEMOP_MNEMONIC("fadd m64r");
14931 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14932}
14933
14934
14935/** Opcode 0xdc !11/1. */
14936FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14937{
14938 IEMOP_MNEMONIC("fmul m64r");
14939 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14940}
14941
14942
14943/** Opcode 0xdc !11/2. */
14944FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14945{
14946 IEMOP_MNEMONIC("fcom st0,m64r");
14947
14948 IEM_MC_BEGIN(3, 3);
14949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14950 IEM_MC_LOCAL(uint16_t, u16Fsw);
14951 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14952 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14953 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14954 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14955
14956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14958
14959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14961 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14962
14963 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14964 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14965 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14966 IEM_MC_ELSE()
14967 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14968 IEM_MC_ENDIF();
14969 IEM_MC_USED_FPU();
14970 IEM_MC_ADVANCE_RIP();
14971
14972 IEM_MC_END();
14973 return VINF_SUCCESS;
14974}
14975
14976
14977/** Opcode 0xdc !11/3. */
14978FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14979{
14980 IEMOP_MNEMONIC("fcomp st0,m64r");
14981
14982 IEM_MC_BEGIN(3, 3);
14983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14984 IEM_MC_LOCAL(uint16_t, u16Fsw);
14985 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14986 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14987 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14988 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14989
14990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14992
14993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14995 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14996
14997 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14998 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14999 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15000 IEM_MC_ELSE()
15001 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15002 IEM_MC_ENDIF();
15003 IEM_MC_USED_FPU();
15004 IEM_MC_ADVANCE_RIP();
15005
15006 IEM_MC_END();
15007 return VINF_SUCCESS;
15008}
15009
15010
15011/** Opcode 0xdc !11/4. */
15012FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15013{
15014 IEMOP_MNEMONIC("fsub m64r");
15015 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15016}
15017
15018
15019/** Opcode 0xdc !11/5. */
15020FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15021{
15022 IEMOP_MNEMONIC("fsubr m64r");
15023 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15024}
15025
15026
15027/** Opcode 0xdc !11/6. */
15028FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15029{
15030 IEMOP_MNEMONIC("fdiv m64r");
15031 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15032}
15033
15034
15035/** Opcode 0xdc !11/7. */
15036FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15037{
15038 IEMOP_MNEMONIC("fdivr m64r");
15039 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15040}
15041
15042
15043/** Opcode 0xdc. */
15044FNIEMOP_DEF(iemOp_EscF4)
15045{
15046 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15049 {
15050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15051 {
15052 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15053 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15054 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15055 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15056 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15057 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15058 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15059 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15061 }
15062 }
15063 else
15064 {
15065 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15066 {
15067 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15068 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15069 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15070 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15071 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15072 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15073 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15074 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15076 }
15077 }
15078}
15079
15080
15081/** Opcode 0xdd !11/0.
15082 * @sa iemOp_fld_m32r */
15083FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15084{
15085 IEMOP_MNEMONIC("fld m64r");
15086
15087 IEM_MC_BEGIN(2, 3);
15088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15089 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15090 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15091 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15092 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15093
15094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15098
15099 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15100 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15101 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15102 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15103 IEM_MC_ELSE()
15104 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15105 IEM_MC_ENDIF();
15106 IEM_MC_USED_FPU();
15107 IEM_MC_ADVANCE_RIP();
15108
15109 IEM_MC_END();
15110 return VINF_SUCCESS;
15111}
15112
15113
15114/** Opcode 0xdd !11/0. */
15115FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15116{
15117 IEMOP_MNEMONIC("fisttp m64i");
15118 IEM_MC_BEGIN(3, 2);
15119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15120 IEM_MC_LOCAL(uint16_t, u16Fsw);
15121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15122 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15124
15125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15129
15130 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15131 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15132 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15133 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15134 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15135 IEM_MC_ELSE()
15136 IEM_MC_IF_FCW_IM()
15137 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15138 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15139 IEM_MC_ENDIF();
15140 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15141 IEM_MC_ENDIF();
15142 IEM_MC_USED_FPU();
15143 IEM_MC_ADVANCE_RIP();
15144
15145 IEM_MC_END();
15146 return VINF_SUCCESS;
15147}
15148
15149
15150/** Opcode 0xdd !11/0. */
15151FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15152{
15153 IEMOP_MNEMONIC("fst m64r");
15154 IEM_MC_BEGIN(3, 2);
15155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15156 IEM_MC_LOCAL(uint16_t, u16Fsw);
15157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15158 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15160
15161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15165
15166 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15168 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15169 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15170 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15171 IEM_MC_ELSE()
15172 IEM_MC_IF_FCW_IM()
15173 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15174 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15175 IEM_MC_ENDIF();
15176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15177 IEM_MC_ENDIF();
15178 IEM_MC_USED_FPU();
15179 IEM_MC_ADVANCE_RIP();
15180
15181 IEM_MC_END();
15182 return VINF_SUCCESS;
15183}
15184
15185
15186
15187
15188/** Opcode 0xdd !11/0. */
15189FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15190{
15191 IEMOP_MNEMONIC("fstp m64r");
15192 IEM_MC_BEGIN(3, 2);
15193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15194 IEM_MC_LOCAL(uint16_t, u16Fsw);
15195 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15196 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15198
15199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15203
15204 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15206 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15207 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15208 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15209 IEM_MC_ELSE()
15210 IEM_MC_IF_FCW_IM()
15211 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15212 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15213 IEM_MC_ENDIF();
15214 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15215 IEM_MC_ENDIF();
15216 IEM_MC_USED_FPU();
15217 IEM_MC_ADVANCE_RIP();
15218
15219 IEM_MC_END();
15220 return VINF_SUCCESS;
15221}
15222
15223
15224/** Opcode 0xdd !11/0. */
15225FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15226{
15227 IEMOP_MNEMONIC("frstor m94/108byte");
15228 IEM_MC_BEGIN(3, 0);
15229 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15230 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15231 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15234 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15235 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15236 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15237 IEM_MC_END();
15238 return VINF_SUCCESS;
15239}
15240
15241
15242/** Opcode 0xdd !11/0. */
15243FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15244{
15245 IEMOP_MNEMONIC("fnsave m94/108byte");
15246 IEM_MC_BEGIN(3, 0);
15247 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15248 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15249 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15253 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15254 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15255 IEM_MC_END();
15256 return VINF_SUCCESS;
15257
15258}
15259
15260/** Opcode 0xdd !11/0. */
15261FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15262{
15263 IEMOP_MNEMONIC("fnstsw m16");
15264
15265 IEM_MC_BEGIN(0, 2);
15266 IEM_MC_LOCAL(uint16_t, u16Tmp);
15267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15268
15269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15272
15273 IEM_MC_FETCH_FSW(u16Tmp);
15274 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15275 IEM_MC_ADVANCE_RIP();
15276
15277/** @todo Debug / drop a hint to the verifier that things may differ
15278 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15279 * NT4SP1. (X86_FSW_PE) */
15280 IEM_MC_END();
15281 return VINF_SUCCESS;
15282}
15283
15284
15285/** Opcode 0xdd 11/0. */
15286FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15287{
15288 IEMOP_MNEMONIC("ffree stN");
15289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15290 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15291 unmodified. */
15292
15293 IEM_MC_BEGIN(0, 0);
15294
15295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15297
15298 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15299 IEM_MC_UPDATE_FPU_OPCODE_IP();
15300
15301 IEM_MC_USED_FPU();
15302 IEM_MC_ADVANCE_RIP();
15303 IEM_MC_END();
15304 return VINF_SUCCESS;
15305}
15306
15307
15308/** Opcode 0xdd 11/1. */
15309FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15310{
15311 IEMOP_MNEMONIC("fst st0,stN");
15312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15313
15314 IEM_MC_BEGIN(0, 2);
15315 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15316 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15317 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15318 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15319 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15320 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15321 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15322 IEM_MC_ELSE()
15323 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15324 IEM_MC_ENDIF();
15325 IEM_MC_USED_FPU();
15326 IEM_MC_ADVANCE_RIP();
15327 IEM_MC_END();
15328 return VINF_SUCCESS;
15329}
15330
15331
15332/** Opcode 0xdd 11/3. */
15333FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15334{
15335 IEMOP_MNEMONIC("fcom st0,stN");
15336 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15337}
15338
15339
15340/** Opcode 0xdd 11/4. */
15341FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15342{
15343 IEMOP_MNEMONIC("fcomp st0,stN");
15344 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15345}
15346
15347
15348/** Opcode 0xdd. */
15349FNIEMOP_DEF(iemOp_EscF5)
15350{
15351 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15353 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15354 {
15355 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15356 {
15357 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15358 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15359 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15360 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15361 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15362 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15363 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15364 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15366 }
15367 }
15368 else
15369 {
15370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15371 {
15372 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15373 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15374 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15375 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15376 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15377 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15378 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15379 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15381 }
15382 }
15383}
15384
15385
15386/** Opcode 0xde 11/0. */
15387FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC("faddp stN,st0");
15390 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15391}
15392
15393
15394/** Opcode 0xde 11/0. */
15395FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15396{
15397 IEMOP_MNEMONIC("fmulp stN,st0");
15398 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15399}
15400
15401
15402/** Opcode 0xde 0xd9. */
15403FNIEMOP_DEF(iemOp_fcompp)
15404{
15405 IEMOP_MNEMONIC("fucompp st0,stN");
15406 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15407}
15408
15409
15410/** Opcode 0xde 11/4. */
15411FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15412{
15413 IEMOP_MNEMONIC("fsubrp stN,st0");
15414 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15415}
15416
15417
15418/** Opcode 0xde 11/5. */
15419FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15420{
15421 IEMOP_MNEMONIC("fsubp stN,st0");
15422 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15423}
15424
15425
15426/** Opcode 0xde 11/6. */
15427FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15428{
15429 IEMOP_MNEMONIC("fdivrp stN,st0");
15430 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15431}
15432
15433
15434/** Opcode 0xde 11/7. */
15435FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15436{
15437 IEMOP_MNEMONIC("fdivp stN,st0");
15438 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15439}
15440
15441
15442/**
15443 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15444 * the result in ST0.
15445 *
15446 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15447 */
15448FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15449{
15450 IEM_MC_BEGIN(3, 3);
15451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15452 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15453 IEM_MC_LOCAL(int16_t, i16Val2);
15454 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15456 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15457
15458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15460
15461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15463 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15464
15465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15466 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15467 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15468 IEM_MC_ELSE()
15469 IEM_MC_FPU_STACK_UNDERFLOW(0);
15470 IEM_MC_ENDIF();
15471 IEM_MC_USED_FPU();
15472 IEM_MC_ADVANCE_RIP();
15473
15474 IEM_MC_END();
15475 return VINF_SUCCESS;
15476}
15477
15478
15479/** Opcode 0xde !11/0. */
15480FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15481{
15482 IEMOP_MNEMONIC("fiadd m16i");
15483 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15484}
15485
15486
15487/** Opcode 0xde !11/1. */
15488FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15489{
15490 IEMOP_MNEMONIC("fimul m16i");
15491 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15492}
15493
15494
15495/** Opcode 0xde !11/2. */
15496FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15497{
15498 IEMOP_MNEMONIC("ficom st0,m16i");
15499
15500 IEM_MC_BEGIN(3, 3);
15501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15502 IEM_MC_LOCAL(uint16_t, u16Fsw);
15503 IEM_MC_LOCAL(int16_t, i16Val2);
15504 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15506 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15507
15508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15510
15511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15513 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15514
15515 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15516 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15517 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15518 IEM_MC_ELSE()
15519 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15520 IEM_MC_ENDIF();
15521 IEM_MC_USED_FPU();
15522 IEM_MC_ADVANCE_RIP();
15523
15524 IEM_MC_END();
15525 return VINF_SUCCESS;
15526}
15527
15528
15529/** Opcode 0xde !11/3. */
15530FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15531{
15532 IEMOP_MNEMONIC("ficomp st0,m16i");
15533
15534 IEM_MC_BEGIN(3, 3);
15535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15536 IEM_MC_LOCAL(uint16_t, u16Fsw);
15537 IEM_MC_LOCAL(int16_t, i16Val2);
15538 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15539 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15540 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15541
15542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15544
15545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15546 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15547 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15548
15549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15550 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15551 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15552 IEM_MC_ELSE()
15553 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15554 IEM_MC_ENDIF();
15555 IEM_MC_USED_FPU();
15556 IEM_MC_ADVANCE_RIP();
15557
15558 IEM_MC_END();
15559 return VINF_SUCCESS;
15560}
15561
15562
15563/** Opcode 0xde !11/4. */
15564FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15565{
15566 IEMOP_MNEMONIC("fisub m16i");
15567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15568}
15569
15570
15571/** Opcode 0xde !11/5. */
15572FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15573{
15574 IEMOP_MNEMONIC("fisubr m16i");
15575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15576}
15577
15578
15579/** Opcode 0xde !11/6. */
15580FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15581{
15582 IEMOP_MNEMONIC("fiadd m16i");
15583 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15584}
15585
15586
15587/** Opcode 0xde !11/7. */
15588FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15589{
15590 IEMOP_MNEMONIC("fiadd m16i");
15591 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15592}
15593
15594
15595/** Opcode 0xde. */
15596FNIEMOP_DEF(iemOp_EscF6)
15597{
15598 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15601 {
15602 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15603 {
15604 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15605 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15606 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15607 case 3: if (bRm == 0xd9)
15608 return FNIEMOP_CALL(iemOp_fcompp);
15609 return IEMOP_RAISE_INVALID_OPCODE();
15610 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15611 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15612 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15613 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15615 }
15616 }
15617 else
15618 {
15619 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15620 {
15621 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15622 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15623 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15624 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15625 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15626 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15627 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15628 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15630 }
15631 }
15632}
15633
15634
15635/** Opcode 0xdf 11/0.
15636 * Undocument instruction, assumed to work like ffree + fincstp. */
15637FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15638{
15639 IEMOP_MNEMONIC("ffreep stN");
15640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15641
15642 IEM_MC_BEGIN(0, 0);
15643
15644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15646
15647 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15648 IEM_MC_FPU_STACK_INC_TOP();
15649 IEM_MC_UPDATE_FPU_OPCODE_IP();
15650
15651 IEM_MC_USED_FPU();
15652 IEM_MC_ADVANCE_RIP();
15653 IEM_MC_END();
15654 return VINF_SUCCESS;
15655}
15656
15657
15658/** Opcode 0xdf 0xe0. */
15659FNIEMOP_DEF(iemOp_fnstsw_ax)
15660{
15661 IEMOP_MNEMONIC("fnstsw ax");
15662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15663
15664 IEM_MC_BEGIN(0, 1);
15665 IEM_MC_LOCAL(uint16_t, u16Tmp);
15666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15667 IEM_MC_FETCH_FSW(u16Tmp);
15668 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15669 IEM_MC_ADVANCE_RIP();
15670 IEM_MC_END();
15671 return VINF_SUCCESS;
15672}
15673
15674
15675/** Opcode 0xdf 11/5. */
15676FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC("fcomip st0,stN");
15679 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15680}
15681
15682
15683/** Opcode 0xdf 11/6. */
15684FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15685{
15686 IEMOP_MNEMONIC("fcomip st0,stN");
15687 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15688}
15689
15690
15691/** Opcode 0xdf !11/0. */
15692FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC("fild m16i");
15695
15696 IEM_MC_BEGIN(2, 3);
15697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15698 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15699 IEM_MC_LOCAL(int16_t, i16Val);
15700 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15701 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15702
15703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15705
15706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15708 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15709
15710 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15711 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15712 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15713 IEM_MC_ELSE()
15714 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15715 IEM_MC_ENDIF();
15716 IEM_MC_USED_FPU();
15717 IEM_MC_ADVANCE_RIP();
15718
15719 IEM_MC_END();
15720 return VINF_SUCCESS;
15721}
15722
15723
15724/** Opcode 0xdf !11/1. */
15725FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15726{
15727 IEMOP_MNEMONIC("fisttp m16i");
15728 IEM_MC_BEGIN(3, 2);
15729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15730 IEM_MC_LOCAL(uint16_t, u16Fsw);
15731 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15732 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15733 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15734
15735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15737 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15738 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15739
15740 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15743 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15744 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15745 IEM_MC_ELSE()
15746 IEM_MC_IF_FCW_IM()
15747 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15748 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15749 IEM_MC_ENDIF();
15750 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15751 IEM_MC_ENDIF();
15752 IEM_MC_USED_FPU();
15753 IEM_MC_ADVANCE_RIP();
15754
15755 IEM_MC_END();
15756 return VINF_SUCCESS;
15757}
15758
15759
15760/** Opcode 0xdf !11/2. */
15761FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15762{
15763 IEMOP_MNEMONIC("fistp m16i");
15764 IEM_MC_BEGIN(3, 2);
15765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15766 IEM_MC_LOCAL(uint16_t, u16Fsw);
15767 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15768 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15770
15771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15775
15776 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15777 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15778 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15779 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15780 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15781 IEM_MC_ELSE()
15782 IEM_MC_IF_FCW_IM()
15783 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15784 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15785 IEM_MC_ENDIF();
15786 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15787 IEM_MC_ENDIF();
15788 IEM_MC_USED_FPU();
15789 IEM_MC_ADVANCE_RIP();
15790
15791 IEM_MC_END();
15792 return VINF_SUCCESS;
15793}
15794
15795
15796/** Opcode 0xdf !11/3. */
15797FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15798{
15799 IEMOP_MNEMONIC("fistp m16i");
15800 IEM_MC_BEGIN(3, 2);
15801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15802 IEM_MC_LOCAL(uint16_t, u16Fsw);
15803 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15804 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15806
15807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15811
15812 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15813 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15814 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15815 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15816 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15817 IEM_MC_ELSE()
15818 IEM_MC_IF_FCW_IM()
15819 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15820 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15821 IEM_MC_ENDIF();
15822 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15823 IEM_MC_ENDIF();
15824 IEM_MC_USED_FPU();
15825 IEM_MC_ADVANCE_RIP();
15826
15827 IEM_MC_END();
15828 return VINF_SUCCESS;
15829}
15830
15831
15832/** Opcode 0xdf !11/4. */
15833FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15834
15835
15836/** Opcode 0xdf !11/5. */
15837FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15838{
15839 IEMOP_MNEMONIC("fild m64i");
15840
15841 IEM_MC_BEGIN(2, 3);
15842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15843 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15844 IEM_MC_LOCAL(int64_t, i64Val);
15845 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15846 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15847
15848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15850
15851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15853 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15854
15855 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15856 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15857 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15858 IEM_MC_ELSE()
15859 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15860 IEM_MC_ENDIF();
15861 IEM_MC_USED_FPU();
15862 IEM_MC_ADVANCE_RIP();
15863
15864 IEM_MC_END();
15865 return VINF_SUCCESS;
15866}
15867
15868
15869/** Opcode 0xdf !11/6. */
15870FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15871
15872
15873/** Opcode 0xdf !11/7. */
15874FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15875{
15876 IEMOP_MNEMONIC("fistp m64i");
15877 IEM_MC_BEGIN(3, 2);
15878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15879 IEM_MC_LOCAL(uint16_t, u16Fsw);
15880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15881 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15883
15884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15888
15889 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15890 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15891 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15892 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15893 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15894 IEM_MC_ELSE()
15895 IEM_MC_IF_FCW_IM()
15896 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15897 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15898 IEM_MC_ENDIF();
15899 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15900 IEM_MC_ENDIF();
15901 IEM_MC_USED_FPU();
15902 IEM_MC_ADVANCE_RIP();
15903
15904 IEM_MC_END();
15905 return VINF_SUCCESS;
15906}
15907
15908
15909/** Opcode 0xdf. */
15910FNIEMOP_DEF(iemOp_EscF7)
15911{
15912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15914 {
15915 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15916 {
15917 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15918 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15919 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15920 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15921 case 4: if (bRm == 0xe0)
15922 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15923 return IEMOP_RAISE_INVALID_OPCODE();
15924 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15925 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15926 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15928 }
15929 }
15930 else
15931 {
15932 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15933 {
15934 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15935 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15936 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15937 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15938 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15939 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15940 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15941 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15943 }
15944 }
15945}
15946
15947
15948/** Opcode 0xe0. */
15949FNIEMOP_DEF(iemOp_loopne_Jb)
15950{
15951 IEMOP_MNEMONIC("loopne Jb");
15952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15953 IEMOP_HLP_NO_LOCK_PREFIX();
15954 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15955
15956 switch (pIemCpu->enmEffAddrMode)
15957 {
15958 case IEMMODE_16BIT:
15959 IEM_MC_BEGIN(0,0);
15960 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15961 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15962 IEM_MC_REL_JMP_S8(i8Imm);
15963 } IEM_MC_ELSE() {
15964 IEM_MC_ADVANCE_RIP();
15965 } IEM_MC_ENDIF();
15966 IEM_MC_END();
15967 return VINF_SUCCESS;
15968
15969 case IEMMODE_32BIT:
15970 IEM_MC_BEGIN(0,0);
15971 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15972 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15973 IEM_MC_REL_JMP_S8(i8Imm);
15974 } IEM_MC_ELSE() {
15975 IEM_MC_ADVANCE_RIP();
15976 } IEM_MC_ENDIF();
15977 IEM_MC_END();
15978 return VINF_SUCCESS;
15979
15980 case IEMMODE_64BIT:
15981 IEM_MC_BEGIN(0,0);
15982 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15983 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15984 IEM_MC_REL_JMP_S8(i8Imm);
15985 } IEM_MC_ELSE() {
15986 IEM_MC_ADVANCE_RIP();
15987 } IEM_MC_ENDIF();
15988 IEM_MC_END();
15989 return VINF_SUCCESS;
15990
15991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15992 }
15993}
15994
15995
15996/** Opcode 0xe1. */
15997FNIEMOP_DEF(iemOp_loope_Jb)
15998{
15999 IEMOP_MNEMONIC("loope Jb");
16000 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16001 IEMOP_HLP_NO_LOCK_PREFIX();
16002 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16003
16004 switch (pIemCpu->enmEffAddrMode)
16005 {
16006 case IEMMODE_16BIT:
16007 IEM_MC_BEGIN(0,0);
16008 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16009 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16010 IEM_MC_REL_JMP_S8(i8Imm);
16011 } IEM_MC_ELSE() {
16012 IEM_MC_ADVANCE_RIP();
16013 } IEM_MC_ENDIF();
16014 IEM_MC_END();
16015 return VINF_SUCCESS;
16016
16017 case IEMMODE_32BIT:
16018 IEM_MC_BEGIN(0,0);
16019 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16020 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16021 IEM_MC_REL_JMP_S8(i8Imm);
16022 } IEM_MC_ELSE() {
16023 IEM_MC_ADVANCE_RIP();
16024 } IEM_MC_ENDIF();
16025 IEM_MC_END();
16026 return VINF_SUCCESS;
16027
16028 case IEMMODE_64BIT:
16029 IEM_MC_BEGIN(0,0);
16030 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16031 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16032 IEM_MC_REL_JMP_S8(i8Imm);
16033 } IEM_MC_ELSE() {
16034 IEM_MC_ADVANCE_RIP();
16035 } IEM_MC_ENDIF();
16036 IEM_MC_END();
16037 return VINF_SUCCESS;
16038
16039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16040 }
16041}
16042
16043
16044/** Opcode 0xe2. */
16045FNIEMOP_DEF(iemOp_loop_Jb)
16046{
16047 IEMOP_MNEMONIC("loop Jb");
16048 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16049 IEMOP_HLP_NO_LOCK_PREFIX();
16050 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16051
16052 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16053 * using the 32-bit operand size override. How can that be restarted? See
16054 * weird pseudo code in intel manual. */
16055 switch (pIemCpu->enmEffAddrMode)
16056 {
16057 case IEMMODE_16BIT:
16058 IEM_MC_BEGIN(0,0);
16059 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16060 {
16061 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16062 IEM_MC_IF_CX_IS_NZ() {
16063 IEM_MC_REL_JMP_S8(i8Imm);
16064 } IEM_MC_ELSE() {
16065 IEM_MC_ADVANCE_RIP();
16066 } IEM_MC_ENDIF();
16067 }
16068 else
16069 {
16070 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16071 IEM_MC_ADVANCE_RIP();
16072 }
16073 IEM_MC_END();
16074 return VINF_SUCCESS;
16075
16076 case IEMMODE_32BIT:
16077 IEM_MC_BEGIN(0,0);
16078 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16079 {
16080 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16081 IEM_MC_IF_ECX_IS_NZ() {
16082 IEM_MC_REL_JMP_S8(i8Imm);
16083 } IEM_MC_ELSE() {
16084 IEM_MC_ADVANCE_RIP();
16085 } IEM_MC_ENDIF();
16086 }
16087 else
16088 {
16089 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16090 IEM_MC_ADVANCE_RIP();
16091 }
16092 IEM_MC_END();
16093 return VINF_SUCCESS;
16094
16095 case IEMMODE_64BIT:
16096 IEM_MC_BEGIN(0,0);
16097 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16098 {
16099 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16100 IEM_MC_IF_RCX_IS_NZ() {
16101 IEM_MC_REL_JMP_S8(i8Imm);
16102 } IEM_MC_ELSE() {
16103 IEM_MC_ADVANCE_RIP();
16104 } IEM_MC_ENDIF();
16105 }
16106 else
16107 {
16108 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16109 IEM_MC_ADVANCE_RIP();
16110 }
16111 IEM_MC_END();
16112 return VINF_SUCCESS;
16113
16114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16115 }
16116}
16117
16118
16119/** Opcode 0xe3. */
16120FNIEMOP_DEF(iemOp_jecxz_Jb)
16121{
16122 IEMOP_MNEMONIC("jecxz Jb");
16123 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16124 IEMOP_HLP_NO_LOCK_PREFIX();
16125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16126
16127 switch (pIemCpu->enmEffAddrMode)
16128 {
16129 case IEMMODE_16BIT:
16130 IEM_MC_BEGIN(0,0);
16131 IEM_MC_IF_CX_IS_NZ() {
16132 IEM_MC_ADVANCE_RIP();
16133 } IEM_MC_ELSE() {
16134 IEM_MC_REL_JMP_S8(i8Imm);
16135 } IEM_MC_ENDIF();
16136 IEM_MC_END();
16137 return VINF_SUCCESS;
16138
16139 case IEMMODE_32BIT:
16140 IEM_MC_BEGIN(0,0);
16141 IEM_MC_IF_ECX_IS_NZ() {
16142 IEM_MC_ADVANCE_RIP();
16143 } IEM_MC_ELSE() {
16144 IEM_MC_REL_JMP_S8(i8Imm);
16145 } IEM_MC_ENDIF();
16146 IEM_MC_END();
16147 return VINF_SUCCESS;
16148
16149 case IEMMODE_64BIT:
16150 IEM_MC_BEGIN(0,0);
16151 IEM_MC_IF_RCX_IS_NZ() {
16152 IEM_MC_ADVANCE_RIP();
16153 } IEM_MC_ELSE() {
16154 IEM_MC_REL_JMP_S8(i8Imm);
16155 } IEM_MC_ENDIF();
16156 IEM_MC_END();
16157 return VINF_SUCCESS;
16158
16159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16160 }
16161}
16162
16163
16164/** Opcode 0xe4 */
16165FNIEMOP_DEF(iemOp_in_AL_Ib)
16166{
16167 IEMOP_MNEMONIC("in eAX,Ib");
16168 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16169 IEMOP_HLP_NO_LOCK_PREFIX();
16170 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16171}
16172
16173
16174/** Opcode 0xe5 */
16175FNIEMOP_DEF(iemOp_in_eAX_Ib)
16176{
16177 IEMOP_MNEMONIC("in eAX,Ib");
16178 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16179 IEMOP_HLP_NO_LOCK_PREFIX();
16180 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16181}
16182
16183
16184/** Opcode 0xe6 */
16185FNIEMOP_DEF(iemOp_out_Ib_AL)
16186{
16187 IEMOP_MNEMONIC("out Ib,AL");
16188 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16189 IEMOP_HLP_NO_LOCK_PREFIX();
16190 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16191}
16192
16193
16194/** Opcode 0xe7 */
16195FNIEMOP_DEF(iemOp_out_Ib_eAX)
16196{
16197 IEMOP_MNEMONIC("out Ib,eAX");
16198 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16199 IEMOP_HLP_NO_LOCK_PREFIX();
16200 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16201}
16202
16203
16204/** Opcode 0xe8. */
16205FNIEMOP_DEF(iemOp_call_Jv)
16206{
16207 IEMOP_MNEMONIC("call Jv");
16208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16209 switch (pIemCpu->enmEffOpSize)
16210 {
16211 case IEMMODE_16BIT:
16212 {
16213 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16214 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16215 }
16216
16217 case IEMMODE_32BIT:
16218 {
16219 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16220 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16221 }
16222
16223 case IEMMODE_64BIT:
16224 {
16225 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16226 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16227 }
16228
16229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16230 }
16231}
16232
16233
16234/** Opcode 0xe9. */
16235FNIEMOP_DEF(iemOp_jmp_Jv)
16236{
16237 IEMOP_MNEMONIC("jmp Jv");
16238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16239 switch (pIemCpu->enmEffOpSize)
16240 {
16241 case IEMMODE_16BIT:
16242 {
16243 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16244 IEM_MC_BEGIN(0, 0);
16245 IEM_MC_REL_JMP_S16(i16Imm);
16246 IEM_MC_END();
16247 return VINF_SUCCESS;
16248 }
16249
16250 case IEMMODE_64BIT:
16251 case IEMMODE_32BIT:
16252 {
16253 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16254 IEM_MC_BEGIN(0, 0);
16255 IEM_MC_REL_JMP_S32(i32Imm);
16256 IEM_MC_END();
16257 return VINF_SUCCESS;
16258 }
16259
16260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16261 }
16262}
16263
16264
16265/** Opcode 0xea. */
16266FNIEMOP_DEF(iemOp_jmp_Ap)
16267{
16268 IEMOP_MNEMONIC("jmp Ap");
16269 IEMOP_HLP_NO_64BIT();
16270
16271 /* Decode the far pointer address and pass it on to the far call C implementation. */
16272 uint32_t offSeg;
16273 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16274 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16275 else
16276 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16277 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16278 IEMOP_HLP_NO_LOCK_PREFIX();
16279 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16280}
16281
16282
16283/** Opcode 0xeb. */
16284FNIEMOP_DEF(iemOp_jmp_Jb)
16285{
16286 IEMOP_MNEMONIC("jmp Jb");
16287 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16288 IEMOP_HLP_NO_LOCK_PREFIX();
16289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16290
16291 IEM_MC_BEGIN(0, 0);
16292 IEM_MC_REL_JMP_S8(i8Imm);
16293 IEM_MC_END();
16294 return VINF_SUCCESS;
16295}
16296
16297
16298/** Opcode 0xec */
16299FNIEMOP_DEF(iemOp_in_AL_DX)
16300{
16301 IEMOP_MNEMONIC("in AL,DX");
16302 IEMOP_HLP_NO_LOCK_PREFIX();
16303 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16304}
16305
16306
16307/** Opcode 0xed */
16308FNIEMOP_DEF(iemOp_eAX_DX)
16309{
16310 IEMOP_MNEMONIC("in eAX,DX");
16311 IEMOP_HLP_NO_LOCK_PREFIX();
16312 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16313}
16314
16315
16316/** Opcode 0xee */
16317FNIEMOP_DEF(iemOp_out_DX_AL)
16318{
16319 IEMOP_MNEMONIC("out DX,AL");
16320 IEMOP_HLP_NO_LOCK_PREFIX();
16321 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16322}
16323
16324
16325/** Opcode 0xef */
16326FNIEMOP_DEF(iemOp_out_DX_eAX)
16327{
16328 IEMOP_MNEMONIC("out DX,eAX");
16329 IEMOP_HLP_NO_LOCK_PREFIX();
16330 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16331}
16332
16333
16334/** Opcode 0xf0. */
16335FNIEMOP_DEF(iemOp_lock)
16336{
16337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16338 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16339
16340 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16341 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16342}
16343
16344
16345/** Opcode 0xf1. */
16346FNIEMOP_DEF(iemOp_int_1)
16347{
16348 IEMOP_MNEMONIC("int1"); /* icebp */
16349 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16350 /** @todo testcase! */
16351 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16352}
16353
16354
16355/** Opcode 0xf2. */
16356FNIEMOP_DEF(iemOp_repne)
16357{
16358 /* This overrides any previous REPE prefix. */
16359 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16360 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16361 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16362
16363 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16364 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16365}
16366
16367
16368/** Opcode 0xf3. */
16369FNIEMOP_DEF(iemOp_repe)
16370{
16371 /* This overrides any previous REPNE prefix. */
16372 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16373 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16374 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16375
16376 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16377 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16378}
16379
16380
16381/** Opcode 0xf4. */
16382FNIEMOP_DEF(iemOp_hlt)
16383{
16384 IEMOP_HLP_NO_LOCK_PREFIX();
16385 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16386}
16387
16388
16389/** Opcode 0xf5. */
16390FNIEMOP_DEF(iemOp_cmc)
16391{
16392 IEMOP_MNEMONIC("cmc");
16393 IEMOP_HLP_NO_LOCK_PREFIX();
16394 IEM_MC_BEGIN(0, 0);
16395 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16396 IEM_MC_ADVANCE_RIP();
16397 IEM_MC_END();
16398 return VINF_SUCCESS;
16399}
16400
16401
16402/**
16403 * Common implementation of 'inc/dec/not/neg Eb'.
16404 *
16405 * @param bRm The RM byte.
16406 * @param pImpl The instruction implementation.
16407 */
16408FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16409{
16410 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16411 {
16412 /* register access */
16413 IEM_MC_BEGIN(2, 0);
16414 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16415 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16416 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16417 IEM_MC_REF_EFLAGS(pEFlags);
16418 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16419 IEM_MC_ADVANCE_RIP();
16420 IEM_MC_END();
16421 }
16422 else
16423 {
16424 /* memory access. */
16425 IEM_MC_BEGIN(2, 2);
16426 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16429
16430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16431 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16432 IEM_MC_FETCH_EFLAGS(EFlags);
16433 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16434 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16435 else
16436 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16437
16438 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16439 IEM_MC_COMMIT_EFLAGS(EFlags);
16440 IEM_MC_ADVANCE_RIP();
16441 IEM_MC_END();
16442 }
16443 return VINF_SUCCESS;
16444}
16445
16446
16447/**
16448 * Common implementation of 'inc/dec/not/neg Ev'.
16449 *
16450 * @param bRm The RM byte.
16451 * @param pImpl The instruction implementation.
16452 */
16453FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16454{
16455 /* Registers are handled by a common worker. */
16456 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16457 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16458
16459 /* Memory we do here. */
16460 switch (pIemCpu->enmEffOpSize)
16461 {
16462 case IEMMODE_16BIT:
16463 IEM_MC_BEGIN(2, 2);
16464 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16465 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16467
16468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16469 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16470 IEM_MC_FETCH_EFLAGS(EFlags);
16471 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16472 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16473 else
16474 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16475
16476 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16477 IEM_MC_COMMIT_EFLAGS(EFlags);
16478 IEM_MC_ADVANCE_RIP();
16479 IEM_MC_END();
16480 return VINF_SUCCESS;
16481
16482 case IEMMODE_32BIT:
16483 IEM_MC_BEGIN(2, 2);
16484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16485 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16487
16488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16489 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16490 IEM_MC_FETCH_EFLAGS(EFlags);
16491 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16492 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16493 else
16494 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16495
16496 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16497 IEM_MC_COMMIT_EFLAGS(EFlags);
16498 IEM_MC_ADVANCE_RIP();
16499 IEM_MC_END();
16500 return VINF_SUCCESS;
16501
16502 case IEMMODE_64BIT:
16503 IEM_MC_BEGIN(2, 2);
16504 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16507
16508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16509 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16510 IEM_MC_FETCH_EFLAGS(EFlags);
16511 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16512 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16513 else
16514 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16515
16516 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16517 IEM_MC_COMMIT_EFLAGS(EFlags);
16518 IEM_MC_ADVANCE_RIP();
16519 IEM_MC_END();
16520 return VINF_SUCCESS;
16521
16522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16523 }
16524}
16525
16526
16527/** Opcode 0xf6 /0. */
16528FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16529{
16530 IEMOP_MNEMONIC("test Eb,Ib");
16531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16532
16533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16534 {
16535 /* register access */
16536 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16537 IEMOP_HLP_NO_LOCK_PREFIX();
16538
16539 IEM_MC_BEGIN(3, 0);
16540 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16541 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16543 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16544 IEM_MC_REF_EFLAGS(pEFlags);
16545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16546 IEM_MC_ADVANCE_RIP();
16547 IEM_MC_END();
16548 }
16549 else
16550 {
16551 /* memory access. */
16552 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16553
16554 IEM_MC_BEGIN(3, 2);
16555 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16556 IEM_MC_ARG(uint8_t, u8Src, 1);
16557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16559
16560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16561 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16562 IEM_MC_ASSIGN(u8Src, u8Imm);
16563 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16564 IEM_MC_FETCH_EFLAGS(EFlags);
16565 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16566
16567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16568 IEM_MC_COMMIT_EFLAGS(EFlags);
16569 IEM_MC_ADVANCE_RIP();
16570 IEM_MC_END();
16571 }
16572 return VINF_SUCCESS;
16573}
16574
16575
16576/** Opcode 0xf7 /0. */
16577FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16578{
16579 IEMOP_MNEMONIC("test Ev,Iv");
16580 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16582
16583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16584 {
16585 /* register access */
16586 switch (pIemCpu->enmEffOpSize)
16587 {
16588 case IEMMODE_16BIT:
16589 {
16590 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16591 IEM_MC_BEGIN(3, 0);
16592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16593 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16594 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16595 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16596 IEM_MC_REF_EFLAGS(pEFlags);
16597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16598 IEM_MC_ADVANCE_RIP();
16599 IEM_MC_END();
16600 return VINF_SUCCESS;
16601 }
16602
16603 case IEMMODE_32BIT:
16604 {
16605 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16606 IEM_MC_BEGIN(3, 0);
16607 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16608 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16609 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16610 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16611 IEM_MC_REF_EFLAGS(pEFlags);
16612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16613 /* No clearing the high dword here - test doesn't write back the result. */
16614 IEM_MC_ADVANCE_RIP();
16615 IEM_MC_END();
16616 return VINF_SUCCESS;
16617 }
16618
16619 case IEMMODE_64BIT:
16620 {
16621 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16622 IEM_MC_BEGIN(3, 0);
16623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16624 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16625 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16626 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16627 IEM_MC_REF_EFLAGS(pEFlags);
16628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16629 IEM_MC_ADVANCE_RIP();
16630 IEM_MC_END();
16631 return VINF_SUCCESS;
16632 }
16633
16634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16635 }
16636 }
16637 else
16638 {
16639 /* memory access. */
16640 switch (pIemCpu->enmEffOpSize)
16641 {
16642 case IEMMODE_16BIT:
16643 {
16644 IEM_MC_BEGIN(3, 2);
16645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16646 IEM_MC_ARG(uint16_t, u16Src, 1);
16647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16649
16650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16651 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16652 IEM_MC_ASSIGN(u16Src, u16Imm);
16653 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16654 IEM_MC_FETCH_EFLAGS(EFlags);
16655 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16656
16657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16658 IEM_MC_COMMIT_EFLAGS(EFlags);
16659 IEM_MC_ADVANCE_RIP();
16660 IEM_MC_END();
16661 return VINF_SUCCESS;
16662 }
16663
16664 case IEMMODE_32BIT:
16665 {
16666 IEM_MC_BEGIN(3, 2);
16667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16668 IEM_MC_ARG(uint32_t, u32Src, 1);
16669 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16671
16672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16673 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16674 IEM_MC_ASSIGN(u32Src, u32Imm);
16675 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16676 IEM_MC_FETCH_EFLAGS(EFlags);
16677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16678
16679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16680 IEM_MC_COMMIT_EFLAGS(EFlags);
16681 IEM_MC_ADVANCE_RIP();
16682 IEM_MC_END();
16683 return VINF_SUCCESS;
16684 }
16685
16686 case IEMMODE_64BIT:
16687 {
16688 IEM_MC_BEGIN(3, 2);
16689 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16690 IEM_MC_ARG(uint64_t, u64Src, 1);
16691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16693
16694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16695 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16696 IEM_MC_ASSIGN(u64Src, u64Imm);
16697 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16698 IEM_MC_FETCH_EFLAGS(EFlags);
16699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16700
16701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16702 IEM_MC_COMMIT_EFLAGS(EFlags);
16703 IEM_MC_ADVANCE_RIP();
16704 IEM_MC_END();
16705 return VINF_SUCCESS;
16706 }
16707
16708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16709 }
16710 }
16711}
16712
16713
16714/** Opcode 0xf6 /4, /5, /6 and /7. */
16715FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16716{
16717 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16718
16719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16720 {
16721 /* register access */
16722 IEMOP_HLP_NO_LOCK_PREFIX();
16723 IEM_MC_BEGIN(3, 1);
16724 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16725 IEM_MC_ARG(uint8_t, u8Value, 1);
16726 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16727 IEM_MC_LOCAL(int32_t, rc);
16728
16729 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16730 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16731 IEM_MC_REF_EFLAGS(pEFlags);
16732 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16733 IEM_MC_IF_LOCAL_IS_Z(rc) {
16734 IEM_MC_ADVANCE_RIP();
16735 } IEM_MC_ELSE() {
16736 IEM_MC_RAISE_DIVIDE_ERROR();
16737 } IEM_MC_ENDIF();
16738
16739 IEM_MC_END();
16740 }
16741 else
16742 {
16743 /* memory access. */
16744 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16745
16746 IEM_MC_BEGIN(3, 2);
16747 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16748 IEM_MC_ARG(uint8_t, u8Value, 1);
16749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16751 IEM_MC_LOCAL(int32_t, rc);
16752
16753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16754 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16755 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16756 IEM_MC_REF_EFLAGS(pEFlags);
16757 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16758 IEM_MC_IF_LOCAL_IS_Z(rc) {
16759 IEM_MC_ADVANCE_RIP();
16760 } IEM_MC_ELSE() {
16761 IEM_MC_RAISE_DIVIDE_ERROR();
16762 } IEM_MC_ENDIF();
16763
16764 IEM_MC_END();
16765 }
16766 return VINF_SUCCESS;
16767}
16768
16769
16770/** Opcode 0xf7 /4, /5, /6 and /7. */
16771FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16772{
16773 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16775
16776 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16777 {
16778 /* register access */
16779 switch (pIemCpu->enmEffOpSize)
16780 {
16781 case IEMMODE_16BIT:
16782 {
16783 IEMOP_HLP_NO_LOCK_PREFIX();
16784 IEM_MC_BEGIN(4, 1);
16785 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16786 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16787 IEM_MC_ARG(uint16_t, u16Value, 2);
16788 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16789 IEM_MC_LOCAL(int32_t, rc);
16790
16791 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16792 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16793 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16794 IEM_MC_REF_EFLAGS(pEFlags);
16795 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16796 IEM_MC_IF_LOCAL_IS_Z(rc) {
16797 IEM_MC_ADVANCE_RIP();
16798 } IEM_MC_ELSE() {
16799 IEM_MC_RAISE_DIVIDE_ERROR();
16800 } IEM_MC_ENDIF();
16801
16802 IEM_MC_END();
16803 return VINF_SUCCESS;
16804 }
16805
16806 case IEMMODE_32BIT:
16807 {
16808 IEMOP_HLP_NO_LOCK_PREFIX();
16809 IEM_MC_BEGIN(4, 1);
16810 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16811 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16812 IEM_MC_ARG(uint32_t, u32Value, 2);
16813 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16814 IEM_MC_LOCAL(int32_t, rc);
16815
16816 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16817 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16818 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16819 IEM_MC_REF_EFLAGS(pEFlags);
16820 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16821 IEM_MC_IF_LOCAL_IS_Z(rc) {
16822 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16823 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16824 IEM_MC_ADVANCE_RIP();
16825 } IEM_MC_ELSE() {
16826 IEM_MC_RAISE_DIVIDE_ERROR();
16827 } IEM_MC_ENDIF();
16828
16829 IEM_MC_END();
16830 return VINF_SUCCESS;
16831 }
16832
16833 case IEMMODE_64BIT:
16834 {
16835 IEMOP_HLP_NO_LOCK_PREFIX();
16836 IEM_MC_BEGIN(4, 1);
16837 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16838 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16839 IEM_MC_ARG(uint64_t, u64Value, 2);
16840 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16841 IEM_MC_LOCAL(int32_t, rc);
16842
16843 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16844 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16845 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16846 IEM_MC_REF_EFLAGS(pEFlags);
16847 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16848 IEM_MC_IF_LOCAL_IS_Z(rc) {
16849 IEM_MC_ADVANCE_RIP();
16850 } IEM_MC_ELSE() {
16851 IEM_MC_RAISE_DIVIDE_ERROR();
16852 } IEM_MC_ENDIF();
16853
16854 IEM_MC_END();
16855 return VINF_SUCCESS;
16856 }
16857
16858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16859 }
16860 }
16861 else
16862 {
16863 /* memory access. */
16864 switch (pIemCpu->enmEffOpSize)
16865 {
16866 case IEMMODE_16BIT:
16867 {
16868 IEMOP_HLP_NO_LOCK_PREFIX();
16869 IEM_MC_BEGIN(4, 2);
16870 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16871 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16872 IEM_MC_ARG(uint16_t, u16Value, 2);
16873 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16875 IEM_MC_LOCAL(int32_t, rc);
16876
16877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16878 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16879 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16880 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16881 IEM_MC_REF_EFLAGS(pEFlags);
16882 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16883 IEM_MC_IF_LOCAL_IS_Z(rc) {
16884 IEM_MC_ADVANCE_RIP();
16885 } IEM_MC_ELSE() {
16886 IEM_MC_RAISE_DIVIDE_ERROR();
16887 } IEM_MC_ENDIF();
16888
16889 IEM_MC_END();
16890 return VINF_SUCCESS;
16891 }
16892
16893 case IEMMODE_32BIT:
16894 {
16895 IEMOP_HLP_NO_LOCK_PREFIX();
16896 IEM_MC_BEGIN(4, 2);
16897 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16898 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16899 IEM_MC_ARG(uint32_t, u32Value, 2);
16900 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16902 IEM_MC_LOCAL(int32_t, rc);
16903
16904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16905 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16906 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16907 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16908 IEM_MC_REF_EFLAGS(pEFlags);
16909 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16910 IEM_MC_IF_LOCAL_IS_Z(rc) {
16911 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16912 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16913 IEM_MC_ADVANCE_RIP();
16914 } IEM_MC_ELSE() {
16915 IEM_MC_RAISE_DIVIDE_ERROR();
16916 } IEM_MC_ENDIF();
16917
16918 IEM_MC_END();
16919 return VINF_SUCCESS;
16920 }
16921
16922 case IEMMODE_64BIT:
16923 {
16924 IEMOP_HLP_NO_LOCK_PREFIX();
16925 IEM_MC_BEGIN(4, 2);
16926 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16927 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16928 IEM_MC_ARG(uint64_t, u64Value, 2);
16929 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16931 IEM_MC_LOCAL(int32_t, rc);
16932
16933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16934 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16935 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16936 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16937 IEM_MC_REF_EFLAGS(pEFlags);
16938 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16939 IEM_MC_IF_LOCAL_IS_Z(rc) {
16940 IEM_MC_ADVANCE_RIP();
16941 } IEM_MC_ELSE() {
16942 IEM_MC_RAISE_DIVIDE_ERROR();
16943 } IEM_MC_ENDIF();
16944
16945 IEM_MC_END();
16946 return VINF_SUCCESS;
16947 }
16948
16949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16950 }
16951 }
16952}
16953
16954/** Opcode 0xf6. */
16955FNIEMOP_DEF(iemOp_Grp3_Eb)
16956{
16957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16958 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16959 {
16960 case 0:
16961 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16962 case 1:
16963/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16964 return IEMOP_RAISE_INVALID_OPCODE();
16965 case 2:
16966 IEMOP_MNEMONIC("not Eb");
16967 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16968 case 3:
16969 IEMOP_MNEMONIC("neg Eb");
16970 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16971 case 4:
16972 IEMOP_MNEMONIC("mul Eb");
16973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16974 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16975 case 5:
16976 IEMOP_MNEMONIC("imul Eb");
16977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16978 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16979 case 6:
16980 IEMOP_MNEMONIC("div Eb");
16981 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16982 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16983 case 7:
16984 IEMOP_MNEMONIC("idiv Eb");
16985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16986 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16988 }
16989}
16990
16991
16992/** Opcode 0xf7. */
16993FNIEMOP_DEF(iemOp_Grp3_Ev)
16994{
16995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16996 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16997 {
16998 case 0:
16999 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17000 case 1:
17001/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17002 return IEMOP_RAISE_INVALID_OPCODE();
17003 case 2:
17004 IEMOP_MNEMONIC("not Ev");
17005 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17006 case 3:
17007 IEMOP_MNEMONIC("neg Ev");
17008 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17009 case 4:
17010 IEMOP_MNEMONIC("mul Ev");
17011 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17012 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17013 case 5:
17014 IEMOP_MNEMONIC("imul Ev");
17015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17016 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17017 case 6:
17018 IEMOP_MNEMONIC("div Ev");
17019 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17020 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17021 case 7:
17022 IEMOP_MNEMONIC("idiv Ev");
17023 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17024 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17026 }
17027}
17028
17029
17030/** Opcode 0xf8. */
17031FNIEMOP_DEF(iemOp_clc)
17032{
17033 IEMOP_MNEMONIC("clc");
17034 IEMOP_HLP_NO_LOCK_PREFIX();
17035 IEM_MC_BEGIN(0, 0);
17036 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17037 IEM_MC_ADVANCE_RIP();
17038 IEM_MC_END();
17039 return VINF_SUCCESS;
17040}
17041
17042
17043/** Opcode 0xf9. */
17044FNIEMOP_DEF(iemOp_stc)
17045{
17046 IEMOP_MNEMONIC("stc");
17047 IEMOP_HLP_NO_LOCK_PREFIX();
17048 IEM_MC_BEGIN(0, 0);
17049 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17050 IEM_MC_ADVANCE_RIP();
17051 IEM_MC_END();
17052 return VINF_SUCCESS;
17053}
17054
17055
17056/** Opcode 0xfa. */
17057FNIEMOP_DEF(iemOp_cli)
17058{
17059 IEMOP_MNEMONIC("cli");
17060 IEMOP_HLP_NO_LOCK_PREFIX();
17061 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17062}
17063
17064
17065FNIEMOP_DEF(iemOp_sti)
17066{
17067 IEMOP_MNEMONIC("sti");
17068 IEMOP_HLP_NO_LOCK_PREFIX();
17069 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17070}
17071
17072
17073/** Opcode 0xfc. */
17074FNIEMOP_DEF(iemOp_cld)
17075{
17076 IEMOP_MNEMONIC("cld");
17077 IEMOP_HLP_NO_LOCK_PREFIX();
17078 IEM_MC_BEGIN(0, 0);
17079 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17080 IEM_MC_ADVANCE_RIP();
17081 IEM_MC_END();
17082 return VINF_SUCCESS;
17083}
17084
17085
17086/** Opcode 0xfd. */
17087FNIEMOP_DEF(iemOp_std)
17088{
17089 IEMOP_MNEMONIC("std");
17090 IEMOP_HLP_NO_LOCK_PREFIX();
17091 IEM_MC_BEGIN(0, 0);
17092 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17093 IEM_MC_ADVANCE_RIP();
17094 IEM_MC_END();
17095 return VINF_SUCCESS;
17096}
17097
17098
17099/** Opcode 0xfe. */
17100FNIEMOP_DEF(iemOp_Grp4)
17101{
17102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17104 {
17105 case 0:
17106 IEMOP_MNEMONIC("inc Ev");
17107 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17108 case 1:
17109 IEMOP_MNEMONIC("dec Ev");
17110 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17111 default:
17112 IEMOP_MNEMONIC("grp4-ud");
17113 return IEMOP_RAISE_INVALID_OPCODE();
17114 }
17115}
17116
17117
17118/**
17119 * Opcode 0xff /2.
17120 * @param bRm The RM byte.
17121 */
17122FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17123{
17124 IEMOP_MNEMONIC("calln Ev");
17125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17127
17128 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17129 {
17130 /* The new RIP is taken from a register. */
17131 switch (pIemCpu->enmEffOpSize)
17132 {
17133 case IEMMODE_16BIT:
17134 IEM_MC_BEGIN(1, 0);
17135 IEM_MC_ARG(uint16_t, u16Target, 0);
17136 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17137 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17138 IEM_MC_END()
17139 return VINF_SUCCESS;
17140
17141 case IEMMODE_32BIT:
17142 IEM_MC_BEGIN(1, 0);
17143 IEM_MC_ARG(uint32_t, u32Target, 0);
17144 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17145 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17146 IEM_MC_END()
17147 return VINF_SUCCESS;
17148
17149 case IEMMODE_64BIT:
17150 IEM_MC_BEGIN(1, 0);
17151 IEM_MC_ARG(uint64_t, u64Target, 0);
17152 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17153 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17154 IEM_MC_END()
17155 return VINF_SUCCESS;
17156
17157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17158 }
17159 }
17160 else
17161 {
17162 /* The new RIP is taken from a register. */
17163 switch (pIemCpu->enmEffOpSize)
17164 {
17165 case IEMMODE_16BIT:
17166 IEM_MC_BEGIN(1, 1);
17167 IEM_MC_ARG(uint16_t, u16Target, 0);
17168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17170 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17171 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17172 IEM_MC_END()
17173 return VINF_SUCCESS;
17174
17175 case IEMMODE_32BIT:
17176 IEM_MC_BEGIN(1, 1);
17177 IEM_MC_ARG(uint32_t, u32Target, 0);
17178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17180 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17181 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17182 IEM_MC_END()
17183 return VINF_SUCCESS;
17184
17185 case IEMMODE_64BIT:
17186 IEM_MC_BEGIN(1, 1);
17187 IEM_MC_ARG(uint64_t, u64Target, 0);
17188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17190 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17191 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17192 IEM_MC_END()
17193 return VINF_SUCCESS;
17194
17195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17196 }
17197 }
17198}
17199
17200typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17201
17202FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17203{
17204 /* Registers? How?? */
17205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17206 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17207
17208 /* Far pointer loaded from memory. */
17209 switch (pIemCpu->enmEffOpSize)
17210 {
17211 case IEMMODE_16BIT:
17212 IEM_MC_BEGIN(3, 1);
17213 IEM_MC_ARG(uint16_t, u16Sel, 0);
17214 IEM_MC_ARG(uint16_t, offSeg, 1);
17215 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17219 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17220 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17221 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17222 IEM_MC_END();
17223 return VINF_SUCCESS;
17224
17225 case IEMMODE_64BIT:
17226 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17227 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17228 * and call far qword [rsp] encodings. */
17229 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17230 {
17231 IEM_MC_BEGIN(3, 1);
17232 IEM_MC_ARG(uint16_t, u16Sel, 0);
17233 IEM_MC_ARG(uint64_t, offSeg, 1);
17234 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17238 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17239 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17240 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17241 IEM_MC_END();
17242 return VINF_SUCCESS;
17243 }
17244 /* AMD falls thru. */
17245
17246 case IEMMODE_32BIT:
17247 IEM_MC_BEGIN(3, 1);
17248 IEM_MC_ARG(uint16_t, u16Sel, 0);
17249 IEM_MC_ARG(uint32_t, offSeg, 1);
17250 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17254 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17255 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17256 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17257 IEM_MC_END();
17258 return VINF_SUCCESS;
17259
17260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17261 }
17262}
17263
17264
17265/**
17266 * Opcode 0xff /3.
17267 * @param bRm The RM byte.
17268 */
17269FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17270{
17271 IEMOP_MNEMONIC("callf Ep");
17272 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17273}
17274
17275
17276/**
17277 * Opcode 0xff /4.
17278 * @param bRm The RM byte.
17279 */
17280FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17281{
17282 IEMOP_MNEMONIC("jmpn Ev");
17283 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17285
17286 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17287 {
17288 /* The new RIP is taken from a register. */
17289 switch (pIemCpu->enmEffOpSize)
17290 {
17291 case IEMMODE_16BIT:
17292 IEM_MC_BEGIN(0, 1);
17293 IEM_MC_LOCAL(uint16_t, u16Target);
17294 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17295 IEM_MC_SET_RIP_U16(u16Target);
17296 IEM_MC_END()
17297 return VINF_SUCCESS;
17298
17299 case IEMMODE_32BIT:
17300 IEM_MC_BEGIN(0, 1);
17301 IEM_MC_LOCAL(uint32_t, u32Target);
17302 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17303 IEM_MC_SET_RIP_U32(u32Target);
17304 IEM_MC_END()
17305 return VINF_SUCCESS;
17306
17307 case IEMMODE_64BIT:
17308 IEM_MC_BEGIN(0, 1);
17309 IEM_MC_LOCAL(uint64_t, u64Target);
17310 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17311 IEM_MC_SET_RIP_U64(u64Target);
17312 IEM_MC_END()
17313 return VINF_SUCCESS;
17314
17315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17316 }
17317 }
17318 else
17319 {
17320 /* The new RIP is taken from a memory location. */
17321 switch (pIemCpu->enmEffOpSize)
17322 {
17323 case IEMMODE_16BIT:
17324 IEM_MC_BEGIN(0, 2);
17325 IEM_MC_LOCAL(uint16_t, u16Target);
17326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17328 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17329 IEM_MC_SET_RIP_U16(u16Target);
17330 IEM_MC_END()
17331 return VINF_SUCCESS;
17332
17333 case IEMMODE_32BIT:
17334 IEM_MC_BEGIN(0, 2);
17335 IEM_MC_LOCAL(uint32_t, u32Target);
17336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17338 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17339 IEM_MC_SET_RIP_U32(u32Target);
17340 IEM_MC_END()
17341 return VINF_SUCCESS;
17342
17343 case IEMMODE_64BIT:
17344 IEM_MC_BEGIN(0, 2);
17345 IEM_MC_LOCAL(uint64_t, u64Target);
17346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17348 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17349 IEM_MC_SET_RIP_U64(u64Target);
17350 IEM_MC_END()
17351 return VINF_SUCCESS;
17352
17353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17354 }
17355 }
17356}
17357
17358
17359/**
17360 * Opcode 0xff /5.
17361 * @param bRm The RM byte.
17362 */
17363FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17364{
17365 IEMOP_MNEMONIC("jmpf Ep");
17366 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17367}
17368
17369
17370/**
17371 * Opcode 0xff /6.
17372 * @param bRm The RM byte.
17373 */
17374FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17375{
17376 IEMOP_MNEMONIC("push Ev");
17377 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17378
17379 /* Registers are handled by a common worker. */
17380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17381 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17382
17383 /* Memory we do here. */
17384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17385 switch (pIemCpu->enmEffOpSize)
17386 {
17387 case IEMMODE_16BIT:
17388 IEM_MC_BEGIN(0, 2);
17389 IEM_MC_LOCAL(uint16_t, u16Src);
17390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17392 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17393 IEM_MC_PUSH_U16(u16Src);
17394 IEM_MC_ADVANCE_RIP();
17395 IEM_MC_END();
17396 return VINF_SUCCESS;
17397
17398 case IEMMODE_32BIT:
17399 IEM_MC_BEGIN(0, 2);
17400 IEM_MC_LOCAL(uint32_t, u32Src);
17401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17403 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17404 IEM_MC_PUSH_U32(u32Src);
17405 IEM_MC_ADVANCE_RIP();
17406 IEM_MC_END();
17407 return VINF_SUCCESS;
17408
17409 case IEMMODE_64BIT:
17410 IEM_MC_BEGIN(0, 2);
17411 IEM_MC_LOCAL(uint64_t, u64Src);
17412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17414 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17415 IEM_MC_PUSH_U64(u64Src);
17416 IEM_MC_ADVANCE_RIP();
17417 IEM_MC_END();
17418 return VINF_SUCCESS;
17419
17420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17421 }
17422}
17423
17424
17425/** Opcode 0xff. */
17426FNIEMOP_DEF(iemOp_Grp5)
17427{
17428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17429 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17430 {
17431 case 0:
17432 IEMOP_MNEMONIC("inc Ev");
17433 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17434 case 1:
17435 IEMOP_MNEMONIC("dec Ev");
17436 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17437 case 2:
17438 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17439 case 3:
17440 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17441 case 4:
17442 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17443 case 5:
17444 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17445 case 6:
17446 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17447 case 7:
17448 IEMOP_MNEMONIC("grp5-ud");
17449 return IEMOP_RAISE_INVALID_OPCODE();
17450 }
17451 AssertFailedReturn(VERR_IEM_IPE_3);
17452}
17453
17454
17455
17456const PFNIEMOP g_apfnOneByteMap[256] =
17457{
17458 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17459 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17460 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17461 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17462 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17463 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17464 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17465 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17466 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17467 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17468 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17469 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17470 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17471 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17472 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17473 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17474 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17475 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17476 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17477 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17478 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17479 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17480 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17481 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17482 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17483 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17484 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17485 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17486 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17487 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17488 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17489 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17490 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17491 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17492 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17493 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17494 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17495 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17496 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17497 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17498 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17499 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17500 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17501 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17502 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17503 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17504 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17505 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17506 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17507 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17508 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17509 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17510 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17511 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17512 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17513 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17514 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17515 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17516 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17517 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17518 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17519 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17520 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17521 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17522};
17523
17524
17525/** @} */
17526
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette