VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60740

Last change on this file since 60740 was 60666, checked in by vboxsync, 9 years ago

IEM: Use IEM_GET_TARGET_CPU(); 486 ignores CR0 bits too.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 594.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60666 2016-04-22 23:48:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(3, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
801 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
802 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
803 IEM_MC_END();
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmcall)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmresume)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /0. */
833FNIEMOP_DEF(iemOp_Grp7_vmxoff)
834{
835 IEMOP_BITCH_ABOUT_STUB();
836 return IEMOP_RAISE_INVALID_OPCODE();
837}
838
839
840/** Opcode 0x0f 0x01 /1. */
841FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
842{
843 IEMOP_MNEMONIC("sidt Ms");
844 IEMOP_HLP_MIN_286();
845 IEMOP_HLP_64BIT_OP_SIZE();
846 IEM_MC_BEGIN(3, 1);
847 IEM_MC_ARG(uint8_t, iEffSeg, 0);
848 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
849 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
853 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
854 IEM_MC_END();
855 return VINF_SUCCESS;
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_monitor)
861{
862 IEMOP_MNEMONIC("monitor");
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
864 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF(iemOp_Grp7_mwait)
870{
871 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
873 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
874}
875
876
877/** Opcode 0x0f 0x01 /2. */
878FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
879{
880 IEMOP_MNEMONIC("lgdt");
881 IEMOP_HLP_64BIT_OP_SIZE();
882 IEM_MC_BEGIN(3, 1);
883 IEM_MC_ARG(uint8_t, iEffSeg, 0);
884 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
885 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
888 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
889 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
890 IEM_MC_END();
891 return VINF_SUCCESS;
892}
893
894
895/** Opcode 0x0f 0x01 0xd0. */
896FNIEMOP_DEF(iemOp_Grp7_xgetbv)
897{
898 IEMOP_MNEMONIC("xgetbv");
899 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
900 {
901 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
902 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
903 }
904 return IEMOP_RAISE_INVALID_OPCODE();
905}
906
907
908/** Opcode 0x0f 0x01 0xd1. */
909FNIEMOP_DEF(iemOp_Grp7_xsetbv)
910{
911 IEMOP_MNEMONIC("xsetbv");
912 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
913 {
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
916 }
917 return IEMOP_RAISE_INVALID_OPCODE();
918}
919
920
921/** Opcode 0x0f 0x01 /3. */
922FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
923{
924 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
925 ? IEMMODE_64BIT
926 : pIemCpu->enmEffOpSize;
927 IEM_MC_BEGIN(3, 1);
928 IEM_MC_ARG(uint8_t, iEffSeg, 0);
929 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
933 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
934 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
935 IEM_MC_END();
936 return VINF_SUCCESS;
937}
938
939
940/** Opcode 0x0f 0x01 0xd8. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
942
943/** Opcode 0x0f 0x01 0xd9. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
945
946/** Opcode 0x0f 0x01 0xda. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
948
949/** Opcode 0x0f 0x01 0xdb. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
951
952/** Opcode 0x0f 0x01 0xdc. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
954
955/** Opcode 0x0f 0x01 0xdd. */
956FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
957
958/** Opcode 0x0f 0x01 0xde. */
959FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
960
961/** Opcode 0x0f 0x01 0xdf. */
962FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
963
964/** Opcode 0x0f 0x01 /4. */
965FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
966{
967 IEMOP_MNEMONIC("smsw");
968 IEMOP_HLP_MIN_286();
969 IEMOP_HLP_NO_LOCK_PREFIX();
970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
971 {
972 switch (pIemCpu->enmEffOpSize)
973 {
974 case IEMMODE_16BIT:
975 IEM_MC_BEGIN(0, 1);
976 IEM_MC_LOCAL(uint16_t, u16Tmp);
977 IEM_MC_FETCH_CR0_U16(u16Tmp);
978 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
979 { /* likely */ }
980 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
982 else
983 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
984 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
985 IEM_MC_ADVANCE_RIP();
986 IEM_MC_END();
987 return VINF_SUCCESS;
988
989 case IEMMODE_32BIT:
990 IEM_MC_BEGIN(0, 1);
991 IEM_MC_LOCAL(uint32_t, u32Tmp);
992 IEM_MC_FETCH_CR0_U32(u32Tmp);
993 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
994 IEM_MC_ADVANCE_RIP();
995 IEM_MC_END();
996 return VINF_SUCCESS;
997
998 case IEMMODE_64BIT:
999 IEM_MC_BEGIN(0, 1);
1000 IEM_MC_LOCAL(uint64_t, u64Tmp);
1001 IEM_MC_FETCH_CR0_U64(u64Tmp);
1002 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1003 IEM_MC_ADVANCE_RIP();
1004 IEM_MC_END();
1005 return VINF_SUCCESS;
1006
1007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1008 }
1009 }
1010 else
1011 {
1012 /* Ignore operand size here, memory refs are always 16-bit. */
1013 IEM_MC_BEGIN(0, 2);
1014 IEM_MC_LOCAL(uint16_t, u16Tmp);
1015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1017 IEM_MC_FETCH_CR0_U16(u16Tmp);
1018 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1019 { /* likely */ }
1020 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1022 else
1023 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1024 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1025 IEM_MC_ADVANCE_RIP();
1026 IEM_MC_END();
1027 return VINF_SUCCESS;
1028 }
1029}
1030
1031
1032/** Opcode 0x0f 0x01 /6. */
1033FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1034{
1035 /* The operand size is effectively ignored, all is 16-bit and only the
1036 lower 3-bits are used. */
1037 IEMOP_MNEMONIC("lmsw");
1038 IEMOP_HLP_MIN_286();
1039 IEMOP_HLP_NO_LOCK_PREFIX();
1040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1041 {
1042 IEM_MC_BEGIN(1, 0);
1043 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1044 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1045 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1046 IEM_MC_END();
1047 }
1048 else
1049 {
1050 IEM_MC_BEGIN(1, 1);
1051 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1054 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1055 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1056 IEM_MC_END();
1057 }
1058 return VINF_SUCCESS;
1059}
1060
1061
1062/** Opcode 0x0f 0x01 /7. */
1063FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1064{
1065 IEMOP_MNEMONIC("invlpg");
1066 IEMOP_HLP_MIN_486();
1067 IEMOP_HLP_NO_LOCK_PREFIX();
1068 IEM_MC_BEGIN(1, 1);
1069 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1071 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1072 IEM_MC_END();
1073 return VINF_SUCCESS;
1074}
1075
1076
1077/** Opcode 0x0f 0x01 /7. */
1078FNIEMOP_DEF(iemOp_Grp7_swapgs)
1079{
1080 IEMOP_MNEMONIC("swapgs");
1081 IEMOP_HLP_ONLY_64BIT();
1082 IEMOP_HLP_NO_LOCK_PREFIX();
1083 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1084}
1085
1086
1087/** Opcode 0x0f 0x01 /7. */
1088FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1089{
1090 NOREF(pIemCpu);
1091 IEMOP_BITCH_ABOUT_STUB();
1092 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1093}
1094
1095
1096/** Opcode 0x0f 0x01. */
1097FNIEMOP_DEF(iemOp_Grp7)
1098{
1099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1100 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1101 {
1102 case 0:
1103 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1104 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1105 switch (bRm & X86_MODRM_RM_MASK)
1106 {
1107 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1108 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1109 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1110 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1111 }
1112 return IEMOP_RAISE_INVALID_OPCODE();
1113
1114 case 1:
1115 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1116 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1117 switch (bRm & X86_MODRM_RM_MASK)
1118 {
1119 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1120 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1121 }
1122 return IEMOP_RAISE_INVALID_OPCODE();
1123
1124 case 2:
1125 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1126 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1127 switch (bRm & X86_MODRM_RM_MASK)
1128 {
1129 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1130 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1131 }
1132 return IEMOP_RAISE_INVALID_OPCODE();
1133
1134 case 3:
1135 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1136 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1137 switch (bRm & X86_MODRM_RM_MASK)
1138 {
1139 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1140 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1141 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1142 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1143 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1144 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1145 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1146 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1148 }
1149
1150 case 4:
1151 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1152
1153 case 5:
1154 return IEMOP_RAISE_INVALID_OPCODE();
1155
1156 case 6:
1157 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1158
1159 case 7:
1160 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1161 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1162 switch (bRm & X86_MODRM_RM_MASK)
1163 {
1164 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1165 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1166 }
1167 return IEMOP_RAISE_INVALID_OPCODE();
1168
1169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1170 }
1171}
1172
1173/** Opcode 0x0f 0x00 /3. */
1174FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1175{
1176 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1178
1179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1180 {
1181 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1182 switch (pIemCpu->enmEffOpSize)
1183 {
1184 case IEMMODE_16BIT:
1185 {
1186 IEM_MC_BEGIN(4, 0);
1187 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1188 IEM_MC_ARG(uint16_t, u16Sel, 1);
1189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1190 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1191
1192 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1193 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1194 IEM_MC_REF_EFLAGS(pEFlags);
1195 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1196
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199 }
1200
1201 case IEMMODE_32BIT:
1202 case IEMMODE_64BIT:
1203 {
1204 IEM_MC_BEGIN(4, 0);
1205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1206 IEM_MC_ARG(uint16_t, u16Sel, 1);
1207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1208 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1209
1210 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1211 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1212 IEM_MC_REF_EFLAGS(pEFlags);
1213 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1214
1215 IEM_MC_END();
1216 return VINF_SUCCESS;
1217 }
1218
1219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1220 }
1221 }
1222 else
1223 {
1224 switch (pIemCpu->enmEffOpSize)
1225 {
1226 case IEMMODE_16BIT:
1227 {
1228 IEM_MC_BEGIN(4, 1);
1229 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1230 IEM_MC_ARG(uint16_t, u16Sel, 1);
1231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1232 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1234
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237
1238 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1239 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1240 IEM_MC_REF_EFLAGS(pEFlags);
1241 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1242
1243 IEM_MC_END();
1244 return VINF_SUCCESS;
1245 }
1246
1247 case IEMMODE_32BIT:
1248 case IEMMODE_64BIT:
1249 {
1250 IEM_MC_BEGIN(4, 1);
1251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1252 IEM_MC_ARG(uint16_t, u16Sel, 1);
1253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1254 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1256
1257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1258 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1259/** @todo testcase: make sure it's a 16-bit read. */
1260
1261 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1262 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1263 IEM_MC_REF_EFLAGS(pEFlags);
1264 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1265
1266 IEM_MC_END();
1267 return VINF_SUCCESS;
1268 }
1269
1270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1271 }
1272 }
1273}
1274
1275
1276
1277/** Opcode 0x0f 0x02. */
1278FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1279{
1280 IEMOP_MNEMONIC("lar Gv,Ew");
1281 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1282}
1283
1284
1285/** Opcode 0x0f 0x03. */
1286FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1287{
1288 IEMOP_MNEMONIC("lsl Gv,Ew");
1289 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1290}
1291
1292
1293/** Opcode 0x0f 0x05. */
1294FNIEMOP_DEF(iemOp_syscall)
1295{
1296 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1297 IEMOP_HLP_NO_LOCK_PREFIX();
1298 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1299}
1300
1301
1302/** Opcode 0x0f 0x06. */
1303FNIEMOP_DEF(iemOp_clts)
1304{
1305 IEMOP_MNEMONIC("clts");
1306 IEMOP_HLP_NO_LOCK_PREFIX();
1307 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1308}
1309
1310
1311/** Opcode 0x0f 0x07. */
1312FNIEMOP_DEF(iemOp_sysret)
1313{
1314 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1315 IEMOP_HLP_NO_LOCK_PREFIX();
1316 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1317}
1318
1319
1320/** Opcode 0x0f 0x08. */
1321FNIEMOP_STUB(iemOp_invd);
1322// IEMOP_HLP_MIN_486();
1323
1324
1325/** Opcode 0x0f 0x09. */
1326FNIEMOP_DEF(iemOp_wbinvd)
1327{
1328 IEMOP_MNEMONIC("wbinvd");
1329 IEMOP_HLP_MIN_486();
1330 IEMOP_HLP_NO_LOCK_PREFIX();
1331 IEM_MC_BEGIN(0, 0);
1332 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1333 IEM_MC_ADVANCE_RIP();
1334 IEM_MC_END();
1335 return VINF_SUCCESS; /* ignore for now */
1336}
1337
1338
1339/** Opcode 0x0f 0x0b. */
1340FNIEMOP_DEF(iemOp_ud2)
1341{
1342 IEMOP_MNEMONIC("ud2");
1343 return IEMOP_RAISE_INVALID_OPCODE();
1344}
1345
1346/** Opcode 0x0f 0x0d. */
1347FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1348{
1349 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1350 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1351 {
1352 IEMOP_MNEMONIC("GrpP");
1353 return IEMOP_RAISE_INVALID_OPCODE();
1354 }
1355
1356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1358 {
1359 IEMOP_MNEMONIC("GrpP");
1360 return IEMOP_RAISE_INVALID_OPCODE();
1361 }
1362
1363 IEMOP_HLP_NO_LOCK_PREFIX();
1364 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1365 {
1366 case 2: /* Aliased to /0 for the time being. */
1367 case 4: /* Aliased to /0 for the time being. */
1368 case 5: /* Aliased to /0 for the time being. */
1369 case 6: /* Aliased to /0 for the time being. */
1370 case 7: /* Aliased to /0 for the time being. */
1371 case 0: IEMOP_MNEMONIC("prefetch"); break;
1372 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1373 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1375 }
1376
1377 IEM_MC_BEGIN(0, 1);
1378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1380 /* Currently a NOP. */
1381 IEM_MC_ADVANCE_RIP();
1382 IEM_MC_END();
1383 return VINF_SUCCESS;
1384}
1385
1386
1387/** Opcode 0x0f 0x0e. */
1388FNIEMOP_STUB(iemOp_femms);
1389
1390
1391/** Opcode 0x0f 0x0f 0x0c. */
1392FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1393
1394/** Opcode 0x0f 0x0f 0x0d. */
1395FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1396
1397/** Opcode 0x0f 0x0f 0x1c. */
1398FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1399
1400/** Opcode 0x0f 0x0f 0x1d. */
1401FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1402
1403/** Opcode 0x0f 0x0f 0x8a. */
1404FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1405
1406/** Opcode 0x0f 0x0f 0x8e. */
1407FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1408
1409/** Opcode 0x0f 0x0f 0x90. */
1410FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1411
1412/** Opcode 0x0f 0x0f 0x94. */
1413FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1414
1415/** Opcode 0x0f 0x0f 0x96. */
1416FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0x97. */
1419FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0x9a. */
1422FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1423
1424/** Opcode 0x0f 0x0f 0x9e. */
1425FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1426
1427/** Opcode 0x0f 0x0f 0xa0. */
1428FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1429
1430/** Opcode 0x0f 0x0f 0xa4. */
1431FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1432
1433/** Opcode 0x0f 0x0f 0xa6. */
1434FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1435
1436/** Opcode 0x0f 0x0f 0xa7. */
1437FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1438
1439/** Opcode 0x0f 0x0f 0xaa. */
1440FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1441
1442/** Opcode 0x0f 0x0f 0xae. */
1443FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1444
1445/** Opcode 0x0f 0x0f 0xb0. */
1446FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1447
1448/** Opcode 0x0f 0x0f 0xb4. */
1449FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1450
1451/** Opcode 0x0f 0x0f 0xb6. */
1452FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1453
1454/** Opcode 0x0f 0x0f 0xb7. */
1455FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1456
1457/** Opcode 0x0f 0x0f 0xbb. */
1458FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1459
1460/** Opcode 0x0f 0x0f 0xbf. */
1461FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1462
1463
1464/** Opcode 0x0f 0x0f. */
1465FNIEMOP_DEF(iemOp_3Dnow)
1466{
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1468 {
1469 IEMOP_MNEMONIC("3Dnow");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 /* This is pretty sparse, use switch instead of table. */
1474 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1475 switch (b)
1476 {
1477 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1478 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1479 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1480 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1481 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1482 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1483 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1484 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1485 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1486 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1487 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1488 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1489 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1490 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1491 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1492 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1493 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1494 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1495 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1496 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1497 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1498 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1499 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1500 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1501 default:
1502 return IEMOP_RAISE_INVALID_OPCODE();
1503 }
1504}
1505
1506
1507/** Opcode 0x0f 0x10. */
1508FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x11. */
1510FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1511/** Opcode 0x0f 0x12. */
1512FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1513/** Opcode 0x0f 0x13. */
1514FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1515/** Opcode 0x0f 0x14. */
1516FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1517/** Opcode 0x0f 0x15. */
1518FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1519/** Opcode 0x0f 0x16. */
1520FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1521/** Opcode 0x0f 0x17. */
1522FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1523
1524
1525/** Opcode 0x0f 0x18. */
1526FNIEMOP_DEF(iemOp_prefetch_Grp16)
1527{
1528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1530 {
1531 IEMOP_HLP_NO_LOCK_PREFIX();
1532 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1533 {
1534 case 4: /* Aliased to /0 for the time being according to AMD. */
1535 case 5: /* Aliased to /0 for the time being according to AMD. */
1536 case 6: /* Aliased to /0 for the time being according to AMD. */
1537 case 7: /* Aliased to /0 for the time being according to AMD. */
1538 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1539 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1540 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1541 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1543 }
1544
1545 IEM_MC_BEGIN(0, 1);
1546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1548 /* Currently a NOP. */
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 return VINF_SUCCESS;
1552 }
1553
1554 return IEMOP_RAISE_INVALID_OPCODE();
1555}
1556
1557
1558/** Opcode 0x0f 0x19..0x1f. */
1559FNIEMOP_DEF(iemOp_nop_Ev)
1560{
1561 IEMOP_HLP_NO_LOCK_PREFIX();
1562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1563 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1564 {
1565 IEM_MC_BEGIN(0, 0);
1566 IEM_MC_ADVANCE_RIP();
1567 IEM_MC_END();
1568 }
1569 else
1570 {
1571 IEM_MC_BEGIN(0, 1);
1572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 /* Currently a NOP. */
1575 IEM_MC_ADVANCE_RIP();
1576 IEM_MC_END();
1577 }
1578 return VINF_SUCCESS;
1579}
1580
1581
1582/** Opcode 0x0f 0x20. */
1583FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1584{
1585 /* mod is ignored, as is operand size overrides. */
1586 IEMOP_MNEMONIC("mov Rd,Cd");
1587 IEMOP_HLP_MIN_386();
1588 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1590 else
1591 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1592
1593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1594 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1595 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1596 {
1597 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1598 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1599 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1600 iCrReg |= 8;
1601 }
1602 switch (iCrReg)
1603 {
1604 case 0: case 2: case 3: case 4: case 8:
1605 break;
1606 default:
1607 return IEMOP_RAISE_INVALID_OPCODE();
1608 }
1609 IEMOP_HLP_DONE_DECODING();
1610
1611 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1612}
1613
1614
1615/** Opcode 0x0f 0x21. */
1616FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1617{
1618 IEMOP_MNEMONIC("mov Rd,Dd");
1619 IEMOP_HLP_MIN_386();
1620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1621 IEMOP_HLP_NO_LOCK_PREFIX();
1622 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1623 return IEMOP_RAISE_INVALID_OPCODE();
1624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1625 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1626 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1627}
1628
1629
1630/** Opcode 0x0f 0x22. */
1631FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1632{
1633 /* mod is ignored, as is operand size overrides. */
1634 IEMOP_MNEMONIC("mov Cd,Rd");
1635 IEMOP_HLP_MIN_386();
1636 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1638 else
1639 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1640
1641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1642 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1643 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1644 {
1645 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1646 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1647 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1648 iCrReg |= 8;
1649 }
1650 switch (iCrReg)
1651 {
1652 case 0: case 2: case 3: case 4: case 8:
1653 break;
1654 default:
1655 return IEMOP_RAISE_INVALID_OPCODE();
1656 }
1657 IEMOP_HLP_DONE_DECODING();
1658
1659 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1660}
1661
1662
1663/** Opcode 0x0f 0x23. */
1664FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1665{
1666 IEMOP_MNEMONIC("mov Dd,Rd");
1667 IEMOP_HLP_MIN_386();
1668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1671 return IEMOP_RAISE_INVALID_OPCODE();
1672 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1673 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1674 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1675}
1676
1677
1678/** Opcode 0x0f 0x24. */
1679FNIEMOP_DEF(iemOp_mov_Rd_Td)
1680{
1681 IEMOP_MNEMONIC("mov Rd,Td");
1682 /** @todo works on 386 and 486. */
1683 /* The RM byte is not considered, see testcase. */
1684 return IEMOP_RAISE_INVALID_OPCODE();
1685}
1686
1687
1688/** Opcode 0x0f 0x26. */
1689FNIEMOP_DEF(iemOp_mov_Td_Rd)
1690{
1691 IEMOP_MNEMONIC("mov Td,Rd");
1692 /** @todo works on 386 and 486. */
1693 /* The RM byte is not considered, see testcase. */
1694 return IEMOP_RAISE_INVALID_OPCODE();
1695}
1696
1697
1698/** Opcode 0x0f 0x28. */
1699FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1700/** Opcode 0x0f 0x29. */
1701FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1702/** Opcode 0x0f 0x2a. */
1703FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1704/** Opcode 0x0f 0x2b. */
1705FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1706/** Opcode 0x0f 0x2c. */
1707FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1708/** Opcode 0x0f 0x2d. */
1709FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1710/** Opcode 0x0f 0x2e. */
1711FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1712/** Opcode 0x0f 0x2f. */
1713FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1714
1715
1716/** Opcode 0x0f 0x30. */
1717FNIEMOP_DEF(iemOp_wrmsr)
1718{
1719 IEMOP_MNEMONIC("wrmsr");
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1722}
1723
1724
1725/** Opcode 0x0f 0x31. */
1726FNIEMOP_DEF(iemOp_rdtsc)
1727{
1728 IEMOP_MNEMONIC("rdtsc");
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1730 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1731}
1732
1733
1734/** Opcode 0x0f 0x33. */
1735FNIEMOP_DEF(iemOp_rdmsr)
1736{
1737 IEMOP_MNEMONIC("rdmsr");
1738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1739 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1740}
1741
1742
1743/** Opcode 0x0f 0x34. */
1744FNIEMOP_STUB(iemOp_rdpmc);
1745/** Opcode 0x0f 0x34. */
1746FNIEMOP_STUB(iemOp_sysenter);
1747/** Opcode 0x0f 0x35. */
1748FNIEMOP_STUB(iemOp_sysexit);
1749/** Opcode 0x0f 0x37. */
1750FNIEMOP_STUB(iemOp_getsec);
1751/** Opcode 0x0f 0x38. */
1752FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1753/** Opcode 0x0f 0x3a. */
1754FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1755/** Opcode 0x0f 0x3c (?). */
1756FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1757
1758/**
1759 * Implements a conditional move.
1760 *
1761 * Wish there was an obvious way to do this where we could share and reduce
1762 * code bloat.
1763 *
1764 * @param a_Cnd The conditional "microcode" operation.
1765 */
1766#define CMOV_X(a_Cnd) \
1767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1769 { \
1770 switch (pIemCpu->enmEffOpSize) \
1771 { \
1772 case IEMMODE_16BIT: \
1773 IEM_MC_BEGIN(0, 1); \
1774 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1775 a_Cnd { \
1776 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1777 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1778 } IEM_MC_ENDIF(); \
1779 IEM_MC_ADVANCE_RIP(); \
1780 IEM_MC_END(); \
1781 return VINF_SUCCESS; \
1782 \
1783 case IEMMODE_32BIT: \
1784 IEM_MC_BEGIN(0, 1); \
1785 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1786 a_Cnd { \
1787 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1789 } IEM_MC_ELSE() { \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1791 } IEM_MC_ENDIF(); \
1792 IEM_MC_ADVANCE_RIP(); \
1793 IEM_MC_END(); \
1794 return VINF_SUCCESS; \
1795 \
1796 case IEMMODE_64BIT: \
1797 IEM_MC_BEGIN(0, 1); \
1798 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1799 a_Cnd { \
1800 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1801 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1802 } IEM_MC_ENDIF(); \
1803 IEM_MC_ADVANCE_RIP(); \
1804 IEM_MC_END(); \
1805 return VINF_SUCCESS; \
1806 \
1807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1808 } \
1809 } \
1810 else \
1811 { \
1812 switch (pIemCpu->enmEffOpSize) \
1813 { \
1814 case IEMMODE_16BIT: \
1815 IEM_MC_BEGIN(0, 2); \
1816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1817 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1819 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1820 a_Cnd { \
1821 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1822 } IEM_MC_ENDIF(); \
1823 IEM_MC_ADVANCE_RIP(); \
1824 IEM_MC_END(); \
1825 return VINF_SUCCESS; \
1826 \
1827 case IEMMODE_32BIT: \
1828 IEM_MC_BEGIN(0, 2); \
1829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1830 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1832 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1833 a_Cnd { \
1834 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1835 } IEM_MC_ELSE() { \
1836 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1837 } IEM_MC_ENDIF(); \
1838 IEM_MC_ADVANCE_RIP(); \
1839 IEM_MC_END(); \
1840 return VINF_SUCCESS; \
1841 \
1842 case IEMMODE_64BIT: \
1843 IEM_MC_BEGIN(0, 2); \
1844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1845 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1847 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1848 a_Cnd { \
1849 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1850 } IEM_MC_ENDIF(); \
1851 IEM_MC_ADVANCE_RIP(); \
1852 IEM_MC_END(); \
1853 return VINF_SUCCESS; \
1854 \
1855 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1856 } \
1857 } do {} while (0)
1858
1859
1860
1861/** Opcode 0x0f 0x40. */
1862FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1863{
1864 IEMOP_MNEMONIC("cmovo Gv,Ev");
1865 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1866}
1867
1868
1869/** Opcode 0x0f 0x41. */
1870FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1871{
1872 IEMOP_MNEMONIC("cmovno Gv,Ev");
1873 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1874}
1875
1876
1877/** Opcode 0x0f 0x42. */
1878FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1879{
1880 IEMOP_MNEMONIC("cmovc Gv,Ev");
1881 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1882}
1883
1884
1885/** Opcode 0x0f 0x43. */
1886FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1887{
1888 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1889 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1890}
1891
1892
1893/** Opcode 0x0f 0x44. */
1894FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1895{
1896 IEMOP_MNEMONIC("cmove Gv,Ev");
1897 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1898}
1899
1900
1901/** Opcode 0x0f 0x45. */
1902FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1903{
1904 IEMOP_MNEMONIC("cmovne Gv,Ev");
1905 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1906}
1907
1908
1909/** Opcode 0x0f 0x46. */
1910FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1911{
1912 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1913 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1914}
1915
1916
1917/** Opcode 0x0f 0x47. */
1918FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1919{
1920 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1921 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1922}
1923
1924
1925/** Opcode 0x0f 0x48. */
1926FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1927{
1928 IEMOP_MNEMONIC("cmovs Gv,Ev");
1929 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1930}
1931
1932
1933/** Opcode 0x0f 0x49. */
1934FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1935{
1936 IEMOP_MNEMONIC("cmovns Gv,Ev");
1937 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1938}
1939
1940
1941/** Opcode 0x0f 0x4a. */
1942FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1943{
1944 IEMOP_MNEMONIC("cmovp Gv,Ev");
1945 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1946}
1947
1948
1949/** Opcode 0x0f 0x4b. */
1950FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1951{
1952 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1953 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1954}
1955
1956
1957/** Opcode 0x0f 0x4c. */
1958FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1959{
1960 IEMOP_MNEMONIC("cmovl Gv,Ev");
1961 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1962}
1963
1964
1965/** Opcode 0x0f 0x4d. */
1966FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1967{
1968 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1969 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1970}
1971
1972
1973/** Opcode 0x0f 0x4e. */
1974FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1975{
1976 IEMOP_MNEMONIC("cmovle Gv,Ev");
1977 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1978}
1979
1980
1981/** Opcode 0x0f 0x4f. */
1982FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1983{
1984 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1985 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1986}
1987
1988#undef CMOV_X
1989
1990/** Opcode 0x0f 0x50. */
1991FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1992/** Opcode 0x0f 0x51. */
1993FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1994/** Opcode 0x0f 0x52. */
1995FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1996/** Opcode 0x0f 0x53. */
1997FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1998/** Opcode 0x0f 0x54. */
1999FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2000/** Opcode 0x0f 0x55. */
2001FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2002/** Opcode 0x0f 0x56. */
2003FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2004/** Opcode 0x0f 0x57. */
2005FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2006/** Opcode 0x0f 0x58. */
2007FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2008/** Opcode 0x0f 0x59. */
2009FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2010/** Opcode 0x0f 0x5a. */
2011FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2012/** Opcode 0x0f 0x5b. */
2013FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2014/** Opcode 0x0f 0x5c. */
2015FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2016/** Opcode 0x0f 0x5d. */
2017FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2018/** Opcode 0x0f 0x5e. */
2019FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2020/** Opcode 0x0f 0x5f. */
2021FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2022
2023
2024/**
2025 * Common worker for SSE2 and MMX instructions on the forms:
2026 * pxxxx xmm1, xmm2/mem128
2027 * pxxxx mm1, mm2/mem32
2028 *
2029 * The 2nd operand is the first half of a register, which in the memory case
2030 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2031 * memory accessed for MMX.
2032 *
2033 * Exceptions type 4.
2034 */
2035FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2036{
2037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2038 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2039 {
2040 case IEM_OP_PRF_SIZE_OP: /* SSE */
2041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2042 {
2043 /*
2044 * Register, register.
2045 */
2046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2047 IEM_MC_BEGIN(2, 0);
2048 IEM_MC_ARG(uint128_t *, pDst, 0);
2049 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2051 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2052 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2053 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2054 IEM_MC_ADVANCE_RIP();
2055 IEM_MC_END();
2056 }
2057 else
2058 {
2059 /*
2060 * Register, memory.
2061 */
2062 IEM_MC_BEGIN(2, 2);
2063 IEM_MC_ARG(uint128_t *, pDst, 0);
2064 IEM_MC_LOCAL(uint64_t, uSrc);
2065 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2067
2068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2071 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2072
2073 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2074 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2075
2076 IEM_MC_ADVANCE_RIP();
2077 IEM_MC_END();
2078 }
2079 return VINF_SUCCESS;
2080
2081 case 0: /* MMX */
2082 if (!pImpl->pfnU64)
2083 return IEMOP_RAISE_INVALID_OPCODE();
2084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2085 {
2086 /*
2087 * Register, register.
2088 */
2089 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2090 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2092 IEM_MC_BEGIN(2, 0);
2093 IEM_MC_ARG(uint64_t *, pDst, 0);
2094 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2095 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2096 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2097 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2098 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2099 IEM_MC_ADVANCE_RIP();
2100 IEM_MC_END();
2101 }
2102 else
2103 {
2104 /*
2105 * Register, memory.
2106 */
2107 IEM_MC_BEGIN(2, 2);
2108 IEM_MC_ARG(uint64_t *, pDst, 0);
2109 IEM_MC_LOCAL(uint32_t, uSrc);
2110 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2112
2113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2115 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2116 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2117
2118 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2119 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2120
2121 IEM_MC_ADVANCE_RIP();
2122 IEM_MC_END();
2123 }
2124 return VINF_SUCCESS;
2125
2126 default:
2127 return IEMOP_RAISE_INVALID_OPCODE();
2128 }
2129}
2130
2131
2132/** Opcode 0x0f 0x60. */
2133FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2134{
2135 IEMOP_MNEMONIC("punpcklbw");
2136 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2137}
2138
2139
2140/** Opcode 0x0f 0x61. */
2141FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2142{
2143 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2144 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2145}
2146
2147
2148/** Opcode 0x0f 0x62. */
2149FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2150{
2151 IEMOP_MNEMONIC("punpckldq");
2152 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2153}
2154
2155
2156/** Opcode 0x0f 0x63. */
2157FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2158/** Opcode 0x0f 0x64. */
2159FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2160/** Opcode 0x0f 0x65. */
2161FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2162/** Opcode 0x0f 0x66. */
2163FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2164/** Opcode 0x0f 0x67. */
2165FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2166
2167
2168/**
2169 * Common worker for SSE2 and MMX instructions on the forms:
2170 * pxxxx xmm1, xmm2/mem128
2171 * pxxxx mm1, mm2/mem64
2172 *
2173 * The 2nd operand is the second half of a register, which in the memory case
2174 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2175 * where it may read the full 128 bits or only the upper 64 bits.
2176 *
2177 * Exceptions type 4.
2178 */
2179FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2180{
2181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2182 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2183 {
2184 case IEM_OP_PRF_SIZE_OP: /* SSE */
2185 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2186 {
2187 /*
2188 * Register, register.
2189 */
2190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2191 IEM_MC_BEGIN(2, 0);
2192 IEM_MC_ARG(uint128_t *, pDst, 0);
2193 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2194 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2195 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2196 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2197 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 else
2202 {
2203 /*
2204 * Register, memory.
2205 */
2206 IEM_MC_BEGIN(2, 2);
2207 IEM_MC_ARG(uint128_t *, pDst, 0);
2208 IEM_MC_LOCAL(uint128_t, uSrc);
2209 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2211
2212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2215 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2216
2217 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2218 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2219
2220 IEM_MC_ADVANCE_RIP();
2221 IEM_MC_END();
2222 }
2223 return VINF_SUCCESS;
2224
2225 case 0: /* MMX */
2226 if (!pImpl->pfnU64)
2227 return IEMOP_RAISE_INVALID_OPCODE();
2228 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2229 {
2230 /*
2231 * Register, register.
2232 */
2233 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2234 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_BEGIN(2, 0);
2237 IEM_MC_ARG(uint64_t *, pDst, 0);
2238 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2239 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2240 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2241 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2242 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2243 IEM_MC_ADVANCE_RIP();
2244 IEM_MC_END();
2245 }
2246 else
2247 {
2248 /*
2249 * Register, memory.
2250 */
2251 IEM_MC_BEGIN(2, 2);
2252 IEM_MC_ARG(uint64_t *, pDst, 0);
2253 IEM_MC_LOCAL(uint64_t, uSrc);
2254 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2256
2257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2259 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2260 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2261
2262 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2263 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2264
2265 IEM_MC_ADVANCE_RIP();
2266 IEM_MC_END();
2267 }
2268 return VINF_SUCCESS;
2269
2270 default:
2271 return IEMOP_RAISE_INVALID_OPCODE();
2272 }
2273}
2274
2275
2276/** Opcode 0x0f 0x68. */
2277FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2278{
2279 IEMOP_MNEMONIC("punpckhbw");
2280 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2281}
2282
2283
2284/** Opcode 0x0f 0x69. */
2285FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2286{
2287 IEMOP_MNEMONIC("punpckhwd");
2288 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2289}
2290
2291
2292/** Opcode 0x0f 0x6a. */
2293FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2294{
2295 IEMOP_MNEMONIC("punpckhdq");
2296 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2297}
2298
2299/** Opcode 0x0f 0x6b. */
2300FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2301
2302
2303/** Opcode 0x0f 0x6c. */
2304FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2305{
2306 IEMOP_MNEMONIC("punpcklqdq");
2307 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2308}
2309
2310
2311/** Opcode 0x0f 0x6d. */
2312FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2313{
2314 IEMOP_MNEMONIC("punpckhqdq");
2315 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2316}
2317
2318
2319/** Opcode 0x0f 0x6e. */
2320FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2321{
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2324 {
2325 case IEM_OP_PRF_SIZE_OP: /* SSE */
2326 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2328 {
2329 /* XMM, greg*/
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2331 IEM_MC_BEGIN(0, 1);
2332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2333 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2334 {
2335 IEM_MC_LOCAL(uint64_t, u64Tmp);
2336 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2337 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2338 }
2339 else
2340 {
2341 IEM_MC_LOCAL(uint32_t, u32Tmp);
2342 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2343 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2344 }
2345 IEM_MC_ADVANCE_RIP();
2346 IEM_MC_END();
2347 }
2348 else
2349 {
2350 /* XMM, [mem] */
2351 IEM_MC_BEGIN(0, 2);
2352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2357 {
2358 IEM_MC_LOCAL(uint64_t, u64Tmp);
2359 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2360 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2361 }
2362 else
2363 {
2364 IEM_MC_LOCAL(uint32_t, u32Tmp);
2365 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2366 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2367 }
2368 IEM_MC_ADVANCE_RIP();
2369 IEM_MC_END();
2370 }
2371 return VINF_SUCCESS;
2372
2373 case 0: /* MMX */
2374 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2376 {
2377 /* MMX, greg */
2378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2379 IEM_MC_BEGIN(0, 1);
2380 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2381 IEM_MC_LOCAL(uint64_t, u64Tmp);
2382 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2383 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2384 else
2385 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2386 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2387 IEM_MC_ADVANCE_RIP();
2388 IEM_MC_END();
2389 }
2390 else
2391 {
2392 /* MMX, [mem] */
2393 IEM_MC_BEGIN(0, 2);
2394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2395 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2398 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2399 {
2400 IEM_MC_LOCAL(uint64_t, u64Tmp);
2401 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2402 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2403 }
2404 else
2405 {
2406 IEM_MC_LOCAL(uint32_t, u32Tmp);
2407 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2408 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2409 }
2410 IEM_MC_ADVANCE_RIP();
2411 IEM_MC_END();
2412 }
2413 return VINF_SUCCESS;
2414
2415 default:
2416 return IEMOP_RAISE_INVALID_OPCODE();
2417 }
2418}
2419
2420
2421/** Opcode 0x0f 0x6f. */
2422FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2423{
2424 bool fAligned = false;
2425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2426 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2427 {
2428 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2429 fAligned = true;
2430 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2431 if (fAligned)
2432 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2433 else
2434 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2435 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2436 {
2437 /*
2438 * Register, register.
2439 */
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2441 IEM_MC_BEGIN(0, 1);
2442 IEM_MC_LOCAL(uint128_t, u128Tmp);
2443 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2444 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2445 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2446 IEM_MC_ADVANCE_RIP();
2447 IEM_MC_END();
2448 }
2449 else
2450 {
2451 /*
2452 * Register, memory.
2453 */
2454 IEM_MC_BEGIN(0, 2);
2455 IEM_MC_LOCAL(uint128_t, u128Tmp);
2456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2457
2458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2460 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2461 if (fAligned)
2462 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2463 else
2464 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2465 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2466
2467 IEM_MC_ADVANCE_RIP();
2468 IEM_MC_END();
2469 }
2470 return VINF_SUCCESS;
2471
2472 case 0: /* MMX */
2473 IEMOP_MNEMONIC("movq Pq,Qq");
2474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2475 {
2476 /*
2477 * Register, register.
2478 */
2479 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2480 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_BEGIN(0, 1);
2483 IEM_MC_LOCAL(uint64_t, u64Tmp);
2484 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2485 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2486 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2487 IEM_MC_ADVANCE_RIP();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /*
2493 * Register, memory.
2494 */
2495 IEM_MC_BEGIN(0, 2);
2496 IEM_MC_LOCAL(uint64_t, u64Tmp);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2502 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2503 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2504
2505 IEM_MC_ADVANCE_RIP();
2506 IEM_MC_END();
2507 }
2508 return VINF_SUCCESS;
2509
2510 default:
2511 return IEMOP_RAISE_INVALID_OPCODE();
2512 }
2513}
2514
2515
2516/** Opcode 0x0f 0x70. The immediate here is evil! */
2517FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2518{
2519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2520 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2521 {
2522 case IEM_OP_PRF_SIZE_OP: /* SSE */
2523 case IEM_OP_PRF_REPNZ: /* SSE */
2524 case IEM_OP_PRF_REPZ: /* SSE */
2525 {
2526 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2527 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2528 {
2529 case IEM_OP_PRF_SIZE_OP:
2530 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2531 pfnAImpl = iemAImpl_pshufd;
2532 break;
2533 case IEM_OP_PRF_REPNZ:
2534 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2535 pfnAImpl = iemAImpl_pshuflw;
2536 break;
2537 case IEM_OP_PRF_REPZ:
2538 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2539 pfnAImpl = iemAImpl_pshufhw;
2540 break;
2541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2542 }
2543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2544 {
2545 /*
2546 * Register, register.
2547 */
2548 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2550
2551 IEM_MC_BEGIN(3, 0);
2552 IEM_MC_ARG(uint128_t *, pDst, 0);
2553 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2554 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2556 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2557 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2558 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2559 IEM_MC_ADVANCE_RIP();
2560 IEM_MC_END();
2561 }
2562 else
2563 {
2564 /*
2565 * Register, memory.
2566 */
2567 IEM_MC_BEGIN(3, 2);
2568 IEM_MC_ARG(uint128_t *, pDst, 0);
2569 IEM_MC_LOCAL(uint128_t, uSrc);
2570 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2572
2573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2574 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2575 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2578
2579 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2580 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2581 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2582
2583 IEM_MC_ADVANCE_RIP();
2584 IEM_MC_END();
2585 }
2586 return VINF_SUCCESS;
2587 }
2588
2589 case 0: /* MMX Extension */
2590 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2591 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2592 {
2593 /*
2594 * Register, register.
2595 */
2596 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598
2599 IEM_MC_BEGIN(3, 0);
2600 IEM_MC_ARG(uint64_t *, pDst, 0);
2601 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2602 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2603 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2604 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2605 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2606 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2607 IEM_MC_ADVANCE_RIP();
2608 IEM_MC_END();
2609 }
2610 else
2611 {
2612 /*
2613 * Register, memory.
2614 */
2615 IEM_MC_BEGIN(3, 2);
2616 IEM_MC_ARG(uint64_t *, pDst, 0);
2617 IEM_MC_LOCAL(uint64_t, uSrc);
2618 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2619 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2620
2621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2622 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2623 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2625 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2626
2627 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2628 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2629 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2630
2631 IEM_MC_ADVANCE_RIP();
2632 IEM_MC_END();
2633 }
2634 return VINF_SUCCESS;
2635
2636 default:
2637 return IEMOP_RAISE_INVALID_OPCODE();
2638 }
2639}
2640
2641
2642/** Opcode 0x0f 0x71 11/2. */
2643FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2644
2645/** Opcode 0x66 0x0f 0x71 11/2. */
2646FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2647
2648/** Opcode 0x0f 0x71 11/4. */
2649FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2650
2651/** Opcode 0x66 0x0f 0x71 11/4. */
2652FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2653
2654/** Opcode 0x0f 0x71 11/6. */
2655FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2656
2657/** Opcode 0x66 0x0f 0x71 11/6. */
2658FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2659
2660
2661/** Opcode 0x0f 0x71. */
2662FNIEMOP_DEF(iemOp_Grp12)
2663{
2664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2666 return IEMOP_RAISE_INVALID_OPCODE();
2667 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2668 {
2669 case 0: case 1: case 3: case 5: case 7:
2670 return IEMOP_RAISE_INVALID_OPCODE();
2671 case 2:
2672 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2673 {
2674 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2675 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2676 default: return IEMOP_RAISE_INVALID_OPCODE();
2677 }
2678 case 4:
2679 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2680 {
2681 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2682 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2683 default: return IEMOP_RAISE_INVALID_OPCODE();
2684 }
2685 case 6:
2686 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2687 {
2688 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2689 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2690 default: return IEMOP_RAISE_INVALID_OPCODE();
2691 }
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2693 }
2694}
2695
2696
2697/** Opcode 0x0f 0x72 11/2. */
2698FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2699
2700/** Opcode 0x66 0x0f 0x72 11/2. */
2701FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2702
2703/** Opcode 0x0f 0x72 11/4. */
2704FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2705
2706/** Opcode 0x66 0x0f 0x72 11/4. */
2707FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2708
2709/** Opcode 0x0f 0x72 11/6. */
2710FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2711
2712/** Opcode 0x66 0x0f 0x72 11/6. */
2713FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2714
2715
2716/** Opcode 0x0f 0x72. */
2717FNIEMOP_DEF(iemOp_Grp13)
2718{
2719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2720 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2721 return IEMOP_RAISE_INVALID_OPCODE();
2722 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2723 {
2724 case 0: case 1: case 3: case 5: case 7:
2725 return IEMOP_RAISE_INVALID_OPCODE();
2726 case 2:
2727 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2728 {
2729 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2730 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2731 default: return IEMOP_RAISE_INVALID_OPCODE();
2732 }
2733 case 4:
2734 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2735 {
2736 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2737 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2738 default: return IEMOP_RAISE_INVALID_OPCODE();
2739 }
2740 case 6:
2741 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2742 {
2743 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2744 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2745 default: return IEMOP_RAISE_INVALID_OPCODE();
2746 }
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749}
2750
2751
2752/** Opcode 0x0f 0x73 11/2. */
2753FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2754
2755/** Opcode 0x66 0x0f 0x73 11/2. */
2756FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2757
2758/** Opcode 0x66 0x0f 0x73 11/3. */
2759FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2760
2761/** Opcode 0x0f 0x73 11/6. */
2762FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2763
2764/** Opcode 0x66 0x0f 0x73 11/6. */
2765FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2766
2767/** Opcode 0x66 0x0f 0x73 11/7. */
2768FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2769
2770
2771/** Opcode 0x0f 0x73. */
2772FNIEMOP_DEF(iemOp_Grp14)
2773{
2774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2775 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2776 return IEMOP_RAISE_INVALID_OPCODE();
2777 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2778 {
2779 case 0: case 1: case 4: case 5:
2780 return IEMOP_RAISE_INVALID_OPCODE();
2781 case 2:
2782 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2783 {
2784 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2785 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2786 default: return IEMOP_RAISE_INVALID_OPCODE();
2787 }
2788 case 3:
2789 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2790 {
2791 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2792 default: return IEMOP_RAISE_INVALID_OPCODE();
2793 }
2794 case 6:
2795 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2796 {
2797 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2798 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2799 default: return IEMOP_RAISE_INVALID_OPCODE();
2800 }
2801 case 7:
2802 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2803 {
2804 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2805 default: return IEMOP_RAISE_INVALID_OPCODE();
2806 }
2807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2808 }
2809}
2810
2811
2812/**
2813 * Common worker for SSE2 and MMX instructions on the forms:
2814 * pxxx mm1, mm2/mem64
2815 * pxxx xmm1, xmm2/mem128
2816 *
2817 * Proper alignment of the 128-bit operand is enforced.
2818 * Exceptions type 4. SSE2 and MMX cpuid checks.
2819 */
2820FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2821{
2822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2823 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2824 {
2825 case IEM_OP_PRF_SIZE_OP: /* SSE */
2826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2827 {
2828 /*
2829 * Register, register.
2830 */
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_BEGIN(2, 0);
2833 IEM_MC_ARG(uint128_t *, pDst, 0);
2834 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2836 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2837 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2838 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2839 IEM_MC_ADVANCE_RIP();
2840 IEM_MC_END();
2841 }
2842 else
2843 {
2844 /*
2845 * Register, memory.
2846 */
2847 IEM_MC_BEGIN(2, 2);
2848 IEM_MC_ARG(uint128_t *, pDst, 0);
2849 IEM_MC_LOCAL(uint128_t, uSrc);
2850 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2852
2853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2855 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2856 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2857
2858 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2859 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2860
2861 IEM_MC_ADVANCE_RIP();
2862 IEM_MC_END();
2863 }
2864 return VINF_SUCCESS;
2865
2866 case 0: /* MMX */
2867 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2868 {
2869 /*
2870 * Register, register.
2871 */
2872 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2873 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_BEGIN(2, 0);
2876 IEM_MC_ARG(uint64_t *, pDst, 0);
2877 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2878 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2879 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2880 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2881 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2882 IEM_MC_ADVANCE_RIP();
2883 IEM_MC_END();
2884 }
2885 else
2886 {
2887 /*
2888 * Register, memory.
2889 */
2890 IEM_MC_BEGIN(2, 2);
2891 IEM_MC_ARG(uint64_t *, pDst, 0);
2892 IEM_MC_LOCAL(uint64_t, uSrc);
2893 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2895
2896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2899 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2900
2901 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2902 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2903
2904 IEM_MC_ADVANCE_RIP();
2905 IEM_MC_END();
2906 }
2907 return VINF_SUCCESS;
2908
2909 default:
2910 return IEMOP_RAISE_INVALID_OPCODE();
2911 }
2912}
2913
2914
2915/** Opcode 0x0f 0x74. */
2916FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2917{
2918 IEMOP_MNEMONIC("pcmpeqb");
2919 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2920}
2921
2922
2923/** Opcode 0x0f 0x75. */
2924FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2925{
2926 IEMOP_MNEMONIC("pcmpeqw");
2927 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2928}
2929
2930
2931/** Opcode 0x0f 0x76. */
2932FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2933{
2934 IEMOP_MNEMONIC("pcmpeqd");
2935 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2936}
2937
2938
2939/** Opcode 0x0f 0x77. */
2940FNIEMOP_STUB(iemOp_emms);
2941/** Opcode 0x0f 0x78. */
2942FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2943/** Opcode 0x0f 0x79. */
2944FNIEMOP_UD_STUB(iemOp_vmwrite);
2945/** Opcode 0x0f 0x7c. */
2946FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2947/** Opcode 0x0f 0x7d. */
2948FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2949
2950
2951/** Opcode 0x0f 0x7e. */
2952FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2953{
2954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2955 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2956 {
2957 case IEM_OP_PRF_SIZE_OP: /* SSE */
2958 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2960 {
2961 /* greg, XMM */
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_BEGIN(0, 1);
2964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2965 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2966 {
2967 IEM_MC_LOCAL(uint64_t, u64Tmp);
2968 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2969 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2970 }
2971 else
2972 {
2973 IEM_MC_LOCAL(uint32_t, u32Tmp);
2974 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2975 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2976 }
2977 IEM_MC_ADVANCE_RIP();
2978 IEM_MC_END();
2979 }
2980 else
2981 {
2982 /* [mem], XMM */
2983 IEM_MC_BEGIN(0, 2);
2984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2985 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2988 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2989 {
2990 IEM_MC_LOCAL(uint64_t, u64Tmp);
2991 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2992 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2993 }
2994 else
2995 {
2996 IEM_MC_LOCAL(uint32_t, u32Tmp);
2997 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2998 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2999 }
3000 IEM_MC_ADVANCE_RIP();
3001 IEM_MC_END();
3002 }
3003 return VINF_SUCCESS;
3004
3005 case 0: /* MMX */
3006 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3008 {
3009 /* greg, MMX */
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_BEGIN(0, 1);
3012 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3013 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3014 {
3015 IEM_MC_LOCAL(uint64_t, u64Tmp);
3016 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3017 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3018 }
3019 else
3020 {
3021 IEM_MC_LOCAL(uint32_t, u32Tmp);
3022 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3023 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3024 }
3025 IEM_MC_ADVANCE_RIP();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 /* [mem], MMX */
3031 IEM_MC_BEGIN(0, 2);
3032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3033 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3036 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3037 {
3038 IEM_MC_LOCAL(uint64_t, u64Tmp);
3039 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3040 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3041 }
3042 else
3043 {
3044 IEM_MC_LOCAL(uint32_t, u32Tmp);
3045 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3046 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3047 }
3048 IEM_MC_ADVANCE_RIP();
3049 IEM_MC_END();
3050 }
3051 return VINF_SUCCESS;
3052
3053 default:
3054 return IEMOP_RAISE_INVALID_OPCODE();
3055 }
3056}
3057
3058
3059/** Opcode 0x0f 0x7f. */
3060FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3061{
3062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3063 bool fAligned = false;
3064 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3065 {
3066 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3067 fAligned = true;
3068 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3069 if (fAligned)
3070 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3071 else
3072 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3073 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3074 {
3075 /*
3076 * Register, register.
3077 */
3078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3079 IEM_MC_BEGIN(0, 1);
3080 IEM_MC_LOCAL(uint128_t, u128Tmp);
3081 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3082 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3083 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 /*
3090 * Register, memory.
3091 */
3092 IEM_MC_BEGIN(0, 2);
3093 IEM_MC_LOCAL(uint128_t, u128Tmp);
3094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3095
3096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3099 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3100 if (fAligned)
3101 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3102 else
3103 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3104
3105 IEM_MC_ADVANCE_RIP();
3106 IEM_MC_END();
3107 }
3108 return VINF_SUCCESS;
3109
3110 case 0: /* MMX */
3111 IEMOP_MNEMONIC("movq Qq,Pq");
3112
3113 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3114 {
3115 /*
3116 * Register, register.
3117 */
3118 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3119 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3121 IEM_MC_BEGIN(0, 1);
3122 IEM_MC_LOCAL(uint64_t, u64Tmp);
3123 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3124 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3125 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3126 IEM_MC_ADVANCE_RIP();
3127 IEM_MC_END();
3128 }
3129 else
3130 {
3131 /*
3132 * Register, memory.
3133 */
3134 IEM_MC_BEGIN(0, 2);
3135 IEM_MC_LOCAL(uint64_t, u64Tmp);
3136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3137
3138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3140 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3141 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3142 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3143
3144 IEM_MC_ADVANCE_RIP();
3145 IEM_MC_END();
3146 }
3147 return VINF_SUCCESS;
3148
3149 default:
3150 return IEMOP_RAISE_INVALID_OPCODE();
3151 }
3152}
3153
3154
3155
3156/** Opcode 0x0f 0x80. */
3157FNIEMOP_DEF(iemOp_jo_Jv)
3158{
3159 IEMOP_MNEMONIC("jo Jv");
3160 IEMOP_HLP_MIN_386();
3161 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3162 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3163 {
3164 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3165 IEMOP_HLP_NO_LOCK_PREFIX();
3166
3167 IEM_MC_BEGIN(0, 0);
3168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3169 IEM_MC_REL_JMP_S16(i16Imm);
3170 } IEM_MC_ELSE() {
3171 IEM_MC_ADVANCE_RIP();
3172 } IEM_MC_ENDIF();
3173 IEM_MC_END();
3174 }
3175 else
3176 {
3177 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3178 IEMOP_HLP_NO_LOCK_PREFIX();
3179
3180 IEM_MC_BEGIN(0, 0);
3181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3182 IEM_MC_REL_JMP_S32(i32Imm);
3183 } IEM_MC_ELSE() {
3184 IEM_MC_ADVANCE_RIP();
3185 } IEM_MC_ENDIF();
3186 IEM_MC_END();
3187 }
3188 return VINF_SUCCESS;
3189}
3190
3191
3192/** Opcode 0x0f 0x81. */
3193FNIEMOP_DEF(iemOp_jno_Jv)
3194{
3195 IEMOP_MNEMONIC("jno Jv");
3196 IEMOP_HLP_MIN_386();
3197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3198 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3199 {
3200 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3201 IEMOP_HLP_NO_LOCK_PREFIX();
3202
3203 IEM_MC_BEGIN(0, 0);
3204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3205 IEM_MC_ADVANCE_RIP();
3206 } IEM_MC_ELSE() {
3207 IEM_MC_REL_JMP_S16(i16Imm);
3208 } IEM_MC_ENDIF();
3209 IEM_MC_END();
3210 }
3211 else
3212 {
3213 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3214 IEMOP_HLP_NO_LOCK_PREFIX();
3215
3216 IEM_MC_BEGIN(0, 0);
3217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3218 IEM_MC_ADVANCE_RIP();
3219 } IEM_MC_ELSE() {
3220 IEM_MC_REL_JMP_S32(i32Imm);
3221 } IEM_MC_ENDIF();
3222 IEM_MC_END();
3223 }
3224 return VINF_SUCCESS;
3225}
3226
3227
3228/** Opcode 0x0f 0x82. */
3229FNIEMOP_DEF(iemOp_jc_Jv)
3230{
3231 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3232 IEMOP_HLP_MIN_386();
3233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3234 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3235 {
3236 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3237 IEMOP_HLP_NO_LOCK_PREFIX();
3238
3239 IEM_MC_BEGIN(0, 0);
3240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3241 IEM_MC_REL_JMP_S16(i16Imm);
3242 } IEM_MC_ELSE() {
3243 IEM_MC_ADVANCE_RIP();
3244 } IEM_MC_ENDIF();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3250 IEMOP_HLP_NO_LOCK_PREFIX();
3251
3252 IEM_MC_BEGIN(0, 0);
3253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3254 IEM_MC_REL_JMP_S32(i32Imm);
3255 } IEM_MC_ELSE() {
3256 IEM_MC_ADVANCE_RIP();
3257 } IEM_MC_ENDIF();
3258 IEM_MC_END();
3259 }
3260 return VINF_SUCCESS;
3261}
3262
3263
3264/** Opcode 0x0f 0x83. */
3265FNIEMOP_DEF(iemOp_jnc_Jv)
3266{
3267 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3268 IEMOP_HLP_MIN_386();
3269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3270 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3271 {
3272 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3273 IEMOP_HLP_NO_LOCK_PREFIX();
3274
3275 IEM_MC_BEGIN(0, 0);
3276 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3277 IEM_MC_ADVANCE_RIP();
3278 } IEM_MC_ELSE() {
3279 IEM_MC_REL_JMP_S16(i16Imm);
3280 } IEM_MC_ENDIF();
3281 IEM_MC_END();
3282 }
3283 else
3284 {
3285 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3286 IEMOP_HLP_NO_LOCK_PREFIX();
3287
3288 IEM_MC_BEGIN(0, 0);
3289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3290 IEM_MC_ADVANCE_RIP();
3291 } IEM_MC_ELSE() {
3292 IEM_MC_REL_JMP_S32(i32Imm);
3293 } IEM_MC_ENDIF();
3294 IEM_MC_END();
3295 }
3296 return VINF_SUCCESS;
3297}
3298
3299
3300/** Opcode 0x0f 0x84. */
3301FNIEMOP_DEF(iemOp_je_Jv)
3302{
3303 IEMOP_MNEMONIC("je/jz Jv");
3304 IEMOP_HLP_MIN_386();
3305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3306 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3307 {
3308 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3309 IEMOP_HLP_NO_LOCK_PREFIX();
3310
3311 IEM_MC_BEGIN(0, 0);
3312 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3313 IEM_MC_REL_JMP_S16(i16Imm);
3314 } IEM_MC_ELSE() {
3315 IEM_MC_ADVANCE_RIP();
3316 } IEM_MC_ENDIF();
3317 IEM_MC_END();
3318 }
3319 else
3320 {
3321 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3322 IEMOP_HLP_NO_LOCK_PREFIX();
3323
3324 IEM_MC_BEGIN(0, 0);
3325 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3326 IEM_MC_REL_JMP_S32(i32Imm);
3327 } IEM_MC_ELSE() {
3328 IEM_MC_ADVANCE_RIP();
3329 } IEM_MC_ENDIF();
3330 IEM_MC_END();
3331 }
3332 return VINF_SUCCESS;
3333}
3334
3335
3336/** Opcode 0x0f 0x85. */
3337FNIEMOP_DEF(iemOp_jne_Jv)
3338{
3339 IEMOP_MNEMONIC("jne/jnz Jv");
3340 IEMOP_HLP_MIN_386();
3341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3342 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3343 {
3344 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3345 IEMOP_HLP_NO_LOCK_PREFIX();
3346
3347 IEM_MC_BEGIN(0, 0);
3348 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3349 IEM_MC_ADVANCE_RIP();
3350 } IEM_MC_ELSE() {
3351 IEM_MC_REL_JMP_S16(i16Imm);
3352 } IEM_MC_ENDIF();
3353 IEM_MC_END();
3354 }
3355 else
3356 {
3357 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3358 IEMOP_HLP_NO_LOCK_PREFIX();
3359
3360 IEM_MC_BEGIN(0, 0);
3361 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3362 IEM_MC_ADVANCE_RIP();
3363 } IEM_MC_ELSE() {
3364 IEM_MC_REL_JMP_S32(i32Imm);
3365 } IEM_MC_ENDIF();
3366 IEM_MC_END();
3367 }
3368 return VINF_SUCCESS;
3369}
3370
3371
3372/** Opcode 0x0f 0x86. */
3373FNIEMOP_DEF(iemOp_jbe_Jv)
3374{
3375 IEMOP_MNEMONIC("jbe/jna Jv");
3376 IEMOP_HLP_MIN_386();
3377 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3378 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3379 {
3380 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3381 IEMOP_HLP_NO_LOCK_PREFIX();
3382
3383 IEM_MC_BEGIN(0, 0);
3384 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3385 IEM_MC_REL_JMP_S16(i16Imm);
3386 } IEM_MC_ELSE() {
3387 IEM_MC_ADVANCE_RIP();
3388 } IEM_MC_ENDIF();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3394 IEMOP_HLP_NO_LOCK_PREFIX();
3395
3396 IEM_MC_BEGIN(0, 0);
3397 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3398 IEM_MC_REL_JMP_S32(i32Imm);
3399 } IEM_MC_ELSE() {
3400 IEM_MC_ADVANCE_RIP();
3401 } IEM_MC_ENDIF();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405}
3406
3407
3408/** Opcode 0x0f 0x87. */
3409FNIEMOP_DEF(iemOp_jnbe_Jv)
3410{
3411 IEMOP_MNEMONIC("jnbe/ja Jv");
3412 IEMOP_HLP_MIN_386();
3413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3414 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3415 {
3416 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3417 IEMOP_HLP_NO_LOCK_PREFIX();
3418
3419 IEM_MC_BEGIN(0, 0);
3420 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3421 IEM_MC_ADVANCE_RIP();
3422 } IEM_MC_ELSE() {
3423 IEM_MC_REL_JMP_S16(i16Imm);
3424 } IEM_MC_ENDIF();
3425 IEM_MC_END();
3426 }
3427 else
3428 {
3429 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3430 IEMOP_HLP_NO_LOCK_PREFIX();
3431
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3434 IEM_MC_ADVANCE_RIP();
3435 } IEM_MC_ELSE() {
3436 IEM_MC_REL_JMP_S32(i32Imm);
3437 } IEM_MC_ENDIF();
3438 IEM_MC_END();
3439 }
3440 return VINF_SUCCESS;
3441}
3442
3443
3444/** Opcode 0x0f 0x88. */
3445FNIEMOP_DEF(iemOp_js_Jv)
3446{
3447 IEMOP_MNEMONIC("js Jv");
3448 IEMOP_HLP_MIN_386();
3449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3450 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3451 {
3452 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3453 IEMOP_HLP_NO_LOCK_PREFIX();
3454
3455 IEM_MC_BEGIN(0, 0);
3456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3457 IEM_MC_REL_JMP_S16(i16Imm);
3458 } IEM_MC_ELSE() {
3459 IEM_MC_ADVANCE_RIP();
3460 } IEM_MC_ENDIF();
3461 IEM_MC_END();
3462 }
3463 else
3464 {
3465 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3466 IEMOP_HLP_NO_LOCK_PREFIX();
3467
3468 IEM_MC_BEGIN(0, 0);
3469 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3470 IEM_MC_REL_JMP_S32(i32Imm);
3471 } IEM_MC_ELSE() {
3472 IEM_MC_ADVANCE_RIP();
3473 } IEM_MC_ENDIF();
3474 IEM_MC_END();
3475 }
3476 return VINF_SUCCESS;
3477}
3478
3479
3480/** Opcode 0x0f 0x89. */
3481FNIEMOP_DEF(iemOp_jns_Jv)
3482{
3483 IEMOP_MNEMONIC("jns Jv");
3484 IEMOP_HLP_MIN_386();
3485 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3486 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3487 {
3488 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3489 IEMOP_HLP_NO_LOCK_PREFIX();
3490
3491 IEM_MC_BEGIN(0, 0);
3492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3493 IEM_MC_ADVANCE_RIP();
3494 } IEM_MC_ELSE() {
3495 IEM_MC_REL_JMP_S16(i16Imm);
3496 } IEM_MC_ENDIF();
3497 IEM_MC_END();
3498 }
3499 else
3500 {
3501 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3502 IEMOP_HLP_NO_LOCK_PREFIX();
3503
3504 IEM_MC_BEGIN(0, 0);
3505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3506 IEM_MC_ADVANCE_RIP();
3507 } IEM_MC_ELSE() {
3508 IEM_MC_REL_JMP_S32(i32Imm);
3509 } IEM_MC_ENDIF();
3510 IEM_MC_END();
3511 }
3512 return VINF_SUCCESS;
3513}
3514
3515
3516/** Opcode 0x0f 0x8a. */
3517FNIEMOP_DEF(iemOp_jp_Jv)
3518{
3519 IEMOP_MNEMONIC("jp Jv");
3520 IEMOP_HLP_MIN_386();
3521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3522 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3523 {
3524 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3525 IEMOP_HLP_NO_LOCK_PREFIX();
3526
3527 IEM_MC_BEGIN(0, 0);
3528 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3529 IEM_MC_REL_JMP_S16(i16Imm);
3530 } IEM_MC_ELSE() {
3531 IEM_MC_ADVANCE_RIP();
3532 } IEM_MC_ENDIF();
3533 IEM_MC_END();
3534 }
3535 else
3536 {
3537 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3538 IEMOP_HLP_NO_LOCK_PREFIX();
3539
3540 IEM_MC_BEGIN(0, 0);
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3542 IEM_MC_REL_JMP_S32(i32Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547 }
3548 return VINF_SUCCESS;
3549}
3550
3551
3552/** Opcode 0x0f 0x8b. */
3553FNIEMOP_DEF(iemOp_jnp_Jv)
3554{
3555 IEMOP_MNEMONIC("jo Jv");
3556 IEMOP_HLP_MIN_386();
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3558 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3559 {
3560 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3561 IEMOP_HLP_NO_LOCK_PREFIX();
3562
3563 IEM_MC_BEGIN(0, 0);
3564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3565 IEM_MC_ADVANCE_RIP();
3566 } IEM_MC_ELSE() {
3567 IEM_MC_REL_JMP_S16(i16Imm);
3568 } IEM_MC_ENDIF();
3569 IEM_MC_END();
3570 }
3571 else
3572 {
3573 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3574 IEMOP_HLP_NO_LOCK_PREFIX();
3575
3576 IEM_MC_BEGIN(0, 0);
3577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3578 IEM_MC_ADVANCE_RIP();
3579 } IEM_MC_ELSE() {
3580 IEM_MC_REL_JMP_S32(i32Imm);
3581 } IEM_MC_ENDIF();
3582 IEM_MC_END();
3583 }
3584 return VINF_SUCCESS;
3585}
3586
3587
3588/** Opcode 0x0f 0x8c. */
3589FNIEMOP_DEF(iemOp_jl_Jv)
3590{
3591 IEMOP_MNEMONIC("jl/jnge Jv");
3592 IEMOP_HLP_MIN_386();
3593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3594 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3595 {
3596 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3597 IEMOP_HLP_NO_LOCK_PREFIX();
3598
3599 IEM_MC_BEGIN(0, 0);
3600 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3601 IEM_MC_REL_JMP_S16(i16Imm);
3602 } IEM_MC_ELSE() {
3603 IEM_MC_ADVANCE_RIP();
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606 }
3607 else
3608 {
3609 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3610 IEMOP_HLP_NO_LOCK_PREFIX();
3611
3612 IEM_MC_BEGIN(0, 0);
3613 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3614 IEM_MC_REL_JMP_S32(i32Imm);
3615 } IEM_MC_ELSE() {
3616 IEM_MC_ADVANCE_RIP();
3617 } IEM_MC_ENDIF();
3618 IEM_MC_END();
3619 }
3620 return VINF_SUCCESS;
3621}
3622
3623
3624/** Opcode 0x0f 0x8d. */
3625FNIEMOP_DEF(iemOp_jnl_Jv)
3626{
3627 IEMOP_MNEMONIC("jnl/jge Jv");
3628 IEMOP_HLP_MIN_386();
3629 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3630 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3631 {
3632 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3633 IEMOP_HLP_NO_LOCK_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0);
3636 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3637 IEM_MC_ADVANCE_RIP();
3638 } IEM_MC_ELSE() {
3639 IEM_MC_REL_JMP_S16(i16Imm);
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642 }
3643 else
3644 {
3645 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3646 IEMOP_HLP_NO_LOCK_PREFIX();
3647
3648 IEM_MC_BEGIN(0, 0);
3649 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3650 IEM_MC_ADVANCE_RIP();
3651 } IEM_MC_ELSE() {
3652 IEM_MC_REL_JMP_S32(i32Imm);
3653 } IEM_MC_ENDIF();
3654 IEM_MC_END();
3655 }
3656 return VINF_SUCCESS;
3657}
3658
3659
3660/** Opcode 0x0f 0x8e. */
3661FNIEMOP_DEF(iemOp_jle_Jv)
3662{
3663 IEMOP_MNEMONIC("jle/jng Jv");
3664 IEMOP_HLP_MIN_386();
3665 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3666 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3667 {
3668 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3669 IEMOP_HLP_NO_LOCK_PREFIX();
3670
3671 IEM_MC_BEGIN(0, 0);
3672 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3673 IEM_MC_REL_JMP_S16(i16Imm);
3674 } IEM_MC_ELSE() {
3675 IEM_MC_ADVANCE_RIP();
3676 } IEM_MC_ENDIF();
3677 IEM_MC_END();
3678 }
3679 else
3680 {
3681 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3682 IEMOP_HLP_NO_LOCK_PREFIX();
3683
3684 IEM_MC_BEGIN(0, 0);
3685 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3686 IEM_MC_REL_JMP_S32(i32Imm);
3687 } IEM_MC_ELSE() {
3688 IEM_MC_ADVANCE_RIP();
3689 } IEM_MC_ENDIF();
3690 IEM_MC_END();
3691 }
3692 return VINF_SUCCESS;
3693}
3694
3695
3696/** Opcode 0x0f 0x8f. */
3697FNIEMOP_DEF(iemOp_jnle_Jv)
3698{
3699 IEMOP_MNEMONIC("jnle/jg Jv");
3700 IEMOP_HLP_MIN_386();
3701 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3702 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3703 {
3704 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3705 IEMOP_HLP_NO_LOCK_PREFIX();
3706
3707 IEM_MC_BEGIN(0, 0);
3708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3709 IEM_MC_ADVANCE_RIP();
3710 } IEM_MC_ELSE() {
3711 IEM_MC_REL_JMP_S16(i16Imm);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_END();
3714 }
3715 else
3716 {
3717 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3718 IEMOP_HLP_NO_LOCK_PREFIX();
3719
3720 IEM_MC_BEGIN(0, 0);
3721 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3722 IEM_MC_ADVANCE_RIP();
3723 } IEM_MC_ELSE() {
3724 IEM_MC_REL_JMP_S32(i32Imm);
3725 } IEM_MC_ENDIF();
3726 IEM_MC_END();
3727 }
3728 return VINF_SUCCESS;
3729}
3730
3731
3732/** Opcode 0x0f 0x90. */
3733FNIEMOP_DEF(iemOp_seto_Eb)
3734{
3735 IEMOP_MNEMONIC("seto Eb");
3736 IEMOP_HLP_MIN_386();
3737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3738 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3739
3740 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3741 * any way. AMD says it's "unused", whatever that means. We're
3742 * ignoring for now. */
3743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3744 {
3745 /* register target */
3746 IEM_MC_BEGIN(0, 0);
3747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3748 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3749 } IEM_MC_ELSE() {
3750 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3751 } IEM_MC_ENDIF();
3752 IEM_MC_ADVANCE_RIP();
3753 IEM_MC_END();
3754 }
3755 else
3756 {
3757 /* memory target */
3758 IEM_MC_BEGIN(0, 1);
3759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3761 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3762 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3763 } IEM_MC_ELSE() {
3764 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3765 } IEM_MC_ENDIF();
3766 IEM_MC_ADVANCE_RIP();
3767 IEM_MC_END();
3768 }
3769 return VINF_SUCCESS;
3770}
3771
3772
3773/** Opcode 0x0f 0x91. */
3774FNIEMOP_DEF(iemOp_setno_Eb)
3775{
3776 IEMOP_MNEMONIC("setno Eb");
3777 IEMOP_HLP_MIN_386();
3778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3779 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3780
3781 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3782 * any way. AMD says it's "unused", whatever that means. We're
3783 * ignoring for now. */
3784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3785 {
3786 /* register target */
3787 IEM_MC_BEGIN(0, 0);
3788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3789 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3790 } IEM_MC_ELSE() {
3791 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_ADVANCE_RIP();
3794 IEM_MC_END();
3795 }
3796 else
3797 {
3798 /* memory target */
3799 IEM_MC_BEGIN(0, 1);
3800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3803 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3804 } IEM_MC_ELSE() {
3805 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3806 } IEM_MC_ENDIF();
3807 IEM_MC_ADVANCE_RIP();
3808 IEM_MC_END();
3809 }
3810 return VINF_SUCCESS;
3811}
3812
3813
3814/** Opcode 0x0f 0x92. */
3815FNIEMOP_DEF(iemOp_setc_Eb)
3816{
3817 IEMOP_MNEMONIC("setc Eb");
3818 IEMOP_HLP_MIN_386();
3819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3820 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3821
3822 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3823 * any way. AMD says it's "unused", whatever that means. We're
3824 * ignoring for now. */
3825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3826 {
3827 /* register target */
3828 IEM_MC_BEGIN(0, 0);
3829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3830 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3831 } IEM_MC_ELSE() {
3832 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3833 } IEM_MC_ENDIF();
3834 IEM_MC_ADVANCE_RIP();
3835 IEM_MC_END();
3836 }
3837 else
3838 {
3839 /* memory target */
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3844 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3845 } IEM_MC_ELSE() {
3846 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3847 } IEM_MC_ENDIF();
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 }
3851 return VINF_SUCCESS;
3852}
3853
3854
3855/** Opcode 0x0f 0x93. */
3856FNIEMOP_DEF(iemOp_setnc_Eb)
3857{
3858 IEMOP_MNEMONIC("setnc Eb");
3859 IEMOP_HLP_MIN_386();
3860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3861 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3862
3863 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3864 * any way. AMD says it's "unused", whatever that means. We're
3865 * ignoring for now. */
3866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3867 {
3868 /* register target */
3869 IEM_MC_BEGIN(0, 0);
3870 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3871 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3872 } IEM_MC_ELSE() {
3873 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3874 } IEM_MC_ENDIF();
3875 IEM_MC_ADVANCE_RIP();
3876 IEM_MC_END();
3877 }
3878 else
3879 {
3880 /* memory target */
3881 IEM_MC_BEGIN(0, 1);
3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3885 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3886 } IEM_MC_ELSE() {
3887 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3888 } IEM_MC_ENDIF();
3889 IEM_MC_ADVANCE_RIP();
3890 IEM_MC_END();
3891 }
3892 return VINF_SUCCESS;
3893}
3894
3895
3896/** Opcode 0x0f 0x94. */
3897FNIEMOP_DEF(iemOp_sete_Eb)
3898{
3899 IEMOP_MNEMONIC("sete Eb");
3900 IEMOP_HLP_MIN_386();
3901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3902 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3903
3904 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3905 * any way. AMD says it's "unused", whatever that means. We're
3906 * ignoring for now. */
3907 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3908 {
3909 /* register target */
3910 IEM_MC_BEGIN(0, 0);
3911 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3913 } IEM_MC_ELSE() {
3914 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3915 } IEM_MC_ENDIF();
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /* memory target */
3922 IEM_MC_BEGIN(0, 1);
3923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3925 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3926 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3927 } IEM_MC_ELSE() {
3928 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3929 } IEM_MC_ENDIF();
3930 IEM_MC_ADVANCE_RIP();
3931 IEM_MC_END();
3932 }
3933 return VINF_SUCCESS;
3934}
3935
3936
3937/** Opcode 0x0f 0x95. */
3938FNIEMOP_DEF(iemOp_setne_Eb)
3939{
3940 IEMOP_MNEMONIC("setne Eb");
3941 IEMOP_HLP_MIN_386();
3942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3943 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3944
3945 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3946 * any way. AMD says it's "unused", whatever that means. We're
3947 * ignoring for now. */
3948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3949 {
3950 /* register target */
3951 IEM_MC_BEGIN(0, 0);
3952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3954 } IEM_MC_ELSE() {
3955 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3956 } IEM_MC_ENDIF();
3957 IEM_MC_ADVANCE_RIP();
3958 IEM_MC_END();
3959 }
3960 else
3961 {
3962 /* memory target */
3963 IEM_MC_BEGIN(0, 1);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3967 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3968 } IEM_MC_ELSE() {
3969 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3970 } IEM_MC_ENDIF();
3971 IEM_MC_ADVANCE_RIP();
3972 IEM_MC_END();
3973 }
3974 return VINF_SUCCESS;
3975}
3976
3977
3978/** Opcode 0x0f 0x96. */
3979FNIEMOP_DEF(iemOp_setbe_Eb)
3980{
3981 IEMOP_MNEMONIC("setbe Eb");
3982 IEMOP_HLP_MIN_386();
3983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3984 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3985
3986 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3987 * any way. AMD says it's "unused", whatever that means. We're
3988 * ignoring for now. */
3989 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3990 {
3991 /* register target */
3992 IEM_MC_BEGIN(0, 0);
3993 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3995 } IEM_MC_ELSE() {
3996 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3997 } IEM_MC_ENDIF();
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 /* memory target */
4004 IEM_MC_BEGIN(0, 1);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4008 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4009 } IEM_MC_ELSE() {
4010 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_ADVANCE_RIP();
4013 IEM_MC_END();
4014 }
4015 return VINF_SUCCESS;
4016}
4017
4018
4019/** Opcode 0x0f 0x97. */
4020FNIEMOP_DEF(iemOp_setnbe_Eb)
4021{
4022 IEMOP_MNEMONIC("setnbe Eb");
4023 IEMOP_HLP_MIN_386();
4024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4025 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4026
4027 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4028 * any way. AMD says it's "unused", whatever that means. We're
4029 * ignoring for now. */
4030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4031 {
4032 /* register target */
4033 IEM_MC_BEGIN(0, 0);
4034 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4035 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4036 } IEM_MC_ELSE() {
4037 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4038 } IEM_MC_ENDIF();
4039 IEM_MC_ADVANCE_RIP();
4040 IEM_MC_END();
4041 }
4042 else
4043 {
4044 /* memory target */
4045 IEM_MC_BEGIN(0, 1);
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4048 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4049 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4050 } IEM_MC_ELSE() {
4051 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4052 } IEM_MC_ENDIF();
4053 IEM_MC_ADVANCE_RIP();
4054 IEM_MC_END();
4055 }
4056 return VINF_SUCCESS;
4057}
4058
4059
4060/** Opcode 0x0f 0x98. */
4061FNIEMOP_DEF(iemOp_sets_Eb)
4062{
4063 IEMOP_MNEMONIC("sets Eb");
4064 IEMOP_HLP_MIN_386();
4065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4066 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4067
4068 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4069 * any way. AMD says it's "unused", whatever that means. We're
4070 * ignoring for now. */
4071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4072 {
4073 /* register target */
4074 IEM_MC_BEGIN(0, 0);
4075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4076 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4077 } IEM_MC_ELSE() {
4078 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4079 } IEM_MC_ENDIF();
4080 IEM_MC_ADVANCE_RIP();
4081 IEM_MC_END();
4082 }
4083 else
4084 {
4085 /* memory target */
4086 IEM_MC_BEGIN(0, 1);
4087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4090 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4091 } IEM_MC_ELSE() {
4092 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4093 } IEM_MC_ENDIF();
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 }
4097 return VINF_SUCCESS;
4098}
4099
4100
4101/** Opcode 0x0f 0x99. */
4102FNIEMOP_DEF(iemOp_setns_Eb)
4103{
4104 IEMOP_MNEMONIC("setns Eb");
4105 IEMOP_HLP_MIN_386();
4106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4107 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4108
4109 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4110 * any way. AMD says it's "unused", whatever that means. We're
4111 * ignoring for now. */
4112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4113 {
4114 /* register target */
4115 IEM_MC_BEGIN(0, 0);
4116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4117 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4118 } IEM_MC_ELSE() {
4119 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4120 } IEM_MC_ENDIF();
4121 IEM_MC_ADVANCE_RIP();
4122 IEM_MC_END();
4123 }
4124 else
4125 {
4126 /* memory target */
4127 IEM_MC_BEGIN(0, 1);
4128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4131 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4132 } IEM_MC_ELSE() {
4133 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4134 } IEM_MC_ENDIF();
4135 IEM_MC_ADVANCE_RIP();
4136 IEM_MC_END();
4137 }
4138 return VINF_SUCCESS;
4139}
4140
4141
4142/** Opcode 0x0f 0x9a. */
4143FNIEMOP_DEF(iemOp_setp_Eb)
4144{
4145 IEMOP_MNEMONIC("setnp Eb");
4146 IEMOP_HLP_MIN_386();
4147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4148 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4149
4150 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4151 * any way. AMD says it's "unused", whatever that means. We're
4152 * ignoring for now. */
4153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4154 {
4155 /* register target */
4156 IEM_MC_BEGIN(0, 0);
4157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4158 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4159 } IEM_MC_ELSE() {
4160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4161 } IEM_MC_ENDIF();
4162 IEM_MC_ADVANCE_RIP();
4163 IEM_MC_END();
4164 }
4165 else
4166 {
4167 /* memory target */
4168 IEM_MC_BEGIN(0, 1);
4169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4171 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4172 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4173 } IEM_MC_ELSE() {
4174 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4175 } IEM_MC_ENDIF();
4176 IEM_MC_ADVANCE_RIP();
4177 IEM_MC_END();
4178 }
4179 return VINF_SUCCESS;
4180}
4181
4182
4183/** Opcode 0x0f 0x9b. */
4184FNIEMOP_DEF(iemOp_setnp_Eb)
4185{
4186 IEMOP_MNEMONIC("setnp Eb");
4187 IEMOP_HLP_MIN_386();
4188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4189 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4190
4191 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4192 * any way. AMD says it's "unused", whatever that means. We're
4193 * ignoring for now. */
4194 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4195 {
4196 /* register target */
4197 IEM_MC_BEGIN(0, 0);
4198 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4199 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4200 } IEM_MC_ELSE() {
4201 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4202 } IEM_MC_ENDIF();
4203 IEM_MC_ADVANCE_RIP();
4204 IEM_MC_END();
4205 }
4206 else
4207 {
4208 /* memory target */
4209 IEM_MC_BEGIN(0, 1);
4210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4213 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4214 } IEM_MC_ELSE() {
4215 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4216 } IEM_MC_ENDIF();
4217 IEM_MC_ADVANCE_RIP();
4218 IEM_MC_END();
4219 }
4220 return VINF_SUCCESS;
4221}
4222
4223
4224/** Opcode 0x0f 0x9c. */
4225FNIEMOP_DEF(iemOp_setl_Eb)
4226{
4227 IEMOP_MNEMONIC("setl Eb");
4228 IEMOP_HLP_MIN_386();
4229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4230 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4231
4232 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4233 * any way. AMD says it's "unused", whatever that means. We're
4234 * ignoring for now. */
4235 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4236 {
4237 /* register target */
4238 IEM_MC_BEGIN(0, 0);
4239 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4241 } IEM_MC_ELSE() {
4242 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4243 } IEM_MC_ENDIF();
4244 IEM_MC_ADVANCE_RIP();
4245 IEM_MC_END();
4246 }
4247 else
4248 {
4249 /* memory target */
4250 IEM_MC_BEGIN(0, 1);
4251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4253 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4254 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4255 } IEM_MC_ELSE() {
4256 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4257 } IEM_MC_ENDIF();
4258 IEM_MC_ADVANCE_RIP();
4259 IEM_MC_END();
4260 }
4261 return VINF_SUCCESS;
4262}
4263
4264
4265/** Opcode 0x0f 0x9d. */
4266FNIEMOP_DEF(iemOp_setnl_Eb)
4267{
4268 IEMOP_MNEMONIC("setnl Eb");
4269 IEMOP_HLP_MIN_386();
4270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4271 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4272
4273 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4274 * any way. AMD says it's "unused", whatever that means. We're
4275 * ignoring for now. */
4276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4277 {
4278 /* register target */
4279 IEM_MC_BEGIN(0, 0);
4280 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4281 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4282 } IEM_MC_ELSE() {
4283 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4284 } IEM_MC_ENDIF();
4285 IEM_MC_ADVANCE_RIP();
4286 IEM_MC_END();
4287 }
4288 else
4289 {
4290 /* memory target */
4291 IEM_MC_BEGIN(0, 1);
4292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4294 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4295 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_ADVANCE_RIP();
4300 IEM_MC_END();
4301 }
4302 return VINF_SUCCESS;
4303}
4304
4305
4306/** Opcode 0x0f 0x9e. */
4307FNIEMOP_DEF(iemOp_setle_Eb)
4308{
4309 IEMOP_MNEMONIC("setle Eb");
4310 IEMOP_HLP_MIN_386();
4311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4312 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4313
4314 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4315 * any way. AMD says it's "unused", whatever that means. We're
4316 * ignoring for now. */
4317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4318 {
4319 /* register target */
4320 IEM_MC_BEGIN(0, 0);
4321 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4322 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4323 } IEM_MC_ELSE() {
4324 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4325 } IEM_MC_ENDIF();
4326 IEM_MC_ADVANCE_RIP();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /* memory target */
4332 IEM_MC_BEGIN(0, 1);
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4335 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4336 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4337 } IEM_MC_ELSE() {
4338 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4339 } IEM_MC_ENDIF();
4340 IEM_MC_ADVANCE_RIP();
4341 IEM_MC_END();
4342 }
4343 return VINF_SUCCESS;
4344}
4345
4346
4347/** Opcode 0x0f 0x9f. */
4348FNIEMOP_DEF(iemOp_setnle_Eb)
4349{
4350 IEMOP_MNEMONIC("setnle Eb");
4351 IEMOP_HLP_MIN_386();
4352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4353 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4354
4355 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4356 * any way. AMD says it's "unused", whatever that means. We're
4357 * ignoring for now. */
4358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4359 {
4360 /* register target */
4361 IEM_MC_BEGIN(0, 0);
4362 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4366 } IEM_MC_ENDIF();
4367 IEM_MC_ADVANCE_RIP();
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 /* memory target */
4373 IEM_MC_BEGIN(0, 1);
4374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4376 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4377 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4378 } IEM_MC_ELSE() {
4379 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4380 } IEM_MC_ENDIF();
4381 IEM_MC_ADVANCE_RIP();
4382 IEM_MC_END();
4383 }
4384 return VINF_SUCCESS;
4385}
4386
4387
4388/**
4389 * Common 'push segment-register' helper.
4390 */
4391FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4392{
4393 IEMOP_HLP_NO_LOCK_PREFIX();
4394 if (iReg < X86_SREG_FS)
4395 IEMOP_HLP_NO_64BIT();
4396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4397
4398 switch (pIemCpu->enmEffOpSize)
4399 {
4400 case IEMMODE_16BIT:
4401 IEM_MC_BEGIN(0, 1);
4402 IEM_MC_LOCAL(uint16_t, u16Value);
4403 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4404 IEM_MC_PUSH_U16(u16Value);
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 break;
4408
4409 case IEMMODE_32BIT:
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(uint32_t, u32Value);
4412 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4413 IEM_MC_PUSH_U32_SREG(u32Value);
4414 IEM_MC_ADVANCE_RIP();
4415 IEM_MC_END();
4416 break;
4417
4418 case IEMMODE_64BIT:
4419 IEM_MC_BEGIN(0, 1);
4420 IEM_MC_LOCAL(uint64_t, u64Value);
4421 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4422 IEM_MC_PUSH_U64(u64Value);
4423 IEM_MC_ADVANCE_RIP();
4424 IEM_MC_END();
4425 break;
4426 }
4427
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0xa0. */
4433FNIEMOP_DEF(iemOp_push_fs)
4434{
4435 IEMOP_MNEMONIC("push fs");
4436 IEMOP_HLP_MIN_386();
4437 IEMOP_HLP_NO_LOCK_PREFIX();
4438 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4439}
4440
4441
4442/** Opcode 0x0f 0xa1. */
4443FNIEMOP_DEF(iemOp_pop_fs)
4444{
4445 IEMOP_MNEMONIC("pop fs");
4446 IEMOP_HLP_MIN_386();
4447 IEMOP_HLP_NO_LOCK_PREFIX();
4448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4449}
4450
4451
4452/** Opcode 0x0f 0xa2. */
4453FNIEMOP_DEF(iemOp_cpuid)
4454{
4455 IEMOP_MNEMONIC("cpuid");
4456 IEMOP_HLP_MIN_486(); /* not all 486es. */
4457 IEMOP_HLP_NO_LOCK_PREFIX();
4458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4459}
4460
4461
4462/**
4463 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4464 * iemOp_bts_Ev_Gv.
4465 */
4466FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4467{
4468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4469 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4470
4471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4472 {
4473 /* register destination. */
4474 IEMOP_HLP_NO_LOCK_PREFIX();
4475 switch (pIemCpu->enmEffOpSize)
4476 {
4477 case IEMMODE_16BIT:
4478 IEM_MC_BEGIN(3, 0);
4479 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4480 IEM_MC_ARG(uint16_t, u16Src, 1);
4481 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4482
4483 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4484 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4485 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4486 IEM_MC_REF_EFLAGS(pEFlags);
4487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4488
4489 IEM_MC_ADVANCE_RIP();
4490 IEM_MC_END();
4491 return VINF_SUCCESS;
4492
4493 case IEMMODE_32BIT:
4494 IEM_MC_BEGIN(3, 0);
4495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4496 IEM_MC_ARG(uint32_t, u32Src, 1);
4497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4498
4499 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4500 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4501 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4502 IEM_MC_REF_EFLAGS(pEFlags);
4503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4504
4505 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 return VINF_SUCCESS;
4509
4510 case IEMMODE_64BIT:
4511 IEM_MC_BEGIN(3, 0);
4512 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4513 IEM_MC_ARG(uint64_t, u64Src, 1);
4514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4515
4516 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4517 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4518 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4519 IEM_MC_REF_EFLAGS(pEFlags);
4520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4521
4522 IEM_MC_ADVANCE_RIP();
4523 IEM_MC_END();
4524 return VINF_SUCCESS;
4525
4526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4527 }
4528 }
4529 else
4530 {
4531 /* memory destination. */
4532
4533 uint32_t fAccess;
4534 if (pImpl->pfnLockedU16)
4535 fAccess = IEM_ACCESS_DATA_RW;
4536 else /* BT */
4537 {
4538 IEMOP_HLP_NO_LOCK_PREFIX();
4539 fAccess = IEM_ACCESS_DATA_R;
4540 }
4541
4542 NOREF(fAccess);
4543
4544 /** @todo test negative bit offsets! */
4545 switch (pIemCpu->enmEffOpSize)
4546 {
4547 case IEMMODE_16BIT:
4548 IEM_MC_BEGIN(3, 2);
4549 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4550 IEM_MC_ARG(uint16_t, u16Src, 1);
4551 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4553 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4554
4555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4556 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4557 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4558 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4559 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4560 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4561 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4562 IEM_MC_FETCH_EFLAGS(EFlags);
4563
4564 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4565 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4567 else
4568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4570
4571 IEM_MC_COMMIT_EFLAGS(EFlags);
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 return VINF_SUCCESS;
4575
4576 case IEMMODE_32BIT:
4577 IEM_MC_BEGIN(3, 2);
4578 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4579 IEM_MC_ARG(uint32_t, u32Src, 1);
4580 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4582 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4583
4584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4585 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4586 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4587 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4588 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4589 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4590 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4591 IEM_MC_FETCH_EFLAGS(EFlags);
4592
4593 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4594 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4595 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4596 else
4597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4599
4600 IEM_MC_COMMIT_EFLAGS(EFlags);
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 return VINF_SUCCESS;
4604
4605 case IEMMODE_64BIT:
4606 IEM_MC_BEGIN(3, 2);
4607 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4608 IEM_MC_ARG(uint64_t, u64Src, 1);
4609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4611 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4612
4613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4614 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4615 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4616 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4617 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4618 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4619 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4620 IEM_MC_FETCH_EFLAGS(EFlags);
4621
4622 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4623 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4624 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4625 else
4626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4628
4629 IEM_MC_COMMIT_EFLAGS(EFlags);
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 return VINF_SUCCESS;
4633
4634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4635 }
4636 }
4637}
4638
4639
4640/** Opcode 0x0f 0xa3. */
4641FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4642{
4643 IEMOP_MNEMONIC("bt Gv,Gv");
4644 IEMOP_HLP_MIN_386();
4645 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4646}
4647
4648
4649/**
4650 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4651 */
4652FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4653{
4654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4655 IEMOP_HLP_NO_LOCK_PREFIX();
4656 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4657
4658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4659 {
4660 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4661 IEMOP_HLP_NO_LOCK_PREFIX();
4662
4663 switch (pIemCpu->enmEffOpSize)
4664 {
4665 case IEMMODE_16BIT:
4666 IEM_MC_BEGIN(4, 0);
4667 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4668 IEM_MC_ARG(uint16_t, u16Src, 1);
4669 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4670 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4671
4672 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4673 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4674 IEM_MC_REF_EFLAGS(pEFlags);
4675 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4676
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 return VINF_SUCCESS;
4680
4681 case IEMMODE_32BIT:
4682 IEM_MC_BEGIN(4, 0);
4683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4684 IEM_MC_ARG(uint32_t, u32Src, 1);
4685 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4686 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4687
4688 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4689 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4690 IEM_MC_REF_EFLAGS(pEFlags);
4691 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4692
4693 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4694 IEM_MC_ADVANCE_RIP();
4695 IEM_MC_END();
4696 return VINF_SUCCESS;
4697
4698 case IEMMODE_64BIT:
4699 IEM_MC_BEGIN(4, 0);
4700 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4701 IEM_MC_ARG(uint64_t, u64Src, 1);
4702 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4703 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4704
4705 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4706 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4707 IEM_MC_REF_EFLAGS(pEFlags);
4708 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4709
4710 IEM_MC_ADVANCE_RIP();
4711 IEM_MC_END();
4712 return VINF_SUCCESS;
4713
4714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4715 }
4716 }
4717 else
4718 {
4719 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4720
4721 switch (pIemCpu->enmEffOpSize)
4722 {
4723 case IEMMODE_16BIT:
4724 IEM_MC_BEGIN(4, 2);
4725 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4726 IEM_MC_ARG(uint16_t, u16Src, 1);
4727 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4730
4731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4732 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4733 IEM_MC_ASSIGN(cShiftArg, cShift);
4734 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4735 IEM_MC_FETCH_EFLAGS(EFlags);
4736 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4737 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4738
4739 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4740 IEM_MC_COMMIT_EFLAGS(EFlags);
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 return VINF_SUCCESS;
4744
4745 case IEMMODE_32BIT:
4746 IEM_MC_BEGIN(4, 2);
4747 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4748 IEM_MC_ARG(uint32_t, u32Src, 1);
4749 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4752
4753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4754 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4755 IEM_MC_ASSIGN(cShiftArg, cShift);
4756 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4757 IEM_MC_FETCH_EFLAGS(EFlags);
4758 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4759 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4760
4761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4762 IEM_MC_COMMIT_EFLAGS(EFlags);
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 return VINF_SUCCESS;
4766
4767 case IEMMODE_64BIT:
4768 IEM_MC_BEGIN(4, 2);
4769 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4770 IEM_MC_ARG(uint64_t, u64Src, 1);
4771 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4772 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4774
4775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4776 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4777 IEM_MC_ASSIGN(cShiftArg, cShift);
4778 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4779 IEM_MC_FETCH_EFLAGS(EFlags);
4780 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4781 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4782
4783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4784 IEM_MC_COMMIT_EFLAGS(EFlags);
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 return VINF_SUCCESS;
4788
4789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4790 }
4791 }
4792}
4793
4794
4795/**
4796 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4797 */
4798FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4799{
4800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4801 IEMOP_HLP_NO_LOCK_PREFIX();
4802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4803
4804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4805 {
4806 IEMOP_HLP_NO_LOCK_PREFIX();
4807
4808 switch (pIemCpu->enmEffOpSize)
4809 {
4810 case IEMMODE_16BIT:
4811 IEM_MC_BEGIN(4, 0);
4812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4813 IEM_MC_ARG(uint16_t, u16Src, 1);
4814 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4815 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4816
4817 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4818 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4819 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4820 IEM_MC_REF_EFLAGS(pEFlags);
4821 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4822
4823 IEM_MC_ADVANCE_RIP();
4824 IEM_MC_END();
4825 return VINF_SUCCESS;
4826
4827 case IEMMODE_32BIT:
4828 IEM_MC_BEGIN(4, 0);
4829 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4830 IEM_MC_ARG(uint32_t, u32Src, 1);
4831 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4832 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4833
4834 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4835 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4836 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4837 IEM_MC_REF_EFLAGS(pEFlags);
4838 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4839
4840 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4841 IEM_MC_ADVANCE_RIP();
4842 IEM_MC_END();
4843 return VINF_SUCCESS;
4844
4845 case IEMMODE_64BIT:
4846 IEM_MC_BEGIN(4, 0);
4847 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4848 IEM_MC_ARG(uint64_t, u64Src, 1);
4849 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4850 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4851
4852 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4853 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4854 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4855 IEM_MC_REF_EFLAGS(pEFlags);
4856 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4857
4858 IEM_MC_ADVANCE_RIP();
4859 IEM_MC_END();
4860 return VINF_SUCCESS;
4861
4862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4863 }
4864 }
4865 else
4866 {
4867 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4868
4869 switch (pIemCpu->enmEffOpSize)
4870 {
4871 case IEMMODE_16BIT:
4872 IEM_MC_BEGIN(4, 2);
4873 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4874 IEM_MC_ARG(uint16_t, u16Src, 1);
4875 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4878
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4880 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4881 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4882 IEM_MC_FETCH_EFLAGS(EFlags);
4883 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4884 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4885
4886 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4887 IEM_MC_COMMIT_EFLAGS(EFlags);
4888 IEM_MC_ADVANCE_RIP();
4889 IEM_MC_END();
4890 return VINF_SUCCESS;
4891
4892 case IEMMODE_32BIT:
4893 IEM_MC_BEGIN(4, 2);
4894 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4895 IEM_MC_ARG(uint32_t, u32Src, 1);
4896 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4897 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4899
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4901 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4902 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4903 IEM_MC_FETCH_EFLAGS(EFlags);
4904 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4905 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4906
4907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4908 IEM_MC_COMMIT_EFLAGS(EFlags);
4909 IEM_MC_ADVANCE_RIP();
4910 IEM_MC_END();
4911 return VINF_SUCCESS;
4912
4913 case IEMMODE_64BIT:
4914 IEM_MC_BEGIN(4, 2);
4915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4916 IEM_MC_ARG(uint64_t, u64Src, 1);
4917 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4918 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4920
4921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4922 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4923 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4924 IEM_MC_FETCH_EFLAGS(EFlags);
4925 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4926 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4927
4928 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4929 IEM_MC_COMMIT_EFLAGS(EFlags);
4930 IEM_MC_ADVANCE_RIP();
4931 IEM_MC_END();
4932 return VINF_SUCCESS;
4933
4934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4935 }
4936 }
4937}
4938
4939
4940
4941/** Opcode 0x0f 0xa4. */
4942FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4943{
4944 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4945 IEMOP_HLP_MIN_386();
4946 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4947}
4948
4949
4950/** Opcode 0x0f 0xa5. */
4951FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4952{
4953 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4954 IEMOP_HLP_MIN_386();
4955 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4956}
4957
4958
4959/** Opcode 0x0f 0xa8. */
4960FNIEMOP_DEF(iemOp_push_gs)
4961{
4962 IEMOP_MNEMONIC("push gs");
4963 IEMOP_HLP_MIN_386();
4964 IEMOP_HLP_NO_LOCK_PREFIX();
4965 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4966}
4967
4968
4969/** Opcode 0x0f 0xa9. */
4970FNIEMOP_DEF(iemOp_pop_gs)
4971{
4972 IEMOP_MNEMONIC("pop gs");
4973 IEMOP_HLP_MIN_386();
4974 IEMOP_HLP_NO_LOCK_PREFIX();
4975 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4976}
4977
4978
4979/** Opcode 0x0f 0xaa. */
4980FNIEMOP_STUB(iemOp_rsm);
4981//IEMOP_HLP_MIN_386();
4982
4983
4984/** Opcode 0x0f 0xab. */
4985FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4986{
4987 IEMOP_MNEMONIC("bts Ev,Gv");
4988 IEMOP_HLP_MIN_386();
4989 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4990}
4991
4992
4993/** Opcode 0x0f 0xac. */
4994FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4995{
4996 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4997 IEMOP_HLP_MIN_386();
4998 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4999}
5000
5001
5002/** Opcode 0x0f 0xad. */
5003FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5004{
5005 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5006 IEMOP_HLP_MIN_386();
5007 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5008}
5009
5010
5011/** Opcode 0x0f 0xae mem/0. */
5012FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC("fxsave m512");
5015 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5016 return IEMOP_RAISE_INVALID_OPCODE();
5017
5018 IEM_MC_BEGIN(3, 1);
5019 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5020 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5021 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5025 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5026 IEM_MC_END();
5027 return VINF_SUCCESS;
5028}
5029
5030
5031/** Opcode 0x0f 0xae mem/1. */
5032FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5033{
5034 IEMOP_MNEMONIC("fxrstor m512");
5035 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5036 return IEMOP_RAISE_INVALID_OPCODE();
5037
5038 IEM_MC_BEGIN(3, 1);
5039 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5040 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5041 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5044 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5045 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5046 IEM_MC_END();
5047 return VINF_SUCCESS;
5048}
5049
5050
5051/** Opcode 0x0f 0xae mem/2. */
5052FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5053
5054/** Opcode 0x0f 0xae mem/3. */
5055FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5056
5057/** Opcode 0x0f 0xae mem/4. */
5058FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5059
5060/** Opcode 0x0f 0xae mem/5. */
5061FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5062
5063/** Opcode 0x0f 0xae mem/6. */
5064FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5065
5066/** Opcode 0x0f 0xae mem/7. */
5067FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5068
5069
5070/** Opcode 0x0f 0xae 11b/5. */
5071FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5072{
5073 IEMOP_MNEMONIC("lfence");
5074 IEMOP_HLP_NO_LOCK_PREFIX();
5075 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5076 return IEMOP_RAISE_INVALID_OPCODE();
5077
5078 IEM_MC_BEGIN(0, 0);
5079 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5080 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5081 else
5082 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5083 IEM_MC_ADVANCE_RIP();
5084 IEM_MC_END();
5085 return VINF_SUCCESS;
5086}
5087
5088
5089/** Opcode 0x0f 0xae 11b/6. */
5090FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5091{
5092 IEMOP_MNEMONIC("mfence");
5093 IEMOP_HLP_NO_LOCK_PREFIX();
5094 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5095 return IEMOP_RAISE_INVALID_OPCODE();
5096
5097 IEM_MC_BEGIN(0, 0);
5098 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5099 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5100 else
5101 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5102 IEM_MC_ADVANCE_RIP();
5103 IEM_MC_END();
5104 return VINF_SUCCESS;
5105}
5106
5107
5108/** Opcode 0x0f 0xae 11b/7. */
5109FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC("sfence");
5112 IEMOP_HLP_NO_LOCK_PREFIX();
5113 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5114 return IEMOP_RAISE_INVALID_OPCODE();
5115
5116 IEM_MC_BEGIN(0, 0);
5117 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5118 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5119 else
5120 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5121 IEM_MC_ADVANCE_RIP();
5122 IEM_MC_END();
5123 return VINF_SUCCESS;
5124}
5125
5126
5127/** Opcode 0xf3 0x0f 0xae 11b/0. */
5128FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5129
5130/** Opcode 0xf3 0x0f 0xae 11b/1. */
5131FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5132
5133/** Opcode 0xf3 0x0f 0xae 11b/2. */
5134FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5135
5136/** Opcode 0xf3 0x0f 0xae 11b/3. */
5137FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5138
5139
5140/** Opcode 0x0f 0xae. */
5141FNIEMOP_DEF(iemOp_Grp15)
5142{
5143 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5146 {
5147 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5148 {
5149 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5150 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5151 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5152 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5153 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5154 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5155 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5156 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5158 }
5159 }
5160 else
5161 {
5162 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5163 {
5164 case 0:
5165 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5166 {
5167 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5168 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5169 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5170 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5171 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5172 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5173 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5174 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5176 }
5177 break;
5178
5179 case IEM_OP_PRF_REPZ:
5180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5181 {
5182 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5183 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5184 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5185 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5186 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5187 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5188 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5189 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5191 }
5192 break;
5193
5194 default:
5195 return IEMOP_RAISE_INVALID_OPCODE();
5196 }
5197 }
5198}
5199
5200
5201/** Opcode 0x0f 0xaf. */
5202FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5203{
5204 IEMOP_MNEMONIC("imul Gv,Ev");
5205 IEMOP_HLP_MIN_386();
5206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5207 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5208}
5209
5210
5211/** Opcode 0x0f 0xb0. */
5212FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5213{
5214 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5215 IEMOP_HLP_MIN_486();
5216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5217
5218 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5219 {
5220 IEMOP_HLP_DONE_DECODING();
5221 IEM_MC_BEGIN(4, 0);
5222 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5223 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5224 IEM_MC_ARG(uint8_t, u8Src, 2);
5225 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5226
5227 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5228 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5229 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5230 IEM_MC_REF_EFLAGS(pEFlags);
5231 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5233 else
5234 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5235
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 }
5239 else
5240 {
5241 IEM_MC_BEGIN(4, 3);
5242 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5243 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5244 IEM_MC_ARG(uint8_t, u8Src, 2);
5245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5247 IEM_MC_LOCAL(uint8_t, u8Al);
5248
5249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5250 IEMOP_HLP_DONE_DECODING();
5251 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5252 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5253 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5254 IEM_MC_FETCH_EFLAGS(EFlags);
5255 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5258 else
5259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5260
5261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5262 IEM_MC_COMMIT_EFLAGS(EFlags);
5263 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5264 IEM_MC_ADVANCE_RIP();
5265 IEM_MC_END();
5266 }
5267 return VINF_SUCCESS;
5268}
5269
5270/** Opcode 0x0f 0xb1. */
5271FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5272{
5273 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5274 IEMOP_HLP_MIN_486();
5275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5276
5277 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5278 {
5279 IEMOP_HLP_DONE_DECODING();
5280 switch (pIemCpu->enmEffOpSize)
5281 {
5282 case IEMMODE_16BIT:
5283 IEM_MC_BEGIN(4, 0);
5284 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5285 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5286 IEM_MC_ARG(uint16_t, u16Src, 2);
5287 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5288
5289 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5290 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5291 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5292 IEM_MC_REF_EFLAGS(pEFlags);
5293 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5295 else
5296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5297
5298 IEM_MC_ADVANCE_RIP();
5299 IEM_MC_END();
5300 return VINF_SUCCESS;
5301
5302 case IEMMODE_32BIT:
5303 IEM_MC_BEGIN(4, 0);
5304 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5305 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5306 IEM_MC_ARG(uint32_t, u32Src, 2);
5307 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5308
5309 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5310 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5311 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5312 IEM_MC_REF_EFLAGS(pEFlags);
5313 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5314 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5315 else
5316 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5317
5318 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5319 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5320 IEM_MC_ADVANCE_RIP();
5321 IEM_MC_END();
5322 return VINF_SUCCESS;
5323
5324 case IEMMODE_64BIT:
5325 IEM_MC_BEGIN(4, 0);
5326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5327 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5328#ifdef RT_ARCH_X86
5329 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5330#else
5331 IEM_MC_ARG(uint64_t, u64Src, 2);
5332#endif
5333 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5334
5335 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5336 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5337 IEM_MC_REF_EFLAGS(pEFlags);
5338#ifdef RT_ARCH_X86
5339 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5340 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5342 else
5343 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5344#else
5345 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5346 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5347 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5348 else
5349 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5350#endif
5351
5352 IEM_MC_ADVANCE_RIP();
5353 IEM_MC_END();
5354 return VINF_SUCCESS;
5355
5356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5357 }
5358 }
5359 else
5360 {
5361 switch (pIemCpu->enmEffOpSize)
5362 {
5363 case IEMMODE_16BIT:
5364 IEM_MC_BEGIN(4, 3);
5365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5366 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5367 IEM_MC_ARG(uint16_t, u16Src, 2);
5368 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5370 IEM_MC_LOCAL(uint16_t, u16Ax);
5371
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5373 IEMOP_HLP_DONE_DECODING();
5374 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5375 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5376 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5377 IEM_MC_FETCH_EFLAGS(EFlags);
5378 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5379 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5380 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5381 else
5382 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5383
5384 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5385 IEM_MC_COMMIT_EFLAGS(EFlags);
5386 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5387 IEM_MC_ADVANCE_RIP();
5388 IEM_MC_END();
5389 return VINF_SUCCESS;
5390
5391 case IEMMODE_32BIT:
5392 IEM_MC_BEGIN(4, 3);
5393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5394 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5395 IEM_MC_ARG(uint32_t, u32Src, 2);
5396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5398 IEM_MC_LOCAL(uint32_t, u32Eax);
5399
5400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5401 IEMOP_HLP_DONE_DECODING();
5402 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5403 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5404 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5405 IEM_MC_FETCH_EFLAGS(EFlags);
5406 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5407 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5409 else
5410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5411
5412 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5413 IEM_MC_COMMIT_EFLAGS(EFlags);
5414 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5415 IEM_MC_ADVANCE_RIP();
5416 IEM_MC_END();
5417 return VINF_SUCCESS;
5418
5419 case IEMMODE_64BIT:
5420 IEM_MC_BEGIN(4, 3);
5421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5422 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5423#ifdef RT_ARCH_X86
5424 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5425#else
5426 IEM_MC_ARG(uint64_t, u64Src, 2);
5427#endif
5428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_LOCAL(uint64_t, u64Rax);
5431
5432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5433 IEMOP_HLP_DONE_DECODING();
5434 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5435 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5436 IEM_MC_FETCH_EFLAGS(EFlags);
5437 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5438#ifdef RT_ARCH_X86
5439 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5440 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5442 else
5443 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5444#else
5445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5446 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5448 else
5449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5450#endif
5451
5452 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5453 IEM_MC_COMMIT_EFLAGS(EFlags);
5454 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5455 IEM_MC_ADVANCE_RIP();
5456 IEM_MC_END();
5457 return VINF_SUCCESS;
5458
5459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5460 }
5461 }
5462}
5463
5464
5465FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5466{
5467 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5468 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5469
5470 switch (pIemCpu->enmEffOpSize)
5471 {
5472 case IEMMODE_16BIT:
5473 IEM_MC_BEGIN(5, 1);
5474 IEM_MC_ARG(uint16_t, uSel, 0);
5475 IEM_MC_ARG(uint16_t, offSeg, 1);
5476 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5477 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5478 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5483 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5484 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5485 IEM_MC_END();
5486 return VINF_SUCCESS;
5487
5488 case IEMMODE_32BIT:
5489 IEM_MC_BEGIN(5, 1);
5490 IEM_MC_ARG(uint16_t, uSel, 0);
5491 IEM_MC_ARG(uint32_t, offSeg, 1);
5492 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5493 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5494 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5499 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5500 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5501 IEM_MC_END();
5502 return VINF_SUCCESS;
5503
5504 case IEMMODE_64BIT:
5505 IEM_MC_BEGIN(5, 1);
5506 IEM_MC_ARG(uint16_t, uSel, 0);
5507 IEM_MC_ARG(uint64_t, offSeg, 1);
5508 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5509 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5515 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5516 else
5517 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5518 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5519 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5520 IEM_MC_END();
5521 return VINF_SUCCESS;
5522
5523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5524 }
5525}
5526
5527
5528/** Opcode 0x0f 0xb2. */
5529FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5530{
5531 IEMOP_MNEMONIC("lss Gv,Mp");
5532 IEMOP_HLP_MIN_386();
5533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5535 return IEMOP_RAISE_INVALID_OPCODE();
5536 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5537}
5538
5539
5540/** Opcode 0x0f 0xb3. */
5541FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5542{
5543 IEMOP_MNEMONIC("btr Ev,Gv");
5544 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5545}
5546
5547
5548/** Opcode 0x0f 0xb4. */
5549FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5550{
5551 IEMOP_MNEMONIC("lfs Gv,Mp");
5552 IEMOP_HLP_MIN_386();
5553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5555 return IEMOP_RAISE_INVALID_OPCODE();
5556 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5557}
5558
5559
5560/** Opcode 0x0f 0xb5. */
5561FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5562{
5563 IEMOP_MNEMONIC("lgs Gv,Mp");
5564 IEMOP_HLP_MIN_386();
5565 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5566 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5567 return IEMOP_RAISE_INVALID_OPCODE();
5568 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5569}
5570
5571
5572/** Opcode 0x0f 0xb6. */
5573FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5574{
5575 IEMOP_MNEMONIC("movzx Gv,Eb");
5576 IEMOP_HLP_MIN_386();
5577
5578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5579 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5580
5581 /*
5582 * If rm is denoting a register, no more instruction bytes.
5583 */
5584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5585 {
5586 switch (pIemCpu->enmEffOpSize)
5587 {
5588 case IEMMODE_16BIT:
5589 IEM_MC_BEGIN(0, 1);
5590 IEM_MC_LOCAL(uint16_t, u16Value);
5591 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5592 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5593 IEM_MC_ADVANCE_RIP();
5594 IEM_MC_END();
5595 return VINF_SUCCESS;
5596
5597 case IEMMODE_32BIT:
5598 IEM_MC_BEGIN(0, 1);
5599 IEM_MC_LOCAL(uint32_t, u32Value);
5600 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5601 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5602 IEM_MC_ADVANCE_RIP();
5603 IEM_MC_END();
5604 return VINF_SUCCESS;
5605
5606 case IEMMODE_64BIT:
5607 IEM_MC_BEGIN(0, 1);
5608 IEM_MC_LOCAL(uint64_t, u64Value);
5609 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5610 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5611 IEM_MC_ADVANCE_RIP();
5612 IEM_MC_END();
5613 return VINF_SUCCESS;
5614
5615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5616 }
5617 }
5618 else
5619 {
5620 /*
5621 * We're loading a register from memory.
5622 */
5623 switch (pIemCpu->enmEffOpSize)
5624 {
5625 case IEMMODE_16BIT:
5626 IEM_MC_BEGIN(0, 2);
5627 IEM_MC_LOCAL(uint16_t, u16Value);
5628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5630 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5631 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5632 IEM_MC_ADVANCE_RIP();
5633 IEM_MC_END();
5634 return VINF_SUCCESS;
5635
5636 case IEMMODE_32BIT:
5637 IEM_MC_BEGIN(0, 2);
5638 IEM_MC_LOCAL(uint32_t, u32Value);
5639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5641 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5642 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 return VINF_SUCCESS;
5646
5647 case IEMMODE_64BIT:
5648 IEM_MC_BEGIN(0, 2);
5649 IEM_MC_LOCAL(uint64_t, u64Value);
5650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5652 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5654 IEM_MC_ADVANCE_RIP();
5655 IEM_MC_END();
5656 return VINF_SUCCESS;
5657
5658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5659 }
5660 }
5661}
5662
5663
5664/** Opcode 0x0f 0xb7. */
5665FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5666{
5667 IEMOP_MNEMONIC("movzx Gv,Ew");
5668 IEMOP_HLP_MIN_386();
5669
5670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5671 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5672
5673 /** @todo Not entirely sure how the operand size prefix is handled here,
5674 * assuming that it will be ignored. Would be nice to have a few
5675 * test for this. */
5676 /*
5677 * If rm is denoting a register, no more instruction bytes.
5678 */
5679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5680 {
5681 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5682 {
5683 IEM_MC_BEGIN(0, 1);
5684 IEM_MC_LOCAL(uint32_t, u32Value);
5685 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5686 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5687 IEM_MC_ADVANCE_RIP();
5688 IEM_MC_END();
5689 }
5690 else
5691 {
5692 IEM_MC_BEGIN(0, 1);
5693 IEM_MC_LOCAL(uint64_t, u64Value);
5694 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5695 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5696 IEM_MC_ADVANCE_RIP();
5697 IEM_MC_END();
5698 }
5699 }
5700 else
5701 {
5702 /*
5703 * We're loading a register from memory.
5704 */
5705 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5706 {
5707 IEM_MC_BEGIN(0, 2);
5708 IEM_MC_LOCAL(uint32_t, u32Value);
5709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5711 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5712 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 }
5716 else
5717 {
5718 IEM_MC_BEGIN(0, 2);
5719 IEM_MC_LOCAL(uint64_t, u64Value);
5720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5722 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5723 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5724 IEM_MC_ADVANCE_RIP();
5725 IEM_MC_END();
5726 }
5727 }
5728 return VINF_SUCCESS;
5729}
5730
5731
5732/** Opcode 0x0f 0xb8. */
5733FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5734
5735
5736/** Opcode 0x0f 0xb9. */
5737FNIEMOP_DEF(iemOp_Grp10)
5738{
5739 Log(("iemOp_Grp10 -> #UD\n"));
5740 return IEMOP_RAISE_INVALID_OPCODE();
5741}
5742
5743
5744/** Opcode 0x0f 0xba. */
5745FNIEMOP_DEF(iemOp_Grp8)
5746{
5747 IEMOP_HLP_MIN_386();
5748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5749 PCIEMOPBINSIZES pImpl;
5750 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5751 {
5752 case 0: case 1: case 2: case 3:
5753 return IEMOP_RAISE_INVALID_OPCODE();
5754 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5755 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5756 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5757 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5759 }
5760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5761
5762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5763 {
5764 /* register destination. */
5765 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5766 IEMOP_HLP_NO_LOCK_PREFIX();
5767
5768 switch (pIemCpu->enmEffOpSize)
5769 {
5770 case IEMMODE_16BIT:
5771 IEM_MC_BEGIN(3, 0);
5772 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5773 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5775
5776 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5777 IEM_MC_REF_EFLAGS(pEFlags);
5778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5779
5780 IEM_MC_ADVANCE_RIP();
5781 IEM_MC_END();
5782 return VINF_SUCCESS;
5783
5784 case IEMMODE_32BIT:
5785 IEM_MC_BEGIN(3, 0);
5786 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5787 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5788 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5789
5790 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5791 IEM_MC_REF_EFLAGS(pEFlags);
5792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5793
5794 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5795 IEM_MC_ADVANCE_RIP();
5796 IEM_MC_END();
5797 return VINF_SUCCESS;
5798
5799 case IEMMODE_64BIT:
5800 IEM_MC_BEGIN(3, 0);
5801 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5802 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5803 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5804
5805 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5806 IEM_MC_REF_EFLAGS(pEFlags);
5807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5808
5809 IEM_MC_ADVANCE_RIP();
5810 IEM_MC_END();
5811 return VINF_SUCCESS;
5812
5813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5814 }
5815 }
5816 else
5817 {
5818 /* memory destination. */
5819
5820 uint32_t fAccess;
5821 if (pImpl->pfnLockedU16)
5822 fAccess = IEM_ACCESS_DATA_RW;
5823 else /* BT */
5824 {
5825 IEMOP_HLP_NO_LOCK_PREFIX();
5826 fAccess = IEM_ACCESS_DATA_R;
5827 }
5828
5829 /** @todo test negative bit offsets! */
5830 switch (pIemCpu->enmEffOpSize)
5831 {
5832 case IEMMODE_16BIT:
5833 IEM_MC_BEGIN(3, 1);
5834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5835 IEM_MC_ARG(uint16_t, u16Src, 1);
5836 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5838
5839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5840 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5841 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5842 IEM_MC_FETCH_EFLAGS(EFlags);
5843 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5844 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5846 else
5847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5848 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5849
5850 IEM_MC_COMMIT_EFLAGS(EFlags);
5851 IEM_MC_ADVANCE_RIP();
5852 IEM_MC_END();
5853 return VINF_SUCCESS;
5854
5855 case IEMMODE_32BIT:
5856 IEM_MC_BEGIN(3, 1);
5857 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5858 IEM_MC_ARG(uint32_t, u32Src, 1);
5859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5861
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5863 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5864 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5865 IEM_MC_FETCH_EFLAGS(EFlags);
5866 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5867 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5869 else
5870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5872
5873 IEM_MC_COMMIT_EFLAGS(EFlags);
5874 IEM_MC_ADVANCE_RIP();
5875 IEM_MC_END();
5876 return VINF_SUCCESS;
5877
5878 case IEMMODE_64BIT:
5879 IEM_MC_BEGIN(3, 1);
5880 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5881 IEM_MC_ARG(uint64_t, u64Src, 1);
5882 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5884
5885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5886 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5887 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5888 IEM_MC_FETCH_EFLAGS(EFlags);
5889 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5890 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5891 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5892 else
5893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5895
5896 IEM_MC_COMMIT_EFLAGS(EFlags);
5897 IEM_MC_ADVANCE_RIP();
5898 IEM_MC_END();
5899 return VINF_SUCCESS;
5900
5901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5902 }
5903 }
5904
5905}
5906
5907
5908/** Opcode 0x0f 0xbb. */
5909FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5910{
5911 IEMOP_MNEMONIC("btc Ev,Gv");
5912 IEMOP_HLP_MIN_386();
5913 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5914}
5915
5916
5917/** Opcode 0x0f 0xbc. */
5918FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5919{
5920 IEMOP_MNEMONIC("bsf Gv,Ev");
5921 IEMOP_HLP_MIN_386();
5922 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5923 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5924}
5925
5926
5927/** Opcode 0x0f 0xbd. */
5928FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5929{
5930 IEMOP_MNEMONIC("bsr Gv,Ev");
5931 IEMOP_HLP_MIN_386();
5932 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5933 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5934}
5935
5936
5937/** Opcode 0x0f 0xbe. */
5938FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5939{
5940 IEMOP_MNEMONIC("movsx Gv,Eb");
5941 IEMOP_HLP_MIN_386();
5942
5943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5944 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5945
5946 /*
5947 * If rm is denoting a register, no more instruction bytes.
5948 */
5949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5950 {
5951 switch (pIemCpu->enmEffOpSize)
5952 {
5953 case IEMMODE_16BIT:
5954 IEM_MC_BEGIN(0, 1);
5955 IEM_MC_LOCAL(uint16_t, u16Value);
5956 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5957 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5958 IEM_MC_ADVANCE_RIP();
5959 IEM_MC_END();
5960 return VINF_SUCCESS;
5961
5962 case IEMMODE_32BIT:
5963 IEM_MC_BEGIN(0, 1);
5964 IEM_MC_LOCAL(uint32_t, u32Value);
5965 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5966 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5967 IEM_MC_ADVANCE_RIP();
5968 IEM_MC_END();
5969 return VINF_SUCCESS;
5970
5971 case IEMMODE_64BIT:
5972 IEM_MC_BEGIN(0, 1);
5973 IEM_MC_LOCAL(uint64_t, u64Value);
5974 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5975 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5976 IEM_MC_ADVANCE_RIP();
5977 IEM_MC_END();
5978 return VINF_SUCCESS;
5979
5980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5981 }
5982 }
5983 else
5984 {
5985 /*
5986 * We're loading a register from memory.
5987 */
5988 switch (pIemCpu->enmEffOpSize)
5989 {
5990 case IEMMODE_16BIT:
5991 IEM_MC_BEGIN(0, 2);
5992 IEM_MC_LOCAL(uint16_t, u16Value);
5993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5995 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5996 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 return VINF_SUCCESS;
6000
6001 case IEMMODE_32BIT:
6002 IEM_MC_BEGIN(0, 2);
6003 IEM_MC_LOCAL(uint32_t, u32Value);
6004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6006 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6007 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6008 IEM_MC_ADVANCE_RIP();
6009 IEM_MC_END();
6010 return VINF_SUCCESS;
6011
6012 case IEMMODE_64BIT:
6013 IEM_MC_BEGIN(0, 2);
6014 IEM_MC_LOCAL(uint64_t, u64Value);
6015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6017 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6018 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6019 IEM_MC_ADVANCE_RIP();
6020 IEM_MC_END();
6021 return VINF_SUCCESS;
6022
6023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6024 }
6025 }
6026}
6027
6028
6029/** Opcode 0x0f 0xbf. */
6030FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6031{
6032 IEMOP_MNEMONIC("movsx Gv,Ew");
6033 IEMOP_HLP_MIN_386();
6034
6035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6037
6038 /** @todo Not entirely sure how the operand size prefix is handled here,
6039 * assuming that it will be ignored. Would be nice to have a few
6040 * test for this. */
6041 /*
6042 * If rm is denoting a register, no more instruction bytes.
6043 */
6044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6045 {
6046 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6047 {
6048 IEM_MC_BEGIN(0, 1);
6049 IEM_MC_LOCAL(uint32_t, u32Value);
6050 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6051 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 }
6055 else
6056 {
6057 IEM_MC_BEGIN(0, 1);
6058 IEM_MC_LOCAL(uint64_t, u64Value);
6059 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6060 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6061 IEM_MC_ADVANCE_RIP();
6062 IEM_MC_END();
6063 }
6064 }
6065 else
6066 {
6067 /*
6068 * We're loading a register from memory.
6069 */
6070 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6071 {
6072 IEM_MC_BEGIN(0, 2);
6073 IEM_MC_LOCAL(uint32_t, u32Value);
6074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6076 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6077 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 }
6081 else
6082 {
6083 IEM_MC_BEGIN(0, 2);
6084 IEM_MC_LOCAL(uint64_t, u64Value);
6085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6087 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6088 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 }
6092 }
6093 return VINF_SUCCESS;
6094}
6095
6096
6097/** Opcode 0x0f 0xc0. */
6098FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6099{
6100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6101 IEMOP_HLP_MIN_486();
6102 IEMOP_MNEMONIC("xadd Eb,Gb");
6103
6104 /*
6105 * If rm is denoting a register, no more instruction bytes.
6106 */
6107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6108 {
6109 IEMOP_HLP_NO_LOCK_PREFIX();
6110
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6113 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115
6116 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6117 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6118 IEM_MC_REF_EFLAGS(pEFlags);
6119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6120
6121 IEM_MC_ADVANCE_RIP();
6122 IEM_MC_END();
6123 }
6124 else
6125 {
6126 /*
6127 * We're accessing memory.
6128 */
6129 IEM_MC_BEGIN(3, 3);
6130 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6131 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6132 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6133 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6135
6136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6137 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6138 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6139 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6140 IEM_MC_FETCH_EFLAGS(EFlags);
6141 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6142 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6143 else
6144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6145
6146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6147 IEM_MC_COMMIT_EFLAGS(EFlags);
6148 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6149 IEM_MC_ADVANCE_RIP();
6150 IEM_MC_END();
6151 return VINF_SUCCESS;
6152 }
6153 return VINF_SUCCESS;
6154}
6155
6156
6157/** Opcode 0x0f 0xc1. */
6158FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6159{
6160 IEMOP_MNEMONIC("xadd Ev,Gv");
6161 IEMOP_HLP_MIN_486();
6162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6163
6164 /*
6165 * If rm is denoting a register, no more instruction bytes.
6166 */
6167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6168 {
6169 IEMOP_HLP_NO_LOCK_PREFIX();
6170
6171 switch (pIemCpu->enmEffOpSize)
6172 {
6173 case IEMMODE_16BIT:
6174 IEM_MC_BEGIN(3, 0);
6175 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6176 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6178
6179 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6180 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6183
6184 IEM_MC_ADVANCE_RIP();
6185 IEM_MC_END();
6186 return VINF_SUCCESS;
6187
6188 case IEMMODE_32BIT:
6189 IEM_MC_BEGIN(3, 0);
6190 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6191 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6193
6194 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6195 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6198
6199 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6200 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6201 IEM_MC_ADVANCE_RIP();
6202 IEM_MC_END();
6203 return VINF_SUCCESS;
6204
6205 case IEMMODE_64BIT:
6206 IEM_MC_BEGIN(3, 0);
6207 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6208 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6210
6211 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6212 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6213 IEM_MC_REF_EFLAGS(pEFlags);
6214 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6215
6216 IEM_MC_ADVANCE_RIP();
6217 IEM_MC_END();
6218 return VINF_SUCCESS;
6219
6220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6221 }
6222 }
6223 else
6224 {
6225 /*
6226 * We're accessing memory.
6227 */
6228 switch (pIemCpu->enmEffOpSize)
6229 {
6230 case IEMMODE_16BIT:
6231 IEM_MC_BEGIN(3, 3);
6232 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6233 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6234 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6235 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6237
6238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6239 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6240 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6241 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6242 IEM_MC_FETCH_EFLAGS(EFlags);
6243 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6244 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6245 else
6246 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6247
6248 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6249 IEM_MC_COMMIT_EFLAGS(EFlags);
6250 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 return VINF_SUCCESS;
6254
6255 case IEMMODE_32BIT:
6256 IEM_MC_BEGIN(3, 3);
6257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6258 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6259 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6260 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6262
6263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6264 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6265 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6266 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6267 IEM_MC_FETCH_EFLAGS(EFlags);
6268 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6270 else
6271 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6272
6273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6274 IEM_MC_COMMIT_EFLAGS(EFlags);
6275 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6276 IEM_MC_ADVANCE_RIP();
6277 IEM_MC_END();
6278 return VINF_SUCCESS;
6279
6280 case IEMMODE_64BIT:
6281 IEM_MC_BEGIN(3, 3);
6282 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6283 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6284 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6285 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6287
6288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6289 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6290 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6291 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6292 IEM_MC_FETCH_EFLAGS(EFlags);
6293 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6294 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6295 else
6296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6297
6298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6299 IEM_MC_COMMIT_EFLAGS(EFlags);
6300 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6301 IEM_MC_ADVANCE_RIP();
6302 IEM_MC_END();
6303 return VINF_SUCCESS;
6304
6305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6306 }
6307 }
6308}
6309
6310/** Opcode 0x0f 0xc2. */
6311FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6312
6313/** Opcode 0x0f 0xc3. */
6314FNIEMOP_STUB(iemOp_movnti_My_Gy);
6315
6316/** Opcode 0x0f 0xc4. */
6317FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6318
6319/** Opcode 0x0f 0xc5. */
6320FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6321
6322/** Opcode 0x0f 0xc6. */
6323FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6324
6325
6326/** Opcode 0x0f 0xc7 !11/1. */
6327FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6328{
6329 IEMOP_MNEMONIC("cmpxchg8b Mq");
6330
6331 IEM_MC_BEGIN(4, 3);
6332 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6333 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6334 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6335 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6336 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6337 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6339
6340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6341 IEMOP_HLP_DONE_DECODING();
6342 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6343
6344 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6345 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6346 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6347
6348 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6349 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6350 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6351
6352 IEM_MC_FETCH_EFLAGS(EFlags);
6353 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6354 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6355 else
6356 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6357
6358 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6359 IEM_MC_COMMIT_EFLAGS(EFlags);
6360 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6361 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6362 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6363 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6364 IEM_MC_ENDIF();
6365 IEM_MC_ADVANCE_RIP();
6366
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369}
6370
6371
6372/** Opcode REX.W 0x0f 0xc7 !11/1. */
6373FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6374
6375/** Opcode 0x0f 0xc7 11/6. */
6376FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6377
6378/** Opcode 0x0f 0xc7 !11/6. */
6379FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6380
6381/** Opcode 0x66 0x0f 0xc7 !11/6. */
6382FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6383
6384/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6385FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6386
6387/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6388FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6389
6390
6391/** Opcode 0x0f 0xc7. */
6392FNIEMOP_DEF(iemOp_Grp9)
6393{
6394 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6396 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6397 {
6398 case 0: case 2: case 3: case 4: case 5:
6399 return IEMOP_RAISE_INVALID_OPCODE();
6400 case 1:
6401 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6402 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6403 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6404 return IEMOP_RAISE_INVALID_OPCODE();
6405 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6406 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6407 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6408 case 6:
6409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6410 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6411 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6412 {
6413 case 0:
6414 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6415 case IEM_OP_PRF_SIZE_OP:
6416 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6417 case IEM_OP_PRF_REPZ:
6418 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6419 default:
6420 return IEMOP_RAISE_INVALID_OPCODE();
6421 }
6422 case 7:
6423 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6424 {
6425 case 0:
6426 case IEM_OP_PRF_REPZ:
6427 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6428 default:
6429 return IEMOP_RAISE_INVALID_OPCODE();
6430 }
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6432 }
6433}
6434
6435
6436/**
6437 * Common 'bswap register' helper.
6438 */
6439FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6440{
6441 IEMOP_HLP_NO_LOCK_PREFIX();
6442 switch (pIemCpu->enmEffOpSize)
6443 {
6444 case IEMMODE_16BIT:
6445 IEM_MC_BEGIN(1, 0);
6446 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6447 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6448 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6449 IEM_MC_ADVANCE_RIP();
6450 IEM_MC_END();
6451 return VINF_SUCCESS;
6452
6453 case IEMMODE_32BIT:
6454 IEM_MC_BEGIN(1, 0);
6455 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6456 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6457 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6458 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6459 IEM_MC_ADVANCE_RIP();
6460 IEM_MC_END();
6461 return VINF_SUCCESS;
6462
6463 case IEMMODE_64BIT:
6464 IEM_MC_BEGIN(1, 0);
6465 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6466 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6467 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6468 IEM_MC_ADVANCE_RIP();
6469 IEM_MC_END();
6470 return VINF_SUCCESS;
6471
6472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6473 }
6474}
6475
6476
6477/** Opcode 0x0f 0xc8. */
6478FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6479{
6480 IEMOP_MNEMONIC("bswap rAX/r8");
6481 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6482 prefix. REX.B is the correct prefix it appears. For a parallel
6483 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6484 IEMOP_HLP_MIN_486();
6485 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6486}
6487
6488
6489/** Opcode 0x0f 0xc9. */
6490FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6491{
6492 IEMOP_MNEMONIC("bswap rCX/r9");
6493 IEMOP_HLP_MIN_486();
6494 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6495}
6496
6497
6498/** Opcode 0x0f 0xca. */
6499FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6500{
6501 IEMOP_MNEMONIC("bswap rDX/r9");
6502 IEMOP_HLP_MIN_486();
6503 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6504}
6505
6506
6507/** Opcode 0x0f 0xcb. */
6508FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6509{
6510 IEMOP_MNEMONIC("bswap rBX/r9");
6511 IEMOP_HLP_MIN_486();
6512 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6513}
6514
6515
6516/** Opcode 0x0f 0xcc. */
6517FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6518{
6519 IEMOP_MNEMONIC("bswap rSP/r12");
6520 IEMOP_HLP_MIN_486();
6521 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6522}
6523
6524
6525/** Opcode 0x0f 0xcd. */
6526FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6527{
6528 IEMOP_MNEMONIC("bswap rBP/r13");
6529 IEMOP_HLP_MIN_486();
6530 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6531}
6532
6533
6534/** Opcode 0x0f 0xce. */
6535FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6536{
6537 IEMOP_MNEMONIC("bswap rSI/r14");
6538 IEMOP_HLP_MIN_486();
6539 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6540}
6541
6542
6543/** Opcode 0x0f 0xcf. */
6544FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6545{
6546 IEMOP_MNEMONIC("bswap rDI/r15");
6547 IEMOP_HLP_MIN_486();
6548 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6549}
6550
6551
6552
6553/** Opcode 0x0f 0xd0. */
6554FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6555/** Opcode 0x0f 0xd1. */
6556FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6557/** Opcode 0x0f 0xd2. */
6558FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6559/** Opcode 0x0f 0xd3. */
6560FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6561/** Opcode 0x0f 0xd4. */
6562FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6563/** Opcode 0x0f 0xd5. */
6564FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6565/** Opcode 0x0f 0xd6. */
6566FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6567
6568
6569/** Opcode 0x0f 0xd7. */
6570FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6571{
6572 /* Docs says register only. */
6573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6574 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6575 return IEMOP_RAISE_INVALID_OPCODE();
6576
6577 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6578 /** @todo testcase: Check that the instruction implicitly clears the high
6579 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6580 * and opcode modifications are made to work with the whole width (not
6581 * just 128). */
6582 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6583 {
6584 case IEM_OP_PRF_SIZE_OP: /* SSE */
6585 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6586 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6587 IEM_MC_BEGIN(2, 0);
6588 IEM_MC_ARG(uint64_t *, pDst, 0);
6589 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6590 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6591 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6592 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6593 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6594 IEM_MC_ADVANCE_RIP();
6595 IEM_MC_END();
6596 return VINF_SUCCESS;
6597
6598 case 0: /* MMX */
6599 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6600 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6601 IEM_MC_BEGIN(2, 0);
6602 IEM_MC_ARG(uint64_t *, pDst, 0);
6603 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6604 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6605 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6606 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6607 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6608 IEM_MC_ADVANCE_RIP();
6609 IEM_MC_END();
6610 return VINF_SUCCESS;
6611
6612 default:
6613 return IEMOP_RAISE_INVALID_OPCODE();
6614 }
6615}
6616
6617
6618/** Opcode 0x0f 0xd8. */
6619FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6620/** Opcode 0x0f 0xd9. */
6621FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6622/** Opcode 0x0f 0xda. */
6623FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6624/** Opcode 0x0f 0xdb. */
6625FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6626/** Opcode 0x0f 0xdc. */
6627FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6628/** Opcode 0x0f 0xdd. */
6629FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6630/** Opcode 0x0f 0xde. */
6631FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6632/** Opcode 0x0f 0xdf. */
6633FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6634/** Opcode 0x0f 0xe0. */
6635FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6636/** Opcode 0x0f 0xe1. */
6637FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6638/** Opcode 0x0f 0xe2. */
6639FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6640/** Opcode 0x0f 0xe3. */
6641FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6642/** Opcode 0x0f 0xe4. */
6643FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6644/** Opcode 0x0f 0xe5. */
6645FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6646/** Opcode 0x0f 0xe6. */
6647FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6648/** Opcode 0x0f 0xe7. */
6649FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6650/** Opcode 0x0f 0xe8. */
6651FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6652/** Opcode 0x0f 0xe9. */
6653FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6654/** Opcode 0x0f 0xea. */
6655FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6656/** Opcode 0x0f 0xeb. */
6657FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6658/** Opcode 0x0f 0xec. */
6659FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6660/** Opcode 0x0f 0xed. */
6661FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6662/** Opcode 0x0f 0xee. */
6663FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6664
6665
6666/** Opcode 0x0f 0xef. */
6667FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6668{
6669 IEMOP_MNEMONIC("pxor");
6670 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6671}
6672
6673
6674/** Opcode 0x0f 0xf0. */
6675FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6676/** Opcode 0x0f 0xf1. */
6677FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6678/** Opcode 0x0f 0xf2. */
6679FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6680/** Opcode 0x0f 0xf3. */
6681FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6682/** Opcode 0x0f 0xf4. */
6683FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6684/** Opcode 0x0f 0xf5. */
6685FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6686/** Opcode 0x0f 0xf6. */
6687FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6688/** Opcode 0x0f 0xf7. */
6689FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6690/** Opcode 0x0f 0xf8. */
6691FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6692/** Opcode 0x0f 0xf9. */
6693FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6694/** Opcode 0x0f 0xfa. */
6695FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6696/** Opcode 0x0f 0xfb. */
6697FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6698/** Opcode 0x0f 0xfc. */
6699FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6700/** Opcode 0x0f 0xfd. */
6701FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6702/** Opcode 0x0f 0xfe. */
6703FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6704
6705
6706const PFNIEMOP g_apfnTwoByteMap[256] =
6707{
6708 /* 0x00 */ iemOp_Grp6,
6709 /* 0x01 */ iemOp_Grp7,
6710 /* 0x02 */ iemOp_lar_Gv_Ew,
6711 /* 0x03 */ iemOp_lsl_Gv_Ew,
6712 /* 0x04 */ iemOp_Invalid,
6713 /* 0x05 */ iemOp_syscall,
6714 /* 0x06 */ iemOp_clts,
6715 /* 0x07 */ iemOp_sysret,
6716 /* 0x08 */ iemOp_invd,
6717 /* 0x09 */ iemOp_wbinvd,
6718 /* 0x0a */ iemOp_Invalid,
6719 /* 0x0b */ iemOp_ud2,
6720 /* 0x0c */ iemOp_Invalid,
6721 /* 0x0d */ iemOp_nop_Ev_GrpP,
6722 /* 0x0e */ iemOp_femms,
6723 /* 0x0f */ iemOp_3Dnow,
6724 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6725 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6726 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6727 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6728 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6729 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6730 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6731 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6732 /* 0x18 */ iemOp_prefetch_Grp16,
6733 /* 0x19 */ iemOp_nop_Ev,
6734 /* 0x1a */ iemOp_nop_Ev,
6735 /* 0x1b */ iemOp_nop_Ev,
6736 /* 0x1c */ iemOp_nop_Ev,
6737 /* 0x1d */ iemOp_nop_Ev,
6738 /* 0x1e */ iemOp_nop_Ev,
6739 /* 0x1f */ iemOp_nop_Ev,
6740 /* 0x20 */ iemOp_mov_Rd_Cd,
6741 /* 0x21 */ iemOp_mov_Rd_Dd,
6742 /* 0x22 */ iemOp_mov_Cd_Rd,
6743 /* 0x23 */ iemOp_mov_Dd_Rd,
6744 /* 0x24 */ iemOp_mov_Rd_Td,
6745 /* 0x25 */ iemOp_Invalid,
6746 /* 0x26 */ iemOp_mov_Td_Rd,
6747 /* 0x27 */ iemOp_Invalid,
6748 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6749 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6750 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6751 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6752 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6753 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6754 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6755 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6756 /* 0x30 */ iemOp_wrmsr,
6757 /* 0x31 */ iemOp_rdtsc,
6758 /* 0x32 */ iemOp_rdmsr,
6759 /* 0x33 */ iemOp_rdpmc,
6760 /* 0x34 */ iemOp_sysenter,
6761 /* 0x35 */ iemOp_sysexit,
6762 /* 0x36 */ iemOp_Invalid,
6763 /* 0x37 */ iemOp_getsec,
6764 /* 0x38 */ iemOp_3byte_Esc_A4,
6765 /* 0x39 */ iemOp_Invalid,
6766 /* 0x3a */ iemOp_3byte_Esc_A5,
6767 /* 0x3b */ iemOp_Invalid,
6768 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6769 /* 0x3d */ iemOp_Invalid,
6770 /* 0x3e */ iemOp_Invalid,
6771 /* 0x3f */ iemOp_Invalid,
6772 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6773 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6774 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6775 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6776 /* 0x44 */ iemOp_cmove_Gv_Ev,
6777 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6778 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6779 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6780 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6781 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6782 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6783 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6784 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6785 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6786 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6787 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6788 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6789 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6790 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6791 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6792 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6793 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6794 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6795 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6796 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6797 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6798 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6799 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6800 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6801 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6802 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6803 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6804 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6805 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6806 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6807 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6808 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6809 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6810 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6811 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6812 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6813 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6814 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6815 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6816 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6817 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6818 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6819 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6820 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6821 /* 0x71 */ iemOp_Grp12,
6822 /* 0x72 */ iemOp_Grp13,
6823 /* 0x73 */ iemOp_Grp14,
6824 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6825 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6826 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6827 /* 0x77 */ iemOp_emms,
6828 /* 0x78 */ iemOp_vmread_AmdGrp17,
6829 /* 0x79 */ iemOp_vmwrite,
6830 /* 0x7a */ iemOp_Invalid,
6831 /* 0x7b */ iemOp_Invalid,
6832 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6833 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6834 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6835 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6836 /* 0x80 */ iemOp_jo_Jv,
6837 /* 0x81 */ iemOp_jno_Jv,
6838 /* 0x82 */ iemOp_jc_Jv,
6839 /* 0x83 */ iemOp_jnc_Jv,
6840 /* 0x84 */ iemOp_je_Jv,
6841 /* 0x85 */ iemOp_jne_Jv,
6842 /* 0x86 */ iemOp_jbe_Jv,
6843 /* 0x87 */ iemOp_jnbe_Jv,
6844 /* 0x88 */ iemOp_js_Jv,
6845 /* 0x89 */ iemOp_jns_Jv,
6846 /* 0x8a */ iemOp_jp_Jv,
6847 /* 0x8b */ iemOp_jnp_Jv,
6848 /* 0x8c */ iemOp_jl_Jv,
6849 /* 0x8d */ iemOp_jnl_Jv,
6850 /* 0x8e */ iemOp_jle_Jv,
6851 /* 0x8f */ iemOp_jnle_Jv,
6852 /* 0x90 */ iemOp_seto_Eb,
6853 /* 0x91 */ iemOp_setno_Eb,
6854 /* 0x92 */ iemOp_setc_Eb,
6855 /* 0x93 */ iemOp_setnc_Eb,
6856 /* 0x94 */ iemOp_sete_Eb,
6857 /* 0x95 */ iemOp_setne_Eb,
6858 /* 0x96 */ iemOp_setbe_Eb,
6859 /* 0x97 */ iemOp_setnbe_Eb,
6860 /* 0x98 */ iemOp_sets_Eb,
6861 /* 0x99 */ iemOp_setns_Eb,
6862 /* 0x9a */ iemOp_setp_Eb,
6863 /* 0x9b */ iemOp_setnp_Eb,
6864 /* 0x9c */ iemOp_setl_Eb,
6865 /* 0x9d */ iemOp_setnl_Eb,
6866 /* 0x9e */ iemOp_setle_Eb,
6867 /* 0x9f */ iemOp_setnle_Eb,
6868 /* 0xa0 */ iemOp_push_fs,
6869 /* 0xa1 */ iemOp_pop_fs,
6870 /* 0xa2 */ iemOp_cpuid,
6871 /* 0xa3 */ iemOp_bt_Ev_Gv,
6872 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6873 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6874 /* 0xa6 */ iemOp_Invalid,
6875 /* 0xa7 */ iemOp_Invalid,
6876 /* 0xa8 */ iemOp_push_gs,
6877 /* 0xa9 */ iemOp_pop_gs,
6878 /* 0xaa */ iemOp_rsm,
6879 /* 0xab */ iemOp_bts_Ev_Gv,
6880 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6881 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6882 /* 0xae */ iemOp_Grp15,
6883 /* 0xaf */ iemOp_imul_Gv_Ev,
6884 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6885 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6886 /* 0xb2 */ iemOp_lss_Gv_Mp,
6887 /* 0xb3 */ iemOp_btr_Ev_Gv,
6888 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6889 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6890 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6891 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6892 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6893 /* 0xb9 */ iemOp_Grp10,
6894 /* 0xba */ iemOp_Grp8,
6895 /* 0xbd */ iemOp_btc_Ev_Gv,
6896 /* 0xbc */ iemOp_bsf_Gv_Ev,
6897 /* 0xbd */ iemOp_bsr_Gv_Ev,
6898 /* 0xbe */ iemOp_movsx_Gv_Eb,
6899 /* 0xbf */ iemOp_movsx_Gv_Ew,
6900 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6901 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6902 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6903 /* 0xc3 */ iemOp_movnti_My_Gy,
6904 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6905 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6906 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6907 /* 0xc7 */ iemOp_Grp9,
6908 /* 0xc8 */ iemOp_bswap_rAX_r8,
6909 /* 0xc9 */ iemOp_bswap_rCX_r9,
6910 /* 0xca */ iemOp_bswap_rDX_r10,
6911 /* 0xcb */ iemOp_bswap_rBX_r11,
6912 /* 0xcc */ iemOp_bswap_rSP_r12,
6913 /* 0xcd */ iemOp_bswap_rBP_r13,
6914 /* 0xce */ iemOp_bswap_rSI_r14,
6915 /* 0xcf */ iemOp_bswap_rDI_r15,
6916 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6917 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6918 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6919 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6920 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6921 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6922 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6923 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6924 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6925 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6926 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6927 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6928 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6929 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6930 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6931 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6932 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6933 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6934 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6935 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6936 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6937 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6938 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6939 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6940 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6941 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6942 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6943 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6944 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6945 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6946 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6947 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6948 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6949 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6950 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6951 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6952 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6953 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6954 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6955 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6956 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6957 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6958 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6959 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6960 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6961 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6962 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6963 /* 0xff */ iemOp_Invalid
6964};
6965
6966/** @} */
6967
6968
6969/** @name One byte opcodes.
6970 *
6971 * @{
6972 */
6973
6974/** Opcode 0x00. */
6975FNIEMOP_DEF(iemOp_add_Eb_Gb)
6976{
6977 IEMOP_MNEMONIC("add Eb,Gb");
6978 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6979}
6980
6981
6982/** Opcode 0x01. */
6983FNIEMOP_DEF(iemOp_add_Ev_Gv)
6984{
6985 IEMOP_MNEMONIC("add Ev,Gv");
6986 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6987}
6988
6989
6990/** Opcode 0x02. */
6991FNIEMOP_DEF(iemOp_add_Gb_Eb)
6992{
6993 IEMOP_MNEMONIC("add Gb,Eb");
6994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6995}
6996
6997
6998/** Opcode 0x03. */
6999FNIEMOP_DEF(iemOp_add_Gv_Ev)
7000{
7001 IEMOP_MNEMONIC("add Gv,Ev");
7002 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7003}
7004
7005
7006/** Opcode 0x04. */
7007FNIEMOP_DEF(iemOp_add_Al_Ib)
7008{
7009 IEMOP_MNEMONIC("add al,Ib");
7010 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7011}
7012
7013
7014/** Opcode 0x05. */
7015FNIEMOP_DEF(iemOp_add_eAX_Iz)
7016{
7017 IEMOP_MNEMONIC("add rAX,Iz");
7018 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7019}
7020
7021
7022/** Opcode 0x06. */
7023FNIEMOP_DEF(iemOp_push_ES)
7024{
7025 IEMOP_MNEMONIC("push es");
7026 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7027}
7028
7029
7030/** Opcode 0x07. */
7031FNIEMOP_DEF(iemOp_pop_ES)
7032{
7033 IEMOP_MNEMONIC("pop es");
7034 IEMOP_HLP_NO_64BIT();
7035 IEMOP_HLP_NO_LOCK_PREFIX();
7036 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7037}
7038
7039
7040/** Opcode 0x08. */
7041FNIEMOP_DEF(iemOp_or_Eb_Gb)
7042{
7043 IEMOP_MNEMONIC("or Eb,Gb");
7044 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7045 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7046}
7047
7048
7049/** Opcode 0x09. */
7050FNIEMOP_DEF(iemOp_or_Ev_Gv)
7051{
7052 IEMOP_MNEMONIC("or Ev,Gv ");
7053 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7054 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7055}
7056
7057
7058/** Opcode 0x0a. */
7059FNIEMOP_DEF(iemOp_or_Gb_Eb)
7060{
7061 IEMOP_MNEMONIC("or Gb,Eb");
7062 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7063 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7064}
7065
7066
7067/** Opcode 0x0b. */
7068FNIEMOP_DEF(iemOp_or_Gv_Ev)
7069{
7070 IEMOP_MNEMONIC("or Gv,Ev");
7071 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7072 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7073}
7074
7075
7076/** Opcode 0x0c. */
7077FNIEMOP_DEF(iemOp_or_Al_Ib)
7078{
7079 IEMOP_MNEMONIC("or al,Ib");
7080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7081 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7082}
7083
7084
7085/** Opcode 0x0d. */
7086FNIEMOP_DEF(iemOp_or_eAX_Iz)
7087{
7088 IEMOP_MNEMONIC("or rAX,Iz");
7089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7090 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7091}
7092
7093
7094/** Opcode 0x0e. */
7095FNIEMOP_DEF(iemOp_push_CS)
7096{
7097 IEMOP_MNEMONIC("push cs");
7098 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7099}
7100
7101
7102/** Opcode 0x0f. */
7103FNIEMOP_DEF(iemOp_2byteEscape)
7104{
7105 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7106 /** @todo PUSH CS on 8086, undefined on 80186. */
7107 IEMOP_HLP_MIN_286();
7108 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7109}
7110
7111/** Opcode 0x10. */
7112FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7113{
7114 IEMOP_MNEMONIC("adc Eb,Gb");
7115 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7116}
7117
7118
7119/** Opcode 0x11. */
7120FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7121{
7122 IEMOP_MNEMONIC("adc Ev,Gv");
7123 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7124}
7125
7126
7127/** Opcode 0x12. */
7128FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7129{
7130 IEMOP_MNEMONIC("adc Gb,Eb");
7131 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7132}
7133
7134
7135/** Opcode 0x13. */
7136FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7137{
7138 IEMOP_MNEMONIC("adc Gv,Ev");
7139 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7140}
7141
7142
7143/** Opcode 0x14. */
7144FNIEMOP_DEF(iemOp_adc_Al_Ib)
7145{
7146 IEMOP_MNEMONIC("adc al,Ib");
7147 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7148}
7149
7150
7151/** Opcode 0x15. */
7152FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7153{
7154 IEMOP_MNEMONIC("adc rAX,Iz");
7155 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7156}
7157
7158
7159/** Opcode 0x16. */
7160FNIEMOP_DEF(iemOp_push_SS)
7161{
7162 IEMOP_MNEMONIC("push ss");
7163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7164}
7165
7166
7167/** Opcode 0x17. */
7168FNIEMOP_DEF(iemOp_pop_SS)
7169{
7170 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7171 IEMOP_HLP_NO_LOCK_PREFIX();
7172 IEMOP_HLP_NO_64BIT();
7173 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7174}
7175
7176
7177/** Opcode 0x18. */
7178FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7179{
7180 IEMOP_MNEMONIC("sbb Eb,Gb");
7181 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7182}
7183
7184
7185/** Opcode 0x19. */
7186FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7187{
7188 IEMOP_MNEMONIC("sbb Ev,Gv");
7189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7190}
7191
7192
7193/** Opcode 0x1a. */
7194FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7195{
7196 IEMOP_MNEMONIC("sbb Gb,Eb");
7197 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7198}
7199
7200
7201/** Opcode 0x1b. */
7202FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7203{
7204 IEMOP_MNEMONIC("sbb Gv,Ev");
7205 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7206}
7207
7208
7209/** Opcode 0x1c. */
7210FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7211{
7212 IEMOP_MNEMONIC("sbb al,Ib");
7213 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7214}
7215
7216
7217/** Opcode 0x1d. */
7218FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7219{
7220 IEMOP_MNEMONIC("sbb rAX,Iz");
7221 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7222}
7223
7224
7225/** Opcode 0x1e. */
7226FNIEMOP_DEF(iemOp_push_DS)
7227{
7228 IEMOP_MNEMONIC("push ds");
7229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7230}
7231
7232
7233/** Opcode 0x1f. */
7234FNIEMOP_DEF(iemOp_pop_DS)
7235{
7236 IEMOP_MNEMONIC("pop ds");
7237 IEMOP_HLP_NO_LOCK_PREFIX();
7238 IEMOP_HLP_NO_64BIT();
7239 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7240}
7241
7242
7243/** Opcode 0x20. */
7244FNIEMOP_DEF(iemOp_and_Eb_Gb)
7245{
7246 IEMOP_MNEMONIC("and Eb,Gb");
7247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7249}
7250
7251
7252/** Opcode 0x21. */
7253FNIEMOP_DEF(iemOp_and_Ev_Gv)
7254{
7255 IEMOP_MNEMONIC("and Ev,Gv");
7256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7258}
7259
7260
7261/** Opcode 0x22. */
7262FNIEMOP_DEF(iemOp_and_Gb_Eb)
7263{
7264 IEMOP_MNEMONIC("and Gb,Eb");
7265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7266 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7267}
7268
7269
7270/** Opcode 0x23. */
7271FNIEMOP_DEF(iemOp_and_Gv_Ev)
7272{
7273 IEMOP_MNEMONIC("and Gv,Ev");
7274 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7275 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7276}
7277
7278
7279/** Opcode 0x24. */
7280FNIEMOP_DEF(iemOp_and_Al_Ib)
7281{
7282 IEMOP_MNEMONIC("and al,Ib");
7283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7284 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7285}
7286
7287
7288/** Opcode 0x25. */
7289FNIEMOP_DEF(iemOp_and_eAX_Iz)
7290{
7291 IEMOP_MNEMONIC("and rAX,Iz");
7292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7293 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7294}
7295
7296
7297/** Opcode 0x26. */
7298FNIEMOP_DEF(iemOp_seg_ES)
7299{
7300 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7301 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7302 pIemCpu->iEffSeg = X86_SREG_ES;
7303
7304 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7305 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7306}
7307
7308
7309/** Opcode 0x27. */
7310FNIEMOP_DEF(iemOp_daa)
7311{
7312 IEMOP_MNEMONIC("daa AL");
7313 IEMOP_HLP_NO_64BIT();
7314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7316 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7317}
7318
7319
7320/** Opcode 0x28. */
7321FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7322{
7323 IEMOP_MNEMONIC("sub Eb,Gb");
7324 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7325}
7326
7327
7328/** Opcode 0x29. */
7329FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7330{
7331 IEMOP_MNEMONIC("sub Ev,Gv");
7332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7333}
7334
7335
7336/** Opcode 0x2a. */
7337FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7338{
7339 IEMOP_MNEMONIC("sub Gb,Eb");
7340 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7341}
7342
7343
7344/** Opcode 0x2b. */
7345FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7346{
7347 IEMOP_MNEMONIC("sub Gv,Ev");
7348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7349}
7350
7351
7352/** Opcode 0x2c. */
7353FNIEMOP_DEF(iemOp_sub_Al_Ib)
7354{
7355 IEMOP_MNEMONIC("sub al,Ib");
7356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7357}
7358
7359
7360/** Opcode 0x2d. */
7361FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7362{
7363 IEMOP_MNEMONIC("sub rAX,Iz");
7364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7365}
7366
7367
7368/** Opcode 0x2e. */
7369FNIEMOP_DEF(iemOp_seg_CS)
7370{
7371 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7372 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7373 pIemCpu->iEffSeg = X86_SREG_CS;
7374
7375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7377}
7378
7379
7380/** Opcode 0x2f. */
7381FNIEMOP_DEF(iemOp_das)
7382{
7383 IEMOP_MNEMONIC("das AL");
7384 IEMOP_HLP_NO_64BIT();
7385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7388}
7389
7390
7391/** Opcode 0x30. */
7392FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7393{
7394 IEMOP_MNEMONIC("xor Eb,Gb");
7395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7397}
7398
7399
7400/** Opcode 0x31. */
7401FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7402{
7403 IEMOP_MNEMONIC("xor Ev,Gv");
7404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7405 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7406}
7407
7408
7409/** Opcode 0x32. */
7410FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7411{
7412 IEMOP_MNEMONIC("xor Gb,Eb");
7413 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7414 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7415}
7416
7417
7418/** Opcode 0x33. */
7419FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7420{
7421 IEMOP_MNEMONIC("xor Gv,Ev");
7422 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7424}
7425
7426
7427/** Opcode 0x34. */
7428FNIEMOP_DEF(iemOp_xor_Al_Ib)
7429{
7430 IEMOP_MNEMONIC("xor al,Ib");
7431 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7433}
7434
7435
7436/** Opcode 0x35. */
7437FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7438{
7439 IEMOP_MNEMONIC("xor rAX,Iz");
7440 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7441 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7442}
7443
7444
7445/** Opcode 0x36. */
7446FNIEMOP_DEF(iemOp_seg_SS)
7447{
7448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7449 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7450 pIemCpu->iEffSeg = X86_SREG_SS;
7451
7452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7454}
7455
7456
7457/** Opcode 0x37. */
7458FNIEMOP_STUB(iemOp_aaa);
7459
7460
7461/** Opcode 0x38. */
7462FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7463{
7464 IEMOP_MNEMONIC("cmp Eb,Gb");
7465 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7466 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7467}
7468
7469
7470/** Opcode 0x39. */
7471FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7472{
7473 IEMOP_MNEMONIC("cmp Ev,Gv");
7474 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7475 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7476}
7477
7478
7479/** Opcode 0x3a. */
7480FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7481{
7482 IEMOP_MNEMONIC("cmp Gb,Eb");
7483 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7484}
7485
7486
7487/** Opcode 0x3b. */
7488FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7489{
7490 IEMOP_MNEMONIC("cmp Gv,Ev");
7491 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7492}
7493
7494
7495/** Opcode 0x3c. */
7496FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7497{
7498 IEMOP_MNEMONIC("cmp al,Ib");
7499 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7500}
7501
7502
7503/** Opcode 0x3d. */
7504FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7505{
7506 IEMOP_MNEMONIC("cmp rAX,Iz");
7507 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7508}
7509
7510
7511/** Opcode 0x3e. */
7512FNIEMOP_DEF(iemOp_seg_DS)
7513{
7514 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7515 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7516 pIemCpu->iEffSeg = X86_SREG_DS;
7517
7518 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7519 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7520}
7521
7522
7523/** Opcode 0x3f. */
7524FNIEMOP_STUB(iemOp_aas);
7525
7526/**
7527 * Common 'inc/dec/not/neg register' helper.
7528 */
7529FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7530{
7531 IEMOP_HLP_NO_LOCK_PREFIX();
7532 switch (pIemCpu->enmEffOpSize)
7533 {
7534 case IEMMODE_16BIT:
7535 IEM_MC_BEGIN(2, 0);
7536 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7537 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7538 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7539 IEM_MC_REF_EFLAGS(pEFlags);
7540 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7541 IEM_MC_ADVANCE_RIP();
7542 IEM_MC_END();
7543 return VINF_SUCCESS;
7544
7545 case IEMMODE_32BIT:
7546 IEM_MC_BEGIN(2, 0);
7547 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7548 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7549 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7550 IEM_MC_REF_EFLAGS(pEFlags);
7551 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7552 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7553 IEM_MC_ADVANCE_RIP();
7554 IEM_MC_END();
7555 return VINF_SUCCESS;
7556
7557 case IEMMODE_64BIT:
7558 IEM_MC_BEGIN(2, 0);
7559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7560 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7561 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7562 IEM_MC_REF_EFLAGS(pEFlags);
7563 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7564 IEM_MC_ADVANCE_RIP();
7565 IEM_MC_END();
7566 return VINF_SUCCESS;
7567 }
7568 return VINF_SUCCESS;
7569}
7570
7571
7572/** Opcode 0x40. */
7573FNIEMOP_DEF(iemOp_inc_eAX)
7574{
7575 /*
7576 * This is a REX prefix in 64-bit mode.
7577 */
7578 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7579 {
7580 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7581 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7582
7583 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7584 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7585 }
7586
7587 IEMOP_MNEMONIC("inc eAX");
7588 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7589}
7590
7591
7592/** Opcode 0x41. */
7593FNIEMOP_DEF(iemOp_inc_eCX)
7594{
7595 /*
7596 * This is a REX prefix in 64-bit mode.
7597 */
7598 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7599 {
7600 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7601 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7602 pIemCpu->uRexB = 1 << 3;
7603
7604 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7605 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7606 }
7607
7608 IEMOP_MNEMONIC("inc eCX");
7609 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7610}
7611
7612
7613/** Opcode 0x42. */
7614FNIEMOP_DEF(iemOp_inc_eDX)
7615{
7616 /*
7617 * This is a REX prefix in 64-bit mode.
7618 */
7619 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7620 {
7621 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7622 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7623 pIemCpu->uRexIndex = 1 << 3;
7624
7625 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7626 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7627 }
7628
7629 IEMOP_MNEMONIC("inc eDX");
7630 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7631}
7632
7633
7634
7635/** Opcode 0x43. */
7636FNIEMOP_DEF(iemOp_inc_eBX)
7637{
7638 /*
7639 * This is a REX prefix in 64-bit mode.
7640 */
7641 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7642 {
7643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7644 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7645 pIemCpu->uRexB = 1 << 3;
7646 pIemCpu->uRexIndex = 1 << 3;
7647
7648 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7649 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7650 }
7651
7652 IEMOP_MNEMONIC("inc eBX");
7653 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7654}
7655
7656
7657/** Opcode 0x44. */
7658FNIEMOP_DEF(iemOp_inc_eSP)
7659{
7660 /*
7661 * This is a REX prefix in 64-bit mode.
7662 */
7663 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7664 {
7665 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7666 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7667 pIemCpu->uRexReg = 1 << 3;
7668
7669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7671 }
7672
7673 IEMOP_MNEMONIC("inc eSP");
7674 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7675}
7676
7677
7678/** Opcode 0x45. */
7679FNIEMOP_DEF(iemOp_inc_eBP)
7680{
7681 /*
7682 * This is a REX prefix in 64-bit mode.
7683 */
7684 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7685 {
7686 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7687 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7688 pIemCpu->uRexReg = 1 << 3;
7689 pIemCpu->uRexB = 1 << 3;
7690
7691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7692 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7693 }
7694
7695 IEMOP_MNEMONIC("inc eBP");
7696 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7697}
7698
7699
7700/** Opcode 0x46. */
7701FNIEMOP_DEF(iemOp_inc_eSI)
7702{
7703 /*
7704 * This is a REX prefix in 64-bit mode.
7705 */
7706 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7707 {
7708 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7709 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7710 pIemCpu->uRexReg = 1 << 3;
7711 pIemCpu->uRexIndex = 1 << 3;
7712
7713 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7714 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7715 }
7716
7717 IEMOP_MNEMONIC("inc eSI");
7718 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7719}
7720
7721
7722/** Opcode 0x47. */
7723FNIEMOP_DEF(iemOp_inc_eDI)
7724{
7725 /*
7726 * This is a REX prefix in 64-bit mode.
7727 */
7728 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7729 {
7730 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7731 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7732 pIemCpu->uRexReg = 1 << 3;
7733 pIemCpu->uRexB = 1 << 3;
7734 pIemCpu->uRexIndex = 1 << 3;
7735
7736 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7737 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7738 }
7739
7740 IEMOP_MNEMONIC("inc eDI");
7741 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7742}
7743
7744
7745/** Opcode 0x48. */
7746FNIEMOP_DEF(iemOp_dec_eAX)
7747{
7748 /*
7749 * This is a REX prefix in 64-bit mode.
7750 */
7751 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7752 {
7753 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7754 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7755 iemRecalEffOpSize(pIemCpu);
7756
7757 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7758 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7759 }
7760
7761 IEMOP_MNEMONIC("dec eAX");
7762 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7763}
7764
7765
7766/** Opcode 0x49. */
7767FNIEMOP_DEF(iemOp_dec_eCX)
7768{
7769 /*
7770 * This is a REX prefix in 64-bit mode.
7771 */
7772 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7773 {
7774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7775 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7776 pIemCpu->uRexB = 1 << 3;
7777 iemRecalEffOpSize(pIemCpu);
7778
7779 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7780 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7781 }
7782
7783 IEMOP_MNEMONIC("dec eCX");
7784 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7785}
7786
7787
7788/** Opcode 0x4a. */
7789FNIEMOP_DEF(iemOp_dec_eDX)
7790{
7791 /*
7792 * This is a REX prefix in 64-bit mode.
7793 */
7794 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7795 {
7796 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7797 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7798 pIemCpu->uRexIndex = 1 << 3;
7799 iemRecalEffOpSize(pIemCpu);
7800
7801 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7802 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7803 }
7804
7805 IEMOP_MNEMONIC("dec eDX");
7806 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7807}
7808
7809
7810/** Opcode 0x4b. */
7811FNIEMOP_DEF(iemOp_dec_eBX)
7812{
7813 /*
7814 * This is a REX prefix in 64-bit mode.
7815 */
7816 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7817 {
7818 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7819 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7820 pIemCpu->uRexB = 1 << 3;
7821 pIemCpu->uRexIndex = 1 << 3;
7822 iemRecalEffOpSize(pIemCpu);
7823
7824 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7825 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7826 }
7827
7828 IEMOP_MNEMONIC("dec eBX");
7829 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7830}
7831
7832
7833/** Opcode 0x4c. */
7834FNIEMOP_DEF(iemOp_dec_eSP)
7835{
7836 /*
7837 * This is a REX prefix in 64-bit mode.
7838 */
7839 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7840 {
7841 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7842 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7843 pIemCpu->uRexReg = 1 << 3;
7844 iemRecalEffOpSize(pIemCpu);
7845
7846 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7847 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7848 }
7849
7850 IEMOP_MNEMONIC("dec eSP");
7851 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7852}
7853
7854
7855/** Opcode 0x4d. */
7856FNIEMOP_DEF(iemOp_dec_eBP)
7857{
7858 /*
7859 * This is a REX prefix in 64-bit mode.
7860 */
7861 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7862 {
7863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7864 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7865 pIemCpu->uRexReg = 1 << 3;
7866 pIemCpu->uRexB = 1 << 3;
7867 iemRecalEffOpSize(pIemCpu);
7868
7869 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7870 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7871 }
7872
7873 IEMOP_MNEMONIC("dec eBP");
7874 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7875}
7876
7877
7878/** Opcode 0x4e. */
7879FNIEMOP_DEF(iemOp_dec_eSI)
7880{
7881 /*
7882 * This is a REX prefix in 64-bit mode.
7883 */
7884 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7885 {
7886 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7887 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7888 pIemCpu->uRexReg = 1 << 3;
7889 pIemCpu->uRexIndex = 1 << 3;
7890 iemRecalEffOpSize(pIemCpu);
7891
7892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7894 }
7895
7896 IEMOP_MNEMONIC("dec eSI");
7897 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7898}
7899
7900
7901/** Opcode 0x4f. */
7902FNIEMOP_DEF(iemOp_dec_eDI)
7903{
7904 /*
7905 * This is a REX prefix in 64-bit mode.
7906 */
7907 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7908 {
7909 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7910 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7911 pIemCpu->uRexReg = 1 << 3;
7912 pIemCpu->uRexB = 1 << 3;
7913 pIemCpu->uRexIndex = 1 << 3;
7914 iemRecalEffOpSize(pIemCpu);
7915
7916 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7917 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7918 }
7919
7920 IEMOP_MNEMONIC("dec eDI");
7921 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7922}
7923
7924
7925/**
7926 * Common 'push register' helper.
7927 */
7928FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7929{
7930 IEMOP_HLP_NO_LOCK_PREFIX();
7931 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7932 {
7933 iReg |= pIemCpu->uRexB;
7934 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7935 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7936 }
7937
7938 switch (pIemCpu->enmEffOpSize)
7939 {
7940 case IEMMODE_16BIT:
7941 IEM_MC_BEGIN(0, 1);
7942 IEM_MC_LOCAL(uint16_t, u16Value);
7943 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7944 IEM_MC_PUSH_U16(u16Value);
7945 IEM_MC_ADVANCE_RIP();
7946 IEM_MC_END();
7947 break;
7948
7949 case IEMMODE_32BIT:
7950 IEM_MC_BEGIN(0, 1);
7951 IEM_MC_LOCAL(uint32_t, u32Value);
7952 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7953 IEM_MC_PUSH_U32(u32Value);
7954 IEM_MC_ADVANCE_RIP();
7955 IEM_MC_END();
7956 break;
7957
7958 case IEMMODE_64BIT:
7959 IEM_MC_BEGIN(0, 1);
7960 IEM_MC_LOCAL(uint64_t, u64Value);
7961 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7962 IEM_MC_PUSH_U64(u64Value);
7963 IEM_MC_ADVANCE_RIP();
7964 IEM_MC_END();
7965 break;
7966 }
7967
7968 return VINF_SUCCESS;
7969}
7970
7971
7972/** Opcode 0x50. */
7973FNIEMOP_DEF(iemOp_push_eAX)
7974{
7975 IEMOP_MNEMONIC("push rAX");
7976 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7977}
7978
7979
7980/** Opcode 0x51. */
7981FNIEMOP_DEF(iemOp_push_eCX)
7982{
7983 IEMOP_MNEMONIC("push rCX");
7984 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7985}
7986
7987
7988/** Opcode 0x52. */
7989FNIEMOP_DEF(iemOp_push_eDX)
7990{
7991 IEMOP_MNEMONIC("push rDX");
7992 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7993}
7994
7995
7996/** Opcode 0x53. */
7997FNIEMOP_DEF(iemOp_push_eBX)
7998{
7999 IEMOP_MNEMONIC("push rBX");
8000 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8001}
8002
8003
8004/** Opcode 0x54. */
8005FNIEMOP_DEF(iemOp_push_eSP)
8006{
8007 IEMOP_MNEMONIC("push rSP");
8008 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8009 {
8010 IEM_MC_BEGIN(0, 1);
8011 IEM_MC_LOCAL(uint16_t, u16Value);
8012 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8013 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8014 IEM_MC_PUSH_U16(u16Value);
8015 IEM_MC_ADVANCE_RIP();
8016 IEM_MC_END();
8017 }
8018 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8019}
8020
8021
8022/** Opcode 0x55. */
8023FNIEMOP_DEF(iemOp_push_eBP)
8024{
8025 IEMOP_MNEMONIC("push rBP");
8026 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8027}
8028
8029
8030/** Opcode 0x56. */
8031FNIEMOP_DEF(iemOp_push_eSI)
8032{
8033 IEMOP_MNEMONIC("push rSI");
8034 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8035}
8036
8037
8038/** Opcode 0x57. */
8039FNIEMOP_DEF(iemOp_push_eDI)
8040{
8041 IEMOP_MNEMONIC("push rDI");
8042 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8043}
8044
8045
8046/**
8047 * Common 'pop register' helper.
8048 */
8049FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8050{
8051 IEMOP_HLP_NO_LOCK_PREFIX();
8052 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8053 {
8054 iReg |= pIemCpu->uRexB;
8055 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8056 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8057 }
8058
8059 switch (pIemCpu->enmEffOpSize)
8060 {
8061 case IEMMODE_16BIT:
8062 IEM_MC_BEGIN(0, 1);
8063 IEM_MC_LOCAL(uint16_t, *pu16Dst);
8064 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8065 IEM_MC_POP_U16(pu16Dst);
8066 IEM_MC_ADVANCE_RIP();
8067 IEM_MC_END();
8068 break;
8069
8070 case IEMMODE_32BIT:
8071 IEM_MC_BEGIN(0, 1);
8072 IEM_MC_LOCAL(uint32_t, *pu32Dst);
8073 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8074 IEM_MC_POP_U32(pu32Dst);
8075 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8076 IEM_MC_ADVANCE_RIP();
8077 IEM_MC_END();
8078 break;
8079
8080 case IEMMODE_64BIT:
8081 IEM_MC_BEGIN(0, 1);
8082 IEM_MC_LOCAL(uint64_t, *pu64Dst);
8083 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8084 IEM_MC_POP_U64(pu64Dst);
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 break;
8088 }
8089
8090 return VINF_SUCCESS;
8091}
8092
8093
8094/** Opcode 0x58. */
8095FNIEMOP_DEF(iemOp_pop_eAX)
8096{
8097 IEMOP_MNEMONIC("pop rAX");
8098 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8099}
8100
8101
8102/** Opcode 0x59. */
8103FNIEMOP_DEF(iemOp_pop_eCX)
8104{
8105 IEMOP_MNEMONIC("pop rCX");
8106 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8107}
8108
8109
8110/** Opcode 0x5a. */
8111FNIEMOP_DEF(iemOp_pop_eDX)
8112{
8113 IEMOP_MNEMONIC("pop rDX");
8114 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8115}
8116
8117
8118/** Opcode 0x5b. */
8119FNIEMOP_DEF(iemOp_pop_eBX)
8120{
8121 IEMOP_MNEMONIC("pop rBX");
8122 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8123}
8124
8125
8126/** Opcode 0x5c. */
8127FNIEMOP_DEF(iemOp_pop_eSP)
8128{
8129 IEMOP_MNEMONIC("pop rSP");
8130 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8131 {
8132 if (pIemCpu->uRexB)
8133 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8134 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8135 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8136 }
8137
8138 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8139 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8140 /** @todo add testcase for this instruction. */
8141 switch (pIemCpu->enmEffOpSize)
8142 {
8143 case IEMMODE_16BIT:
8144 IEM_MC_BEGIN(0, 1);
8145 IEM_MC_LOCAL(uint16_t, u16Dst);
8146 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8147 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8148 IEM_MC_ADVANCE_RIP();
8149 IEM_MC_END();
8150 break;
8151
8152 case IEMMODE_32BIT:
8153 IEM_MC_BEGIN(0, 1);
8154 IEM_MC_LOCAL(uint32_t, u32Dst);
8155 IEM_MC_POP_U32(&u32Dst);
8156 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8157 IEM_MC_ADVANCE_RIP();
8158 IEM_MC_END();
8159 break;
8160
8161 case IEMMODE_64BIT:
8162 IEM_MC_BEGIN(0, 1);
8163 IEM_MC_LOCAL(uint64_t, u64Dst);
8164 IEM_MC_POP_U64(&u64Dst);
8165 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8166 IEM_MC_ADVANCE_RIP();
8167 IEM_MC_END();
8168 break;
8169 }
8170
8171 return VINF_SUCCESS;
8172}
8173
8174
8175/** Opcode 0x5d. */
8176FNIEMOP_DEF(iemOp_pop_eBP)
8177{
8178 IEMOP_MNEMONIC("pop rBP");
8179 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8180}
8181
8182
8183/** Opcode 0x5e. */
8184FNIEMOP_DEF(iemOp_pop_eSI)
8185{
8186 IEMOP_MNEMONIC("pop rSI");
8187 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8188}
8189
8190
8191/** Opcode 0x5f. */
8192FNIEMOP_DEF(iemOp_pop_eDI)
8193{
8194 IEMOP_MNEMONIC("pop rDI");
8195 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8196}
8197
8198
8199/** Opcode 0x60. */
8200FNIEMOP_DEF(iemOp_pusha)
8201{
8202 IEMOP_MNEMONIC("pusha");
8203 IEMOP_HLP_MIN_186();
8204 IEMOP_HLP_NO_64BIT();
8205 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8207 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8209}
8210
8211
8212/** Opcode 0x61. */
8213FNIEMOP_DEF(iemOp_popa)
8214{
8215 IEMOP_MNEMONIC("popa");
8216 IEMOP_HLP_MIN_186();
8217 IEMOP_HLP_NO_64BIT();
8218 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8219 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8220 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8221 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8222}
8223
8224
8225/** Opcode 0x62. */
8226FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8227// IEMOP_HLP_MIN_186();
8228
8229
8230/** Opcode 0x63 - non-64-bit modes. */
8231FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8232{
8233 IEMOP_MNEMONIC("arpl Ew,Gw");
8234 IEMOP_HLP_MIN_286();
8235 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8237
8238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8239 {
8240 /* Register */
8241 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8242 IEM_MC_BEGIN(3, 0);
8243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8244 IEM_MC_ARG(uint16_t, u16Src, 1);
8245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8246
8247 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8248 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8249 IEM_MC_REF_EFLAGS(pEFlags);
8250 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8251
8252 IEM_MC_ADVANCE_RIP();
8253 IEM_MC_END();
8254 }
8255 else
8256 {
8257 /* Memory */
8258 IEM_MC_BEGIN(3, 2);
8259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8260 IEM_MC_ARG(uint16_t, u16Src, 1);
8261 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8263
8264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8265 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8266 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8267 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8268 IEM_MC_FETCH_EFLAGS(EFlags);
8269 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8270
8271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8272 IEM_MC_COMMIT_EFLAGS(EFlags);
8273 IEM_MC_ADVANCE_RIP();
8274 IEM_MC_END();
8275 }
8276 return VINF_SUCCESS;
8277
8278}
8279
8280
8281/** Opcode 0x63.
8282 * @note This is a weird one. It works like a regular move instruction if
8283 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8284 * @todo This definitely needs a testcase to verify the odd cases. */
8285FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8286{
8287 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8288
8289 IEMOP_MNEMONIC("movsxd Gv,Ev");
8290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8291
8292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8293 {
8294 /*
8295 * Register to register.
8296 */
8297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8298 IEM_MC_BEGIN(0, 1);
8299 IEM_MC_LOCAL(uint64_t, u64Value);
8300 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8301 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8302 IEM_MC_ADVANCE_RIP();
8303 IEM_MC_END();
8304 }
8305 else
8306 {
8307 /*
8308 * We're loading a register from memory.
8309 */
8310 IEM_MC_BEGIN(0, 2);
8311 IEM_MC_LOCAL(uint64_t, u64Value);
8312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8316 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8317 IEM_MC_ADVANCE_RIP();
8318 IEM_MC_END();
8319 }
8320 return VINF_SUCCESS;
8321}
8322
8323
8324/** Opcode 0x64. */
8325FNIEMOP_DEF(iemOp_seg_FS)
8326{
8327 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8328 IEMOP_HLP_MIN_386();
8329
8330 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8331 pIemCpu->iEffSeg = X86_SREG_FS;
8332
8333 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8334 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8335}
8336
8337
8338/** Opcode 0x65. */
8339FNIEMOP_DEF(iemOp_seg_GS)
8340{
8341 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8342 IEMOP_HLP_MIN_386();
8343
8344 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8345 pIemCpu->iEffSeg = X86_SREG_GS;
8346
8347 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8348 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8349}
8350
8351
8352/** Opcode 0x66. */
8353FNIEMOP_DEF(iemOp_op_size)
8354{
8355 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8356 IEMOP_HLP_MIN_386();
8357
8358 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8359 iemRecalEffOpSize(pIemCpu);
8360
8361 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8362 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8363}
8364
8365
8366/** Opcode 0x67. */
8367FNIEMOP_DEF(iemOp_addr_size)
8368{
8369 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8370 IEMOP_HLP_MIN_386();
8371
8372 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8373 switch (pIemCpu->enmDefAddrMode)
8374 {
8375 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8376 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8377 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8378 default: AssertFailed();
8379 }
8380
8381 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8382 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8383}
8384
8385
8386/** Opcode 0x68. */
8387FNIEMOP_DEF(iemOp_push_Iz)
8388{
8389 IEMOP_MNEMONIC("push Iz");
8390 IEMOP_HLP_MIN_186();
8391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8392 switch (pIemCpu->enmEffOpSize)
8393 {
8394 case IEMMODE_16BIT:
8395 {
8396 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8397 IEMOP_HLP_NO_LOCK_PREFIX();
8398 IEM_MC_BEGIN(0,0);
8399 IEM_MC_PUSH_U16(u16Imm);
8400 IEM_MC_ADVANCE_RIP();
8401 IEM_MC_END();
8402 return VINF_SUCCESS;
8403 }
8404
8405 case IEMMODE_32BIT:
8406 {
8407 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8408 IEMOP_HLP_NO_LOCK_PREFIX();
8409 IEM_MC_BEGIN(0,0);
8410 IEM_MC_PUSH_U32(u32Imm);
8411 IEM_MC_ADVANCE_RIP();
8412 IEM_MC_END();
8413 return VINF_SUCCESS;
8414 }
8415
8416 case IEMMODE_64BIT:
8417 {
8418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8419 IEMOP_HLP_NO_LOCK_PREFIX();
8420 IEM_MC_BEGIN(0,0);
8421 IEM_MC_PUSH_U64(u64Imm);
8422 IEM_MC_ADVANCE_RIP();
8423 IEM_MC_END();
8424 return VINF_SUCCESS;
8425 }
8426
8427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8428 }
8429}
8430
8431
8432/** Opcode 0x69. */
8433FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8434{
8435 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8436 IEMOP_HLP_MIN_186();
8437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8438 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8439
8440 switch (pIemCpu->enmEffOpSize)
8441 {
8442 case IEMMODE_16BIT:
8443 {
8444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8445 {
8446 /* register operand */
8447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8449
8450 IEM_MC_BEGIN(3, 1);
8451 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8453 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8454 IEM_MC_LOCAL(uint16_t, u16Tmp);
8455
8456 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8457 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8458 IEM_MC_REF_EFLAGS(pEFlags);
8459 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8460 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8461
8462 IEM_MC_ADVANCE_RIP();
8463 IEM_MC_END();
8464 }
8465 else
8466 {
8467 /* memory operand */
8468 IEM_MC_BEGIN(3, 2);
8469 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8470 IEM_MC_ARG(uint16_t, u16Src, 1);
8471 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8472 IEM_MC_LOCAL(uint16_t, u16Tmp);
8473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8474
8475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8476 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8477 IEM_MC_ASSIGN(u16Src, u16Imm);
8478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8479 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8480 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8481 IEM_MC_REF_EFLAGS(pEFlags);
8482 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8483 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8484
8485 IEM_MC_ADVANCE_RIP();
8486 IEM_MC_END();
8487 }
8488 return VINF_SUCCESS;
8489 }
8490
8491 case IEMMODE_32BIT:
8492 {
8493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8494 {
8495 /* register operand */
8496 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8498
8499 IEM_MC_BEGIN(3, 1);
8500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8501 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8503 IEM_MC_LOCAL(uint32_t, u32Tmp);
8504
8505 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8506 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8507 IEM_MC_REF_EFLAGS(pEFlags);
8508 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8509 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8510
8511 IEM_MC_ADVANCE_RIP();
8512 IEM_MC_END();
8513 }
8514 else
8515 {
8516 /* memory operand */
8517 IEM_MC_BEGIN(3, 2);
8518 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8519 IEM_MC_ARG(uint32_t, u32Src, 1);
8520 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8521 IEM_MC_LOCAL(uint32_t, u32Tmp);
8522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8523
8524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8525 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8526 IEM_MC_ASSIGN(u32Src, u32Imm);
8527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8528 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8529 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8530 IEM_MC_REF_EFLAGS(pEFlags);
8531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8532 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8533
8534 IEM_MC_ADVANCE_RIP();
8535 IEM_MC_END();
8536 }
8537 return VINF_SUCCESS;
8538 }
8539
8540 case IEMMODE_64BIT:
8541 {
8542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8543 {
8544 /* register operand */
8545 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8547
8548 IEM_MC_BEGIN(3, 1);
8549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8550 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8552 IEM_MC_LOCAL(uint64_t, u64Tmp);
8553
8554 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8555 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8556 IEM_MC_REF_EFLAGS(pEFlags);
8557 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8558 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8559
8560 IEM_MC_ADVANCE_RIP();
8561 IEM_MC_END();
8562 }
8563 else
8564 {
8565 /* memory operand */
8566 IEM_MC_BEGIN(3, 2);
8567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8568 IEM_MC_ARG(uint64_t, u64Src, 1);
8569 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8570 IEM_MC_LOCAL(uint64_t, u64Tmp);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572
8573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8574 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8575 IEM_MC_ASSIGN(u64Src, u64Imm);
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8578 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8579 IEM_MC_REF_EFLAGS(pEFlags);
8580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8581 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8582
8583 IEM_MC_ADVANCE_RIP();
8584 IEM_MC_END();
8585 }
8586 return VINF_SUCCESS;
8587 }
8588 }
8589 AssertFailedReturn(VERR_IEM_IPE_9);
8590}
8591
8592
8593/** Opcode 0x6a. */
8594FNIEMOP_DEF(iemOp_push_Ib)
8595{
8596 IEMOP_MNEMONIC("push Ib");
8597 IEMOP_HLP_MIN_186();
8598 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8599 IEMOP_HLP_NO_LOCK_PREFIX();
8600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8601
8602 IEM_MC_BEGIN(0,0);
8603 switch (pIemCpu->enmEffOpSize)
8604 {
8605 case IEMMODE_16BIT:
8606 IEM_MC_PUSH_U16(i8Imm);
8607 break;
8608 case IEMMODE_32BIT:
8609 IEM_MC_PUSH_U32(i8Imm);
8610 break;
8611 case IEMMODE_64BIT:
8612 IEM_MC_PUSH_U64(i8Imm);
8613 break;
8614 }
8615 IEM_MC_ADVANCE_RIP();
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618}
8619
8620
8621/** Opcode 0x6b. */
8622FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8623{
8624 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8625 IEMOP_HLP_MIN_186();
8626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8628
8629 switch (pIemCpu->enmEffOpSize)
8630 {
8631 case IEMMODE_16BIT:
8632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8633 {
8634 /* register operand */
8635 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637
8638 IEM_MC_BEGIN(3, 1);
8639 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8640 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8641 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8642 IEM_MC_LOCAL(uint16_t, u16Tmp);
8643
8644 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8645 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8646 IEM_MC_REF_EFLAGS(pEFlags);
8647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8648 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8649
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 }
8653 else
8654 {
8655 /* memory operand */
8656 IEM_MC_BEGIN(3, 2);
8657 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8658 IEM_MC_ARG(uint16_t, u16Src, 1);
8659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8660 IEM_MC_LOCAL(uint16_t, u16Tmp);
8661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8662
8663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8664 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8665 IEM_MC_ASSIGN(u16Src, u16Imm);
8666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8667 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8668 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8669 IEM_MC_REF_EFLAGS(pEFlags);
8670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8671 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8672
8673 IEM_MC_ADVANCE_RIP();
8674 IEM_MC_END();
8675 }
8676 return VINF_SUCCESS;
8677
8678 case IEMMODE_32BIT:
8679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8680 {
8681 /* register operand */
8682 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8684
8685 IEM_MC_BEGIN(3, 1);
8686 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8687 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8689 IEM_MC_LOCAL(uint32_t, u32Tmp);
8690
8691 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8692 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8693 IEM_MC_REF_EFLAGS(pEFlags);
8694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8695 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8696
8697 IEM_MC_ADVANCE_RIP();
8698 IEM_MC_END();
8699 }
8700 else
8701 {
8702 /* memory operand */
8703 IEM_MC_BEGIN(3, 2);
8704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8705 IEM_MC_ARG(uint32_t, u32Src, 1);
8706 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8707 IEM_MC_LOCAL(uint32_t, u32Tmp);
8708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8709
8710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8711 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8712 IEM_MC_ASSIGN(u32Src, u32Imm);
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8715 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8716 IEM_MC_REF_EFLAGS(pEFlags);
8717 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8718 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8719
8720 IEM_MC_ADVANCE_RIP();
8721 IEM_MC_END();
8722 }
8723 return VINF_SUCCESS;
8724
8725 case IEMMODE_64BIT:
8726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8727 {
8728 /* register operand */
8729 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8731
8732 IEM_MC_BEGIN(3, 1);
8733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8734 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8735 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8736 IEM_MC_LOCAL(uint64_t, u64Tmp);
8737
8738 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8739 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8740 IEM_MC_REF_EFLAGS(pEFlags);
8741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8742 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8743
8744 IEM_MC_ADVANCE_RIP();
8745 IEM_MC_END();
8746 }
8747 else
8748 {
8749 /* memory operand */
8750 IEM_MC_BEGIN(3, 2);
8751 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8752 IEM_MC_ARG(uint64_t, u64Src, 1);
8753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8754 IEM_MC_LOCAL(uint64_t, u64Tmp);
8755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8756
8757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8758 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8759 IEM_MC_ASSIGN(u64Src, u64Imm);
8760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8761 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8762 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8763 IEM_MC_REF_EFLAGS(pEFlags);
8764 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8765 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8766
8767 IEM_MC_ADVANCE_RIP();
8768 IEM_MC_END();
8769 }
8770 return VINF_SUCCESS;
8771 }
8772 AssertFailedReturn(VERR_IEM_IPE_8);
8773}
8774
8775
8776/** Opcode 0x6c. */
8777FNIEMOP_DEF(iemOp_insb_Yb_DX)
8778{
8779 IEMOP_HLP_MIN_186();
8780 IEMOP_HLP_NO_LOCK_PREFIX();
8781 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8782 {
8783 IEMOP_MNEMONIC("rep ins Yb,DX");
8784 switch (pIemCpu->enmEffAddrMode)
8785 {
8786 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8787 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8788 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8790 }
8791 }
8792 else
8793 {
8794 IEMOP_MNEMONIC("ins Yb,DX");
8795 switch (pIemCpu->enmEffAddrMode)
8796 {
8797 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8798 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8799 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8801 }
8802 }
8803}
8804
8805
8806/** Opcode 0x6d. */
8807FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8808{
8809 IEMOP_HLP_MIN_186();
8810 IEMOP_HLP_NO_LOCK_PREFIX();
8811 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8812 {
8813 IEMOP_MNEMONIC("rep ins Yv,DX");
8814 switch (pIemCpu->enmEffOpSize)
8815 {
8816 case IEMMODE_16BIT:
8817 switch (pIemCpu->enmEffAddrMode)
8818 {
8819 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8820 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8821 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8823 }
8824 break;
8825 case IEMMODE_64BIT:
8826 case IEMMODE_32BIT:
8827 switch (pIemCpu->enmEffAddrMode)
8828 {
8829 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8833 }
8834 break;
8835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8836 }
8837 }
8838 else
8839 {
8840 IEMOP_MNEMONIC("ins Yv,DX");
8841 switch (pIemCpu->enmEffOpSize)
8842 {
8843 case IEMMODE_16BIT:
8844 switch (pIemCpu->enmEffAddrMode)
8845 {
8846 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8847 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8848 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8850 }
8851 break;
8852 case IEMMODE_64BIT:
8853 case IEMMODE_32BIT:
8854 switch (pIemCpu->enmEffAddrMode)
8855 {
8856 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8857 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8858 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8860 }
8861 break;
8862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8863 }
8864 }
8865}
8866
8867
8868/** Opcode 0x6e. */
8869FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8870{
8871 IEMOP_HLP_MIN_186();
8872 IEMOP_HLP_NO_LOCK_PREFIX();
8873 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8874 {
8875 IEMOP_MNEMONIC("rep outs DX,Yb");
8876 switch (pIemCpu->enmEffAddrMode)
8877 {
8878 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8879 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8880 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8882 }
8883 }
8884 else
8885 {
8886 IEMOP_MNEMONIC("outs DX,Yb");
8887 switch (pIemCpu->enmEffAddrMode)
8888 {
8889 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8890 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8891 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8893 }
8894 }
8895}
8896
8897
8898/** Opcode 0x6f. */
8899FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8900{
8901 IEMOP_HLP_MIN_186();
8902 IEMOP_HLP_NO_LOCK_PREFIX();
8903 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8904 {
8905 IEMOP_MNEMONIC("rep outs DX,Yv");
8906 switch (pIemCpu->enmEffOpSize)
8907 {
8908 case IEMMODE_16BIT:
8909 switch (pIemCpu->enmEffAddrMode)
8910 {
8911 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8912 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8913 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8915 }
8916 break;
8917 case IEMMODE_64BIT:
8918 case IEMMODE_32BIT:
8919 switch (pIemCpu->enmEffAddrMode)
8920 {
8921 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8922 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8923 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8925 }
8926 break;
8927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8928 }
8929 }
8930 else
8931 {
8932 IEMOP_MNEMONIC("outs DX,Yv");
8933 switch (pIemCpu->enmEffOpSize)
8934 {
8935 case IEMMODE_16BIT:
8936 switch (pIemCpu->enmEffAddrMode)
8937 {
8938 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8939 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8940 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8942 }
8943 break;
8944 case IEMMODE_64BIT:
8945 case IEMMODE_32BIT:
8946 switch (pIemCpu->enmEffAddrMode)
8947 {
8948 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8949 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8950 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8952 }
8953 break;
8954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8955 }
8956 }
8957}
8958
8959
8960/** Opcode 0x70. */
8961FNIEMOP_DEF(iemOp_jo_Jb)
8962{
8963 IEMOP_MNEMONIC("jo Jb");
8964 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8965 IEMOP_HLP_NO_LOCK_PREFIX();
8966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8967
8968 IEM_MC_BEGIN(0, 0);
8969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8970 IEM_MC_REL_JMP_S8(i8Imm);
8971 } IEM_MC_ELSE() {
8972 IEM_MC_ADVANCE_RIP();
8973 } IEM_MC_ENDIF();
8974 IEM_MC_END();
8975 return VINF_SUCCESS;
8976}
8977
8978
8979/** Opcode 0x71. */
8980FNIEMOP_DEF(iemOp_jno_Jb)
8981{
8982 IEMOP_MNEMONIC("jno Jb");
8983 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8984 IEMOP_HLP_NO_LOCK_PREFIX();
8985 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8986
8987 IEM_MC_BEGIN(0, 0);
8988 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8989 IEM_MC_ADVANCE_RIP();
8990 } IEM_MC_ELSE() {
8991 IEM_MC_REL_JMP_S8(i8Imm);
8992 } IEM_MC_ENDIF();
8993 IEM_MC_END();
8994 return VINF_SUCCESS;
8995}
8996
8997/** Opcode 0x72. */
8998FNIEMOP_DEF(iemOp_jc_Jb)
8999{
9000 IEMOP_MNEMONIC("jc/jnae Jb");
9001 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9002 IEMOP_HLP_NO_LOCK_PREFIX();
9003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9004
9005 IEM_MC_BEGIN(0, 0);
9006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9007 IEM_MC_REL_JMP_S8(i8Imm);
9008 } IEM_MC_ELSE() {
9009 IEM_MC_ADVANCE_RIP();
9010 } IEM_MC_ENDIF();
9011 IEM_MC_END();
9012 return VINF_SUCCESS;
9013}
9014
9015
9016/** Opcode 0x73. */
9017FNIEMOP_DEF(iemOp_jnc_Jb)
9018{
9019 IEMOP_MNEMONIC("jnc/jnb Jb");
9020 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9021 IEMOP_HLP_NO_LOCK_PREFIX();
9022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9023
9024 IEM_MC_BEGIN(0, 0);
9025 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9026 IEM_MC_ADVANCE_RIP();
9027 } IEM_MC_ELSE() {
9028 IEM_MC_REL_JMP_S8(i8Imm);
9029 } IEM_MC_ENDIF();
9030 IEM_MC_END();
9031 return VINF_SUCCESS;
9032}
9033
9034
9035/** Opcode 0x74. */
9036FNIEMOP_DEF(iemOp_je_Jb)
9037{
9038 IEMOP_MNEMONIC("je/jz Jb");
9039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9040 IEMOP_HLP_NO_LOCK_PREFIX();
9041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9042
9043 IEM_MC_BEGIN(0, 0);
9044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9045 IEM_MC_REL_JMP_S8(i8Imm);
9046 } IEM_MC_ELSE() {
9047 IEM_MC_ADVANCE_RIP();
9048 } IEM_MC_ENDIF();
9049 IEM_MC_END();
9050 return VINF_SUCCESS;
9051}
9052
9053
9054/** Opcode 0x75. */
9055FNIEMOP_DEF(iemOp_jne_Jb)
9056{
9057 IEMOP_MNEMONIC("jne/jnz Jb");
9058 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9059 IEMOP_HLP_NO_LOCK_PREFIX();
9060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9061
9062 IEM_MC_BEGIN(0, 0);
9063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9064 IEM_MC_ADVANCE_RIP();
9065 } IEM_MC_ELSE() {
9066 IEM_MC_REL_JMP_S8(i8Imm);
9067 } IEM_MC_ENDIF();
9068 IEM_MC_END();
9069 return VINF_SUCCESS;
9070}
9071
9072
9073/** Opcode 0x76. */
9074FNIEMOP_DEF(iemOp_jbe_Jb)
9075{
9076 IEMOP_MNEMONIC("jbe/jna Jb");
9077 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9078 IEMOP_HLP_NO_LOCK_PREFIX();
9079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9080
9081 IEM_MC_BEGIN(0, 0);
9082 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9083 IEM_MC_REL_JMP_S8(i8Imm);
9084 } IEM_MC_ELSE() {
9085 IEM_MC_ADVANCE_RIP();
9086 } IEM_MC_ENDIF();
9087 IEM_MC_END();
9088 return VINF_SUCCESS;
9089}
9090
9091
9092/** Opcode 0x77. */
9093FNIEMOP_DEF(iemOp_jnbe_Jb)
9094{
9095 IEMOP_MNEMONIC("jnbe/ja Jb");
9096 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9097 IEMOP_HLP_NO_LOCK_PREFIX();
9098 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9099
9100 IEM_MC_BEGIN(0, 0);
9101 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9102 IEM_MC_ADVANCE_RIP();
9103 } IEM_MC_ELSE() {
9104 IEM_MC_REL_JMP_S8(i8Imm);
9105 } IEM_MC_ENDIF();
9106 IEM_MC_END();
9107 return VINF_SUCCESS;
9108}
9109
9110
9111/** Opcode 0x78. */
9112FNIEMOP_DEF(iemOp_js_Jb)
9113{
9114 IEMOP_MNEMONIC("js Jb");
9115 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9116 IEMOP_HLP_NO_LOCK_PREFIX();
9117 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9118
9119 IEM_MC_BEGIN(0, 0);
9120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9121 IEM_MC_REL_JMP_S8(i8Imm);
9122 } IEM_MC_ELSE() {
9123 IEM_MC_ADVANCE_RIP();
9124 } IEM_MC_ENDIF();
9125 IEM_MC_END();
9126 return VINF_SUCCESS;
9127}
9128
9129
9130/** Opcode 0x79. */
9131FNIEMOP_DEF(iemOp_jns_Jb)
9132{
9133 IEMOP_MNEMONIC("jns Jb");
9134 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9135 IEMOP_HLP_NO_LOCK_PREFIX();
9136 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9137
9138 IEM_MC_BEGIN(0, 0);
9139 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9140 IEM_MC_ADVANCE_RIP();
9141 } IEM_MC_ELSE() {
9142 IEM_MC_REL_JMP_S8(i8Imm);
9143 } IEM_MC_ENDIF();
9144 IEM_MC_END();
9145 return VINF_SUCCESS;
9146}
9147
9148
9149/** Opcode 0x7a. */
9150FNIEMOP_DEF(iemOp_jp_Jb)
9151{
9152 IEMOP_MNEMONIC("jp Jb");
9153 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9154 IEMOP_HLP_NO_LOCK_PREFIX();
9155 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9156
9157 IEM_MC_BEGIN(0, 0);
9158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9159 IEM_MC_REL_JMP_S8(i8Imm);
9160 } IEM_MC_ELSE() {
9161 IEM_MC_ADVANCE_RIP();
9162 } IEM_MC_ENDIF();
9163 IEM_MC_END();
9164 return VINF_SUCCESS;
9165}
9166
9167
9168/** Opcode 0x7b. */
9169FNIEMOP_DEF(iemOp_jnp_Jb)
9170{
9171 IEMOP_MNEMONIC("jnp Jb");
9172 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9173 IEMOP_HLP_NO_LOCK_PREFIX();
9174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9175
9176 IEM_MC_BEGIN(0, 0);
9177 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9178 IEM_MC_ADVANCE_RIP();
9179 } IEM_MC_ELSE() {
9180 IEM_MC_REL_JMP_S8(i8Imm);
9181 } IEM_MC_ENDIF();
9182 IEM_MC_END();
9183 return VINF_SUCCESS;
9184}
9185
9186
9187/** Opcode 0x7c. */
9188FNIEMOP_DEF(iemOp_jl_Jb)
9189{
9190 IEMOP_MNEMONIC("jl/jnge Jb");
9191 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9192 IEMOP_HLP_NO_LOCK_PREFIX();
9193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9194
9195 IEM_MC_BEGIN(0, 0);
9196 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9197 IEM_MC_REL_JMP_S8(i8Imm);
9198 } IEM_MC_ELSE() {
9199 IEM_MC_ADVANCE_RIP();
9200 } IEM_MC_ENDIF();
9201 IEM_MC_END();
9202 return VINF_SUCCESS;
9203}
9204
9205
9206/** Opcode 0x7d. */
9207FNIEMOP_DEF(iemOp_jnl_Jb)
9208{
9209 IEMOP_MNEMONIC("jnl/jge Jb");
9210 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9211 IEMOP_HLP_NO_LOCK_PREFIX();
9212 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9213
9214 IEM_MC_BEGIN(0, 0);
9215 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9216 IEM_MC_ADVANCE_RIP();
9217 } IEM_MC_ELSE() {
9218 IEM_MC_REL_JMP_S8(i8Imm);
9219 } IEM_MC_ENDIF();
9220 IEM_MC_END();
9221 return VINF_SUCCESS;
9222}
9223
9224
9225/** Opcode 0x7e. */
9226FNIEMOP_DEF(iemOp_jle_Jb)
9227{
9228 IEMOP_MNEMONIC("jle/jng Jb");
9229 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9230 IEMOP_HLP_NO_LOCK_PREFIX();
9231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9232
9233 IEM_MC_BEGIN(0, 0);
9234 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9235 IEM_MC_REL_JMP_S8(i8Imm);
9236 } IEM_MC_ELSE() {
9237 IEM_MC_ADVANCE_RIP();
9238 } IEM_MC_ENDIF();
9239 IEM_MC_END();
9240 return VINF_SUCCESS;
9241}
9242
9243
9244/** Opcode 0x7f. */
9245FNIEMOP_DEF(iemOp_jnle_Jb)
9246{
9247 IEMOP_MNEMONIC("jnle/jg Jb");
9248 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9249 IEMOP_HLP_NO_LOCK_PREFIX();
9250 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9251
9252 IEM_MC_BEGIN(0, 0);
9253 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9254 IEM_MC_ADVANCE_RIP();
9255 } IEM_MC_ELSE() {
9256 IEM_MC_REL_JMP_S8(i8Imm);
9257 } IEM_MC_ENDIF();
9258 IEM_MC_END();
9259 return VINF_SUCCESS;
9260}
9261
9262
9263/** Opcode 0x80. */
9264FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9265{
9266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9267 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9268 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9269
9270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9271 {
9272 /* register target */
9273 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9274 IEMOP_HLP_NO_LOCK_PREFIX();
9275 IEM_MC_BEGIN(3, 0);
9276 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9277 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9279
9280 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9281 IEM_MC_REF_EFLAGS(pEFlags);
9282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9283
9284 IEM_MC_ADVANCE_RIP();
9285 IEM_MC_END();
9286 }
9287 else
9288 {
9289 /* memory target */
9290 uint32_t fAccess;
9291 if (pImpl->pfnLockedU8)
9292 fAccess = IEM_ACCESS_DATA_RW;
9293 else
9294 { /* CMP */
9295 IEMOP_HLP_NO_LOCK_PREFIX();
9296 fAccess = IEM_ACCESS_DATA_R;
9297 }
9298 IEM_MC_BEGIN(3, 2);
9299 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9300 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9302
9303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9304 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9305 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9306
9307 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9308 IEM_MC_FETCH_EFLAGS(EFlags);
9309 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9311 else
9312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9313
9314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9315 IEM_MC_COMMIT_EFLAGS(EFlags);
9316 IEM_MC_ADVANCE_RIP();
9317 IEM_MC_END();
9318 }
9319 return VINF_SUCCESS;
9320}
9321
9322
9323/** Opcode 0x81. */
9324FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9325{
9326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9327 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9328 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9329
9330 switch (pIemCpu->enmEffOpSize)
9331 {
9332 case IEMMODE_16BIT:
9333 {
9334 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9335 {
9336 /* register target */
9337 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9338 IEMOP_HLP_NO_LOCK_PREFIX();
9339 IEM_MC_BEGIN(3, 0);
9340 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9341 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9343
9344 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9345 IEM_MC_REF_EFLAGS(pEFlags);
9346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9347
9348 IEM_MC_ADVANCE_RIP();
9349 IEM_MC_END();
9350 }
9351 else
9352 {
9353 /* memory target */
9354 uint32_t fAccess;
9355 if (pImpl->pfnLockedU16)
9356 fAccess = IEM_ACCESS_DATA_RW;
9357 else
9358 { /* CMP, TEST */
9359 IEMOP_HLP_NO_LOCK_PREFIX();
9360 fAccess = IEM_ACCESS_DATA_R;
9361 }
9362 IEM_MC_BEGIN(3, 2);
9363 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9364 IEM_MC_ARG(uint16_t, u16Src, 1);
9365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9367
9368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9369 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9370 IEM_MC_ASSIGN(u16Src, u16Imm);
9371 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9372 IEM_MC_FETCH_EFLAGS(EFlags);
9373 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9375 else
9376 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9377
9378 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9379 IEM_MC_COMMIT_EFLAGS(EFlags);
9380 IEM_MC_ADVANCE_RIP();
9381 IEM_MC_END();
9382 }
9383 break;
9384 }
9385
9386 case IEMMODE_32BIT:
9387 {
9388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9389 {
9390 /* register target */
9391 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9392 IEMOP_HLP_NO_LOCK_PREFIX();
9393 IEM_MC_BEGIN(3, 0);
9394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9395 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9397
9398 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9399 IEM_MC_REF_EFLAGS(pEFlags);
9400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9401 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9402
9403 IEM_MC_ADVANCE_RIP();
9404 IEM_MC_END();
9405 }
9406 else
9407 {
9408 /* memory target */
9409 uint32_t fAccess;
9410 if (pImpl->pfnLockedU32)
9411 fAccess = IEM_ACCESS_DATA_RW;
9412 else
9413 { /* CMP, TEST */
9414 IEMOP_HLP_NO_LOCK_PREFIX();
9415 fAccess = IEM_ACCESS_DATA_R;
9416 }
9417 IEM_MC_BEGIN(3, 2);
9418 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9419 IEM_MC_ARG(uint32_t, u32Src, 1);
9420 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9422
9423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9424 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9425 IEM_MC_ASSIGN(u32Src, u32Imm);
9426 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9427 IEM_MC_FETCH_EFLAGS(EFlags);
9428 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9430 else
9431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9432
9433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9434 IEM_MC_COMMIT_EFLAGS(EFlags);
9435 IEM_MC_ADVANCE_RIP();
9436 IEM_MC_END();
9437 }
9438 break;
9439 }
9440
9441 case IEMMODE_64BIT:
9442 {
9443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9444 {
9445 /* register target */
9446 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9447 IEMOP_HLP_NO_LOCK_PREFIX();
9448 IEM_MC_BEGIN(3, 0);
9449 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9450 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9451 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9452
9453 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9454 IEM_MC_REF_EFLAGS(pEFlags);
9455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9456
9457 IEM_MC_ADVANCE_RIP();
9458 IEM_MC_END();
9459 }
9460 else
9461 {
9462 /* memory target */
9463 uint32_t fAccess;
9464 if (pImpl->pfnLockedU64)
9465 fAccess = IEM_ACCESS_DATA_RW;
9466 else
9467 { /* CMP */
9468 IEMOP_HLP_NO_LOCK_PREFIX();
9469 fAccess = IEM_ACCESS_DATA_R;
9470 }
9471 IEM_MC_BEGIN(3, 2);
9472 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9473 IEM_MC_ARG(uint64_t, u64Src, 1);
9474 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9476
9477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9478 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9479 IEM_MC_ASSIGN(u64Src, u64Imm);
9480 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9481 IEM_MC_FETCH_EFLAGS(EFlags);
9482 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9484 else
9485 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9486
9487 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9488 IEM_MC_COMMIT_EFLAGS(EFlags);
9489 IEM_MC_ADVANCE_RIP();
9490 IEM_MC_END();
9491 }
9492 break;
9493 }
9494 }
9495 return VINF_SUCCESS;
9496}
9497
9498
9499/** Opcode 0x82. */
9500FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9501{
9502 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9503 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9504}
9505
9506
9507/** Opcode 0x83. */
9508FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9509{
9510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9511 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9512 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9513 to the 386 even if absent in the intel reference manuals and some
9514 3rd party opcode listings. */
9515 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9516
9517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9518 {
9519 /*
9520 * Register target
9521 */
9522 IEMOP_HLP_NO_LOCK_PREFIX();
9523 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9524 switch (pIemCpu->enmEffOpSize)
9525 {
9526 case IEMMODE_16BIT:
9527 {
9528 IEM_MC_BEGIN(3, 0);
9529 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9530 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9531 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9532
9533 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9534 IEM_MC_REF_EFLAGS(pEFlags);
9535 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9536
9537 IEM_MC_ADVANCE_RIP();
9538 IEM_MC_END();
9539 break;
9540 }
9541
9542 case IEMMODE_32BIT:
9543 {
9544 IEM_MC_BEGIN(3, 0);
9545 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9546 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9548
9549 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9550 IEM_MC_REF_EFLAGS(pEFlags);
9551 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9552 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9553
9554 IEM_MC_ADVANCE_RIP();
9555 IEM_MC_END();
9556 break;
9557 }
9558
9559 case IEMMODE_64BIT:
9560 {
9561 IEM_MC_BEGIN(3, 0);
9562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9563 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9564 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9565
9566 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9567 IEM_MC_REF_EFLAGS(pEFlags);
9568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9569
9570 IEM_MC_ADVANCE_RIP();
9571 IEM_MC_END();
9572 break;
9573 }
9574 }
9575 }
9576 else
9577 {
9578 /*
9579 * Memory target.
9580 */
9581 uint32_t fAccess;
9582 if (pImpl->pfnLockedU16)
9583 fAccess = IEM_ACCESS_DATA_RW;
9584 else
9585 { /* CMP */
9586 IEMOP_HLP_NO_LOCK_PREFIX();
9587 fAccess = IEM_ACCESS_DATA_R;
9588 }
9589
9590 switch (pIemCpu->enmEffOpSize)
9591 {
9592 case IEMMODE_16BIT:
9593 {
9594 IEM_MC_BEGIN(3, 2);
9595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9596 IEM_MC_ARG(uint16_t, u16Src, 1);
9597 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9599
9600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9601 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9602 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9603 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9604 IEM_MC_FETCH_EFLAGS(EFlags);
9605 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9607 else
9608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9609
9610 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9611 IEM_MC_COMMIT_EFLAGS(EFlags);
9612 IEM_MC_ADVANCE_RIP();
9613 IEM_MC_END();
9614 break;
9615 }
9616
9617 case IEMMODE_32BIT:
9618 {
9619 IEM_MC_BEGIN(3, 2);
9620 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9621 IEM_MC_ARG(uint32_t, u32Src, 1);
9622 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9624
9625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9626 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9627 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9628 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9629 IEM_MC_FETCH_EFLAGS(EFlags);
9630 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9632 else
9633 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9634
9635 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9636 IEM_MC_COMMIT_EFLAGS(EFlags);
9637 IEM_MC_ADVANCE_RIP();
9638 IEM_MC_END();
9639 break;
9640 }
9641
9642 case IEMMODE_64BIT:
9643 {
9644 IEM_MC_BEGIN(3, 2);
9645 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9646 IEM_MC_ARG(uint64_t, u64Src, 1);
9647 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9649
9650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9651 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9652 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9653 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9654 IEM_MC_FETCH_EFLAGS(EFlags);
9655 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9656 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9657 else
9658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9659
9660 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9661 IEM_MC_COMMIT_EFLAGS(EFlags);
9662 IEM_MC_ADVANCE_RIP();
9663 IEM_MC_END();
9664 break;
9665 }
9666 }
9667 }
9668 return VINF_SUCCESS;
9669}
9670
9671
9672/** Opcode 0x84. */
9673FNIEMOP_DEF(iemOp_test_Eb_Gb)
9674{
9675 IEMOP_MNEMONIC("test Eb,Gb");
9676 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9679}
9680
9681
9682/** Opcode 0x85. */
9683FNIEMOP_DEF(iemOp_test_Ev_Gv)
9684{
9685 IEMOP_MNEMONIC("test Ev,Gv");
9686 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9689}
9690
9691
9692/** Opcode 0x86. */
9693FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9694{
9695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9696 IEMOP_MNEMONIC("xchg Eb,Gb");
9697
9698 /*
9699 * If rm is denoting a register, no more instruction bytes.
9700 */
9701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9702 {
9703 IEMOP_HLP_NO_LOCK_PREFIX();
9704
9705 IEM_MC_BEGIN(0, 2);
9706 IEM_MC_LOCAL(uint8_t, uTmp1);
9707 IEM_MC_LOCAL(uint8_t, uTmp2);
9708
9709 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9710 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9711 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9712 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9713
9714 IEM_MC_ADVANCE_RIP();
9715 IEM_MC_END();
9716 }
9717 else
9718 {
9719 /*
9720 * We're accessing memory.
9721 */
9722/** @todo the register must be committed separately! */
9723 IEM_MC_BEGIN(2, 2);
9724 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9725 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9727
9728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9729 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9730 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9733
9734 IEM_MC_ADVANCE_RIP();
9735 IEM_MC_END();
9736 }
9737 return VINF_SUCCESS;
9738}
9739
9740
9741/** Opcode 0x87. */
9742FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9743{
9744 IEMOP_MNEMONIC("xchg Ev,Gv");
9745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9746
9747 /*
9748 * If rm is denoting a register, no more instruction bytes.
9749 */
9750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9751 {
9752 IEMOP_HLP_NO_LOCK_PREFIX();
9753
9754 switch (pIemCpu->enmEffOpSize)
9755 {
9756 case IEMMODE_16BIT:
9757 IEM_MC_BEGIN(0, 2);
9758 IEM_MC_LOCAL(uint16_t, uTmp1);
9759 IEM_MC_LOCAL(uint16_t, uTmp2);
9760
9761 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9762 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9763 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9764 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9765
9766 IEM_MC_ADVANCE_RIP();
9767 IEM_MC_END();
9768 return VINF_SUCCESS;
9769
9770 case IEMMODE_32BIT:
9771 IEM_MC_BEGIN(0, 2);
9772 IEM_MC_LOCAL(uint32_t, uTmp1);
9773 IEM_MC_LOCAL(uint32_t, uTmp2);
9774
9775 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9776 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9777 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9778 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9779
9780 IEM_MC_ADVANCE_RIP();
9781 IEM_MC_END();
9782 return VINF_SUCCESS;
9783
9784 case IEMMODE_64BIT:
9785 IEM_MC_BEGIN(0, 2);
9786 IEM_MC_LOCAL(uint64_t, uTmp1);
9787 IEM_MC_LOCAL(uint64_t, uTmp2);
9788
9789 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9790 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9791 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9792 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9793
9794 IEM_MC_ADVANCE_RIP();
9795 IEM_MC_END();
9796 return VINF_SUCCESS;
9797
9798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9799 }
9800 }
9801 else
9802 {
9803 /*
9804 * We're accessing memory.
9805 */
9806 switch (pIemCpu->enmEffOpSize)
9807 {
9808/** @todo the register must be committed separately! */
9809 case IEMMODE_16BIT:
9810 IEM_MC_BEGIN(2, 2);
9811 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9812 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9814
9815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9816 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9817 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9818 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9819 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9820
9821 IEM_MC_ADVANCE_RIP();
9822 IEM_MC_END();
9823 return VINF_SUCCESS;
9824
9825 case IEMMODE_32BIT:
9826 IEM_MC_BEGIN(2, 2);
9827 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9828 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9830
9831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9832 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9833 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9834 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9836
9837 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9838 IEM_MC_ADVANCE_RIP();
9839 IEM_MC_END();
9840 return VINF_SUCCESS;
9841
9842 case IEMMODE_64BIT:
9843 IEM_MC_BEGIN(2, 2);
9844 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9845 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9847
9848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9849 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9850 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9851 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9853
9854 IEM_MC_ADVANCE_RIP();
9855 IEM_MC_END();
9856 return VINF_SUCCESS;
9857
9858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9859 }
9860 }
9861}
9862
9863
9864/** Opcode 0x88. */
9865FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9866{
9867 IEMOP_MNEMONIC("mov Eb,Gb");
9868
9869 uint8_t bRm;
9870 IEM_OPCODE_GET_NEXT_U8(&bRm);
9871 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9872
9873 /*
9874 * If rm is denoting a register, no more instruction bytes.
9875 */
9876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9877 {
9878 IEM_MC_BEGIN(0, 1);
9879 IEM_MC_LOCAL(uint8_t, u8Value);
9880 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9881 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9882 IEM_MC_ADVANCE_RIP();
9883 IEM_MC_END();
9884 }
9885 else
9886 {
9887 /*
9888 * We're writing a register to memory.
9889 */
9890 IEM_MC_BEGIN(0, 2);
9891 IEM_MC_LOCAL(uint8_t, u8Value);
9892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9894 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9895 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 }
9899 return VINF_SUCCESS;
9900
9901}
9902
9903
9904/** Opcode 0x89. */
9905FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9906{
9907 IEMOP_MNEMONIC("mov Ev,Gv");
9908
9909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9910 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9911
9912 /*
9913 * If rm is denoting a register, no more instruction bytes.
9914 */
9915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9916 {
9917 switch (pIemCpu->enmEffOpSize)
9918 {
9919 case IEMMODE_16BIT:
9920 IEM_MC_BEGIN(0, 1);
9921 IEM_MC_LOCAL(uint16_t, u16Value);
9922 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9923 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9924 IEM_MC_ADVANCE_RIP();
9925 IEM_MC_END();
9926 break;
9927
9928 case IEMMODE_32BIT:
9929 IEM_MC_BEGIN(0, 1);
9930 IEM_MC_LOCAL(uint32_t, u32Value);
9931 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9932 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9933 IEM_MC_ADVANCE_RIP();
9934 IEM_MC_END();
9935 break;
9936
9937 case IEMMODE_64BIT:
9938 IEM_MC_BEGIN(0, 1);
9939 IEM_MC_LOCAL(uint64_t, u64Value);
9940 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9941 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9942 IEM_MC_ADVANCE_RIP();
9943 IEM_MC_END();
9944 break;
9945 }
9946 }
9947 else
9948 {
9949 /*
9950 * We're writing a register to memory.
9951 */
9952 switch (pIemCpu->enmEffOpSize)
9953 {
9954 case IEMMODE_16BIT:
9955 IEM_MC_BEGIN(0, 2);
9956 IEM_MC_LOCAL(uint16_t, u16Value);
9957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9959 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9960 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9961 IEM_MC_ADVANCE_RIP();
9962 IEM_MC_END();
9963 break;
9964
9965 case IEMMODE_32BIT:
9966 IEM_MC_BEGIN(0, 2);
9967 IEM_MC_LOCAL(uint32_t, u32Value);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9970 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9971 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9972 IEM_MC_ADVANCE_RIP();
9973 IEM_MC_END();
9974 break;
9975
9976 case IEMMODE_64BIT:
9977 IEM_MC_BEGIN(0, 2);
9978 IEM_MC_LOCAL(uint64_t, u64Value);
9979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9981 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9982 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9983 IEM_MC_ADVANCE_RIP();
9984 IEM_MC_END();
9985 break;
9986 }
9987 }
9988 return VINF_SUCCESS;
9989}
9990
9991
9992/** Opcode 0x8a. */
9993FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9994{
9995 IEMOP_MNEMONIC("mov Gb,Eb");
9996
9997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9998 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9999
10000 /*
10001 * If rm is denoting a register, no more instruction bytes.
10002 */
10003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10004 {
10005 IEM_MC_BEGIN(0, 1);
10006 IEM_MC_LOCAL(uint8_t, u8Value);
10007 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10008 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10009 IEM_MC_ADVANCE_RIP();
10010 IEM_MC_END();
10011 }
10012 else
10013 {
10014 /*
10015 * We're loading a register from memory.
10016 */
10017 IEM_MC_BEGIN(0, 2);
10018 IEM_MC_LOCAL(uint8_t, u8Value);
10019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10021 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10022 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10023 IEM_MC_ADVANCE_RIP();
10024 IEM_MC_END();
10025 }
10026 return VINF_SUCCESS;
10027}
10028
10029
10030/** Opcode 0x8b. */
10031FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10032{
10033 IEMOP_MNEMONIC("mov Gv,Ev");
10034
10035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10037
10038 /*
10039 * If rm is denoting a register, no more instruction bytes.
10040 */
10041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10042 {
10043 switch (pIemCpu->enmEffOpSize)
10044 {
10045 case IEMMODE_16BIT:
10046 IEM_MC_BEGIN(0, 1);
10047 IEM_MC_LOCAL(uint16_t, u16Value);
10048 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10049 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 break;
10053
10054 case IEMMODE_32BIT:
10055 IEM_MC_BEGIN(0, 1);
10056 IEM_MC_LOCAL(uint32_t, u32Value);
10057 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10058 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10059 IEM_MC_ADVANCE_RIP();
10060 IEM_MC_END();
10061 break;
10062
10063 case IEMMODE_64BIT:
10064 IEM_MC_BEGIN(0, 1);
10065 IEM_MC_LOCAL(uint64_t, u64Value);
10066 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10067 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10068 IEM_MC_ADVANCE_RIP();
10069 IEM_MC_END();
10070 break;
10071 }
10072 }
10073 else
10074 {
10075 /*
10076 * We're loading a register from memory.
10077 */
10078 switch (pIemCpu->enmEffOpSize)
10079 {
10080 case IEMMODE_16BIT:
10081 IEM_MC_BEGIN(0, 2);
10082 IEM_MC_LOCAL(uint16_t, u16Value);
10083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10085 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10086 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10087 IEM_MC_ADVANCE_RIP();
10088 IEM_MC_END();
10089 break;
10090
10091 case IEMMODE_32BIT:
10092 IEM_MC_BEGIN(0, 2);
10093 IEM_MC_LOCAL(uint32_t, u32Value);
10094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10096 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10097 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10098 IEM_MC_ADVANCE_RIP();
10099 IEM_MC_END();
10100 break;
10101
10102 case IEMMODE_64BIT:
10103 IEM_MC_BEGIN(0, 2);
10104 IEM_MC_LOCAL(uint64_t, u64Value);
10105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10107 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10108 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10109 IEM_MC_ADVANCE_RIP();
10110 IEM_MC_END();
10111 break;
10112 }
10113 }
10114 return VINF_SUCCESS;
10115}
10116
10117
10118/** Opcode 0x63. */
10119FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10120{
10121 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10122 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10123 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10124 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10125 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10126}
10127
10128
10129/** Opcode 0x8c. */
10130FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10131{
10132 IEMOP_MNEMONIC("mov Ev,Sw");
10133
10134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10135 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10136
10137 /*
10138 * Check that the destination register exists. The REX.R prefix is ignored.
10139 */
10140 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10141 if ( iSegReg > X86_SREG_GS)
10142 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10143
10144 /*
10145 * If rm is denoting a register, no more instruction bytes.
10146 * In that case, the operand size is respected and the upper bits are
10147 * cleared (starting with some pentium).
10148 */
10149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10150 {
10151 switch (pIemCpu->enmEffOpSize)
10152 {
10153 case IEMMODE_16BIT:
10154 IEM_MC_BEGIN(0, 1);
10155 IEM_MC_LOCAL(uint16_t, u16Value);
10156 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10157 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10158 IEM_MC_ADVANCE_RIP();
10159 IEM_MC_END();
10160 break;
10161
10162 case IEMMODE_32BIT:
10163 IEM_MC_BEGIN(0, 1);
10164 IEM_MC_LOCAL(uint32_t, u32Value);
10165 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10166 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10167 IEM_MC_ADVANCE_RIP();
10168 IEM_MC_END();
10169 break;
10170
10171 case IEMMODE_64BIT:
10172 IEM_MC_BEGIN(0, 1);
10173 IEM_MC_LOCAL(uint64_t, u64Value);
10174 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10175 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10176 IEM_MC_ADVANCE_RIP();
10177 IEM_MC_END();
10178 break;
10179 }
10180 }
10181 else
10182 {
10183 /*
10184 * We're saving the register to memory. The access is word sized
10185 * regardless of operand size prefixes.
10186 */
10187#if 0 /* not necessary */
10188 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10189#endif
10190 IEM_MC_BEGIN(0, 2);
10191 IEM_MC_LOCAL(uint16_t, u16Value);
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10194 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10195 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10196 IEM_MC_ADVANCE_RIP();
10197 IEM_MC_END();
10198 }
10199 return VINF_SUCCESS;
10200}
10201
10202
10203
10204
10205/** Opcode 0x8d. */
10206FNIEMOP_DEF(iemOp_lea_Gv_M)
10207{
10208 IEMOP_MNEMONIC("lea Gv,M");
10209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10210 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10211 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10212 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10213
10214 switch (pIemCpu->enmEffOpSize)
10215 {
10216 case IEMMODE_16BIT:
10217 IEM_MC_BEGIN(0, 2);
10218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10219 IEM_MC_LOCAL(uint16_t, u16Cast);
10220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10221 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10222 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10223 IEM_MC_ADVANCE_RIP();
10224 IEM_MC_END();
10225 return VINF_SUCCESS;
10226
10227 case IEMMODE_32BIT:
10228 IEM_MC_BEGIN(0, 2);
10229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10230 IEM_MC_LOCAL(uint32_t, u32Cast);
10231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10232 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10233 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10234 IEM_MC_ADVANCE_RIP();
10235 IEM_MC_END();
10236 return VINF_SUCCESS;
10237
10238 case IEMMODE_64BIT:
10239 IEM_MC_BEGIN(0, 1);
10240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10242 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10243 IEM_MC_ADVANCE_RIP();
10244 IEM_MC_END();
10245 return VINF_SUCCESS;
10246 }
10247 AssertFailedReturn(VERR_IEM_IPE_7);
10248}
10249
10250
10251/** Opcode 0x8e. */
10252FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10253{
10254 IEMOP_MNEMONIC("mov Sw,Ev");
10255
10256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10257 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10258
10259 /*
10260 * The practical operand size is 16-bit.
10261 */
10262#if 0 /* not necessary */
10263 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10264#endif
10265
10266 /*
10267 * Check that the destination register exists and can be used with this
10268 * instruction. The REX.R prefix is ignored.
10269 */
10270 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10271 if ( iSegReg == X86_SREG_CS
10272 || iSegReg > X86_SREG_GS)
10273 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10274
10275 /*
10276 * If rm is denoting a register, no more instruction bytes.
10277 */
10278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10279 {
10280 IEM_MC_BEGIN(2, 0);
10281 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10282 IEM_MC_ARG(uint16_t, u16Value, 1);
10283 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10284 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10285 IEM_MC_END();
10286 }
10287 else
10288 {
10289 /*
10290 * We're loading the register from memory. The access is word sized
10291 * regardless of operand size prefixes.
10292 */
10293 IEM_MC_BEGIN(2, 1);
10294 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10295 IEM_MC_ARG(uint16_t, u16Value, 1);
10296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10298 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10299 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10300 IEM_MC_END();
10301 }
10302 return VINF_SUCCESS;
10303}
10304
10305
10306/** Opcode 0x8f /0. */
10307FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10308{
10309 /* This bugger is rather annoying as it requires rSP to be updated before
10310 doing the effective address calculations. Will eventually require a
10311 split between the R/M+SIB decoding and the effective address
10312 calculation - which is something that is required for any attempt at
10313 reusing this code for a recompiler. It may also be good to have if we
10314 need to delay #UD exception caused by invalid lock prefixes.
10315
10316 For now, we'll do a mostly safe interpreter-only implementation here. */
10317 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10318 * now until tests show it's checked.. */
10319 IEMOP_MNEMONIC("pop Ev");
10320 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10321
10322 /* Register access is relatively easy and can share code. */
10323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10324 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10325
10326 /*
10327 * Memory target.
10328 *
10329 * Intel says that RSP is incremented before it's used in any effective
10330 * address calcuations. This means some serious extra annoyance here since
10331 * we decode and calculate the effective address in one step and like to
10332 * delay committing registers till everything is done.
10333 *
10334 * So, we'll decode and calculate the effective address twice. This will
10335 * require some recoding if turned into a recompiler.
10336 */
10337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10338
10339#ifndef TST_IEM_CHECK_MC
10340 /* Calc effective address with modified ESP. */
10341 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10342 RTGCPTR GCPtrEff;
10343 VBOXSTRICTRC rcStrict;
10344 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10345 if (rcStrict != VINF_SUCCESS)
10346 return rcStrict;
10347 pIemCpu->offOpcode = offOpcodeSaved;
10348
10349 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10350 uint64_t const RspSaved = pCtx->rsp;
10351 switch (pIemCpu->enmEffOpSize)
10352 {
10353 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10354 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10355 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10357 }
10358 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10359 Assert(rcStrict == VINF_SUCCESS);
10360 pCtx->rsp = RspSaved;
10361
10362 /* Perform the operation - this should be CImpl. */
10363 RTUINT64U TmpRsp;
10364 TmpRsp.u = pCtx->rsp;
10365 switch (pIemCpu->enmEffOpSize)
10366 {
10367 case IEMMODE_16BIT:
10368 {
10369 uint16_t u16Value;
10370 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10371 if (rcStrict == VINF_SUCCESS)
10372 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10373 break;
10374 }
10375
10376 case IEMMODE_32BIT:
10377 {
10378 uint32_t u32Value;
10379 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10380 if (rcStrict == VINF_SUCCESS)
10381 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10382 break;
10383 }
10384
10385 case IEMMODE_64BIT:
10386 {
10387 uint64_t u64Value;
10388 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10389 if (rcStrict == VINF_SUCCESS)
10390 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10391 break;
10392 }
10393
10394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10395 }
10396 if (rcStrict == VINF_SUCCESS)
10397 {
10398 pCtx->rsp = TmpRsp.u;
10399 iemRegUpdateRipAndClearRF(pIemCpu);
10400 }
10401 return rcStrict;
10402
10403#else
10404 return VERR_IEM_IPE_2;
10405#endif
10406}
10407
10408
10409/** Opcode 0x8f. */
10410FNIEMOP_DEF(iemOp_Grp1A)
10411{
10412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10413 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10414 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10415
10416 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10417 /** @todo XOP decoding. */
10418 IEMOP_MNEMONIC("3-byte-xop");
10419 return IEMOP_RAISE_INVALID_OPCODE();
10420}
10421
10422
10423/**
10424 * Common 'xchg reg,rAX' helper.
10425 */
10426FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10427{
10428 IEMOP_HLP_NO_LOCK_PREFIX();
10429
10430 iReg |= pIemCpu->uRexB;
10431 switch (pIemCpu->enmEffOpSize)
10432 {
10433 case IEMMODE_16BIT:
10434 IEM_MC_BEGIN(0, 2);
10435 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10436 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10437 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10438 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10439 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10440 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10441 IEM_MC_ADVANCE_RIP();
10442 IEM_MC_END();
10443 return VINF_SUCCESS;
10444
10445 case IEMMODE_32BIT:
10446 IEM_MC_BEGIN(0, 2);
10447 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10448 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10449 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10450 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10451 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10452 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10453 IEM_MC_ADVANCE_RIP();
10454 IEM_MC_END();
10455 return VINF_SUCCESS;
10456
10457 case IEMMODE_64BIT:
10458 IEM_MC_BEGIN(0, 2);
10459 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10460 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10461 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10462 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10463 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10464 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10465 IEM_MC_ADVANCE_RIP();
10466 IEM_MC_END();
10467 return VINF_SUCCESS;
10468
10469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10470 }
10471}
10472
10473
10474/** Opcode 0x90. */
10475FNIEMOP_DEF(iemOp_nop)
10476{
10477 /* R8/R8D and RAX/EAX can be exchanged. */
10478 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10479 {
10480 IEMOP_MNEMONIC("xchg r8,rAX");
10481 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10482 }
10483
10484 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10485 IEMOP_MNEMONIC("pause");
10486 else
10487 IEMOP_MNEMONIC("nop");
10488 IEM_MC_BEGIN(0, 0);
10489 IEM_MC_ADVANCE_RIP();
10490 IEM_MC_END();
10491 return VINF_SUCCESS;
10492}
10493
10494
10495/** Opcode 0x91. */
10496FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10497{
10498 IEMOP_MNEMONIC("xchg rCX,rAX");
10499 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10500}
10501
10502
10503/** Opcode 0x92. */
10504FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10505{
10506 IEMOP_MNEMONIC("xchg rDX,rAX");
10507 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10508}
10509
10510
10511/** Opcode 0x93. */
10512FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10513{
10514 IEMOP_MNEMONIC("xchg rBX,rAX");
10515 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10516}
10517
10518
10519/** Opcode 0x94. */
10520FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10521{
10522 IEMOP_MNEMONIC("xchg rSX,rAX");
10523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10524}
10525
10526
10527/** Opcode 0x95. */
10528FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10529{
10530 IEMOP_MNEMONIC("xchg rBP,rAX");
10531 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10532}
10533
10534
10535/** Opcode 0x96. */
10536FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10537{
10538 IEMOP_MNEMONIC("xchg rSI,rAX");
10539 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10540}
10541
10542
10543/** Opcode 0x97. */
10544FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10545{
10546 IEMOP_MNEMONIC("xchg rDI,rAX");
10547 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10548}
10549
10550
10551/** Opcode 0x98. */
10552FNIEMOP_DEF(iemOp_cbw)
10553{
10554 IEMOP_HLP_NO_LOCK_PREFIX();
10555 switch (pIemCpu->enmEffOpSize)
10556 {
10557 case IEMMODE_16BIT:
10558 IEMOP_MNEMONIC("cbw");
10559 IEM_MC_BEGIN(0, 1);
10560 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10561 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10562 } IEM_MC_ELSE() {
10563 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10564 } IEM_MC_ENDIF();
10565 IEM_MC_ADVANCE_RIP();
10566 IEM_MC_END();
10567 return VINF_SUCCESS;
10568
10569 case IEMMODE_32BIT:
10570 IEMOP_MNEMONIC("cwde");
10571 IEM_MC_BEGIN(0, 1);
10572 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10573 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10574 } IEM_MC_ELSE() {
10575 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10576 } IEM_MC_ENDIF();
10577 IEM_MC_ADVANCE_RIP();
10578 IEM_MC_END();
10579 return VINF_SUCCESS;
10580
10581 case IEMMODE_64BIT:
10582 IEMOP_MNEMONIC("cdqe");
10583 IEM_MC_BEGIN(0, 1);
10584 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10585 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10586 } IEM_MC_ELSE() {
10587 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10588 } IEM_MC_ENDIF();
10589 IEM_MC_ADVANCE_RIP();
10590 IEM_MC_END();
10591 return VINF_SUCCESS;
10592
10593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10594 }
10595}
10596
10597
10598/** Opcode 0x99. */
10599FNIEMOP_DEF(iemOp_cwd)
10600{
10601 IEMOP_HLP_NO_LOCK_PREFIX();
10602 switch (pIemCpu->enmEffOpSize)
10603 {
10604 case IEMMODE_16BIT:
10605 IEMOP_MNEMONIC("cwd");
10606 IEM_MC_BEGIN(0, 1);
10607 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10608 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10609 } IEM_MC_ELSE() {
10610 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10611 } IEM_MC_ENDIF();
10612 IEM_MC_ADVANCE_RIP();
10613 IEM_MC_END();
10614 return VINF_SUCCESS;
10615
10616 case IEMMODE_32BIT:
10617 IEMOP_MNEMONIC("cdq");
10618 IEM_MC_BEGIN(0, 1);
10619 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10620 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10621 } IEM_MC_ELSE() {
10622 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10623 } IEM_MC_ENDIF();
10624 IEM_MC_ADVANCE_RIP();
10625 IEM_MC_END();
10626 return VINF_SUCCESS;
10627
10628 case IEMMODE_64BIT:
10629 IEMOP_MNEMONIC("cqo");
10630 IEM_MC_BEGIN(0, 1);
10631 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10632 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10633 } IEM_MC_ELSE() {
10634 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10635 } IEM_MC_ENDIF();
10636 IEM_MC_ADVANCE_RIP();
10637 IEM_MC_END();
10638 return VINF_SUCCESS;
10639
10640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10641 }
10642}
10643
10644
10645/** Opcode 0x9a. */
10646FNIEMOP_DEF(iemOp_call_Ap)
10647{
10648 IEMOP_MNEMONIC("call Ap");
10649 IEMOP_HLP_NO_64BIT();
10650
10651 /* Decode the far pointer address and pass it on to the far call C implementation. */
10652 uint32_t offSeg;
10653 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10654 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10655 else
10656 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10657 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10660}
10661
10662
10663/** Opcode 0x9b. (aka fwait) */
10664FNIEMOP_DEF(iemOp_wait)
10665{
10666 IEMOP_MNEMONIC("wait");
10667 IEMOP_HLP_NO_LOCK_PREFIX();
10668
10669 IEM_MC_BEGIN(0, 0);
10670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10672 IEM_MC_ADVANCE_RIP();
10673 IEM_MC_END();
10674 return VINF_SUCCESS;
10675}
10676
10677
10678/** Opcode 0x9c. */
10679FNIEMOP_DEF(iemOp_pushf_Fv)
10680{
10681 IEMOP_HLP_NO_LOCK_PREFIX();
10682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10683 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10684}
10685
10686
10687/** Opcode 0x9d. */
10688FNIEMOP_DEF(iemOp_popf_Fv)
10689{
10690 IEMOP_HLP_NO_LOCK_PREFIX();
10691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10692 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10693}
10694
10695
10696/** Opcode 0x9e. */
10697FNIEMOP_DEF(iemOp_sahf)
10698{
10699 IEMOP_MNEMONIC("sahf");
10700 IEMOP_HLP_NO_LOCK_PREFIX();
10701 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10702 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10703 return IEMOP_RAISE_INVALID_OPCODE();
10704 IEM_MC_BEGIN(0, 2);
10705 IEM_MC_LOCAL(uint32_t, u32Flags);
10706 IEM_MC_LOCAL(uint32_t, EFlags);
10707 IEM_MC_FETCH_EFLAGS(EFlags);
10708 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10709 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10710 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10711 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10712 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10713 IEM_MC_COMMIT_EFLAGS(EFlags);
10714 IEM_MC_ADVANCE_RIP();
10715 IEM_MC_END();
10716 return VINF_SUCCESS;
10717}
10718
10719
10720/** Opcode 0x9f. */
10721FNIEMOP_DEF(iemOp_lahf)
10722{
10723 IEMOP_MNEMONIC("lahf");
10724 IEMOP_HLP_NO_LOCK_PREFIX();
10725 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10726 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10727 return IEMOP_RAISE_INVALID_OPCODE();
10728 IEM_MC_BEGIN(0, 1);
10729 IEM_MC_LOCAL(uint8_t, u8Flags);
10730 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10731 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10732 IEM_MC_ADVANCE_RIP();
10733 IEM_MC_END();
10734 return VINF_SUCCESS;
10735}
10736
10737
10738/**
10739 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10740 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10741 * prefixes. Will return on failures.
10742 * @param a_GCPtrMemOff The variable to store the offset in.
10743 */
10744#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10745 do \
10746 { \
10747 switch (pIemCpu->enmEffAddrMode) \
10748 { \
10749 case IEMMODE_16BIT: \
10750 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10751 break; \
10752 case IEMMODE_32BIT: \
10753 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10754 break; \
10755 case IEMMODE_64BIT: \
10756 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10757 break; \
10758 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10759 } \
10760 IEMOP_HLP_NO_LOCK_PREFIX(); \
10761 } while (0)
10762
10763/** Opcode 0xa0. */
10764FNIEMOP_DEF(iemOp_mov_Al_Ob)
10765{
10766 /*
10767 * Get the offset and fend of lock prefixes.
10768 */
10769 RTGCPTR GCPtrMemOff;
10770 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10771
10772 /*
10773 * Fetch AL.
10774 */
10775 IEM_MC_BEGIN(0,1);
10776 IEM_MC_LOCAL(uint8_t, u8Tmp);
10777 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10778 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10779 IEM_MC_ADVANCE_RIP();
10780 IEM_MC_END();
10781 return VINF_SUCCESS;
10782}
10783
10784
10785/** Opcode 0xa1. */
10786FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10787{
10788 /*
10789 * Get the offset and fend of lock prefixes.
10790 */
10791 IEMOP_MNEMONIC("mov rAX,Ov");
10792 RTGCPTR GCPtrMemOff;
10793 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10794
10795 /*
10796 * Fetch rAX.
10797 */
10798 switch (pIemCpu->enmEffOpSize)
10799 {
10800 case IEMMODE_16BIT:
10801 IEM_MC_BEGIN(0,1);
10802 IEM_MC_LOCAL(uint16_t, u16Tmp);
10803 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10804 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10805 IEM_MC_ADVANCE_RIP();
10806 IEM_MC_END();
10807 return VINF_SUCCESS;
10808
10809 case IEMMODE_32BIT:
10810 IEM_MC_BEGIN(0,1);
10811 IEM_MC_LOCAL(uint32_t, u32Tmp);
10812 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10813 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10814 IEM_MC_ADVANCE_RIP();
10815 IEM_MC_END();
10816 return VINF_SUCCESS;
10817
10818 case IEMMODE_64BIT:
10819 IEM_MC_BEGIN(0,1);
10820 IEM_MC_LOCAL(uint64_t, u64Tmp);
10821 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10822 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10823 IEM_MC_ADVANCE_RIP();
10824 IEM_MC_END();
10825 return VINF_SUCCESS;
10826
10827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10828 }
10829}
10830
10831
10832/** Opcode 0xa2. */
10833FNIEMOP_DEF(iemOp_mov_Ob_AL)
10834{
10835 /*
10836 * Get the offset and fend of lock prefixes.
10837 */
10838 RTGCPTR GCPtrMemOff;
10839 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10840
10841 /*
10842 * Store AL.
10843 */
10844 IEM_MC_BEGIN(0,1);
10845 IEM_MC_LOCAL(uint8_t, u8Tmp);
10846 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10847 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10848 IEM_MC_ADVANCE_RIP();
10849 IEM_MC_END();
10850 return VINF_SUCCESS;
10851}
10852
10853
10854/** Opcode 0xa3. */
10855FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10856{
10857 /*
10858 * Get the offset and fend of lock prefixes.
10859 */
10860 RTGCPTR GCPtrMemOff;
10861 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10862
10863 /*
10864 * Store rAX.
10865 */
10866 switch (pIemCpu->enmEffOpSize)
10867 {
10868 case IEMMODE_16BIT:
10869 IEM_MC_BEGIN(0,1);
10870 IEM_MC_LOCAL(uint16_t, u16Tmp);
10871 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10872 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10873 IEM_MC_ADVANCE_RIP();
10874 IEM_MC_END();
10875 return VINF_SUCCESS;
10876
10877 case IEMMODE_32BIT:
10878 IEM_MC_BEGIN(0,1);
10879 IEM_MC_LOCAL(uint32_t, u32Tmp);
10880 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10881 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10882 IEM_MC_ADVANCE_RIP();
10883 IEM_MC_END();
10884 return VINF_SUCCESS;
10885
10886 case IEMMODE_64BIT:
10887 IEM_MC_BEGIN(0,1);
10888 IEM_MC_LOCAL(uint64_t, u64Tmp);
10889 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10890 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10891 IEM_MC_ADVANCE_RIP();
10892 IEM_MC_END();
10893 return VINF_SUCCESS;
10894
10895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10896 }
10897}
10898
10899/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10900#define IEM_MOVS_CASE(ValBits, AddrBits) \
10901 IEM_MC_BEGIN(0, 2); \
10902 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10903 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10904 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10905 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10906 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10907 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10909 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10910 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10911 } IEM_MC_ELSE() { \
10912 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10913 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10914 } IEM_MC_ENDIF(); \
10915 IEM_MC_ADVANCE_RIP(); \
10916 IEM_MC_END();
10917
10918/** Opcode 0xa4. */
10919FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10920{
10921 IEMOP_HLP_NO_LOCK_PREFIX();
10922
10923 /*
10924 * Use the C implementation if a repeat prefix is encountered.
10925 */
10926 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10927 {
10928 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10929 switch (pIemCpu->enmEffAddrMode)
10930 {
10931 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10932 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10933 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10935 }
10936 }
10937 IEMOP_MNEMONIC("movsb Xb,Yb");
10938
10939 /*
10940 * Sharing case implementation with movs[wdq] below.
10941 */
10942 switch (pIemCpu->enmEffAddrMode)
10943 {
10944 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10945 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10946 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10948 }
10949 return VINF_SUCCESS;
10950}
10951
10952
10953/** Opcode 0xa5. */
10954FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10955{
10956 IEMOP_HLP_NO_LOCK_PREFIX();
10957
10958 /*
10959 * Use the C implementation if a repeat prefix is encountered.
10960 */
10961 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10962 {
10963 IEMOP_MNEMONIC("rep movs Xv,Yv");
10964 switch (pIemCpu->enmEffOpSize)
10965 {
10966 case IEMMODE_16BIT:
10967 switch (pIemCpu->enmEffAddrMode)
10968 {
10969 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10970 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10971 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10973 }
10974 break;
10975 case IEMMODE_32BIT:
10976 switch (pIemCpu->enmEffAddrMode)
10977 {
10978 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10979 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10980 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10982 }
10983 case IEMMODE_64BIT:
10984 switch (pIemCpu->enmEffAddrMode)
10985 {
10986 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10987 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10988 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10990 }
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993 }
10994 IEMOP_MNEMONIC("movs Xv,Yv");
10995
10996 /*
10997 * Annoying double switch here.
10998 * Using ugly macro for implementing the cases, sharing it with movsb.
10999 */
11000 switch (pIemCpu->enmEffOpSize)
11001 {
11002 case IEMMODE_16BIT:
11003 switch (pIemCpu->enmEffAddrMode)
11004 {
11005 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11006 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11007 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11009 }
11010 break;
11011
11012 case IEMMODE_32BIT:
11013 switch (pIemCpu->enmEffAddrMode)
11014 {
11015 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11016 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11017 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11019 }
11020 break;
11021
11022 case IEMMODE_64BIT:
11023 switch (pIemCpu->enmEffAddrMode)
11024 {
11025 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11026 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11027 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11029 }
11030 break;
11031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11032 }
11033 return VINF_SUCCESS;
11034}
11035
11036#undef IEM_MOVS_CASE
11037
11038/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11039#define IEM_CMPS_CASE(ValBits, AddrBits) \
11040 IEM_MC_BEGIN(3, 3); \
11041 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11042 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11043 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11044 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11045 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11046 \
11047 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11048 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11049 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11050 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11051 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11052 IEM_MC_REF_EFLAGS(pEFlags); \
11053 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11054 \
11055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11056 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11057 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11058 } IEM_MC_ELSE() { \
11059 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11060 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11061 } IEM_MC_ENDIF(); \
11062 IEM_MC_ADVANCE_RIP(); \
11063 IEM_MC_END(); \
11064
11065/** Opcode 0xa6. */
11066FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11067{
11068 IEMOP_HLP_NO_LOCK_PREFIX();
11069
11070 /*
11071 * Use the C implementation if a repeat prefix is encountered.
11072 */
11073 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11074 {
11075 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11076 switch (pIemCpu->enmEffAddrMode)
11077 {
11078 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11079 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11080 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11082 }
11083 }
11084 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11085 {
11086 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11087 switch (pIemCpu->enmEffAddrMode)
11088 {
11089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11093 }
11094 }
11095 IEMOP_MNEMONIC("cmps Xb,Yb");
11096
11097 /*
11098 * Sharing case implementation with cmps[wdq] below.
11099 */
11100 switch (pIemCpu->enmEffAddrMode)
11101 {
11102 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11103 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11104 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11106 }
11107 return VINF_SUCCESS;
11108
11109}
11110
11111
11112/** Opcode 0xa7. */
11113FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11114{
11115 IEMOP_HLP_NO_LOCK_PREFIX();
11116
11117 /*
11118 * Use the C implementation if a repeat prefix is encountered.
11119 */
11120 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11121 {
11122 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11123 switch (pIemCpu->enmEffOpSize)
11124 {
11125 case IEMMODE_16BIT:
11126 switch (pIemCpu->enmEffAddrMode)
11127 {
11128 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11129 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11130 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11132 }
11133 break;
11134 case IEMMODE_32BIT:
11135 switch (pIemCpu->enmEffAddrMode)
11136 {
11137 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11138 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11139 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11141 }
11142 case IEMMODE_64BIT:
11143 switch (pIemCpu->enmEffAddrMode)
11144 {
11145 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11151 }
11152 }
11153
11154 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11155 {
11156 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11157 switch (pIemCpu->enmEffOpSize)
11158 {
11159 case IEMMODE_16BIT:
11160 switch (pIemCpu->enmEffAddrMode)
11161 {
11162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11166 }
11167 break;
11168 case IEMMODE_32BIT:
11169 switch (pIemCpu->enmEffAddrMode)
11170 {
11171 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11172 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11173 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11175 }
11176 case IEMMODE_64BIT:
11177 switch (pIemCpu->enmEffAddrMode)
11178 {
11179 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11180 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11181 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11183 }
11184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11185 }
11186 }
11187
11188 IEMOP_MNEMONIC("cmps Xv,Yv");
11189
11190 /*
11191 * Annoying double switch here.
11192 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11193 */
11194 switch (pIemCpu->enmEffOpSize)
11195 {
11196 case IEMMODE_16BIT:
11197 switch (pIemCpu->enmEffAddrMode)
11198 {
11199 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11200 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11201 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11203 }
11204 break;
11205
11206 case IEMMODE_32BIT:
11207 switch (pIemCpu->enmEffAddrMode)
11208 {
11209 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11210 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11211 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11213 }
11214 break;
11215
11216 case IEMMODE_64BIT:
11217 switch (pIemCpu->enmEffAddrMode)
11218 {
11219 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11220 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11221 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11223 }
11224 break;
11225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11226 }
11227 return VINF_SUCCESS;
11228
11229}
11230
11231#undef IEM_CMPS_CASE
11232
11233/** Opcode 0xa8. */
11234FNIEMOP_DEF(iemOp_test_AL_Ib)
11235{
11236 IEMOP_MNEMONIC("test al,Ib");
11237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11239}
11240
11241
11242/** Opcode 0xa9. */
11243FNIEMOP_DEF(iemOp_test_eAX_Iz)
11244{
11245 IEMOP_MNEMONIC("test rAX,Iz");
11246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11247 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11248}
11249
11250
11251/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11252#define IEM_STOS_CASE(ValBits, AddrBits) \
11253 IEM_MC_BEGIN(0, 2); \
11254 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11255 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11256 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11257 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11258 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11260 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11261 } IEM_MC_ELSE() { \
11262 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11263 } IEM_MC_ENDIF(); \
11264 IEM_MC_ADVANCE_RIP(); \
11265 IEM_MC_END(); \
11266
11267/** Opcode 0xaa. */
11268FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11269{
11270 IEMOP_HLP_NO_LOCK_PREFIX();
11271
11272 /*
11273 * Use the C implementation if a repeat prefix is encountered.
11274 */
11275 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11276 {
11277 IEMOP_MNEMONIC("rep stos Yb,al");
11278 switch (pIemCpu->enmEffAddrMode)
11279 {
11280 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11281 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11282 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11284 }
11285 }
11286 IEMOP_MNEMONIC("stos Yb,al");
11287
11288 /*
11289 * Sharing case implementation with stos[wdq] below.
11290 */
11291 switch (pIemCpu->enmEffAddrMode)
11292 {
11293 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11294 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11295 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11297 }
11298 return VINF_SUCCESS;
11299}
11300
11301
11302/** Opcode 0xab. */
11303FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11304{
11305 IEMOP_HLP_NO_LOCK_PREFIX();
11306
11307 /*
11308 * Use the C implementation if a repeat prefix is encountered.
11309 */
11310 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11311 {
11312 IEMOP_MNEMONIC("rep stos Yv,rAX");
11313 switch (pIemCpu->enmEffOpSize)
11314 {
11315 case IEMMODE_16BIT:
11316 switch (pIemCpu->enmEffAddrMode)
11317 {
11318 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11319 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11320 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11322 }
11323 break;
11324 case IEMMODE_32BIT:
11325 switch (pIemCpu->enmEffAddrMode)
11326 {
11327 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11328 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11329 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11331 }
11332 case IEMMODE_64BIT:
11333 switch (pIemCpu->enmEffAddrMode)
11334 {
11335 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11341 }
11342 }
11343 IEMOP_MNEMONIC("stos Yv,rAX");
11344
11345 /*
11346 * Annoying double switch here.
11347 * Using ugly macro for implementing the cases, sharing it with stosb.
11348 */
11349 switch (pIemCpu->enmEffOpSize)
11350 {
11351 case IEMMODE_16BIT:
11352 switch (pIemCpu->enmEffAddrMode)
11353 {
11354 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11355 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11356 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11358 }
11359 break;
11360
11361 case IEMMODE_32BIT:
11362 switch (pIemCpu->enmEffAddrMode)
11363 {
11364 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11365 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11366 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11368 }
11369 break;
11370
11371 case IEMMODE_64BIT:
11372 switch (pIemCpu->enmEffAddrMode)
11373 {
11374 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11375 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11376 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11378 }
11379 break;
11380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11381 }
11382 return VINF_SUCCESS;
11383}
11384
11385#undef IEM_STOS_CASE
11386
11387/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11388#define IEM_LODS_CASE(ValBits, AddrBits) \
11389 IEM_MC_BEGIN(0, 2); \
11390 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11391 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11392 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11393 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11394 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11396 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11397 } IEM_MC_ELSE() { \
11398 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11399 } IEM_MC_ENDIF(); \
11400 IEM_MC_ADVANCE_RIP(); \
11401 IEM_MC_END();
11402
11403/** Opcode 0xac. */
11404FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11405{
11406 IEMOP_HLP_NO_LOCK_PREFIX();
11407
11408 /*
11409 * Use the C implementation if a repeat prefix is encountered.
11410 */
11411 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11412 {
11413 IEMOP_MNEMONIC("rep lodsb al,Xb");
11414 switch (pIemCpu->enmEffAddrMode)
11415 {
11416 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11417 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11418 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11420 }
11421 }
11422 IEMOP_MNEMONIC("lodsb al,Xb");
11423
11424 /*
11425 * Sharing case implementation with stos[wdq] below.
11426 */
11427 switch (pIemCpu->enmEffAddrMode)
11428 {
11429 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11430 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11431 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 return VINF_SUCCESS;
11435}
11436
11437
11438/** Opcode 0xad. */
11439FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11440{
11441 IEMOP_HLP_NO_LOCK_PREFIX();
11442
11443 /*
11444 * Use the C implementation if a repeat prefix is encountered.
11445 */
11446 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11447 {
11448 IEMOP_MNEMONIC("rep lods rAX,Xv");
11449 switch (pIemCpu->enmEffOpSize)
11450 {
11451 case IEMMODE_16BIT:
11452 switch (pIemCpu->enmEffAddrMode)
11453 {
11454 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11455 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11456 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11458 }
11459 break;
11460 case IEMMODE_32BIT:
11461 switch (pIemCpu->enmEffAddrMode)
11462 {
11463 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 case IEMMODE_64BIT:
11469 switch (pIemCpu->enmEffAddrMode)
11470 {
11471 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11472 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11473 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11475 }
11476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11477 }
11478 }
11479 IEMOP_MNEMONIC("lods rAX,Xv");
11480
11481 /*
11482 * Annoying double switch here.
11483 * Using ugly macro for implementing the cases, sharing it with lodsb.
11484 */
11485 switch (pIemCpu->enmEffOpSize)
11486 {
11487 case IEMMODE_16BIT:
11488 switch (pIemCpu->enmEffAddrMode)
11489 {
11490 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11491 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11492 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11494 }
11495 break;
11496
11497 case IEMMODE_32BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11501 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11502 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 break;
11506
11507 case IEMMODE_64BIT:
11508 switch (pIemCpu->enmEffAddrMode)
11509 {
11510 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11511 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11512 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11514 }
11515 break;
11516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11517 }
11518 return VINF_SUCCESS;
11519}
11520
11521#undef IEM_LODS_CASE
11522
11523/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11524#define IEM_SCAS_CASE(ValBits, AddrBits) \
11525 IEM_MC_BEGIN(3, 2); \
11526 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11527 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11528 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11529 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11530 \
11531 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11532 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11533 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11534 IEM_MC_REF_EFLAGS(pEFlags); \
11535 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11536 \
11537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11538 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11539 } IEM_MC_ELSE() { \
11540 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11541 } IEM_MC_ENDIF(); \
11542 IEM_MC_ADVANCE_RIP(); \
11543 IEM_MC_END();
11544
11545/** Opcode 0xae. */
11546FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11547{
11548 IEMOP_HLP_NO_LOCK_PREFIX();
11549
11550 /*
11551 * Use the C implementation if a repeat prefix is encountered.
11552 */
11553 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11554 {
11555 IEMOP_MNEMONIC("repe scasb al,Xb");
11556 switch (pIemCpu->enmEffAddrMode)
11557 {
11558 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11559 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11560 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11562 }
11563 }
11564 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11565 {
11566 IEMOP_MNEMONIC("repne scasb al,Xb");
11567 switch (pIemCpu->enmEffAddrMode)
11568 {
11569 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11570 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11571 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11573 }
11574 }
11575 IEMOP_MNEMONIC("scasb al,Xb");
11576
11577 /*
11578 * Sharing case implementation with stos[wdq] below.
11579 */
11580 switch (pIemCpu->enmEffAddrMode)
11581 {
11582 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11583 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11584 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11586 }
11587 return VINF_SUCCESS;
11588}
11589
11590
11591/** Opcode 0xaf. */
11592FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11593{
11594 IEMOP_HLP_NO_LOCK_PREFIX();
11595
11596 /*
11597 * Use the C implementation if a repeat prefix is encountered.
11598 */
11599 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11600 {
11601 IEMOP_MNEMONIC("repe scas rAX,Xv");
11602 switch (pIemCpu->enmEffOpSize)
11603 {
11604 case IEMMODE_16BIT:
11605 switch (pIemCpu->enmEffAddrMode)
11606 {
11607 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11608 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11609 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11611 }
11612 break;
11613 case IEMMODE_32BIT:
11614 switch (pIemCpu->enmEffAddrMode)
11615 {
11616 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11617 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11618 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11620 }
11621 case IEMMODE_64BIT:
11622 switch (pIemCpu->enmEffAddrMode)
11623 {
11624 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11625 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11626 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11628 }
11629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11630 }
11631 }
11632 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11633 {
11634 IEMOP_MNEMONIC("repne scas rAX,Xv");
11635 switch (pIemCpu->enmEffOpSize)
11636 {
11637 case IEMMODE_16BIT:
11638 switch (pIemCpu->enmEffAddrMode)
11639 {
11640 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11641 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11642 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11644 }
11645 break;
11646 case IEMMODE_32BIT:
11647 switch (pIemCpu->enmEffAddrMode)
11648 {
11649 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11650 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11651 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11653 }
11654 case IEMMODE_64BIT:
11655 switch (pIemCpu->enmEffAddrMode)
11656 {
11657 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11658 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11659 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11661 }
11662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11663 }
11664 }
11665 IEMOP_MNEMONIC("scas rAX,Xv");
11666
11667 /*
11668 * Annoying double switch here.
11669 * Using ugly macro for implementing the cases, sharing it with scasb.
11670 */
11671 switch (pIemCpu->enmEffOpSize)
11672 {
11673 case IEMMODE_16BIT:
11674 switch (pIemCpu->enmEffAddrMode)
11675 {
11676 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11677 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11678 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11680 }
11681 break;
11682
11683 case IEMMODE_32BIT:
11684 switch (pIemCpu->enmEffAddrMode)
11685 {
11686 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11687 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11688 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11690 }
11691 break;
11692
11693 case IEMMODE_64BIT:
11694 switch (pIemCpu->enmEffAddrMode)
11695 {
11696 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11697 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11698 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11700 }
11701 break;
11702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11703 }
11704 return VINF_SUCCESS;
11705}
11706
11707#undef IEM_SCAS_CASE
11708
11709/**
11710 * Common 'mov r8, imm8' helper.
11711 */
11712FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11713{
11714 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11715 IEMOP_HLP_NO_LOCK_PREFIX();
11716
11717 IEM_MC_BEGIN(0, 1);
11718 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11719 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11720 IEM_MC_ADVANCE_RIP();
11721 IEM_MC_END();
11722
11723 return VINF_SUCCESS;
11724}
11725
11726
11727/** Opcode 0xb0. */
11728FNIEMOP_DEF(iemOp_mov_AL_Ib)
11729{
11730 IEMOP_MNEMONIC("mov AL,Ib");
11731 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11732}
11733
11734
11735/** Opcode 0xb1. */
11736FNIEMOP_DEF(iemOp_CL_Ib)
11737{
11738 IEMOP_MNEMONIC("mov CL,Ib");
11739 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11740}
11741
11742
11743/** Opcode 0xb2. */
11744FNIEMOP_DEF(iemOp_DL_Ib)
11745{
11746 IEMOP_MNEMONIC("mov DL,Ib");
11747 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11748}
11749
11750
11751/** Opcode 0xb3. */
11752FNIEMOP_DEF(iemOp_BL_Ib)
11753{
11754 IEMOP_MNEMONIC("mov BL,Ib");
11755 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11756}
11757
11758
11759/** Opcode 0xb4. */
11760FNIEMOP_DEF(iemOp_mov_AH_Ib)
11761{
11762 IEMOP_MNEMONIC("mov AH,Ib");
11763 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11764}
11765
11766
11767/** Opcode 0xb5. */
11768FNIEMOP_DEF(iemOp_CH_Ib)
11769{
11770 IEMOP_MNEMONIC("mov CH,Ib");
11771 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11772}
11773
11774
11775/** Opcode 0xb6. */
11776FNIEMOP_DEF(iemOp_DH_Ib)
11777{
11778 IEMOP_MNEMONIC("mov DH,Ib");
11779 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11780}
11781
11782
11783/** Opcode 0xb7. */
11784FNIEMOP_DEF(iemOp_BH_Ib)
11785{
11786 IEMOP_MNEMONIC("mov BH,Ib");
11787 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11788}
11789
11790
11791/**
11792 * Common 'mov regX,immX' helper.
11793 */
11794FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11795{
11796 switch (pIemCpu->enmEffOpSize)
11797 {
11798 case IEMMODE_16BIT:
11799 {
11800 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11801 IEMOP_HLP_NO_LOCK_PREFIX();
11802
11803 IEM_MC_BEGIN(0, 1);
11804 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11805 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11806 IEM_MC_ADVANCE_RIP();
11807 IEM_MC_END();
11808 break;
11809 }
11810
11811 case IEMMODE_32BIT:
11812 {
11813 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11814 IEMOP_HLP_NO_LOCK_PREFIX();
11815
11816 IEM_MC_BEGIN(0, 1);
11817 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11818 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11819 IEM_MC_ADVANCE_RIP();
11820 IEM_MC_END();
11821 break;
11822 }
11823 case IEMMODE_64BIT:
11824 {
11825 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11826 IEMOP_HLP_NO_LOCK_PREFIX();
11827
11828 IEM_MC_BEGIN(0, 1);
11829 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11830 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11831 IEM_MC_ADVANCE_RIP();
11832 IEM_MC_END();
11833 break;
11834 }
11835 }
11836
11837 return VINF_SUCCESS;
11838}
11839
11840
11841/** Opcode 0xb8. */
11842FNIEMOP_DEF(iemOp_eAX_Iv)
11843{
11844 IEMOP_MNEMONIC("mov rAX,IV");
11845 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11846}
11847
11848
11849/** Opcode 0xb9. */
11850FNIEMOP_DEF(iemOp_eCX_Iv)
11851{
11852 IEMOP_MNEMONIC("mov rCX,IV");
11853 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11854}
11855
11856
11857/** Opcode 0xba. */
11858FNIEMOP_DEF(iemOp_eDX_Iv)
11859{
11860 IEMOP_MNEMONIC("mov rDX,IV");
11861 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11862}
11863
11864
11865/** Opcode 0xbb. */
11866FNIEMOP_DEF(iemOp_eBX_Iv)
11867{
11868 IEMOP_MNEMONIC("mov rBX,IV");
11869 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11870}
11871
11872
11873/** Opcode 0xbc. */
11874FNIEMOP_DEF(iemOp_eSP_Iv)
11875{
11876 IEMOP_MNEMONIC("mov rSP,IV");
11877 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11878}
11879
11880
11881/** Opcode 0xbd. */
11882FNIEMOP_DEF(iemOp_eBP_Iv)
11883{
11884 IEMOP_MNEMONIC("mov rBP,IV");
11885 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11886}
11887
11888
11889/** Opcode 0xbe. */
11890FNIEMOP_DEF(iemOp_eSI_Iv)
11891{
11892 IEMOP_MNEMONIC("mov rSI,IV");
11893 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11894}
11895
11896
11897/** Opcode 0xbf. */
11898FNIEMOP_DEF(iemOp_eDI_Iv)
11899{
11900 IEMOP_MNEMONIC("mov rDI,IV");
11901 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11902}
11903
11904
11905/** Opcode 0xc0. */
11906FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11907{
11908 IEMOP_HLP_MIN_186();
11909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11910 PCIEMOPSHIFTSIZES pImpl;
11911 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11912 {
11913 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11914 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11915 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11916 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11917 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11918 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11919 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11920 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11922 }
11923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11924
11925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11926 {
11927 /* register */
11928 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11929 IEMOP_HLP_NO_LOCK_PREFIX();
11930 IEM_MC_BEGIN(3, 0);
11931 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11932 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11934 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11935 IEM_MC_REF_EFLAGS(pEFlags);
11936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11937 IEM_MC_ADVANCE_RIP();
11938 IEM_MC_END();
11939 }
11940 else
11941 {
11942 /* memory */
11943 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11944 IEM_MC_BEGIN(3, 2);
11945 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11946 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11947 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11949
11950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11951 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11952 IEM_MC_ASSIGN(cShiftArg, cShift);
11953 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11954 IEM_MC_FETCH_EFLAGS(EFlags);
11955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11956
11957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11958 IEM_MC_COMMIT_EFLAGS(EFlags);
11959 IEM_MC_ADVANCE_RIP();
11960 IEM_MC_END();
11961 }
11962 return VINF_SUCCESS;
11963}
11964
11965
11966/** Opcode 0xc1. */
11967FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11968{
11969 IEMOP_HLP_MIN_186();
11970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11971 PCIEMOPSHIFTSIZES pImpl;
11972 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11973 {
11974 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11975 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11976 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11977 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11978 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11979 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11980 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11981 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11982 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11983 }
11984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11985
11986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11987 {
11988 /* register */
11989 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11990 IEMOP_HLP_NO_LOCK_PREFIX();
11991 switch (pIemCpu->enmEffOpSize)
11992 {
11993 case IEMMODE_16BIT:
11994 IEM_MC_BEGIN(3, 0);
11995 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11996 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11998 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11999 IEM_MC_REF_EFLAGS(pEFlags);
12000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12001 IEM_MC_ADVANCE_RIP();
12002 IEM_MC_END();
12003 return VINF_SUCCESS;
12004
12005 case IEMMODE_32BIT:
12006 IEM_MC_BEGIN(3, 0);
12007 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12008 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12010 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12011 IEM_MC_REF_EFLAGS(pEFlags);
12012 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12013 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12014 IEM_MC_ADVANCE_RIP();
12015 IEM_MC_END();
12016 return VINF_SUCCESS;
12017
12018 case IEMMODE_64BIT:
12019 IEM_MC_BEGIN(3, 0);
12020 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12021 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12023 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12024 IEM_MC_REF_EFLAGS(pEFlags);
12025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12026 IEM_MC_ADVANCE_RIP();
12027 IEM_MC_END();
12028 return VINF_SUCCESS;
12029
12030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12031 }
12032 }
12033 else
12034 {
12035 /* memory */
12036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12037 switch (pIemCpu->enmEffOpSize)
12038 {
12039 case IEMMODE_16BIT:
12040 IEM_MC_BEGIN(3, 2);
12041 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12042 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12043 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12045
12046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12047 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12048 IEM_MC_ASSIGN(cShiftArg, cShift);
12049 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12050 IEM_MC_FETCH_EFLAGS(EFlags);
12051 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12052
12053 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12054 IEM_MC_COMMIT_EFLAGS(EFlags);
12055 IEM_MC_ADVANCE_RIP();
12056 IEM_MC_END();
12057 return VINF_SUCCESS;
12058
12059 case IEMMODE_32BIT:
12060 IEM_MC_BEGIN(3, 2);
12061 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12062 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12063 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12065
12066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12067 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12068 IEM_MC_ASSIGN(cShiftArg, cShift);
12069 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12070 IEM_MC_FETCH_EFLAGS(EFlags);
12071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12072
12073 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12074 IEM_MC_COMMIT_EFLAGS(EFlags);
12075 IEM_MC_ADVANCE_RIP();
12076 IEM_MC_END();
12077 return VINF_SUCCESS;
12078
12079 case IEMMODE_64BIT:
12080 IEM_MC_BEGIN(3, 2);
12081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12082 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12083 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12085
12086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12087 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12088 IEM_MC_ASSIGN(cShiftArg, cShift);
12089 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12090 IEM_MC_FETCH_EFLAGS(EFlags);
12091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12092
12093 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12094 IEM_MC_COMMIT_EFLAGS(EFlags);
12095 IEM_MC_ADVANCE_RIP();
12096 IEM_MC_END();
12097 return VINF_SUCCESS;
12098
12099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12100 }
12101 }
12102}
12103
12104
12105/** Opcode 0xc2. */
12106FNIEMOP_DEF(iemOp_retn_Iw)
12107{
12108 IEMOP_MNEMONIC("retn Iw");
12109 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12110 IEMOP_HLP_NO_LOCK_PREFIX();
12111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12112 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12113}
12114
12115
12116/** Opcode 0xc3. */
12117FNIEMOP_DEF(iemOp_retn)
12118{
12119 IEMOP_MNEMONIC("retn");
12120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12121 IEMOP_HLP_NO_LOCK_PREFIX();
12122 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12123}
12124
12125
12126/** Opcode 0xc4. */
12127FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12128{
12129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12130 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12131 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12132 {
12133 IEMOP_MNEMONIC("2-byte-vex");
12134 /* The LES instruction is invalid 64-bit mode. In legacy and
12135 compatability mode it is invalid with MOD=3.
12136 The use as a VEX prefix is made possible by assigning the inverted
12137 REX.R to the top MOD bit, and the top bit in the inverted register
12138 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12139 to accessing registers 0..7 in this VEX form. */
12140 /** @todo VEX: Just use new tables for it. */
12141 return IEMOP_RAISE_INVALID_OPCODE();
12142 }
12143 IEMOP_MNEMONIC("les Gv,Mp");
12144 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12145}
12146
12147
12148/** Opcode 0xc5. */
12149FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12150{
12151 /* The LDS instruction is invalid 64-bit mode. In legacy and
12152 compatability mode it is invalid with MOD=3.
12153 The use as a VEX prefix is made possible by assigning the inverted
12154 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12155 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12157 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12158 {
12159 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12160 {
12161 IEMOP_MNEMONIC("lds Gv,Mp");
12162 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12163 }
12164 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12165 }
12166
12167 IEMOP_MNEMONIC("3-byte-vex");
12168 /** @todo Test when exctly the VEX conformance checks kick in during
12169 * instruction decoding and fetching (using \#PF). */
12170 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12171 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12172 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12173#if 0 /* will make sense of this next week... */
12174 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12175 &&
12176 )
12177 {
12178
12179 }
12180#endif
12181
12182 /** @todo VEX: Just use new tables for it. */
12183 return IEMOP_RAISE_INVALID_OPCODE();
12184}
12185
12186
12187/** Opcode 0xc6. */
12188FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12189{
12190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12191 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12192 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12193 return IEMOP_RAISE_INVALID_OPCODE();
12194 IEMOP_MNEMONIC("mov Eb,Ib");
12195
12196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12197 {
12198 /* register access */
12199 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12200 IEM_MC_BEGIN(0, 0);
12201 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12202 IEM_MC_ADVANCE_RIP();
12203 IEM_MC_END();
12204 }
12205 else
12206 {
12207 /* memory access. */
12208 IEM_MC_BEGIN(0, 1);
12209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12211 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12212 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12213 IEM_MC_ADVANCE_RIP();
12214 IEM_MC_END();
12215 }
12216 return VINF_SUCCESS;
12217}
12218
12219
12220/** Opcode 0xc7. */
12221FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12222{
12223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12224 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12225 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12226 return IEMOP_RAISE_INVALID_OPCODE();
12227 IEMOP_MNEMONIC("mov Ev,Iz");
12228
12229 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12230 {
12231 /* register access */
12232 switch (pIemCpu->enmEffOpSize)
12233 {
12234 case IEMMODE_16BIT:
12235 IEM_MC_BEGIN(0, 0);
12236 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12237 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12238 IEM_MC_ADVANCE_RIP();
12239 IEM_MC_END();
12240 return VINF_SUCCESS;
12241
12242 case IEMMODE_32BIT:
12243 IEM_MC_BEGIN(0, 0);
12244 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12245 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12246 IEM_MC_ADVANCE_RIP();
12247 IEM_MC_END();
12248 return VINF_SUCCESS;
12249
12250 case IEMMODE_64BIT:
12251 IEM_MC_BEGIN(0, 0);
12252 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12253 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12254 IEM_MC_ADVANCE_RIP();
12255 IEM_MC_END();
12256 return VINF_SUCCESS;
12257
12258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12259 }
12260 }
12261 else
12262 {
12263 /* memory access. */
12264 switch (pIemCpu->enmEffOpSize)
12265 {
12266 case IEMMODE_16BIT:
12267 IEM_MC_BEGIN(0, 1);
12268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12271 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12272 IEM_MC_ADVANCE_RIP();
12273 IEM_MC_END();
12274 return VINF_SUCCESS;
12275
12276 case IEMMODE_32BIT:
12277 IEM_MC_BEGIN(0, 1);
12278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12280 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12281 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12282 IEM_MC_ADVANCE_RIP();
12283 IEM_MC_END();
12284 return VINF_SUCCESS;
12285
12286 case IEMMODE_64BIT:
12287 IEM_MC_BEGIN(0, 1);
12288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12290 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12291 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12292 IEM_MC_ADVANCE_RIP();
12293 IEM_MC_END();
12294 return VINF_SUCCESS;
12295
12296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12297 }
12298 }
12299}
12300
12301
12302
12303
12304/** Opcode 0xc8. */
12305FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12306{
12307 IEMOP_MNEMONIC("enter Iw,Ib");
12308 IEMOP_HLP_MIN_186();
12309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12310 IEMOP_HLP_NO_LOCK_PREFIX();
12311 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12312 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12313 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12314}
12315
12316
12317/** Opcode 0xc9. */
12318FNIEMOP_DEF(iemOp_leave)
12319{
12320 IEMOP_MNEMONIC("retn");
12321 IEMOP_HLP_MIN_186();
12322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12323 IEMOP_HLP_NO_LOCK_PREFIX();
12324 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12325}
12326
12327
12328/** Opcode 0xca. */
12329FNIEMOP_DEF(iemOp_retf_Iw)
12330{
12331 IEMOP_MNEMONIC("retf Iw");
12332 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12333 IEMOP_HLP_NO_LOCK_PREFIX();
12334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12335 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12336}
12337
12338
12339/** Opcode 0xcb. */
12340FNIEMOP_DEF(iemOp_retf)
12341{
12342 IEMOP_MNEMONIC("retf");
12343 IEMOP_HLP_NO_LOCK_PREFIX();
12344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12345 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12346}
12347
12348
12349/** Opcode 0xcc. */
12350FNIEMOP_DEF(iemOp_int_3)
12351{
12352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12354}
12355
12356
12357/** Opcode 0xcd. */
12358FNIEMOP_DEF(iemOp_int_Ib)
12359{
12360 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12362 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12363}
12364
12365
12366/** Opcode 0xce. */
12367FNIEMOP_DEF(iemOp_into)
12368{
12369 IEMOP_MNEMONIC("into");
12370 IEMOP_HLP_NO_64BIT();
12371
12372 IEM_MC_BEGIN(2, 0);
12373 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12374 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12375 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12376 IEM_MC_END();
12377 return VINF_SUCCESS;
12378}
12379
12380
12381/** Opcode 0xcf. */
12382FNIEMOP_DEF(iemOp_iret)
12383{
12384 IEMOP_MNEMONIC("iret");
12385 IEMOP_HLP_NO_LOCK_PREFIX();
12386 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12387}
12388
12389
12390/** Opcode 0xd0. */
12391FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12392{
12393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12394 PCIEMOPSHIFTSIZES pImpl;
12395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12396 {
12397 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12398 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12399 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12400 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12401 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12402 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12403 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12404 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12405 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12406 }
12407 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12408
12409 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12410 {
12411 /* register */
12412 IEMOP_HLP_NO_LOCK_PREFIX();
12413 IEM_MC_BEGIN(3, 0);
12414 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12415 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12416 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12417 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12418 IEM_MC_REF_EFLAGS(pEFlags);
12419 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12420 IEM_MC_ADVANCE_RIP();
12421 IEM_MC_END();
12422 }
12423 else
12424 {
12425 /* memory */
12426 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12427 IEM_MC_BEGIN(3, 2);
12428 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12429 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12430 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12432
12433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12434 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12435 IEM_MC_FETCH_EFLAGS(EFlags);
12436 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12437
12438 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12439 IEM_MC_COMMIT_EFLAGS(EFlags);
12440 IEM_MC_ADVANCE_RIP();
12441 IEM_MC_END();
12442 }
12443 return VINF_SUCCESS;
12444}
12445
12446
12447
12448/** Opcode 0xd1. */
12449FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12450{
12451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12452 PCIEMOPSHIFTSIZES pImpl;
12453 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12454 {
12455 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12456 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12457 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12458 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12459 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12460 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12461 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12462 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12463 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12464 }
12465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12466
12467 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12468 {
12469 /* register */
12470 IEMOP_HLP_NO_LOCK_PREFIX();
12471 switch (pIemCpu->enmEffOpSize)
12472 {
12473 case IEMMODE_16BIT:
12474 IEM_MC_BEGIN(3, 0);
12475 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12476 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12477 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12478 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12479 IEM_MC_REF_EFLAGS(pEFlags);
12480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12481 IEM_MC_ADVANCE_RIP();
12482 IEM_MC_END();
12483 return VINF_SUCCESS;
12484
12485 case IEMMODE_32BIT:
12486 IEM_MC_BEGIN(3, 0);
12487 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12488 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12489 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12490 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12491 IEM_MC_REF_EFLAGS(pEFlags);
12492 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12493 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12494 IEM_MC_ADVANCE_RIP();
12495 IEM_MC_END();
12496 return VINF_SUCCESS;
12497
12498 case IEMMODE_64BIT:
12499 IEM_MC_BEGIN(3, 0);
12500 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12501 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12502 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12503 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12504 IEM_MC_REF_EFLAGS(pEFlags);
12505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12506 IEM_MC_ADVANCE_RIP();
12507 IEM_MC_END();
12508 return VINF_SUCCESS;
12509
12510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12511 }
12512 }
12513 else
12514 {
12515 /* memory */
12516 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12517 switch (pIemCpu->enmEffOpSize)
12518 {
12519 case IEMMODE_16BIT:
12520 IEM_MC_BEGIN(3, 2);
12521 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12522 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12523 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12525
12526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12527 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12528 IEM_MC_FETCH_EFLAGS(EFlags);
12529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12530
12531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12532 IEM_MC_COMMIT_EFLAGS(EFlags);
12533 IEM_MC_ADVANCE_RIP();
12534 IEM_MC_END();
12535 return VINF_SUCCESS;
12536
12537 case IEMMODE_32BIT:
12538 IEM_MC_BEGIN(3, 2);
12539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12540 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12541 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12543
12544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12545 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12546 IEM_MC_FETCH_EFLAGS(EFlags);
12547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12548
12549 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12550 IEM_MC_COMMIT_EFLAGS(EFlags);
12551 IEM_MC_ADVANCE_RIP();
12552 IEM_MC_END();
12553 return VINF_SUCCESS;
12554
12555 case IEMMODE_64BIT:
12556 IEM_MC_BEGIN(3, 2);
12557 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12558 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12559 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12561
12562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12563 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12564 IEM_MC_FETCH_EFLAGS(EFlags);
12565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12566
12567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12568 IEM_MC_COMMIT_EFLAGS(EFlags);
12569 IEM_MC_ADVANCE_RIP();
12570 IEM_MC_END();
12571 return VINF_SUCCESS;
12572
12573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12574 }
12575 }
12576}
12577
12578
12579/** Opcode 0xd2. */
12580FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12581{
12582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12583 PCIEMOPSHIFTSIZES pImpl;
12584 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12585 {
12586 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12587 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12588 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12589 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12590 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12591 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12592 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12593 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12594 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12595 }
12596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12597
12598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12599 {
12600 /* register */
12601 IEMOP_HLP_NO_LOCK_PREFIX();
12602 IEM_MC_BEGIN(3, 0);
12603 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12604 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12605 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12606 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12607 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12608 IEM_MC_REF_EFLAGS(pEFlags);
12609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12610 IEM_MC_ADVANCE_RIP();
12611 IEM_MC_END();
12612 }
12613 else
12614 {
12615 /* memory */
12616 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12617 IEM_MC_BEGIN(3, 2);
12618 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12619 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12620 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12622
12623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12624 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12625 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12626 IEM_MC_FETCH_EFLAGS(EFlags);
12627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12628
12629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12630 IEM_MC_COMMIT_EFLAGS(EFlags);
12631 IEM_MC_ADVANCE_RIP();
12632 IEM_MC_END();
12633 }
12634 return VINF_SUCCESS;
12635}
12636
12637
12638/** Opcode 0xd3. */
12639FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12640{
12641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12642 PCIEMOPSHIFTSIZES pImpl;
12643 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12644 {
12645 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12646 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12647 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12648 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12649 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12650 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12651 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12652 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12653 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12654 }
12655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12656
12657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12658 {
12659 /* register */
12660 IEMOP_HLP_NO_LOCK_PREFIX();
12661 switch (pIemCpu->enmEffOpSize)
12662 {
12663 case IEMMODE_16BIT:
12664 IEM_MC_BEGIN(3, 0);
12665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12666 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12668 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12669 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12670 IEM_MC_REF_EFLAGS(pEFlags);
12671 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12672 IEM_MC_ADVANCE_RIP();
12673 IEM_MC_END();
12674 return VINF_SUCCESS;
12675
12676 case IEMMODE_32BIT:
12677 IEM_MC_BEGIN(3, 0);
12678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12679 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12681 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12682 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12683 IEM_MC_REF_EFLAGS(pEFlags);
12684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12685 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12686 IEM_MC_ADVANCE_RIP();
12687 IEM_MC_END();
12688 return VINF_SUCCESS;
12689
12690 case IEMMODE_64BIT:
12691 IEM_MC_BEGIN(3, 0);
12692 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12693 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12695 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12696 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12697 IEM_MC_REF_EFLAGS(pEFlags);
12698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12699 IEM_MC_ADVANCE_RIP();
12700 IEM_MC_END();
12701 return VINF_SUCCESS;
12702
12703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12704 }
12705 }
12706 else
12707 {
12708 /* memory */
12709 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12710 switch (pIemCpu->enmEffOpSize)
12711 {
12712 case IEMMODE_16BIT:
12713 IEM_MC_BEGIN(3, 2);
12714 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12715 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12716 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12718
12719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12720 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12721 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12722 IEM_MC_FETCH_EFLAGS(EFlags);
12723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12724
12725 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12726 IEM_MC_COMMIT_EFLAGS(EFlags);
12727 IEM_MC_ADVANCE_RIP();
12728 IEM_MC_END();
12729 return VINF_SUCCESS;
12730
12731 case IEMMODE_32BIT:
12732 IEM_MC_BEGIN(3, 2);
12733 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12734 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12735 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12737
12738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12739 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12740 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12741 IEM_MC_FETCH_EFLAGS(EFlags);
12742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12743
12744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12745 IEM_MC_COMMIT_EFLAGS(EFlags);
12746 IEM_MC_ADVANCE_RIP();
12747 IEM_MC_END();
12748 return VINF_SUCCESS;
12749
12750 case IEMMODE_64BIT:
12751 IEM_MC_BEGIN(3, 2);
12752 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12753 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12754 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12756
12757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12758 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12759 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12760 IEM_MC_FETCH_EFLAGS(EFlags);
12761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12762
12763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12764 IEM_MC_COMMIT_EFLAGS(EFlags);
12765 IEM_MC_ADVANCE_RIP();
12766 IEM_MC_END();
12767 return VINF_SUCCESS;
12768
12769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12770 }
12771 }
12772}
12773
12774/** Opcode 0xd4. */
12775FNIEMOP_DEF(iemOp_aam_Ib)
12776{
12777 IEMOP_MNEMONIC("aam Ib");
12778 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12779 IEMOP_HLP_NO_LOCK_PREFIX();
12780 IEMOP_HLP_NO_64BIT();
12781 if (!bImm)
12782 return IEMOP_RAISE_DIVIDE_ERROR();
12783 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12784}
12785
12786
12787/** Opcode 0xd5. */
12788FNIEMOP_DEF(iemOp_aad_Ib)
12789{
12790 IEMOP_MNEMONIC("aad Ib");
12791 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12792 IEMOP_HLP_NO_LOCK_PREFIX();
12793 IEMOP_HLP_NO_64BIT();
12794 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12795}
12796
12797
12798/** Opcode 0xd6. */
12799FNIEMOP_DEF(iemOp_salc)
12800{
12801 IEMOP_MNEMONIC("salc");
12802 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12805 IEMOP_HLP_NO_64BIT();
12806
12807 IEM_MC_BEGIN(0, 0);
12808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12809 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12810 } IEM_MC_ELSE() {
12811 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12812 } IEM_MC_ENDIF();
12813 IEM_MC_ADVANCE_RIP();
12814 IEM_MC_END();
12815 return VINF_SUCCESS;
12816}
12817
12818
12819/** Opcode 0xd7. */
12820FNIEMOP_DEF(iemOp_xlat)
12821{
12822 IEMOP_MNEMONIC("xlat");
12823 IEMOP_HLP_NO_LOCK_PREFIX();
12824 switch (pIemCpu->enmEffAddrMode)
12825 {
12826 case IEMMODE_16BIT:
12827 IEM_MC_BEGIN(2, 0);
12828 IEM_MC_LOCAL(uint8_t, u8Tmp);
12829 IEM_MC_LOCAL(uint16_t, u16Addr);
12830 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12831 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12832 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12833 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12834 IEM_MC_ADVANCE_RIP();
12835 IEM_MC_END();
12836 return VINF_SUCCESS;
12837
12838 case IEMMODE_32BIT:
12839 IEM_MC_BEGIN(2, 0);
12840 IEM_MC_LOCAL(uint8_t, u8Tmp);
12841 IEM_MC_LOCAL(uint32_t, u32Addr);
12842 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12843 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12844 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12845 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12846 IEM_MC_ADVANCE_RIP();
12847 IEM_MC_END();
12848 return VINF_SUCCESS;
12849
12850 case IEMMODE_64BIT:
12851 IEM_MC_BEGIN(2, 0);
12852 IEM_MC_LOCAL(uint8_t, u8Tmp);
12853 IEM_MC_LOCAL(uint64_t, u64Addr);
12854 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12855 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12856 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12857 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12858 IEM_MC_ADVANCE_RIP();
12859 IEM_MC_END();
12860 return VINF_SUCCESS;
12861
12862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12863 }
12864}
12865
12866
12867/**
12868 * Common worker for FPU instructions working on ST0 and STn, and storing the
12869 * result in ST0.
12870 *
12871 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12872 */
12873FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12874{
12875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12876
12877 IEM_MC_BEGIN(3, 1);
12878 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12879 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12880 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12881 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12882
12883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12885 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12886 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12887 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12888 IEM_MC_ELSE()
12889 IEM_MC_FPU_STACK_UNDERFLOW(0);
12890 IEM_MC_ENDIF();
12891 IEM_MC_USED_FPU();
12892 IEM_MC_ADVANCE_RIP();
12893
12894 IEM_MC_END();
12895 return VINF_SUCCESS;
12896}
12897
12898
12899/**
12900 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12901 * flags.
12902 *
12903 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12904 */
12905FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12906{
12907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12908
12909 IEM_MC_BEGIN(3, 1);
12910 IEM_MC_LOCAL(uint16_t, u16Fsw);
12911 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12912 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12913 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12914
12915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12917 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12918 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12919 IEM_MC_UPDATE_FSW(u16Fsw);
12920 IEM_MC_ELSE()
12921 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12922 IEM_MC_ENDIF();
12923 IEM_MC_USED_FPU();
12924 IEM_MC_ADVANCE_RIP();
12925
12926 IEM_MC_END();
12927 return VINF_SUCCESS;
12928}
12929
12930
12931/**
12932 * Common worker for FPU instructions working on ST0 and STn, only affecting
12933 * flags, and popping when done.
12934 *
12935 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12936 */
12937FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12938{
12939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12940
12941 IEM_MC_BEGIN(3, 1);
12942 IEM_MC_LOCAL(uint16_t, u16Fsw);
12943 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12944 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12946
12947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12949 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12950 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12951 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12952 IEM_MC_ELSE()
12953 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12954 IEM_MC_ENDIF();
12955 IEM_MC_USED_FPU();
12956 IEM_MC_ADVANCE_RIP();
12957
12958 IEM_MC_END();
12959 return VINF_SUCCESS;
12960}
12961
12962
12963/** Opcode 0xd8 11/0. */
12964FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12965{
12966 IEMOP_MNEMONIC("fadd st0,stN");
12967 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12968}
12969
12970
12971/** Opcode 0xd8 11/1. */
12972FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12973{
12974 IEMOP_MNEMONIC("fmul st0,stN");
12975 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12976}
12977
12978
12979/** Opcode 0xd8 11/2. */
12980FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12981{
12982 IEMOP_MNEMONIC("fcom st0,stN");
12983 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12984}
12985
12986
12987/** Opcode 0xd8 11/3. */
12988FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12989{
12990 IEMOP_MNEMONIC("fcomp st0,stN");
12991 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12992}
12993
12994
12995/** Opcode 0xd8 11/4. */
12996FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12997{
12998 IEMOP_MNEMONIC("fsub st0,stN");
12999 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13000}
13001
13002
13003/** Opcode 0xd8 11/5. */
13004FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13005{
13006 IEMOP_MNEMONIC("fsubr st0,stN");
13007 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13008}
13009
13010
13011/** Opcode 0xd8 11/6. */
13012FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13013{
13014 IEMOP_MNEMONIC("fdiv st0,stN");
13015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13016}
13017
13018
13019/** Opcode 0xd8 11/7. */
13020FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13021{
13022 IEMOP_MNEMONIC("fdivr st0,stN");
13023 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13024}
13025
13026
13027/**
13028 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13029 * the result in ST0.
13030 *
13031 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13032 */
13033FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13034{
13035 IEM_MC_BEGIN(3, 3);
13036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13037 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13038 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13039 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13040 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13041 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13042
13043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13045
13046 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13047 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13048 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13049
13050 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13051 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13052 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13053 IEM_MC_ELSE()
13054 IEM_MC_FPU_STACK_UNDERFLOW(0);
13055 IEM_MC_ENDIF();
13056 IEM_MC_USED_FPU();
13057 IEM_MC_ADVANCE_RIP();
13058
13059 IEM_MC_END();
13060 return VINF_SUCCESS;
13061}
13062
13063
13064/** Opcode 0xd8 !11/0. */
13065FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13066{
13067 IEMOP_MNEMONIC("fadd st0,m32r");
13068 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13069}
13070
13071
13072/** Opcode 0xd8 !11/1. */
13073FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13074{
13075 IEMOP_MNEMONIC("fmul st0,m32r");
13076 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13077}
13078
13079
13080/** Opcode 0xd8 !11/2. */
13081FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13082{
13083 IEMOP_MNEMONIC("fcom st0,m32r");
13084
13085 IEM_MC_BEGIN(3, 3);
13086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13087 IEM_MC_LOCAL(uint16_t, u16Fsw);
13088 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13089 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13091 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13092
13093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13095
13096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13098 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13099
13100 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13101 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13102 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13103 IEM_MC_ELSE()
13104 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13105 IEM_MC_ENDIF();
13106 IEM_MC_USED_FPU();
13107 IEM_MC_ADVANCE_RIP();
13108
13109 IEM_MC_END();
13110 return VINF_SUCCESS;
13111}
13112
13113
13114/** Opcode 0xd8 !11/3. */
13115FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13116{
13117 IEMOP_MNEMONIC("fcomp st0,m32r");
13118
13119 IEM_MC_BEGIN(3, 3);
13120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13121 IEM_MC_LOCAL(uint16_t, u16Fsw);
13122 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13125 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13126
13127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13129
13130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13132 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13133
13134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13135 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13136 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13137 IEM_MC_ELSE()
13138 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13139 IEM_MC_ENDIF();
13140 IEM_MC_USED_FPU();
13141 IEM_MC_ADVANCE_RIP();
13142
13143 IEM_MC_END();
13144 return VINF_SUCCESS;
13145}
13146
13147
13148/** Opcode 0xd8 !11/4. */
13149FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13150{
13151 IEMOP_MNEMONIC("fsub st0,m32r");
13152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13153}
13154
13155
13156/** Opcode 0xd8 !11/5. */
13157FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13158{
13159 IEMOP_MNEMONIC("fsubr st0,m32r");
13160 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13161}
13162
13163
13164/** Opcode 0xd8 !11/6. */
13165FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13166{
13167 IEMOP_MNEMONIC("fdiv st0,m32r");
13168 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13169}
13170
13171
13172/** Opcode 0xd8 !11/7. */
13173FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13174{
13175 IEMOP_MNEMONIC("fdivr st0,m32r");
13176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13177}
13178
13179
13180/** Opcode 0xd8. */
13181FNIEMOP_DEF(iemOp_EscF0)
13182{
13183 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13185
13186 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13187 {
13188 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13189 {
13190 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13191 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13192 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13193 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13194 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13195 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13196 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13197 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13199 }
13200 }
13201 else
13202 {
13203 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13204 {
13205 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13206 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13207 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13208 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13209 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13210 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13211 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13212 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13214 }
13215 }
13216}
13217
13218
13219/** Opcode 0xd9 /0 mem32real
13220 * @sa iemOp_fld_m64r */
13221FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13222{
13223 IEMOP_MNEMONIC("fld m32r");
13224
13225 IEM_MC_BEGIN(2, 3);
13226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13227 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13228 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13229 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13230 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13231
13232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13234
13235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13237 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13238
13239 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13240 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13241 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13242 IEM_MC_ELSE()
13243 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13244 IEM_MC_ENDIF();
13245 IEM_MC_USED_FPU();
13246 IEM_MC_ADVANCE_RIP();
13247
13248 IEM_MC_END();
13249 return VINF_SUCCESS;
13250}
13251
13252
13253/** Opcode 0xd9 !11/2 mem32real */
13254FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13255{
13256 IEMOP_MNEMONIC("fst m32r");
13257 IEM_MC_BEGIN(3, 2);
13258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13259 IEM_MC_LOCAL(uint16_t, u16Fsw);
13260 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13261 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13262 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13263
13264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13267 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13268
13269 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13270 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13271 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13272 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13273 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13274 IEM_MC_ELSE()
13275 IEM_MC_IF_FCW_IM()
13276 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13277 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13278 IEM_MC_ENDIF();
13279 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13280 IEM_MC_ENDIF();
13281 IEM_MC_USED_FPU();
13282 IEM_MC_ADVANCE_RIP();
13283
13284 IEM_MC_END();
13285 return VINF_SUCCESS;
13286}
13287
13288
13289/** Opcode 0xd9 !11/3 */
13290FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13291{
13292 IEMOP_MNEMONIC("fstp m32r");
13293 IEM_MC_BEGIN(3, 2);
13294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13295 IEM_MC_LOCAL(uint16_t, u16Fsw);
13296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13297 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13299
13300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13304
13305 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13306 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13307 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13308 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13309 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13310 IEM_MC_ELSE()
13311 IEM_MC_IF_FCW_IM()
13312 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13313 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13314 IEM_MC_ENDIF();
13315 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13316 IEM_MC_ENDIF();
13317 IEM_MC_USED_FPU();
13318 IEM_MC_ADVANCE_RIP();
13319
13320 IEM_MC_END();
13321 return VINF_SUCCESS;
13322}
13323
13324
13325/** Opcode 0xd9 !11/4 */
13326FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13327{
13328 IEMOP_MNEMONIC("fldenv m14/28byte");
13329 IEM_MC_BEGIN(3, 0);
13330 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13331 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13332 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13336 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13337 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13338 IEM_MC_END();
13339 return VINF_SUCCESS;
13340}
13341
13342
13343/** Opcode 0xd9 !11/5 */
13344FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13345{
13346 IEMOP_MNEMONIC("fldcw m2byte");
13347 IEM_MC_BEGIN(1, 1);
13348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13349 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13353 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13354 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13355 IEM_MC_END();
13356 return VINF_SUCCESS;
13357}
13358
13359
13360/** Opcode 0xd9 !11/6 */
13361FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13362{
13363 IEMOP_MNEMONIC("fstenv m14/m28byte");
13364 IEM_MC_BEGIN(3, 0);
13365 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13366 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13367 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13371 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13372 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13373 IEM_MC_END();
13374 return VINF_SUCCESS;
13375}
13376
13377
13378/** Opcode 0xd9 !11/7 */
13379FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13380{
13381 IEMOP_MNEMONIC("fnstcw m2byte");
13382 IEM_MC_BEGIN(2, 0);
13383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13384 IEM_MC_LOCAL(uint16_t, u16Fcw);
13385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13388 IEM_MC_FETCH_FCW(u16Fcw);
13389 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13390 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13391 IEM_MC_END();
13392 return VINF_SUCCESS;
13393}
13394
13395
13396/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13397FNIEMOP_DEF(iemOp_fnop)
13398{
13399 IEMOP_MNEMONIC("fnop");
13400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13401
13402 IEM_MC_BEGIN(0, 0);
13403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13405 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13406 * intel optimizations. Investigate. */
13407 IEM_MC_UPDATE_FPU_OPCODE_IP();
13408 IEM_MC_USED_FPU();
13409 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13410 IEM_MC_END();
13411 return VINF_SUCCESS;
13412}
13413
13414
13415/** Opcode 0xd9 11/0 stN */
13416FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13417{
13418 IEMOP_MNEMONIC("fld stN");
13419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13420
13421 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13422 * indicates that it does. */
13423 IEM_MC_BEGIN(0, 2);
13424 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13425 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13428 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13429 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13430 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13431 IEM_MC_ELSE()
13432 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13433 IEM_MC_ENDIF();
13434 IEM_MC_USED_FPU();
13435 IEM_MC_ADVANCE_RIP();
13436 IEM_MC_END();
13437
13438 return VINF_SUCCESS;
13439}
13440
13441
13442/** Opcode 0xd9 11/3 stN */
13443FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13444{
13445 IEMOP_MNEMONIC("fxch stN");
13446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13447
13448 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13449 * indicates that it does. */
13450 IEM_MC_BEGIN(1, 3);
13451 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13452 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13453 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13454 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13457 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13458 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13459 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13460 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13461 IEM_MC_ELSE()
13462 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13463 IEM_MC_ENDIF();
13464 IEM_MC_USED_FPU();
13465 IEM_MC_ADVANCE_RIP();
13466 IEM_MC_END();
13467
13468 return VINF_SUCCESS;
13469}
13470
13471
13472/** Opcode 0xd9 11/4, 0xdd 11/2. */
13473FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13474{
13475 IEMOP_MNEMONIC("fstp st0,stN");
13476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13477
13478 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13479 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13480 if (!iDstReg)
13481 {
13482 IEM_MC_BEGIN(0, 1);
13483 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13486 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13487 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13488 IEM_MC_ELSE()
13489 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13490 IEM_MC_ENDIF();
13491 IEM_MC_USED_FPU();
13492 IEM_MC_ADVANCE_RIP();
13493 IEM_MC_END();
13494 }
13495 else
13496 {
13497 IEM_MC_BEGIN(0, 2);
13498 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13499 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13503 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13504 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13505 IEM_MC_ELSE()
13506 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13507 IEM_MC_ENDIF();
13508 IEM_MC_USED_FPU();
13509 IEM_MC_ADVANCE_RIP();
13510 IEM_MC_END();
13511 }
13512 return VINF_SUCCESS;
13513}
13514
13515
13516/**
13517 * Common worker for FPU instructions working on ST0 and replaces it with the
13518 * result, i.e. unary operators.
13519 *
13520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13521 */
13522FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13523{
13524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13525
13526 IEM_MC_BEGIN(2, 1);
13527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13528 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13530
13531 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13532 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13533 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13534 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13535 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13536 IEM_MC_ELSE()
13537 IEM_MC_FPU_STACK_UNDERFLOW(0);
13538 IEM_MC_ENDIF();
13539 IEM_MC_USED_FPU();
13540 IEM_MC_ADVANCE_RIP();
13541
13542 IEM_MC_END();
13543 return VINF_SUCCESS;
13544}
13545
13546
13547/** Opcode 0xd9 0xe0. */
13548FNIEMOP_DEF(iemOp_fchs)
13549{
13550 IEMOP_MNEMONIC("fchs st0");
13551 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13552}
13553
13554
13555/** Opcode 0xd9 0xe1. */
13556FNIEMOP_DEF(iemOp_fabs)
13557{
13558 IEMOP_MNEMONIC("fabs st0");
13559 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13560}
13561
13562
13563/**
13564 * Common worker for FPU instructions working on ST0 and only returns FSW.
13565 *
13566 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13567 */
13568FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13569{
13570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13571
13572 IEM_MC_BEGIN(2, 1);
13573 IEM_MC_LOCAL(uint16_t, u16Fsw);
13574 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13575 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13576
13577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13580 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13581 IEM_MC_UPDATE_FSW(u16Fsw);
13582 IEM_MC_ELSE()
13583 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13584 IEM_MC_ENDIF();
13585 IEM_MC_USED_FPU();
13586 IEM_MC_ADVANCE_RIP();
13587
13588 IEM_MC_END();
13589 return VINF_SUCCESS;
13590}
13591
13592
13593/** Opcode 0xd9 0xe4. */
13594FNIEMOP_DEF(iemOp_ftst)
13595{
13596 IEMOP_MNEMONIC("ftst st0");
13597 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13598}
13599
13600
13601/** Opcode 0xd9 0xe5. */
13602FNIEMOP_DEF(iemOp_fxam)
13603{
13604 IEMOP_MNEMONIC("fxam st0");
13605 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13606}
13607
13608
13609/**
13610 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13611 *
13612 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13613 */
13614FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13615{
13616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13617
13618 IEM_MC_BEGIN(1, 1);
13619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13620 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13621
13622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13624 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13625 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13626 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13627 IEM_MC_ELSE()
13628 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13629 IEM_MC_ENDIF();
13630 IEM_MC_USED_FPU();
13631 IEM_MC_ADVANCE_RIP();
13632
13633 IEM_MC_END();
13634 return VINF_SUCCESS;
13635}
13636
13637
13638/** Opcode 0xd9 0xe8. */
13639FNIEMOP_DEF(iemOp_fld1)
13640{
13641 IEMOP_MNEMONIC("fld1");
13642 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13643}
13644
13645
13646/** Opcode 0xd9 0xe9. */
13647FNIEMOP_DEF(iemOp_fldl2t)
13648{
13649 IEMOP_MNEMONIC("fldl2t");
13650 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13651}
13652
13653
13654/** Opcode 0xd9 0xea. */
13655FNIEMOP_DEF(iemOp_fldl2e)
13656{
13657 IEMOP_MNEMONIC("fldl2e");
13658 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13659}
13660
13661/** Opcode 0xd9 0xeb. */
13662FNIEMOP_DEF(iemOp_fldpi)
13663{
13664 IEMOP_MNEMONIC("fldpi");
13665 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13666}
13667
13668
13669/** Opcode 0xd9 0xec. */
13670FNIEMOP_DEF(iemOp_fldlg2)
13671{
13672 IEMOP_MNEMONIC("fldlg2");
13673 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13674}
13675
13676/** Opcode 0xd9 0xed. */
13677FNIEMOP_DEF(iemOp_fldln2)
13678{
13679 IEMOP_MNEMONIC("fldln2");
13680 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13681}
13682
13683
13684/** Opcode 0xd9 0xee. */
13685FNIEMOP_DEF(iemOp_fldz)
13686{
13687 IEMOP_MNEMONIC("fldz");
13688 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13689}
13690
13691
13692/** Opcode 0xd9 0xf0. */
13693FNIEMOP_DEF(iemOp_f2xm1)
13694{
13695 IEMOP_MNEMONIC("f2xm1 st0");
13696 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13697}
13698
13699
13700/** Opcode 0xd9 0xf1. */
13701FNIEMOP_DEF(iemOp_fylx2)
13702{
13703 IEMOP_MNEMONIC("fylx2 st0");
13704 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13705}
13706
13707
13708/**
13709 * Common worker for FPU instructions working on ST0 and having two outputs, one
13710 * replacing ST0 and one pushed onto the stack.
13711 *
13712 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13713 */
13714FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13715{
13716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13717
13718 IEM_MC_BEGIN(2, 1);
13719 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13720 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13721 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13722
13723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13726 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13727 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13728 IEM_MC_ELSE()
13729 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13730 IEM_MC_ENDIF();
13731 IEM_MC_USED_FPU();
13732 IEM_MC_ADVANCE_RIP();
13733
13734 IEM_MC_END();
13735 return VINF_SUCCESS;
13736}
13737
13738
13739/** Opcode 0xd9 0xf2. */
13740FNIEMOP_DEF(iemOp_fptan)
13741{
13742 IEMOP_MNEMONIC("fptan st0");
13743 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13744}
13745
13746
13747/**
13748 * Common worker for FPU instructions working on STn and ST0, storing the result
13749 * in STn, and popping the stack unless IE, DE or ZE was raised.
13750 *
13751 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13752 */
13753FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13754{
13755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13756
13757 IEM_MC_BEGIN(3, 1);
13758 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13759 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13760 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13761 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13762
13763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13765
13766 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13767 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13768 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13769 IEM_MC_ELSE()
13770 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13771 IEM_MC_ENDIF();
13772 IEM_MC_USED_FPU();
13773 IEM_MC_ADVANCE_RIP();
13774
13775 IEM_MC_END();
13776 return VINF_SUCCESS;
13777}
13778
13779
13780/** Opcode 0xd9 0xf3. */
13781FNIEMOP_DEF(iemOp_fpatan)
13782{
13783 IEMOP_MNEMONIC("fpatan st1,st0");
13784 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13785}
13786
13787
13788/** Opcode 0xd9 0xf4. */
13789FNIEMOP_DEF(iemOp_fxtract)
13790{
13791 IEMOP_MNEMONIC("fxtract st0");
13792 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13793}
13794
13795
13796/** Opcode 0xd9 0xf5. */
13797FNIEMOP_DEF(iemOp_fprem1)
13798{
13799 IEMOP_MNEMONIC("fprem1 st0, st1");
13800 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13801}
13802
13803
13804/** Opcode 0xd9 0xf6. */
13805FNIEMOP_DEF(iemOp_fdecstp)
13806{
13807 IEMOP_MNEMONIC("fdecstp");
13808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13809 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13810 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13811 * FINCSTP and FDECSTP. */
13812
13813 IEM_MC_BEGIN(0,0);
13814
13815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13817
13818 IEM_MC_FPU_STACK_DEC_TOP();
13819 IEM_MC_UPDATE_FSW_CONST(0);
13820
13821 IEM_MC_USED_FPU();
13822 IEM_MC_ADVANCE_RIP();
13823 IEM_MC_END();
13824 return VINF_SUCCESS;
13825}
13826
13827
13828/** Opcode 0xd9 0xf7. */
13829FNIEMOP_DEF(iemOp_fincstp)
13830{
13831 IEMOP_MNEMONIC("fincstp");
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13834 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13835 * FINCSTP and FDECSTP. */
13836
13837 IEM_MC_BEGIN(0,0);
13838
13839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13841
13842 IEM_MC_FPU_STACK_INC_TOP();
13843 IEM_MC_UPDATE_FSW_CONST(0);
13844
13845 IEM_MC_USED_FPU();
13846 IEM_MC_ADVANCE_RIP();
13847 IEM_MC_END();
13848 return VINF_SUCCESS;
13849}
13850
13851
13852/** Opcode 0xd9 0xf8. */
13853FNIEMOP_DEF(iemOp_fprem)
13854{
13855 IEMOP_MNEMONIC("fprem st0, st1");
13856 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13857}
13858
13859
13860/** Opcode 0xd9 0xf9. */
13861FNIEMOP_DEF(iemOp_fyl2xp1)
13862{
13863 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13864 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13865}
13866
13867
13868/** Opcode 0xd9 0xfa. */
13869FNIEMOP_DEF(iemOp_fsqrt)
13870{
13871 IEMOP_MNEMONIC("fsqrt st0");
13872 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13873}
13874
13875
13876/** Opcode 0xd9 0xfb. */
13877FNIEMOP_DEF(iemOp_fsincos)
13878{
13879 IEMOP_MNEMONIC("fsincos st0");
13880 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13881}
13882
13883
13884/** Opcode 0xd9 0xfc. */
13885FNIEMOP_DEF(iemOp_frndint)
13886{
13887 IEMOP_MNEMONIC("frndint st0");
13888 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13889}
13890
13891
13892/** Opcode 0xd9 0xfd. */
13893FNIEMOP_DEF(iemOp_fscale)
13894{
13895 IEMOP_MNEMONIC("fscale st0, st1");
13896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13897}
13898
13899
13900/** Opcode 0xd9 0xfe. */
13901FNIEMOP_DEF(iemOp_fsin)
13902{
13903 IEMOP_MNEMONIC("fsin st0");
13904 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13905}
13906
13907
13908/** Opcode 0xd9 0xff. */
13909FNIEMOP_DEF(iemOp_fcos)
13910{
13911 IEMOP_MNEMONIC("fcos st0");
13912 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13913}
13914
13915
13916/** Used by iemOp_EscF1. */
13917static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13918{
13919 /* 0xe0 */ iemOp_fchs,
13920 /* 0xe1 */ iemOp_fabs,
13921 /* 0xe2 */ iemOp_Invalid,
13922 /* 0xe3 */ iemOp_Invalid,
13923 /* 0xe4 */ iemOp_ftst,
13924 /* 0xe5 */ iemOp_fxam,
13925 /* 0xe6 */ iemOp_Invalid,
13926 /* 0xe7 */ iemOp_Invalid,
13927 /* 0xe8 */ iemOp_fld1,
13928 /* 0xe9 */ iemOp_fldl2t,
13929 /* 0xea */ iemOp_fldl2e,
13930 /* 0xeb */ iemOp_fldpi,
13931 /* 0xec */ iemOp_fldlg2,
13932 /* 0xed */ iemOp_fldln2,
13933 /* 0xee */ iemOp_fldz,
13934 /* 0xef */ iemOp_Invalid,
13935 /* 0xf0 */ iemOp_f2xm1,
13936 /* 0xf1 */ iemOp_fylx2,
13937 /* 0xf2 */ iemOp_fptan,
13938 /* 0xf3 */ iemOp_fpatan,
13939 /* 0xf4 */ iemOp_fxtract,
13940 /* 0xf5 */ iemOp_fprem1,
13941 /* 0xf6 */ iemOp_fdecstp,
13942 /* 0xf7 */ iemOp_fincstp,
13943 /* 0xf8 */ iemOp_fprem,
13944 /* 0xf9 */ iemOp_fyl2xp1,
13945 /* 0xfa */ iemOp_fsqrt,
13946 /* 0xfb */ iemOp_fsincos,
13947 /* 0xfc */ iemOp_frndint,
13948 /* 0xfd */ iemOp_fscale,
13949 /* 0xfe */ iemOp_fsin,
13950 /* 0xff */ iemOp_fcos
13951};
13952
13953
13954/** Opcode 0xd9. */
13955FNIEMOP_DEF(iemOp_EscF1)
13956{
13957 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13960 {
13961 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13962 {
13963 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13964 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13965 case 2:
13966 if (bRm == 0xd0)
13967 return FNIEMOP_CALL(iemOp_fnop);
13968 return IEMOP_RAISE_INVALID_OPCODE();
13969 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13970 case 4:
13971 case 5:
13972 case 6:
13973 case 7:
13974 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13975 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13977 }
13978 }
13979 else
13980 {
13981 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13982 {
13983 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13984 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13985 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13986 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13987 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13988 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13989 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13990 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13992 }
13993 }
13994}
13995
13996
13997/** Opcode 0xda 11/0. */
13998FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13999{
14000 IEMOP_MNEMONIC("fcmovb st0,stN");
14001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14002
14003 IEM_MC_BEGIN(0, 1);
14004 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14005
14006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14008
14009 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14011 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14012 IEM_MC_ENDIF();
14013 IEM_MC_UPDATE_FPU_OPCODE_IP();
14014 IEM_MC_ELSE()
14015 IEM_MC_FPU_STACK_UNDERFLOW(0);
14016 IEM_MC_ENDIF();
14017 IEM_MC_USED_FPU();
14018 IEM_MC_ADVANCE_RIP();
14019
14020 IEM_MC_END();
14021 return VINF_SUCCESS;
14022}
14023
14024
14025/** Opcode 0xda 11/1. */
14026FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14027{
14028 IEMOP_MNEMONIC("fcmove st0,stN");
14029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14030
14031 IEM_MC_BEGIN(0, 1);
14032 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14033
14034 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14035 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14036
14037 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14039 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14040 IEM_MC_ENDIF();
14041 IEM_MC_UPDATE_FPU_OPCODE_IP();
14042 IEM_MC_ELSE()
14043 IEM_MC_FPU_STACK_UNDERFLOW(0);
14044 IEM_MC_ENDIF();
14045 IEM_MC_USED_FPU();
14046 IEM_MC_ADVANCE_RIP();
14047
14048 IEM_MC_END();
14049 return VINF_SUCCESS;
14050}
14051
14052
14053/** Opcode 0xda 11/2. */
14054FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14055{
14056 IEMOP_MNEMONIC("fcmovbe st0,stN");
14057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14058
14059 IEM_MC_BEGIN(0, 1);
14060 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14061
14062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14064
14065 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14066 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14067 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14068 IEM_MC_ENDIF();
14069 IEM_MC_UPDATE_FPU_OPCODE_IP();
14070 IEM_MC_ELSE()
14071 IEM_MC_FPU_STACK_UNDERFLOW(0);
14072 IEM_MC_ENDIF();
14073 IEM_MC_USED_FPU();
14074 IEM_MC_ADVANCE_RIP();
14075
14076 IEM_MC_END();
14077 return VINF_SUCCESS;
14078}
14079
14080
14081/** Opcode 0xda 11/3. */
14082FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14083{
14084 IEMOP_MNEMONIC("fcmovu st0,stN");
14085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14086
14087 IEM_MC_BEGIN(0, 1);
14088 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14089
14090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14092
14093 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14095 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14096 IEM_MC_ENDIF();
14097 IEM_MC_UPDATE_FPU_OPCODE_IP();
14098 IEM_MC_ELSE()
14099 IEM_MC_FPU_STACK_UNDERFLOW(0);
14100 IEM_MC_ENDIF();
14101 IEM_MC_USED_FPU();
14102 IEM_MC_ADVANCE_RIP();
14103
14104 IEM_MC_END();
14105 return VINF_SUCCESS;
14106}
14107
14108
14109/**
14110 * Common worker for FPU instructions working on ST0 and STn, only affecting
14111 * flags, and popping twice when done.
14112 *
14113 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14114 */
14115FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14116{
14117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14118
14119 IEM_MC_BEGIN(3, 1);
14120 IEM_MC_LOCAL(uint16_t, u16Fsw);
14121 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14124
14125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14127 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14128 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14129 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14130 IEM_MC_ELSE()
14131 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14132 IEM_MC_ENDIF();
14133 IEM_MC_USED_FPU();
14134 IEM_MC_ADVANCE_RIP();
14135
14136 IEM_MC_END();
14137 return VINF_SUCCESS;
14138}
14139
14140
14141/** Opcode 0xda 0xe9. */
14142FNIEMOP_DEF(iemOp_fucompp)
14143{
14144 IEMOP_MNEMONIC("fucompp st0,stN");
14145 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14146}
14147
14148
14149/**
14150 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14151 * the result in ST0.
14152 *
14153 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14154 */
14155FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14156{
14157 IEM_MC_BEGIN(3, 3);
14158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14159 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14160 IEM_MC_LOCAL(int32_t, i32Val2);
14161 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14163 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14164
14165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14167
14168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14170 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14171
14172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14173 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14174 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14175 IEM_MC_ELSE()
14176 IEM_MC_FPU_STACK_UNDERFLOW(0);
14177 IEM_MC_ENDIF();
14178 IEM_MC_USED_FPU();
14179 IEM_MC_ADVANCE_RIP();
14180
14181 IEM_MC_END();
14182 return VINF_SUCCESS;
14183}
14184
14185
14186/** Opcode 0xda !11/0. */
14187FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14188{
14189 IEMOP_MNEMONIC("fiadd m32i");
14190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14191}
14192
14193
14194/** Opcode 0xda !11/1. */
14195FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14196{
14197 IEMOP_MNEMONIC("fimul m32i");
14198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14199}
14200
14201
14202/** Opcode 0xda !11/2. */
14203FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14204{
14205 IEMOP_MNEMONIC("ficom st0,m32i");
14206
14207 IEM_MC_BEGIN(3, 3);
14208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14209 IEM_MC_LOCAL(uint16_t, u16Fsw);
14210 IEM_MC_LOCAL(int32_t, i32Val2);
14211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14213 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14214
14215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14217
14218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14220 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14221
14222 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14223 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14224 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14225 IEM_MC_ELSE()
14226 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14227 IEM_MC_ENDIF();
14228 IEM_MC_USED_FPU();
14229 IEM_MC_ADVANCE_RIP();
14230
14231 IEM_MC_END();
14232 return VINF_SUCCESS;
14233}
14234
14235
14236/** Opcode 0xda !11/3. */
14237FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14238{
14239 IEMOP_MNEMONIC("ficomp st0,m32i");
14240
14241 IEM_MC_BEGIN(3, 3);
14242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14243 IEM_MC_LOCAL(uint16_t, u16Fsw);
14244 IEM_MC_LOCAL(int32_t, i32Val2);
14245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14246 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14247 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14248
14249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14251
14252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14254 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14255
14256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14259 IEM_MC_ELSE()
14260 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14261 IEM_MC_ENDIF();
14262 IEM_MC_USED_FPU();
14263 IEM_MC_ADVANCE_RIP();
14264
14265 IEM_MC_END();
14266 return VINF_SUCCESS;
14267}
14268
14269
14270/** Opcode 0xda !11/4. */
14271FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14272{
14273 IEMOP_MNEMONIC("fisub m32i");
14274 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14275}
14276
14277
14278/** Opcode 0xda !11/5. */
14279FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14280{
14281 IEMOP_MNEMONIC("fisubr m32i");
14282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14283}
14284
14285
14286/** Opcode 0xda !11/6. */
14287FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14288{
14289 IEMOP_MNEMONIC("fidiv m32i");
14290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14291}
14292
14293
14294/** Opcode 0xda !11/7. */
14295FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14296{
14297 IEMOP_MNEMONIC("fidivr m32i");
14298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14299}
14300
14301
14302/** Opcode 0xda. */
14303FNIEMOP_DEF(iemOp_EscF2)
14304{
14305 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14308 {
14309 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14310 {
14311 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14312 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14313 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14314 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14315 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14316 case 5:
14317 if (bRm == 0xe9)
14318 return FNIEMOP_CALL(iemOp_fucompp);
14319 return IEMOP_RAISE_INVALID_OPCODE();
14320 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14321 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14323 }
14324 }
14325 else
14326 {
14327 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14328 {
14329 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14330 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14331 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14332 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14333 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14334 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14335 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14336 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14338 }
14339 }
14340}
14341
14342
14343/** Opcode 0xdb !11/0. */
14344FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14345{
14346 IEMOP_MNEMONIC("fild m32i");
14347
14348 IEM_MC_BEGIN(2, 3);
14349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14350 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14351 IEM_MC_LOCAL(int32_t, i32Val);
14352 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14353 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14354
14355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14357
14358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14359 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14360 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14361
14362 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14363 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14364 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14365 IEM_MC_ELSE()
14366 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14367 IEM_MC_ENDIF();
14368 IEM_MC_USED_FPU();
14369 IEM_MC_ADVANCE_RIP();
14370
14371 IEM_MC_END();
14372 return VINF_SUCCESS;
14373}
14374
14375
14376/** Opcode 0xdb !11/1. */
14377FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14378{
14379 IEMOP_MNEMONIC("fisttp m32i");
14380 IEM_MC_BEGIN(3, 2);
14381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14382 IEM_MC_LOCAL(uint16_t, u16Fsw);
14383 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14384 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14385 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14386
14387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14391
14392 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14393 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14394 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14395 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14396 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14397 IEM_MC_ELSE()
14398 IEM_MC_IF_FCW_IM()
14399 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14400 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14401 IEM_MC_ENDIF();
14402 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14403 IEM_MC_ENDIF();
14404 IEM_MC_USED_FPU();
14405 IEM_MC_ADVANCE_RIP();
14406
14407 IEM_MC_END();
14408 return VINF_SUCCESS;
14409}
14410
14411
14412/** Opcode 0xdb !11/2. */
14413FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14414{
14415 IEMOP_MNEMONIC("fist m32i");
14416 IEM_MC_BEGIN(3, 2);
14417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14418 IEM_MC_LOCAL(uint16_t, u16Fsw);
14419 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14420 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14421 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14422
14423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14427
14428 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14429 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14430 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14431 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14432 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14433 IEM_MC_ELSE()
14434 IEM_MC_IF_FCW_IM()
14435 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14436 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14437 IEM_MC_ENDIF();
14438 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14439 IEM_MC_ENDIF();
14440 IEM_MC_USED_FPU();
14441 IEM_MC_ADVANCE_RIP();
14442
14443 IEM_MC_END();
14444 return VINF_SUCCESS;
14445}
14446
14447
14448/** Opcode 0xdb !11/3. */
14449FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14450{
14451 IEMOP_MNEMONIC("fisttp m32i");
14452 IEM_MC_BEGIN(3, 2);
14453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14454 IEM_MC_LOCAL(uint16_t, u16Fsw);
14455 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14456 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14458
14459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14463
14464 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14465 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14466 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14467 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14468 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14469 IEM_MC_ELSE()
14470 IEM_MC_IF_FCW_IM()
14471 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14472 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14473 IEM_MC_ENDIF();
14474 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14475 IEM_MC_ENDIF();
14476 IEM_MC_USED_FPU();
14477 IEM_MC_ADVANCE_RIP();
14478
14479 IEM_MC_END();
14480 return VINF_SUCCESS;
14481}
14482
14483
14484/** Opcode 0xdb !11/5. */
14485FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14486{
14487 IEMOP_MNEMONIC("fld m80r");
14488
14489 IEM_MC_BEGIN(2, 3);
14490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14491 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14492 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14493 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14494 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14495
14496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14498
14499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14501 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14502
14503 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14504 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14505 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14506 IEM_MC_ELSE()
14507 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14508 IEM_MC_ENDIF();
14509 IEM_MC_USED_FPU();
14510 IEM_MC_ADVANCE_RIP();
14511
14512 IEM_MC_END();
14513 return VINF_SUCCESS;
14514}
14515
14516
14517/** Opcode 0xdb !11/7. */
14518FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14519{
14520 IEMOP_MNEMONIC("fstp m80r");
14521 IEM_MC_BEGIN(3, 2);
14522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14523 IEM_MC_LOCAL(uint16_t, u16Fsw);
14524 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14525 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14526 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14527
14528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14532
14533 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14534 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14535 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14536 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14537 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14538 IEM_MC_ELSE()
14539 IEM_MC_IF_FCW_IM()
14540 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14541 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14542 IEM_MC_ENDIF();
14543 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14544 IEM_MC_ENDIF();
14545 IEM_MC_USED_FPU();
14546 IEM_MC_ADVANCE_RIP();
14547
14548 IEM_MC_END();
14549 return VINF_SUCCESS;
14550}
14551
14552
14553/** Opcode 0xdb 11/0. */
14554FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14555{
14556 IEMOP_MNEMONIC("fcmovnb st0,stN");
14557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14558
14559 IEM_MC_BEGIN(0, 1);
14560 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14561
14562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14564
14565 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14566 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14567 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14568 IEM_MC_ENDIF();
14569 IEM_MC_UPDATE_FPU_OPCODE_IP();
14570 IEM_MC_ELSE()
14571 IEM_MC_FPU_STACK_UNDERFLOW(0);
14572 IEM_MC_ENDIF();
14573 IEM_MC_USED_FPU();
14574 IEM_MC_ADVANCE_RIP();
14575
14576 IEM_MC_END();
14577 return VINF_SUCCESS;
14578}
14579
14580
14581/** Opcode 0xdb 11/1. */
14582FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14583{
14584 IEMOP_MNEMONIC("fcmovne st0,stN");
14585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14586
14587 IEM_MC_BEGIN(0, 1);
14588 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14589
14590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14592
14593 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14594 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14595 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14596 IEM_MC_ENDIF();
14597 IEM_MC_UPDATE_FPU_OPCODE_IP();
14598 IEM_MC_ELSE()
14599 IEM_MC_FPU_STACK_UNDERFLOW(0);
14600 IEM_MC_ENDIF();
14601 IEM_MC_USED_FPU();
14602 IEM_MC_ADVANCE_RIP();
14603
14604 IEM_MC_END();
14605 return VINF_SUCCESS;
14606}
14607
14608
14609/** Opcode 0xdb 11/2. */
14610FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14611{
14612 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14614
14615 IEM_MC_BEGIN(0, 1);
14616 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14617
14618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14620
14621 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14622 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14623 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14624 IEM_MC_ENDIF();
14625 IEM_MC_UPDATE_FPU_OPCODE_IP();
14626 IEM_MC_ELSE()
14627 IEM_MC_FPU_STACK_UNDERFLOW(0);
14628 IEM_MC_ENDIF();
14629 IEM_MC_USED_FPU();
14630 IEM_MC_ADVANCE_RIP();
14631
14632 IEM_MC_END();
14633 return VINF_SUCCESS;
14634}
14635
14636
14637/** Opcode 0xdb 11/3. */
14638FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14639{
14640 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14642
14643 IEM_MC_BEGIN(0, 1);
14644 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14645
14646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14648
14649 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14650 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14651 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14652 IEM_MC_ENDIF();
14653 IEM_MC_UPDATE_FPU_OPCODE_IP();
14654 IEM_MC_ELSE()
14655 IEM_MC_FPU_STACK_UNDERFLOW(0);
14656 IEM_MC_ENDIF();
14657 IEM_MC_USED_FPU();
14658 IEM_MC_ADVANCE_RIP();
14659
14660 IEM_MC_END();
14661 return VINF_SUCCESS;
14662}
14663
14664
14665/** Opcode 0xdb 0xe0. */
14666FNIEMOP_DEF(iemOp_fneni)
14667{
14668 IEMOP_MNEMONIC("fneni (8087/ign)");
14669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14670 IEM_MC_BEGIN(0,0);
14671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14672 IEM_MC_ADVANCE_RIP();
14673 IEM_MC_END();
14674 return VINF_SUCCESS;
14675}
14676
14677
14678/** Opcode 0xdb 0xe1. */
14679FNIEMOP_DEF(iemOp_fndisi)
14680{
14681 IEMOP_MNEMONIC("fndisi (8087/ign)");
14682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14683 IEM_MC_BEGIN(0,0);
14684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14685 IEM_MC_ADVANCE_RIP();
14686 IEM_MC_END();
14687 return VINF_SUCCESS;
14688}
14689
14690
14691/** Opcode 0xdb 0xe2. */
14692FNIEMOP_DEF(iemOp_fnclex)
14693{
14694 IEMOP_MNEMONIC("fnclex");
14695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14696
14697 IEM_MC_BEGIN(0,0);
14698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14699 IEM_MC_CLEAR_FSW_EX();
14700 IEM_MC_ADVANCE_RIP();
14701 IEM_MC_END();
14702 return VINF_SUCCESS;
14703}
14704
14705
14706/** Opcode 0xdb 0xe3. */
14707FNIEMOP_DEF(iemOp_fninit)
14708{
14709 IEMOP_MNEMONIC("fninit");
14710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14711 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14712}
14713
14714
14715/** Opcode 0xdb 0xe4. */
14716FNIEMOP_DEF(iemOp_fnsetpm)
14717{
14718 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14720 IEM_MC_BEGIN(0,0);
14721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14722 IEM_MC_ADVANCE_RIP();
14723 IEM_MC_END();
14724 return VINF_SUCCESS;
14725}
14726
14727
14728/** Opcode 0xdb 0xe5. */
14729FNIEMOP_DEF(iemOp_frstpm)
14730{
14731 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14732#if 0 /* #UDs on newer CPUs */
14733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14734 IEM_MC_BEGIN(0,0);
14735 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14736 IEM_MC_ADVANCE_RIP();
14737 IEM_MC_END();
14738 return VINF_SUCCESS;
14739#else
14740 return IEMOP_RAISE_INVALID_OPCODE();
14741#endif
14742}
14743
14744
14745/** Opcode 0xdb 11/5. */
14746FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14747{
14748 IEMOP_MNEMONIC("fucomi st0,stN");
14749 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14750}
14751
14752
14753/** Opcode 0xdb 11/6. */
14754FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14755{
14756 IEMOP_MNEMONIC("fcomi st0,stN");
14757 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14758}
14759
14760
14761/** Opcode 0xdb. */
14762FNIEMOP_DEF(iemOp_EscF3)
14763{
14764 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14766 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14767 {
14768 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14769 {
14770 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14771 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14772 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14773 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14774 case 4:
14775 switch (bRm)
14776 {
14777 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14778 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14779 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14780 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14781 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14782 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14783 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14784 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14786 }
14787 break;
14788 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14789 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14790 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14792 }
14793 }
14794 else
14795 {
14796 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14797 {
14798 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14799 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14800 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14801 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14802 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14803 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14804 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14805 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14807 }
14808 }
14809}
14810
14811
14812/**
14813 * Common worker for FPU instructions working on STn and ST0, and storing the
14814 * result in STn unless IE, DE or ZE was raised.
14815 *
14816 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14817 */
14818FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14819{
14820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14821
14822 IEM_MC_BEGIN(3, 1);
14823 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14826 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14827
14828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14830
14831 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14832 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14833 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14834 IEM_MC_ELSE()
14835 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14836 IEM_MC_ENDIF();
14837 IEM_MC_USED_FPU();
14838 IEM_MC_ADVANCE_RIP();
14839
14840 IEM_MC_END();
14841 return VINF_SUCCESS;
14842}
14843
14844
14845/** Opcode 0xdc 11/0. */
14846FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14847{
14848 IEMOP_MNEMONIC("fadd stN,st0");
14849 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14850}
14851
14852
14853/** Opcode 0xdc 11/1. */
14854FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14855{
14856 IEMOP_MNEMONIC("fmul stN,st0");
14857 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14858}
14859
14860
14861/** Opcode 0xdc 11/4. */
14862FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14863{
14864 IEMOP_MNEMONIC("fsubr stN,st0");
14865 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14866}
14867
14868
14869/** Opcode 0xdc 11/5. */
14870FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14871{
14872 IEMOP_MNEMONIC("fsub stN,st0");
14873 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14874}
14875
14876
14877/** Opcode 0xdc 11/6. */
14878FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14879{
14880 IEMOP_MNEMONIC("fdivr stN,st0");
14881 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14882}
14883
14884
14885/** Opcode 0xdc 11/7. */
14886FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14887{
14888 IEMOP_MNEMONIC("fdiv stN,st0");
14889 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14890}
14891
14892
14893/**
14894 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14895 * memory operand, and storing the result in ST0.
14896 *
14897 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14898 */
14899FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14900{
14901 IEM_MC_BEGIN(3, 3);
14902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14903 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14904 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14905 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14906 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14907 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14908
14909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14913
14914 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14915 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14916 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14917 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14918 IEM_MC_ELSE()
14919 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14920 IEM_MC_ENDIF();
14921 IEM_MC_USED_FPU();
14922 IEM_MC_ADVANCE_RIP();
14923
14924 IEM_MC_END();
14925 return VINF_SUCCESS;
14926}
14927
14928
14929/** Opcode 0xdc !11/0. */
14930FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14931{
14932 IEMOP_MNEMONIC("fadd m64r");
14933 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14934}
14935
14936
14937/** Opcode 0xdc !11/1. */
14938FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14939{
14940 IEMOP_MNEMONIC("fmul m64r");
14941 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14942}
14943
14944
14945/** Opcode 0xdc !11/2. */
14946FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14947{
14948 IEMOP_MNEMONIC("fcom st0,m64r");
14949
14950 IEM_MC_BEGIN(3, 3);
14951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14952 IEM_MC_LOCAL(uint16_t, u16Fsw);
14953 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14954 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14956 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14957
14958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14960
14961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14963 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14964
14965 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14967 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14968 IEM_MC_ELSE()
14969 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14970 IEM_MC_ENDIF();
14971 IEM_MC_USED_FPU();
14972 IEM_MC_ADVANCE_RIP();
14973
14974 IEM_MC_END();
14975 return VINF_SUCCESS;
14976}
14977
14978
14979/** Opcode 0xdc !11/3. */
14980FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14981{
14982 IEMOP_MNEMONIC("fcomp st0,m64r");
14983
14984 IEM_MC_BEGIN(3, 3);
14985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14986 IEM_MC_LOCAL(uint16_t, u16Fsw);
14987 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14989 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14990 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14991
14992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14994
14995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14996 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14997 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14998
14999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15001 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15002 IEM_MC_ELSE()
15003 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15004 IEM_MC_ENDIF();
15005 IEM_MC_USED_FPU();
15006 IEM_MC_ADVANCE_RIP();
15007
15008 IEM_MC_END();
15009 return VINF_SUCCESS;
15010}
15011
15012
15013/** Opcode 0xdc !11/4. */
15014FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15015{
15016 IEMOP_MNEMONIC("fsub m64r");
15017 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15018}
15019
15020
15021/** Opcode 0xdc !11/5. */
15022FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15023{
15024 IEMOP_MNEMONIC("fsubr m64r");
15025 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15026}
15027
15028
15029/** Opcode 0xdc !11/6. */
15030FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15031{
15032 IEMOP_MNEMONIC("fdiv m64r");
15033 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15034}
15035
15036
15037/** Opcode 0xdc !11/7. */
15038FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15039{
15040 IEMOP_MNEMONIC("fdivr m64r");
15041 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15042}
15043
15044
15045/** Opcode 0xdc. */
15046FNIEMOP_DEF(iemOp_EscF4)
15047{
15048 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15051 {
15052 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15053 {
15054 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15055 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15056 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15057 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15058 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15059 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15060 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15061 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15063 }
15064 }
15065 else
15066 {
15067 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15068 {
15069 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15070 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15071 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15072 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15073 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15074 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15075 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15076 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15078 }
15079 }
15080}
15081
15082
15083/** Opcode 0xdd !11/0.
15084 * @sa iemOp_fld_m32r */
15085FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15086{
15087 IEMOP_MNEMONIC("fld m64r");
15088
15089 IEM_MC_BEGIN(2, 3);
15090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15091 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15092 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15093 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15094 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15095
15096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15100
15101 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15102 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15103 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15104 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15105 IEM_MC_ELSE()
15106 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15107 IEM_MC_ENDIF();
15108 IEM_MC_USED_FPU();
15109 IEM_MC_ADVANCE_RIP();
15110
15111 IEM_MC_END();
15112 return VINF_SUCCESS;
15113}
15114
15115
15116/** Opcode 0xdd !11/0. */
15117FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15118{
15119 IEMOP_MNEMONIC("fisttp m64i");
15120 IEM_MC_BEGIN(3, 2);
15121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15122 IEM_MC_LOCAL(uint16_t, u16Fsw);
15123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15124 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15126
15127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15131
15132 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15134 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15135 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15136 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15137 IEM_MC_ELSE()
15138 IEM_MC_IF_FCW_IM()
15139 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15140 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15141 IEM_MC_ENDIF();
15142 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15143 IEM_MC_ENDIF();
15144 IEM_MC_USED_FPU();
15145 IEM_MC_ADVANCE_RIP();
15146
15147 IEM_MC_END();
15148 return VINF_SUCCESS;
15149}
15150
15151
15152/** Opcode 0xdd !11/0. */
15153FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15154{
15155 IEMOP_MNEMONIC("fst m64r");
15156 IEM_MC_BEGIN(3, 2);
15157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15158 IEM_MC_LOCAL(uint16_t, u16Fsw);
15159 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15160 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15162
15163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15167
15168 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15171 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15172 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15173 IEM_MC_ELSE()
15174 IEM_MC_IF_FCW_IM()
15175 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15176 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15177 IEM_MC_ENDIF();
15178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15179 IEM_MC_ENDIF();
15180 IEM_MC_USED_FPU();
15181 IEM_MC_ADVANCE_RIP();
15182
15183 IEM_MC_END();
15184 return VINF_SUCCESS;
15185}
15186
15187
15188
15189
15190/** Opcode 0xdd !11/0. */
15191FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15192{
15193 IEMOP_MNEMONIC("fstp m64r");
15194 IEM_MC_BEGIN(3, 2);
15195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15196 IEM_MC_LOCAL(uint16_t, u16Fsw);
15197 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15198 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15199 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15200
15201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15205
15206 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15207 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15208 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15209 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15210 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15211 IEM_MC_ELSE()
15212 IEM_MC_IF_FCW_IM()
15213 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15214 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15215 IEM_MC_ENDIF();
15216 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15217 IEM_MC_ENDIF();
15218 IEM_MC_USED_FPU();
15219 IEM_MC_ADVANCE_RIP();
15220
15221 IEM_MC_END();
15222 return VINF_SUCCESS;
15223}
15224
15225
15226/** Opcode 0xdd !11/0. */
15227FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15228{
15229 IEMOP_MNEMONIC("frstor m94/108byte");
15230 IEM_MC_BEGIN(3, 0);
15231 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15232 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15233 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15237 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15238 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15239 IEM_MC_END();
15240 return VINF_SUCCESS;
15241}
15242
15243
15244/** Opcode 0xdd !11/0. */
15245FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15246{
15247 IEMOP_MNEMONIC("fnsave m94/108byte");
15248 IEM_MC_BEGIN(3, 0);
15249 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15250 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15251 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15255 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15256 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15257 IEM_MC_END();
15258 return VINF_SUCCESS;
15259
15260}
15261
15262/** Opcode 0xdd !11/0. */
15263FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15264{
15265 IEMOP_MNEMONIC("fnstsw m16");
15266
15267 IEM_MC_BEGIN(0, 2);
15268 IEM_MC_LOCAL(uint16_t, u16Tmp);
15269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15270
15271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15274
15275 IEM_MC_FETCH_FSW(u16Tmp);
15276 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15277 IEM_MC_ADVANCE_RIP();
15278
15279/** @todo Debug / drop a hint to the verifier that things may differ
15280 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15281 * NT4SP1. (X86_FSW_PE) */
15282 IEM_MC_END();
15283 return VINF_SUCCESS;
15284}
15285
15286
15287/** Opcode 0xdd 11/0. */
15288FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15289{
15290 IEMOP_MNEMONIC("ffree stN");
15291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15292 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15293 unmodified. */
15294
15295 IEM_MC_BEGIN(0, 0);
15296
15297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15299
15300 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15301 IEM_MC_UPDATE_FPU_OPCODE_IP();
15302
15303 IEM_MC_USED_FPU();
15304 IEM_MC_ADVANCE_RIP();
15305 IEM_MC_END();
15306 return VINF_SUCCESS;
15307}
15308
15309
15310/** Opcode 0xdd 11/1. */
15311FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15312{
15313 IEMOP_MNEMONIC("fst st0,stN");
15314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15315
15316 IEM_MC_BEGIN(0, 2);
15317 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15318 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15321 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15322 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15323 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15324 IEM_MC_ELSE()
15325 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15326 IEM_MC_ENDIF();
15327 IEM_MC_USED_FPU();
15328 IEM_MC_ADVANCE_RIP();
15329 IEM_MC_END();
15330 return VINF_SUCCESS;
15331}
15332
15333
15334/** Opcode 0xdd 11/3. */
15335FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15336{
15337 IEMOP_MNEMONIC("fcom st0,stN");
15338 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15339}
15340
15341
15342/** Opcode 0xdd 11/4. */
15343FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15344{
15345 IEMOP_MNEMONIC("fcomp st0,stN");
15346 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15347}
15348
15349
15350/** Opcode 0xdd. */
15351FNIEMOP_DEF(iemOp_EscF5)
15352{
15353 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15356 {
15357 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15358 {
15359 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15360 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15361 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15362 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15363 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15364 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15365 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15366 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15368 }
15369 }
15370 else
15371 {
15372 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15373 {
15374 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15375 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15376 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15377 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15378 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15379 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15380 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15381 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15383 }
15384 }
15385}
15386
15387
15388/** Opcode 0xde 11/0. */
15389FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15390{
15391 IEMOP_MNEMONIC("faddp stN,st0");
15392 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15393}
15394
15395
15396/** Opcode 0xde 11/0. */
15397FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15398{
15399 IEMOP_MNEMONIC("fmulp stN,st0");
15400 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15401}
15402
15403
15404/** Opcode 0xde 0xd9. */
15405FNIEMOP_DEF(iemOp_fcompp)
15406{
15407 IEMOP_MNEMONIC("fucompp st0,stN");
15408 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15409}
15410
15411
15412/** Opcode 0xde 11/4. */
15413FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15414{
15415 IEMOP_MNEMONIC("fsubrp stN,st0");
15416 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15417}
15418
15419
15420/** Opcode 0xde 11/5. */
15421FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15422{
15423 IEMOP_MNEMONIC("fsubp stN,st0");
15424 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15425}
15426
15427
15428/** Opcode 0xde 11/6. */
15429FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15430{
15431 IEMOP_MNEMONIC("fdivrp stN,st0");
15432 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15433}
15434
15435
15436/** Opcode 0xde 11/7. */
15437FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15438{
15439 IEMOP_MNEMONIC("fdivp stN,st0");
15440 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15441}
15442
15443
15444/**
15445 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15446 * the result in ST0.
15447 *
15448 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15449 */
15450FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15451{
15452 IEM_MC_BEGIN(3, 3);
15453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15454 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15455 IEM_MC_LOCAL(int16_t, i16Val2);
15456 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15458 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15459
15460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15462
15463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15465 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15466
15467 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15468 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15469 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15470 IEM_MC_ELSE()
15471 IEM_MC_FPU_STACK_UNDERFLOW(0);
15472 IEM_MC_ENDIF();
15473 IEM_MC_USED_FPU();
15474 IEM_MC_ADVANCE_RIP();
15475
15476 IEM_MC_END();
15477 return VINF_SUCCESS;
15478}
15479
15480
15481/** Opcode 0xde !11/0. */
15482FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15483{
15484 IEMOP_MNEMONIC("fiadd m16i");
15485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15486}
15487
15488
15489/** Opcode 0xde !11/1. */
15490FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15491{
15492 IEMOP_MNEMONIC("fimul m16i");
15493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15494}
15495
15496
15497/** Opcode 0xde !11/2. */
15498FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15499{
15500 IEMOP_MNEMONIC("ficom st0,m16i");
15501
15502 IEM_MC_BEGIN(3, 3);
15503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15504 IEM_MC_LOCAL(uint16_t, u16Fsw);
15505 IEM_MC_LOCAL(int16_t, i16Val2);
15506 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15507 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15508 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15509
15510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15512
15513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15515 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15516
15517 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15518 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15519 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15520 IEM_MC_ELSE()
15521 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15522 IEM_MC_ENDIF();
15523 IEM_MC_USED_FPU();
15524 IEM_MC_ADVANCE_RIP();
15525
15526 IEM_MC_END();
15527 return VINF_SUCCESS;
15528}
15529
15530
15531/** Opcode 0xde !11/3. */
15532FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15533{
15534 IEMOP_MNEMONIC("ficomp st0,m16i");
15535
15536 IEM_MC_BEGIN(3, 3);
15537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15538 IEM_MC_LOCAL(uint16_t, u16Fsw);
15539 IEM_MC_LOCAL(int16_t, i16Val2);
15540 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15542 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15543
15544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15546
15547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15549 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15550
15551 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15553 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15554 IEM_MC_ELSE()
15555 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15556 IEM_MC_ENDIF();
15557 IEM_MC_USED_FPU();
15558 IEM_MC_ADVANCE_RIP();
15559
15560 IEM_MC_END();
15561 return VINF_SUCCESS;
15562}
15563
15564
15565/** Opcode 0xde !11/4. */
15566FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15567{
15568 IEMOP_MNEMONIC("fisub m16i");
15569 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15570}
15571
15572
15573/** Opcode 0xde !11/5. */
15574FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15575{
15576 IEMOP_MNEMONIC("fisubr m16i");
15577 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15578}
15579
15580
15581/** Opcode 0xde !11/6. */
15582FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15583{
15584 IEMOP_MNEMONIC("fiadd m16i");
15585 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15586}
15587
15588
15589/** Opcode 0xde !11/7. */
15590FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15591{
15592 IEMOP_MNEMONIC("fiadd m16i");
15593 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15594}
15595
15596
15597/** Opcode 0xde. */
15598FNIEMOP_DEF(iemOp_EscF6)
15599{
15600 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15603 {
15604 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15605 {
15606 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15607 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15608 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15609 case 3: if (bRm == 0xd9)
15610 return FNIEMOP_CALL(iemOp_fcompp);
15611 return IEMOP_RAISE_INVALID_OPCODE();
15612 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15613 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15614 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15615 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15617 }
15618 }
15619 else
15620 {
15621 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15622 {
15623 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15624 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15625 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15626 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15627 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15628 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15629 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15630 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15632 }
15633 }
15634}
15635
15636
15637/** Opcode 0xdf 11/0.
15638 * Undocument instruction, assumed to work like ffree + fincstp. */
15639FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15640{
15641 IEMOP_MNEMONIC("ffreep stN");
15642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15643
15644 IEM_MC_BEGIN(0, 0);
15645
15646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15648
15649 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15650 IEM_MC_FPU_STACK_INC_TOP();
15651 IEM_MC_UPDATE_FPU_OPCODE_IP();
15652
15653 IEM_MC_USED_FPU();
15654 IEM_MC_ADVANCE_RIP();
15655 IEM_MC_END();
15656 return VINF_SUCCESS;
15657}
15658
15659
15660/** Opcode 0xdf 0xe0. */
15661FNIEMOP_DEF(iemOp_fnstsw_ax)
15662{
15663 IEMOP_MNEMONIC("fnstsw ax");
15664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15665
15666 IEM_MC_BEGIN(0, 1);
15667 IEM_MC_LOCAL(uint16_t, u16Tmp);
15668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15669 IEM_MC_FETCH_FSW(u16Tmp);
15670 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15671 IEM_MC_ADVANCE_RIP();
15672 IEM_MC_END();
15673 return VINF_SUCCESS;
15674}
15675
15676
15677/** Opcode 0xdf 11/5. */
15678FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15679{
15680 IEMOP_MNEMONIC("fcomip st0,stN");
15681 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15682}
15683
15684
15685/** Opcode 0xdf 11/6. */
15686FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15687{
15688 IEMOP_MNEMONIC("fcomip st0,stN");
15689 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15690}
15691
15692
15693/** Opcode 0xdf !11/0. */
15694FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15695{
15696 IEMOP_MNEMONIC("fild m16i");
15697
15698 IEM_MC_BEGIN(2, 3);
15699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15700 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15701 IEM_MC_LOCAL(int16_t, i16Val);
15702 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15703 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15704
15705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15707
15708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15710 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15711
15712 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15713 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15714 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15715 IEM_MC_ELSE()
15716 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15717 IEM_MC_ENDIF();
15718 IEM_MC_USED_FPU();
15719 IEM_MC_ADVANCE_RIP();
15720
15721 IEM_MC_END();
15722 return VINF_SUCCESS;
15723}
15724
15725
15726/** Opcode 0xdf !11/1. */
15727FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15728{
15729 IEMOP_MNEMONIC("fisttp m16i");
15730 IEM_MC_BEGIN(3, 2);
15731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15732 IEM_MC_LOCAL(uint16_t, u16Fsw);
15733 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15734 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15736
15737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15740 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15741
15742 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15743 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15744 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15745 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15746 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15747 IEM_MC_ELSE()
15748 IEM_MC_IF_FCW_IM()
15749 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15750 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15751 IEM_MC_ENDIF();
15752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15753 IEM_MC_ENDIF();
15754 IEM_MC_USED_FPU();
15755 IEM_MC_ADVANCE_RIP();
15756
15757 IEM_MC_END();
15758 return VINF_SUCCESS;
15759}
15760
15761
15762/** Opcode 0xdf !11/2. */
15763FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15764{
15765 IEMOP_MNEMONIC("fistp m16i");
15766 IEM_MC_BEGIN(3, 2);
15767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15768 IEM_MC_LOCAL(uint16_t, u16Fsw);
15769 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15770 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15772
15773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15777
15778 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15779 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15780 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15781 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15782 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15783 IEM_MC_ELSE()
15784 IEM_MC_IF_FCW_IM()
15785 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15786 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15787 IEM_MC_ENDIF();
15788 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15789 IEM_MC_ENDIF();
15790 IEM_MC_USED_FPU();
15791 IEM_MC_ADVANCE_RIP();
15792
15793 IEM_MC_END();
15794 return VINF_SUCCESS;
15795}
15796
15797
15798/** Opcode 0xdf !11/3. */
15799FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15800{
15801 IEMOP_MNEMONIC("fistp m16i");
15802 IEM_MC_BEGIN(3, 2);
15803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15804 IEM_MC_LOCAL(uint16_t, u16Fsw);
15805 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15806 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15807 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15808
15809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15813
15814 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15815 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15816 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15817 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15818 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15819 IEM_MC_ELSE()
15820 IEM_MC_IF_FCW_IM()
15821 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15822 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15823 IEM_MC_ENDIF();
15824 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15825 IEM_MC_ENDIF();
15826 IEM_MC_USED_FPU();
15827 IEM_MC_ADVANCE_RIP();
15828
15829 IEM_MC_END();
15830 return VINF_SUCCESS;
15831}
15832
15833
15834/** Opcode 0xdf !11/4. */
15835FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15836
15837
15838/** Opcode 0xdf !11/5. */
15839FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15840{
15841 IEMOP_MNEMONIC("fild m64i");
15842
15843 IEM_MC_BEGIN(2, 3);
15844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15845 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15846 IEM_MC_LOCAL(int64_t, i64Val);
15847 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15848 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15849
15850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15852
15853 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15854 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15855 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15856
15857 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15858 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15859 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15860 IEM_MC_ELSE()
15861 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15862 IEM_MC_ENDIF();
15863 IEM_MC_USED_FPU();
15864 IEM_MC_ADVANCE_RIP();
15865
15866 IEM_MC_END();
15867 return VINF_SUCCESS;
15868}
15869
15870
15871/** Opcode 0xdf !11/6. */
15872FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15873
15874
15875/** Opcode 0xdf !11/7. */
15876FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15877{
15878 IEMOP_MNEMONIC("fistp m64i");
15879 IEM_MC_BEGIN(3, 2);
15880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15881 IEM_MC_LOCAL(uint16_t, u16Fsw);
15882 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15883 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15884 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15885
15886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15889 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15890
15891 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15892 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15893 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15894 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15895 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15896 IEM_MC_ELSE()
15897 IEM_MC_IF_FCW_IM()
15898 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15899 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15900 IEM_MC_ENDIF();
15901 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15902 IEM_MC_ENDIF();
15903 IEM_MC_USED_FPU();
15904 IEM_MC_ADVANCE_RIP();
15905
15906 IEM_MC_END();
15907 return VINF_SUCCESS;
15908}
15909
15910
15911/** Opcode 0xdf. */
15912FNIEMOP_DEF(iemOp_EscF7)
15913{
15914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15916 {
15917 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15918 {
15919 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15920 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15921 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15922 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15923 case 4: if (bRm == 0xe0)
15924 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15925 return IEMOP_RAISE_INVALID_OPCODE();
15926 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15927 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15928 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15930 }
15931 }
15932 else
15933 {
15934 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15935 {
15936 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15937 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15938 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15939 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15940 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15941 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15942 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15943 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15945 }
15946 }
15947}
15948
15949
15950/** Opcode 0xe0. */
15951FNIEMOP_DEF(iemOp_loopne_Jb)
15952{
15953 IEMOP_MNEMONIC("loopne Jb");
15954 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15955 IEMOP_HLP_NO_LOCK_PREFIX();
15956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15957
15958 switch (pIemCpu->enmEffAddrMode)
15959 {
15960 case IEMMODE_16BIT:
15961 IEM_MC_BEGIN(0,0);
15962 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15963 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15964 IEM_MC_REL_JMP_S8(i8Imm);
15965 } IEM_MC_ELSE() {
15966 IEM_MC_ADVANCE_RIP();
15967 } IEM_MC_ENDIF();
15968 IEM_MC_END();
15969 return VINF_SUCCESS;
15970
15971 case IEMMODE_32BIT:
15972 IEM_MC_BEGIN(0,0);
15973 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15974 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15975 IEM_MC_REL_JMP_S8(i8Imm);
15976 } IEM_MC_ELSE() {
15977 IEM_MC_ADVANCE_RIP();
15978 } IEM_MC_ENDIF();
15979 IEM_MC_END();
15980 return VINF_SUCCESS;
15981
15982 case IEMMODE_64BIT:
15983 IEM_MC_BEGIN(0,0);
15984 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15985 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15986 IEM_MC_REL_JMP_S8(i8Imm);
15987 } IEM_MC_ELSE() {
15988 IEM_MC_ADVANCE_RIP();
15989 } IEM_MC_ENDIF();
15990 IEM_MC_END();
15991 return VINF_SUCCESS;
15992
15993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15994 }
15995}
15996
15997
15998/** Opcode 0xe1. */
15999FNIEMOP_DEF(iemOp_loope_Jb)
16000{
16001 IEMOP_MNEMONIC("loope Jb");
16002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16003 IEMOP_HLP_NO_LOCK_PREFIX();
16004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16005
16006 switch (pIemCpu->enmEffAddrMode)
16007 {
16008 case IEMMODE_16BIT:
16009 IEM_MC_BEGIN(0,0);
16010 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16011 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16012 IEM_MC_REL_JMP_S8(i8Imm);
16013 } IEM_MC_ELSE() {
16014 IEM_MC_ADVANCE_RIP();
16015 } IEM_MC_ENDIF();
16016 IEM_MC_END();
16017 return VINF_SUCCESS;
16018
16019 case IEMMODE_32BIT:
16020 IEM_MC_BEGIN(0,0);
16021 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16022 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16023 IEM_MC_REL_JMP_S8(i8Imm);
16024 } IEM_MC_ELSE() {
16025 IEM_MC_ADVANCE_RIP();
16026 } IEM_MC_ENDIF();
16027 IEM_MC_END();
16028 return VINF_SUCCESS;
16029
16030 case IEMMODE_64BIT:
16031 IEM_MC_BEGIN(0,0);
16032 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16033 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16034 IEM_MC_REL_JMP_S8(i8Imm);
16035 } IEM_MC_ELSE() {
16036 IEM_MC_ADVANCE_RIP();
16037 } IEM_MC_ENDIF();
16038 IEM_MC_END();
16039 return VINF_SUCCESS;
16040
16041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16042 }
16043}
16044
16045
16046/** Opcode 0xe2. */
16047FNIEMOP_DEF(iemOp_loop_Jb)
16048{
16049 IEMOP_MNEMONIC("loop Jb");
16050 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16051 IEMOP_HLP_NO_LOCK_PREFIX();
16052 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16053
16054 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16055 * using the 32-bit operand size override. How can that be restarted? See
16056 * weird pseudo code in intel manual. */
16057 switch (pIemCpu->enmEffAddrMode)
16058 {
16059 case IEMMODE_16BIT:
16060 IEM_MC_BEGIN(0,0);
16061 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16062 {
16063 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16064 IEM_MC_IF_CX_IS_NZ() {
16065 IEM_MC_REL_JMP_S8(i8Imm);
16066 } IEM_MC_ELSE() {
16067 IEM_MC_ADVANCE_RIP();
16068 } IEM_MC_ENDIF();
16069 }
16070 else
16071 {
16072 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16073 IEM_MC_ADVANCE_RIP();
16074 }
16075 IEM_MC_END();
16076 return VINF_SUCCESS;
16077
16078 case IEMMODE_32BIT:
16079 IEM_MC_BEGIN(0,0);
16080 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16081 {
16082 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16083 IEM_MC_IF_ECX_IS_NZ() {
16084 IEM_MC_REL_JMP_S8(i8Imm);
16085 } IEM_MC_ELSE() {
16086 IEM_MC_ADVANCE_RIP();
16087 } IEM_MC_ENDIF();
16088 }
16089 else
16090 {
16091 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16092 IEM_MC_ADVANCE_RIP();
16093 }
16094 IEM_MC_END();
16095 return VINF_SUCCESS;
16096
16097 case IEMMODE_64BIT:
16098 IEM_MC_BEGIN(0,0);
16099 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16100 {
16101 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16102 IEM_MC_IF_RCX_IS_NZ() {
16103 IEM_MC_REL_JMP_S8(i8Imm);
16104 } IEM_MC_ELSE() {
16105 IEM_MC_ADVANCE_RIP();
16106 } IEM_MC_ENDIF();
16107 }
16108 else
16109 {
16110 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16111 IEM_MC_ADVANCE_RIP();
16112 }
16113 IEM_MC_END();
16114 return VINF_SUCCESS;
16115
16116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16117 }
16118}
16119
16120
16121/** Opcode 0xe3. */
16122FNIEMOP_DEF(iemOp_jecxz_Jb)
16123{
16124 IEMOP_MNEMONIC("jecxz Jb");
16125 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16126 IEMOP_HLP_NO_LOCK_PREFIX();
16127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16128
16129 switch (pIemCpu->enmEffAddrMode)
16130 {
16131 case IEMMODE_16BIT:
16132 IEM_MC_BEGIN(0,0);
16133 IEM_MC_IF_CX_IS_NZ() {
16134 IEM_MC_ADVANCE_RIP();
16135 } IEM_MC_ELSE() {
16136 IEM_MC_REL_JMP_S8(i8Imm);
16137 } IEM_MC_ENDIF();
16138 IEM_MC_END();
16139 return VINF_SUCCESS;
16140
16141 case IEMMODE_32BIT:
16142 IEM_MC_BEGIN(0,0);
16143 IEM_MC_IF_ECX_IS_NZ() {
16144 IEM_MC_ADVANCE_RIP();
16145 } IEM_MC_ELSE() {
16146 IEM_MC_REL_JMP_S8(i8Imm);
16147 } IEM_MC_ENDIF();
16148 IEM_MC_END();
16149 return VINF_SUCCESS;
16150
16151 case IEMMODE_64BIT:
16152 IEM_MC_BEGIN(0,0);
16153 IEM_MC_IF_RCX_IS_NZ() {
16154 IEM_MC_ADVANCE_RIP();
16155 } IEM_MC_ELSE() {
16156 IEM_MC_REL_JMP_S8(i8Imm);
16157 } IEM_MC_ENDIF();
16158 IEM_MC_END();
16159 return VINF_SUCCESS;
16160
16161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16162 }
16163}
16164
16165
16166/** Opcode 0xe4 */
16167FNIEMOP_DEF(iemOp_in_AL_Ib)
16168{
16169 IEMOP_MNEMONIC("in eAX,Ib");
16170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16171 IEMOP_HLP_NO_LOCK_PREFIX();
16172 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16173}
16174
16175
16176/** Opcode 0xe5 */
16177FNIEMOP_DEF(iemOp_in_eAX_Ib)
16178{
16179 IEMOP_MNEMONIC("in eAX,Ib");
16180 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16181 IEMOP_HLP_NO_LOCK_PREFIX();
16182 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16183}
16184
16185
16186/** Opcode 0xe6 */
16187FNIEMOP_DEF(iemOp_out_Ib_AL)
16188{
16189 IEMOP_MNEMONIC("out Ib,AL");
16190 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16191 IEMOP_HLP_NO_LOCK_PREFIX();
16192 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16193}
16194
16195
16196/** Opcode 0xe7 */
16197FNIEMOP_DEF(iemOp_out_Ib_eAX)
16198{
16199 IEMOP_MNEMONIC("out Ib,eAX");
16200 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16201 IEMOP_HLP_NO_LOCK_PREFIX();
16202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16203}
16204
16205
16206/** Opcode 0xe8. */
16207FNIEMOP_DEF(iemOp_call_Jv)
16208{
16209 IEMOP_MNEMONIC("call Jv");
16210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16211 switch (pIemCpu->enmEffOpSize)
16212 {
16213 case IEMMODE_16BIT:
16214 {
16215 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16216 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16217 }
16218
16219 case IEMMODE_32BIT:
16220 {
16221 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16222 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16223 }
16224
16225 case IEMMODE_64BIT:
16226 {
16227 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16228 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16229 }
16230
16231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16232 }
16233}
16234
16235
16236/** Opcode 0xe9. */
16237FNIEMOP_DEF(iemOp_jmp_Jv)
16238{
16239 IEMOP_MNEMONIC("jmp Jv");
16240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16241 switch (pIemCpu->enmEffOpSize)
16242 {
16243 case IEMMODE_16BIT:
16244 {
16245 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16246 IEM_MC_BEGIN(0, 0);
16247 IEM_MC_REL_JMP_S16(i16Imm);
16248 IEM_MC_END();
16249 return VINF_SUCCESS;
16250 }
16251
16252 case IEMMODE_64BIT:
16253 case IEMMODE_32BIT:
16254 {
16255 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16256 IEM_MC_BEGIN(0, 0);
16257 IEM_MC_REL_JMP_S32(i32Imm);
16258 IEM_MC_END();
16259 return VINF_SUCCESS;
16260 }
16261
16262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16263 }
16264}
16265
16266
16267/** Opcode 0xea. */
16268FNIEMOP_DEF(iemOp_jmp_Ap)
16269{
16270 IEMOP_MNEMONIC("jmp Ap");
16271 IEMOP_HLP_NO_64BIT();
16272
16273 /* Decode the far pointer address and pass it on to the far call C implementation. */
16274 uint32_t offSeg;
16275 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16276 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16277 else
16278 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16279 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16280 IEMOP_HLP_NO_LOCK_PREFIX();
16281 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16282}
16283
16284
16285/** Opcode 0xeb. */
16286FNIEMOP_DEF(iemOp_jmp_Jb)
16287{
16288 IEMOP_MNEMONIC("jmp Jb");
16289 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16290 IEMOP_HLP_NO_LOCK_PREFIX();
16291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16292
16293 IEM_MC_BEGIN(0, 0);
16294 IEM_MC_REL_JMP_S8(i8Imm);
16295 IEM_MC_END();
16296 return VINF_SUCCESS;
16297}
16298
16299
16300/** Opcode 0xec */
16301FNIEMOP_DEF(iemOp_in_AL_DX)
16302{
16303 IEMOP_MNEMONIC("in AL,DX");
16304 IEMOP_HLP_NO_LOCK_PREFIX();
16305 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16306}
16307
16308
16309/** Opcode 0xed */
16310FNIEMOP_DEF(iemOp_eAX_DX)
16311{
16312 IEMOP_MNEMONIC("in eAX,DX");
16313 IEMOP_HLP_NO_LOCK_PREFIX();
16314 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16315}
16316
16317
16318/** Opcode 0xee */
16319FNIEMOP_DEF(iemOp_out_DX_AL)
16320{
16321 IEMOP_MNEMONIC("out DX,AL");
16322 IEMOP_HLP_NO_LOCK_PREFIX();
16323 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16324}
16325
16326
16327/** Opcode 0xef */
16328FNIEMOP_DEF(iemOp_out_DX_eAX)
16329{
16330 IEMOP_MNEMONIC("out DX,eAX");
16331 IEMOP_HLP_NO_LOCK_PREFIX();
16332 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16333}
16334
16335
16336/** Opcode 0xf0. */
16337FNIEMOP_DEF(iemOp_lock)
16338{
16339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16340 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16341
16342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16344}
16345
16346
16347/** Opcode 0xf1. */
16348FNIEMOP_DEF(iemOp_int_1)
16349{
16350 IEMOP_MNEMONIC("int1"); /* icebp */
16351 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16352 /** @todo testcase! */
16353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16354}
16355
16356
16357/** Opcode 0xf2. */
16358FNIEMOP_DEF(iemOp_repne)
16359{
16360 /* This overrides any previous REPE prefix. */
16361 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16363 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16364
16365 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16366 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16367}
16368
16369
16370/** Opcode 0xf3. */
16371FNIEMOP_DEF(iemOp_repe)
16372{
16373 /* This overrides any previous REPNE prefix. */
16374 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16375 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16376 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16377
16378 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16379 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16380}
16381
16382
16383/** Opcode 0xf4. */
16384FNIEMOP_DEF(iemOp_hlt)
16385{
16386 IEMOP_HLP_NO_LOCK_PREFIX();
16387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16388}
16389
16390
16391/** Opcode 0xf5. */
16392FNIEMOP_DEF(iemOp_cmc)
16393{
16394 IEMOP_MNEMONIC("cmc");
16395 IEMOP_HLP_NO_LOCK_PREFIX();
16396 IEM_MC_BEGIN(0, 0);
16397 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16398 IEM_MC_ADVANCE_RIP();
16399 IEM_MC_END();
16400 return VINF_SUCCESS;
16401}
16402
16403
16404/**
16405 * Common implementation of 'inc/dec/not/neg Eb'.
16406 *
16407 * @param bRm The RM byte.
16408 * @param pImpl The instruction implementation.
16409 */
16410FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16411{
16412 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16413 {
16414 /* register access */
16415 IEM_MC_BEGIN(2, 0);
16416 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16417 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16418 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16419 IEM_MC_REF_EFLAGS(pEFlags);
16420 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16421 IEM_MC_ADVANCE_RIP();
16422 IEM_MC_END();
16423 }
16424 else
16425 {
16426 /* memory access. */
16427 IEM_MC_BEGIN(2, 2);
16428 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16429 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16431
16432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16433 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16434 IEM_MC_FETCH_EFLAGS(EFlags);
16435 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16436 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16437 else
16438 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16439
16440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16441 IEM_MC_COMMIT_EFLAGS(EFlags);
16442 IEM_MC_ADVANCE_RIP();
16443 IEM_MC_END();
16444 }
16445 return VINF_SUCCESS;
16446}
16447
16448
16449/**
16450 * Common implementation of 'inc/dec/not/neg Ev'.
16451 *
16452 * @param bRm The RM byte.
16453 * @param pImpl The instruction implementation.
16454 */
16455FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16456{
16457 /* Registers are handled by a common worker. */
16458 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16459 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16460
16461 /* Memory we do here. */
16462 switch (pIemCpu->enmEffOpSize)
16463 {
16464 case IEMMODE_16BIT:
16465 IEM_MC_BEGIN(2, 2);
16466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16469
16470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16471 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16472 IEM_MC_FETCH_EFLAGS(EFlags);
16473 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16474 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16475 else
16476 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16477
16478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16479 IEM_MC_COMMIT_EFLAGS(EFlags);
16480 IEM_MC_ADVANCE_RIP();
16481 IEM_MC_END();
16482 return VINF_SUCCESS;
16483
16484 case IEMMODE_32BIT:
16485 IEM_MC_BEGIN(2, 2);
16486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16489
16490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16491 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16492 IEM_MC_FETCH_EFLAGS(EFlags);
16493 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16494 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16495 else
16496 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16497
16498 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16499 IEM_MC_COMMIT_EFLAGS(EFlags);
16500 IEM_MC_ADVANCE_RIP();
16501 IEM_MC_END();
16502 return VINF_SUCCESS;
16503
16504 case IEMMODE_64BIT:
16505 IEM_MC_BEGIN(2, 2);
16506 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16509
16510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16511 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16512 IEM_MC_FETCH_EFLAGS(EFlags);
16513 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16514 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16515 else
16516 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16517
16518 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16519 IEM_MC_COMMIT_EFLAGS(EFlags);
16520 IEM_MC_ADVANCE_RIP();
16521 IEM_MC_END();
16522 return VINF_SUCCESS;
16523
16524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16525 }
16526}
16527
16528
16529/** Opcode 0xf6 /0. */
16530FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16531{
16532 IEMOP_MNEMONIC("test Eb,Ib");
16533 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16534
16535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16536 {
16537 /* register access */
16538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16539 IEMOP_HLP_NO_LOCK_PREFIX();
16540
16541 IEM_MC_BEGIN(3, 0);
16542 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16543 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16545 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16546 IEM_MC_REF_EFLAGS(pEFlags);
16547 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16548 IEM_MC_ADVANCE_RIP();
16549 IEM_MC_END();
16550 }
16551 else
16552 {
16553 /* memory access. */
16554 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16555
16556 IEM_MC_BEGIN(3, 2);
16557 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16558 IEM_MC_ARG(uint8_t, u8Src, 1);
16559 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16561
16562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16563 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16564 IEM_MC_ASSIGN(u8Src, u8Imm);
16565 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16566 IEM_MC_FETCH_EFLAGS(EFlags);
16567 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16568
16569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16570 IEM_MC_COMMIT_EFLAGS(EFlags);
16571 IEM_MC_ADVANCE_RIP();
16572 IEM_MC_END();
16573 }
16574 return VINF_SUCCESS;
16575}
16576
16577
16578/** Opcode 0xf7 /0. */
16579FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16580{
16581 IEMOP_MNEMONIC("test Ev,Iv");
16582 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16583 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16584
16585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16586 {
16587 /* register access */
16588 switch (pIemCpu->enmEffOpSize)
16589 {
16590 case IEMMODE_16BIT:
16591 {
16592 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16593 IEM_MC_BEGIN(3, 0);
16594 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16595 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16597 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16598 IEM_MC_REF_EFLAGS(pEFlags);
16599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16600 IEM_MC_ADVANCE_RIP();
16601 IEM_MC_END();
16602 return VINF_SUCCESS;
16603 }
16604
16605 case IEMMODE_32BIT:
16606 {
16607 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16608 IEM_MC_BEGIN(3, 0);
16609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16610 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16612 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16613 IEM_MC_REF_EFLAGS(pEFlags);
16614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16615 /* No clearing the high dword here - test doesn't write back the result. */
16616 IEM_MC_ADVANCE_RIP();
16617 IEM_MC_END();
16618 return VINF_SUCCESS;
16619 }
16620
16621 case IEMMODE_64BIT:
16622 {
16623 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16624 IEM_MC_BEGIN(3, 0);
16625 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16626 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16627 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16628 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16629 IEM_MC_REF_EFLAGS(pEFlags);
16630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16631 IEM_MC_ADVANCE_RIP();
16632 IEM_MC_END();
16633 return VINF_SUCCESS;
16634 }
16635
16636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16637 }
16638 }
16639 else
16640 {
16641 /* memory access. */
16642 switch (pIemCpu->enmEffOpSize)
16643 {
16644 case IEMMODE_16BIT:
16645 {
16646 IEM_MC_BEGIN(3, 2);
16647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16648 IEM_MC_ARG(uint16_t, u16Src, 1);
16649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16651
16652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16654 IEM_MC_ASSIGN(u16Src, u16Imm);
16655 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16656 IEM_MC_FETCH_EFLAGS(EFlags);
16657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16658
16659 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16660 IEM_MC_COMMIT_EFLAGS(EFlags);
16661 IEM_MC_ADVANCE_RIP();
16662 IEM_MC_END();
16663 return VINF_SUCCESS;
16664 }
16665
16666 case IEMMODE_32BIT:
16667 {
16668 IEM_MC_BEGIN(3, 2);
16669 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16670 IEM_MC_ARG(uint32_t, u32Src, 1);
16671 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16673
16674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16675 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16676 IEM_MC_ASSIGN(u32Src, u32Imm);
16677 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16678 IEM_MC_FETCH_EFLAGS(EFlags);
16679 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16680
16681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16682 IEM_MC_COMMIT_EFLAGS(EFlags);
16683 IEM_MC_ADVANCE_RIP();
16684 IEM_MC_END();
16685 return VINF_SUCCESS;
16686 }
16687
16688 case IEMMODE_64BIT:
16689 {
16690 IEM_MC_BEGIN(3, 2);
16691 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16692 IEM_MC_ARG(uint64_t, u64Src, 1);
16693 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16695
16696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16697 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16698 IEM_MC_ASSIGN(u64Src, u64Imm);
16699 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16700 IEM_MC_FETCH_EFLAGS(EFlags);
16701 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16702
16703 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16704 IEM_MC_COMMIT_EFLAGS(EFlags);
16705 IEM_MC_ADVANCE_RIP();
16706 IEM_MC_END();
16707 return VINF_SUCCESS;
16708 }
16709
16710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16711 }
16712 }
16713}
16714
16715
16716/** Opcode 0xf6 /4, /5, /6 and /7. */
16717FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16718{
16719 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16720
16721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16722 {
16723 /* register access */
16724 IEMOP_HLP_NO_LOCK_PREFIX();
16725 IEM_MC_BEGIN(3, 1);
16726 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16727 IEM_MC_ARG(uint8_t, u8Value, 1);
16728 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16729 IEM_MC_LOCAL(int32_t, rc);
16730
16731 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16732 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16733 IEM_MC_REF_EFLAGS(pEFlags);
16734 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16735 IEM_MC_IF_LOCAL_IS_Z(rc) {
16736 IEM_MC_ADVANCE_RIP();
16737 } IEM_MC_ELSE() {
16738 IEM_MC_RAISE_DIVIDE_ERROR();
16739 } IEM_MC_ENDIF();
16740
16741 IEM_MC_END();
16742 }
16743 else
16744 {
16745 /* memory access. */
16746 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16747
16748 IEM_MC_BEGIN(3, 2);
16749 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16750 IEM_MC_ARG(uint8_t, u8Value, 1);
16751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16753 IEM_MC_LOCAL(int32_t, rc);
16754
16755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16756 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16757 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16758 IEM_MC_REF_EFLAGS(pEFlags);
16759 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16760 IEM_MC_IF_LOCAL_IS_Z(rc) {
16761 IEM_MC_ADVANCE_RIP();
16762 } IEM_MC_ELSE() {
16763 IEM_MC_RAISE_DIVIDE_ERROR();
16764 } IEM_MC_ENDIF();
16765
16766 IEM_MC_END();
16767 }
16768 return VINF_SUCCESS;
16769}
16770
16771
16772/** Opcode 0xf7 /4, /5, /6 and /7. */
16773FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16774{
16775 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16776 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16777
16778 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16779 {
16780 /* register access */
16781 switch (pIemCpu->enmEffOpSize)
16782 {
16783 case IEMMODE_16BIT:
16784 {
16785 IEMOP_HLP_NO_LOCK_PREFIX();
16786 IEM_MC_BEGIN(4, 1);
16787 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16788 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16789 IEM_MC_ARG(uint16_t, u16Value, 2);
16790 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16791 IEM_MC_LOCAL(int32_t, rc);
16792
16793 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16794 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16795 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16796 IEM_MC_REF_EFLAGS(pEFlags);
16797 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16798 IEM_MC_IF_LOCAL_IS_Z(rc) {
16799 IEM_MC_ADVANCE_RIP();
16800 } IEM_MC_ELSE() {
16801 IEM_MC_RAISE_DIVIDE_ERROR();
16802 } IEM_MC_ENDIF();
16803
16804 IEM_MC_END();
16805 return VINF_SUCCESS;
16806 }
16807
16808 case IEMMODE_32BIT:
16809 {
16810 IEMOP_HLP_NO_LOCK_PREFIX();
16811 IEM_MC_BEGIN(4, 1);
16812 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16813 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16814 IEM_MC_ARG(uint32_t, u32Value, 2);
16815 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16816 IEM_MC_LOCAL(int32_t, rc);
16817
16818 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16819 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16820 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16821 IEM_MC_REF_EFLAGS(pEFlags);
16822 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16823 IEM_MC_IF_LOCAL_IS_Z(rc) {
16824 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16825 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16826 IEM_MC_ADVANCE_RIP();
16827 } IEM_MC_ELSE() {
16828 IEM_MC_RAISE_DIVIDE_ERROR();
16829 } IEM_MC_ENDIF();
16830
16831 IEM_MC_END();
16832 return VINF_SUCCESS;
16833 }
16834
16835 case IEMMODE_64BIT:
16836 {
16837 IEMOP_HLP_NO_LOCK_PREFIX();
16838 IEM_MC_BEGIN(4, 1);
16839 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16840 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16841 IEM_MC_ARG(uint64_t, u64Value, 2);
16842 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16843 IEM_MC_LOCAL(int32_t, rc);
16844
16845 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16846 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16847 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16848 IEM_MC_REF_EFLAGS(pEFlags);
16849 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16850 IEM_MC_IF_LOCAL_IS_Z(rc) {
16851 IEM_MC_ADVANCE_RIP();
16852 } IEM_MC_ELSE() {
16853 IEM_MC_RAISE_DIVIDE_ERROR();
16854 } IEM_MC_ENDIF();
16855
16856 IEM_MC_END();
16857 return VINF_SUCCESS;
16858 }
16859
16860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16861 }
16862 }
16863 else
16864 {
16865 /* memory access. */
16866 switch (pIemCpu->enmEffOpSize)
16867 {
16868 case IEMMODE_16BIT:
16869 {
16870 IEMOP_HLP_NO_LOCK_PREFIX();
16871 IEM_MC_BEGIN(4, 2);
16872 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16873 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16874 IEM_MC_ARG(uint16_t, u16Value, 2);
16875 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16877 IEM_MC_LOCAL(int32_t, rc);
16878
16879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16880 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16881 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16882 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16883 IEM_MC_REF_EFLAGS(pEFlags);
16884 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16885 IEM_MC_IF_LOCAL_IS_Z(rc) {
16886 IEM_MC_ADVANCE_RIP();
16887 } IEM_MC_ELSE() {
16888 IEM_MC_RAISE_DIVIDE_ERROR();
16889 } IEM_MC_ENDIF();
16890
16891 IEM_MC_END();
16892 return VINF_SUCCESS;
16893 }
16894
16895 case IEMMODE_32BIT:
16896 {
16897 IEMOP_HLP_NO_LOCK_PREFIX();
16898 IEM_MC_BEGIN(4, 2);
16899 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16900 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16901 IEM_MC_ARG(uint32_t, u32Value, 2);
16902 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16904 IEM_MC_LOCAL(int32_t, rc);
16905
16906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16907 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16908 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16909 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16910 IEM_MC_REF_EFLAGS(pEFlags);
16911 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16912 IEM_MC_IF_LOCAL_IS_Z(rc) {
16913 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16914 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16915 IEM_MC_ADVANCE_RIP();
16916 } IEM_MC_ELSE() {
16917 IEM_MC_RAISE_DIVIDE_ERROR();
16918 } IEM_MC_ENDIF();
16919
16920 IEM_MC_END();
16921 return VINF_SUCCESS;
16922 }
16923
16924 case IEMMODE_64BIT:
16925 {
16926 IEMOP_HLP_NO_LOCK_PREFIX();
16927 IEM_MC_BEGIN(4, 2);
16928 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16929 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16930 IEM_MC_ARG(uint64_t, u64Value, 2);
16931 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16933 IEM_MC_LOCAL(int32_t, rc);
16934
16935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16936 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16937 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16938 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16939 IEM_MC_REF_EFLAGS(pEFlags);
16940 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16941 IEM_MC_IF_LOCAL_IS_Z(rc) {
16942 IEM_MC_ADVANCE_RIP();
16943 } IEM_MC_ELSE() {
16944 IEM_MC_RAISE_DIVIDE_ERROR();
16945 } IEM_MC_ENDIF();
16946
16947 IEM_MC_END();
16948 return VINF_SUCCESS;
16949 }
16950
16951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16952 }
16953 }
16954}
16955
16956/** Opcode 0xf6. */
16957FNIEMOP_DEF(iemOp_Grp3_Eb)
16958{
16959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16960 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16961 {
16962 case 0:
16963 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16964 case 1:
16965/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16966 return IEMOP_RAISE_INVALID_OPCODE();
16967 case 2:
16968 IEMOP_MNEMONIC("not Eb");
16969 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16970 case 3:
16971 IEMOP_MNEMONIC("neg Eb");
16972 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16973 case 4:
16974 IEMOP_MNEMONIC("mul Eb");
16975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16976 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16977 case 5:
16978 IEMOP_MNEMONIC("imul Eb");
16979 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16980 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16981 case 6:
16982 IEMOP_MNEMONIC("div Eb");
16983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16984 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16985 case 7:
16986 IEMOP_MNEMONIC("idiv Eb");
16987 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16988 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16990 }
16991}
16992
16993
16994/** Opcode 0xf7. */
16995FNIEMOP_DEF(iemOp_Grp3_Ev)
16996{
16997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16998 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16999 {
17000 case 0:
17001 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17002 case 1:
17003/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17004 return IEMOP_RAISE_INVALID_OPCODE();
17005 case 2:
17006 IEMOP_MNEMONIC("not Ev");
17007 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17008 case 3:
17009 IEMOP_MNEMONIC("neg Ev");
17010 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17011 case 4:
17012 IEMOP_MNEMONIC("mul Ev");
17013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17014 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17015 case 5:
17016 IEMOP_MNEMONIC("imul Ev");
17017 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17018 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17019 case 6:
17020 IEMOP_MNEMONIC("div Ev");
17021 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17022 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17023 case 7:
17024 IEMOP_MNEMONIC("idiv Ev");
17025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17026 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17028 }
17029}
17030
17031
17032/** Opcode 0xf8. */
17033FNIEMOP_DEF(iemOp_clc)
17034{
17035 IEMOP_MNEMONIC("clc");
17036 IEMOP_HLP_NO_LOCK_PREFIX();
17037 IEM_MC_BEGIN(0, 0);
17038 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17039 IEM_MC_ADVANCE_RIP();
17040 IEM_MC_END();
17041 return VINF_SUCCESS;
17042}
17043
17044
17045/** Opcode 0xf9. */
17046FNIEMOP_DEF(iemOp_stc)
17047{
17048 IEMOP_MNEMONIC("stc");
17049 IEMOP_HLP_NO_LOCK_PREFIX();
17050 IEM_MC_BEGIN(0, 0);
17051 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17052 IEM_MC_ADVANCE_RIP();
17053 IEM_MC_END();
17054 return VINF_SUCCESS;
17055}
17056
17057
17058/** Opcode 0xfa. */
17059FNIEMOP_DEF(iemOp_cli)
17060{
17061 IEMOP_MNEMONIC("cli");
17062 IEMOP_HLP_NO_LOCK_PREFIX();
17063 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17064}
17065
17066
17067FNIEMOP_DEF(iemOp_sti)
17068{
17069 IEMOP_MNEMONIC("sti");
17070 IEMOP_HLP_NO_LOCK_PREFIX();
17071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17072}
17073
17074
17075/** Opcode 0xfc. */
17076FNIEMOP_DEF(iemOp_cld)
17077{
17078 IEMOP_MNEMONIC("cld");
17079 IEMOP_HLP_NO_LOCK_PREFIX();
17080 IEM_MC_BEGIN(0, 0);
17081 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17082 IEM_MC_ADVANCE_RIP();
17083 IEM_MC_END();
17084 return VINF_SUCCESS;
17085}
17086
17087
17088/** Opcode 0xfd. */
17089FNIEMOP_DEF(iemOp_std)
17090{
17091 IEMOP_MNEMONIC("std");
17092 IEMOP_HLP_NO_LOCK_PREFIX();
17093 IEM_MC_BEGIN(0, 0);
17094 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17095 IEM_MC_ADVANCE_RIP();
17096 IEM_MC_END();
17097 return VINF_SUCCESS;
17098}
17099
17100
17101/** Opcode 0xfe. */
17102FNIEMOP_DEF(iemOp_Grp4)
17103{
17104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17105 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17106 {
17107 case 0:
17108 IEMOP_MNEMONIC("inc Ev");
17109 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17110 case 1:
17111 IEMOP_MNEMONIC("dec Ev");
17112 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17113 default:
17114 IEMOP_MNEMONIC("grp4-ud");
17115 return IEMOP_RAISE_INVALID_OPCODE();
17116 }
17117}
17118
17119
17120/**
17121 * Opcode 0xff /2.
17122 * @param bRm The RM byte.
17123 */
17124FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17125{
17126 IEMOP_MNEMONIC("calln Ev");
17127 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17128 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17129
17130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17131 {
17132 /* The new RIP is taken from a register. */
17133 switch (pIemCpu->enmEffOpSize)
17134 {
17135 case IEMMODE_16BIT:
17136 IEM_MC_BEGIN(1, 0);
17137 IEM_MC_ARG(uint16_t, u16Target, 0);
17138 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17139 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17140 IEM_MC_END()
17141 return VINF_SUCCESS;
17142
17143 case IEMMODE_32BIT:
17144 IEM_MC_BEGIN(1, 0);
17145 IEM_MC_ARG(uint32_t, u32Target, 0);
17146 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17147 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17148 IEM_MC_END()
17149 return VINF_SUCCESS;
17150
17151 case IEMMODE_64BIT:
17152 IEM_MC_BEGIN(1, 0);
17153 IEM_MC_ARG(uint64_t, u64Target, 0);
17154 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17155 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17156 IEM_MC_END()
17157 return VINF_SUCCESS;
17158
17159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17160 }
17161 }
17162 else
17163 {
17164 /* The new RIP is taken from a register. */
17165 switch (pIemCpu->enmEffOpSize)
17166 {
17167 case IEMMODE_16BIT:
17168 IEM_MC_BEGIN(1, 1);
17169 IEM_MC_ARG(uint16_t, u16Target, 0);
17170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17172 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17173 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17174 IEM_MC_END()
17175 return VINF_SUCCESS;
17176
17177 case IEMMODE_32BIT:
17178 IEM_MC_BEGIN(1, 1);
17179 IEM_MC_ARG(uint32_t, u32Target, 0);
17180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17182 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17183 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17184 IEM_MC_END()
17185 return VINF_SUCCESS;
17186
17187 case IEMMODE_64BIT:
17188 IEM_MC_BEGIN(1, 1);
17189 IEM_MC_ARG(uint64_t, u64Target, 0);
17190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17192 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17193 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17194 IEM_MC_END()
17195 return VINF_SUCCESS;
17196
17197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17198 }
17199 }
17200}
17201
17202typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17203
17204FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17205{
17206 /* Registers? How?? */
17207 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17208 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17209
17210 /* Far pointer loaded from memory. */
17211 switch (pIemCpu->enmEffOpSize)
17212 {
17213 case IEMMODE_16BIT:
17214 IEM_MC_BEGIN(3, 1);
17215 IEM_MC_ARG(uint16_t, u16Sel, 0);
17216 IEM_MC_ARG(uint16_t, offSeg, 1);
17217 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17221 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17222 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17223 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17224 IEM_MC_END();
17225 return VINF_SUCCESS;
17226
17227 case IEMMODE_64BIT:
17228 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17229 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17230 * and call far qword [rsp] encodings. */
17231 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17232 {
17233 IEM_MC_BEGIN(3, 1);
17234 IEM_MC_ARG(uint16_t, u16Sel, 0);
17235 IEM_MC_ARG(uint64_t, offSeg, 1);
17236 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17240 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17241 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17242 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17243 IEM_MC_END();
17244 return VINF_SUCCESS;
17245 }
17246 /* AMD falls thru. */
17247
17248 case IEMMODE_32BIT:
17249 IEM_MC_BEGIN(3, 1);
17250 IEM_MC_ARG(uint16_t, u16Sel, 0);
17251 IEM_MC_ARG(uint32_t, offSeg, 1);
17252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17256 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17257 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17258 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17259 IEM_MC_END();
17260 return VINF_SUCCESS;
17261
17262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17263 }
17264}
17265
17266
17267/**
17268 * Opcode 0xff /3.
17269 * @param bRm The RM byte.
17270 */
17271FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17272{
17273 IEMOP_MNEMONIC("callf Ep");
17274 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17275}
17276
17277
17278/**
17279 * Opcode 0xff /4.
17280 * @param bRm The RM byte.
17281 */
17282FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17283{
17284 IEMOP_MNEMONIC("jmpn Ev");
17285 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17287
17288 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17289 {
17290 /* The new RIP is taken from a register. */
17291 switch (pIemCpu->enmEffOpSize)
17292 {
17293 case IEMMODE_16BIT:
17294 IEM_MC_BEGIN(0, 1);
17295 IEM_MC_LOCAL(uint16_t, u16Target);
17296 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17297 IEM_MC_SET_RIP_U16(u16Target);
17298 IEM_MC_END()
17299 return VINF_SUCCESS;
17300
17301 case IEMMODE_32BIT:
17302 IEM_MC_BEGIN(0, 1);
17303 IEM_MC_LOCAL(uint32_t, u32Target);
17304 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17305 IEM_MC_SET_RIP_U32(u32Target);
17306 IEM_MC_END()
17307 return VINF_SUCCESS;
17308
17309 case IEMMODE_64BIT:
17310 IEM_MC_BEGIN(0, 1);
17311 IEM_MC_LOCAL(uint64_t, u64Target);
17312 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17313 IEM_MC_SET_RIP_U64(u64Target);
17314 IEM_MC_END()
17315 return VINF_SUCCESS;
17316
17317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17318 }
17319 }
17320 else
17321 {
17322 /* The new RIP is taken from a memory location. */
17323 switch (pIemCpu->enmEffOpSize)
17324 {
17325 case IEMMODE_16BIT:
17326 IEM_MC_BEGIN(0, 2);
17327 IEM_MC_LOCAL(uint16_t, u16Target);
17328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17330 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17331 IEM_MC_SET_RIP_U16(u16Target);
17332 IEM_MC_END()
17333 return VINF_SUCCESS;
17334
17335 case IEMMODE_32BIT:
17336 IEM_MC_BEGIN(0, 2);
17337 IEM_MC_LOCAL(uint32_t, u32Target);
17338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17340 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17341 IEM_MC_SET_RIP_U32(u32Target);
17342 IEM_MC_END()
17343 return VINF_SUCCESS;
17344
17345 case IEMMODE_64BIT:
17346 IEM_MC_BEGIN(0, 2);
17347 IEM_MC_LOCAL(uint64_t, u64Target);
17348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17350 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17351 IEM_MC_SET_RIP_U64(u64Target);
17352 IEM_MC_END()
17353 return VINF_SUCCESS;
17354
17355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17356 }
17357 }
17358}
17359
17360
17361/**
17362 * Opcode 0xff /5.
17363 * @param bRm The RM byte.
17364 */
17365FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17366{
17367 IEMOP_MNEMONIC("jmpf Ep");
17368 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17369}
17370
17371
17372/**
17373 * Opcode 0xff /6.
17374 * @param bRm The RM byte.
17375 */
17376FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17377{
17378 IEMOP_MNEMONIC("push Ev");
17379 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17380
17381 /* Registers are handled by a common worker. */
17382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17383 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17384
17385 /* Memory we do here. */
17386 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17387 switch (pIemCpu->enmEffOpSize)
17388 {
17389 case IEMMODE_16BIT:
17390 IEM_MC_BEGIN(0, 2);
17391 IEM_MC_LOCAL(uint16_t, u16Src);
17392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17394 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17395 IEM_MC_PUSH_U16(u16Src);
17396 IEM_MC_ADVANCE_RIP();
17397 IEM_MC_END();
17398 return VINF_SUCCESS;
17399
17400 case IEMMODE_32BIT:
17401 IEM_MC_BEGIN(0, 2);
17402 IEM_MC_LOCAL(uint32_t, u32Src);
17403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17405 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17406 IEM_MC_PUSH_U32(u32Src);
17407 IEM_MC_ADVANCE_RIP();
17408 IEM_MC_END();
17409 return VINF_SUCCESS;
17410
17411 case IEMMODE_64BIT:
17412 IEM_MC_BEGIN(0, 2);
17413 IEM_MC_LOCAL(uint64_t, u64Src);
17414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17416 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17417 IEM_MC_PUSH_U64(u64Src);
17418 IEM_MC_ADVANCE_RIP();
17419 IEM_MC_END();
17420 return VINF_SUCCESS;
17421
17422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17423 }
17424}
17425
17426
17427/** Opcode 0xff. */
17428FNIEMOP_DEF(iemOp_Grp5)
17429{
17430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17431 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17432 {
17433 case 0:
17434 IEMOP_MNEMONIC("inc Ev");
17435 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17436 case 1:
17437 IEMOP_MNEMONIC("dec Ev");
17438 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17439 case 2:
17440 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17441 case 3:
17442 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17443 case 4:
17444 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17445 case 5:
17446 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17447 case 6:
17448 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17449 case 7:
17450 IEMOP_MNEMONIC("grp5-ud");
17451 return IEMOP_RAISE_INVALID_OPCODE();
17452 }
17453 AssertFailedReturn(VERR_IEM_IPE_3);
17454}
17455
17456
17457
17458const PFNIEMOP g_apfnOneByteMap[256] =
17459{
17460 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17461 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17462 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17463 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17464 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17465 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17466 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17467 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17468 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17469 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17470 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17471 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17472 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17473 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17474 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17475 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17476 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17477 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17478 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17479 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17480 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17481 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17482 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17483 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17484 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17485 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17486 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17487 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17488 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17489 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17490 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17491 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17492 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17493 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17494 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17495 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17496 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17497 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17498 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17499 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17500 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17501 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17502 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17503 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17504 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17505 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17506 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17507 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17508 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17509 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17510 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17511 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17512 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17513 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17514 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17515 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17516 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17517 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17518 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17519 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17520 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17521 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17522 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17523 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17524};
17525
17526
17527/** @} */
17528
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette