VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60994

Last change on this file since 60994 was 60994, checked in by vboxsync, 9 years ago

testing

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 599.0 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60994 2016-05-17 06:01:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1713 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1714 IEM_MC_ADVANCE_RIP();
1715 IEM_MC_END();
1716 }
1717 else
1718 {
1719 /*
1720 * Register, memory.
1721 */
1722 IEM_MC_BEGIN(0, 2);
1723 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1725
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1728 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1730 else
1731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1732
1733 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1734 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1735
1736 IEM_MC_ADVANCE_RIP();
1737 IEM_MC_END();
1738 }
1739 return VINF_SUCCESS;
1740}
1741
1742
1743/** Opcode 0x0f 0x29. */
1744FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1745{
1746 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1749 {
1750 /*
1751 * Register, register.
1752 */
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1754 IEM_MC_BEGIN(0, 0);
1755 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1757 else
1758 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1759 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1760 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1761 IEM_MC_ADVANCE_RIP();
1762 IEM_MC_END();
1763 }
1764 else
1765 {
1766 /*
1767 * Memory, register.
1768 */
1769 IEM_MC_BEGIN(0, 2);
1770 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1775 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1777 else
1778 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1779
1780 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1781 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787}
1788
1789
1790/** Opcode 0x0f 0x2a. */
1791FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1792
1793
1794/** Opcode 0x0f 0x2b. */
1795#if 0
1796FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1797{
1798 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1801 {
1802 /*
1803 * Register, memory.
1804 */
1805 IEM_MC_BEGIN(0, 2);
1806 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1808
1809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1810 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1811 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1812 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1813 else
1814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1815
1816 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1817 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1818
1819 IEM_MC_ADVANCE_RIP();
1820 IEM_MC_END();
1821 }
1822 /* The register, register encoding is invalid. */
1823 else
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 return VINF_SUCCESS;
1826}
1827#else
1828FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1829#endif
1830
1831
1832/** Opcode 0x0f 0x2c. */
1833FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1834/** Opcode 0x0f 0x2d. */
1835FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1836/** Opcode 0x0f 0x2e. */
1837FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1838/** Opcode 0x0f 0x2f. */
1839FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1840
1841
1842/** Opcode 0x0f 0x30. */
1843FNIEMOP_DEF(iemOp_wrmsr)
1844{
1845 IEMOP_MNEMONIC("wrmsr");
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1848}
1849
1850
1851/** Opcode 0x0f 0x31. */
1852FNIEMOP_DEF(iemOp_rdtsc)
1853{
1854 IEMOP_MNEMONIC("rdtsc");
1855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1856 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1857}
1858
1859
1860/** Opcode 0x0f 0x33. */
1861FNIEMOP_DEF(iemOp_rdmsr)
1862{
1863 IEMOP_MNEMONIC("rdmsr");
1864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1865 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1866}
1867
1868
1869/** Opcode 0x0f 0x34. */
1870FNIEMOP_STUB(iemOp_rdpmc);
1871/** Opcode 0x0f 0x34. */
1872FNIEMOP_STUB(iemOp_sysenter);
1873/** Opcode 0x0f 0x35. */
1874FNIEMOP_STUB(iemOp_sysexit);
1875/** Opcode 0x0f 0x37. */
1876FNIEMOP_STUB(iemOp_getsec);
1877/** Opcode 0x0f 0x38. */
1878FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1879/** Opcode 0x0f 0x3a. */
1880FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1881/** Opcode 0x0f 0x3c (?). */
1882FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1883
1884/**
1885 * Implements a conditional move.
1886 *
1887 * Wish there was an obvious way to do this where we could share and reduce
1888 * code bloat.
1889 *
1890 * @param a_Cnd The conditional "microcode" operation.
1891 */
1892#define CMOV_X(a_Cnd) \
1893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1895 { \
1896 switch (pIemCpu->enmEffOpSize) \
1897 { \
1898 case IEMMODE_16BIT: \
1899 IEM_MC_BEGIN(0, 1); \
1900 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1901 a_Cnd { \
1902 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1903 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1904 } IEM_MC_ENDIF(); \
1905 IEM_MC_ADVANCE_RIP(); \
1906 IEM_MC_END(); \
1907 return VINF_SUCCESS; \
1908 \
1909 case IEMMODE_32BIT: \
1910 IEM_MC_BEGIN(0, 1); \
1911 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1912 a_Cnd { \
1913 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1914 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1915 } IEM_MC_ELSE() { \
1916 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1917 } IEM_MC_ENDIF(); \
1918 IEM_MC_ADVANCE_RIP(); \
1919 IEM_MC_END(); \
1920 return VINF_SUCCESS; \
1921 \
1922 case IEMMODE_64BIT: \
1923 IEM_MC_BEGIN(0, 1); \
1924 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1925 a_Cnd { \
1926 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1928 } IEM_MC_ENDIF(); \
1929 IEM_MC_ADVANCE_RIP(); \
1930 IEM_MC_END(); \
1931 return VINF_SUCCESS; \
1932 \
1933 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1934 } \
1935 } \
1936 else \
1937 { \
1938 switch (pIemCpu->enmEffOpSize) \
1939 { \
1940 case IEMMODE_16BIT: \
1941 IEM_MC_BEGIN(0, 2); \
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1943 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1945 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1946 a_Cnd { \
1947 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1948 } IEM_MC_ENDIF(); \
1949 IEM_MC_ADVANCE_RIP(); \
1950 IEM_MC_END(); \
1951 return VINF_SUCCESS; \
1952 \
1953 case IEMMODE_32BIT: \
1954 IEM_MC_BEGIN(0, 2); \
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1956 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1958 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1959 a_Cnd { \
1960 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1961 } IEM_MC_ELSE() { \
1962 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1963 } IEM_MC_ENDIF(); \
1964 IEM_MC_ADVANCE_RIP(); \
1965 IEM_MC_END(); \
1966 return VINF_SUCCESS; \
1967 \
1968 case IEMMODE_64BIT: \
1969 IEM_MC_BEGIN(0, 2); \
1970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1971 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1973 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1974 a_Cnd { \
1975 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1976 } IEM_MC_ENDIF(); \
1977 IEM_MC_ADVANCE_RIP(); \
1978 IEM_MC_END(); \
1979 return VINF_SUCCESS; \
1980 \
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1982 } \
1983 } do {} while (0)
1984
1985
1986
1987/** Opcode 0x0f 0x40. */
1988FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1989{
1990 IEMOP_MNEMONIC("cmovo Gv,Ev");
1991 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1992}
1993
1994
1995/** Opcode 0x0f 0x41. */
1996FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1997{
1998 IEMOP_MNEMONIC("cmovno Gv,Ev");
1999 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2000}
2001
2002
2003/** Opcode 0x0f 0x42. */
2004FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2005{
2006 IEMOP_MNEMONIC("cmovc Gv,Ev");
2007 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2008}
2009
2010
2011/** Opcode 0x0f 0x43. */
2012FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2013{
2014 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2015 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2016}
2017
2018
2019/** Opcode 0x0f 0x44. */
2020FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2021{
2022 IEMOP_MNEMONIC("cmove Gv,Ev");
2023 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2024}
2025
2026
2027/** Opcode 0x0f 0x45. */
2028FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2029{
2030 IEMOP_MNEMONIC("cmovne Gv,Ev");
2031 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2032}
2033
2034
2035/** Opcode 0x0f 0x46. */
2036FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2037{
2038 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2039 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2040}
2041
2042
2043/** Opcode 0x0f 0x47. */
2044FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2045{
2046 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2047 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2048}
2049
2050
2051/** Opcode 0x0f 0x48. */
2052FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2053{
2054 IEMOP_MNEMONIC("cmovs Gv,Ev");
2055 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2056}
2057
2058
2059/** Opcode 0x0f 0x49. */
2060FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2061{
2062 IEMOP_MNEMONIC("cmovns Gv,Ev");
2063 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2064}
2065
2066
2067/** Opcode 0x0f 0x4a. */
2068FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2069{
2070 IEMOP_MNEMONIC("cmovp Gv,Ev");
2071 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2072}
2073
2074
2075/** Opcode 0x0f 0x4b. */
2076FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2077{
2078 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2079 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2080}
2081
2082
2083/** Opcode 0x0f 0x4c. */
2084FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2085{
2086 IEMOP_MNEMONIC("cmovl Gv,Ev");
2087 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2088}
2089
2090
2091/** Opcode 0x0f 0x4d. */
2092FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2093{
2094 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2095 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2096}
2097
2098
2099/** Opcode 0x0f 0x4e. */
2100FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2101{
2102 IEMOP_MNEMONIC("cmovle Gv,Ev");
2103 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2104}
2105
2106
2107/** Opcode 0x0f 0x4f. */
2108FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2109{
2110 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2111 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2112}
2113
2114#undef CMOV_X
2115
2116/** Opcode 0x0f 0x50. */
2117FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2118/** Opcode 0x0f 0x51. */
2119FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2120/** Opcode 0x0f 0x52. */
2121FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2122/** Opcode 0x0f 0x53. */
2123FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2124/** Opcode 0x0f 0x54. */
2125FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2126/** Opcode 0x0f 0x55. */
2127FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2128/** Opcode 0x0f 0x56. */
2129FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2130/** Opcode 0x0f 0x57. */
2131FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2132/** Opcode 0x0f 0x58. */
2133FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2134/** Opcode 0x0f 0x59. */
2135FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2136/** Opcode 0x0f 0x5a. */
2137FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2138/** Opcode 0x0f 0x5b. */
2139FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2140/** Opcode 0x0f 0x5c. */
2141FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2142/** Opcode 0x0f 0x5d. */
2143FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2144/** Opcode 0x0f 0x5e. */
2145FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2146/** Opcode 0x0f 0x5f. */
2147FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2148
2149
2150/**
2151 * Common worker for SSE2 and MMX instructions on the forms:
2152 * pxxxx xmm1, xmm2/mem128
2153 * pxxxx mm1, mm2/mem32
2154 *
2155 * The 2nd operand is the first half of a register, which in the memory case
2156 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2157 * memory accessed for MMX.
2158 *
2159 * Exceptions type 4.
2160 */
2161FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2162{
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2165 {
2166 case IEM_OP_PRF_SIZE_OP: /* SSE */
2167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2168 {
2169 /*
2170 * Register, register.
2171 */
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_BEGIN(2, 0);
2174 IEM_MC_ARG(uint128_t *, pDst, 0);
2175 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2176 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2177 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2178 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2179 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 /*
2186 * Register, memory.
2187 */
2188 IEM_MC_BEGIN(2, 2);
2189 IEM_MC_ARG(uint128_t *, pDst, 0);
2190 IEM_MC_LOCAL(uint64_t, uSrc);
2191 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2193
2194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2196 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2197 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2198
2199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2201
2202 IEM_MC_ADVANCE_RIP();
2203 IEM_MC_END();
2204 }
2205 return VINF_SUCCESS;
2206
2207 case 0: /* MMX */
2208 if (!pImpl->pfnU64)
2209 return IEMOP_RAISE_INVALID_OPCODE();
2210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2211 {
2212 /*
2213 * Register, register.
2214 */
2215 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2216 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2218 IEM_MC_BEGIN(2, 0);
2219 IEM_MC_ARG(uint64_t *, pDst, 0);
2220 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2221 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2222 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2223 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2224 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2225 IEM_MC_ADVANCE_RIP();
2226 IEM_MC_END();
2227 }
2228 else
2229 {
2230 /*
2231 * Register, memory.
2232 */
2233 IEM_MC_BEGIN(2, 2);
2234 IEM_MC_ARG(uint64_t *, pDst, 0);
2235 IEM_MC_LOCAL(uint32_t, uSrc);
2236 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2242 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2243
2244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 return VINF_SUCCESS;
2251
2252 default:
2253 return IEMOP_RAISE_INVALID_OPCODE();
2254 }
2255}
2256
2257
2258/** Opcode 0x0f 0x60. */
2259FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2260{
2261 IEMOP_MNEMONIC("punpcklbw");
2262 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2263}
2264
2265
2266/** Opcode 0x0f 0x61. */
2267FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2268{
2269 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2270 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2271}
2272
2273
2274/** Opcode 0x0f 0x62. */
2275FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2276{
2277 IEMOP_MNEMONIC("punpckldq");
2278 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2279}
2280
2281
2282/** Opcode 0x0f 0x63. */
2283FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2284/** Opcode 0x0f 0x64. */
2285FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2286/** Opcode 0x0f 0x65. */
2287FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2288/** Opcode 0x0f 0x66. */
2289FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2290/** Opcode 0x0f 0x67. */
2291FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2292
2293
2294/**
2295 * Common worker for SSE2 and MMX instructions on the forms:
2296 * pxxxx xmm1, xmm2/mem128
2297 * pxxxx mm1, mm2/mem64
2298 *
2299 * The 2nd operand is the second half of a register, which in the memory case
2300 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2301 * where it may read the full 128 bits or only the upper 64 bits.
2302 *
2303 * Exceptions type 4.
2304 */
2305FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2306{
2307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2308 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2309 {
2310 case IEM_OP_PRF_SIZE_OP: /* SSE */
2311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2312 {
2313 /*
2314 * Register, register.
2315 */
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_BEGIN(2, 0);
2318 IEM_MC_ARG(uint128_t *, pDst, 0);
2319 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2320 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2321 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2322 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2323 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2324 IEM_MC_ADVANCE_RIP();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 /*
2330 * Register, memory.
2331 */
2332 IEM_MC_BEGIN(2, 2);
2333 IEM_MC_ARG(uint128_t *, pDst, 0);
2334 IEM_MC_LOCAL(uint128_t, uSrc);
2335 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2337
2338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2341 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2342
2343 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2344 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2345
2346 IEM_MC_ADVANCE_RIP();
2347 IEM_MC_END();
2348 }
2349 return VINF_SUCCESS;
2350
2351 case 0: /* MMX */
2352 if (!pImpl->pfnU64)
2353 return IEMOP_RAISE_INVALID_OPCODE();
2354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2355 {
2356 /*
2357 * Register, register.
2358 */
2359 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2360 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2362 IEM_MC_BEGIN(2, 0);
2363 IEM_MC_ARG(uint64_t *, pDst, 0);
2364 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2365 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2366 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2367 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2368 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint64_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2389 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2390
2391 IEM_MC_ADVANCE_RIP();
2392 IEM_MC_END();
2393 }
2394 return VINF_SUCCESS;
2395
2396 default:
2397 return IEMOP_RAISE_INVALID_OPCODE();
2398 }
2399}
2400
2401
2402/** Opcode 0x0f 0x68. */
2403FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2404{
2405 IEMOP_MNEMONIC("punpckhbw");
2406 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2407}
2408
2409
2410/** Opcode 0x0f 0x69. */
2411FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2412{
2413 IEMOP_MNEMONIC("punpckhwd");
2414 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2415}
2416
2417
2418/** Opcode 0x0f 0x6a. */
2419FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2420{
2421 IEMOP_MNEMONIC("punpckhdq");
2422 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2423}
2424
2425/** Opcode 0x0f 0x6b. */
2426FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2427
2428
2429/** Opcode 0x0f 0x6c. */
2430FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2431{
2432 IEMOP_MNEMONIC("punpcklqdq");
2433 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2434}
2435
2436
2437/** Opcode 0x0f 0x6d. */
2438FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2439{
2440 IEMOP_MNEMONIC("punpckhqdq");
2441 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2442}
2443
2444
2445/** Opcode 0x0f 0x6e. */
2446FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2447{
2448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2449 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2450 {
2451 case IEM_OP_PRF_SIZE_OP: /* SSE */
2452 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2453 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2454 {
2455 /* XMM, greg*/
2456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2457 IEM_MC_BEGIN(0, 1);
2458 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2459 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2460 {
2461 IEM_MC_LOCAL(uint64_t, u64Tmp);
2462 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2463 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2464 }
2465 else
2466 {
2467 IEM_MC_LOCAL(uint32_t, u32Tmp);
2468 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2469 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2470 }
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 else
2475 {
2476 /* XMM, [mem] */
2477 IEM_MC_BEGIN(0, 2);
2478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2479 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2483 {
2484 IEM_MC_LOCAL(uint64_t, u64Tmp);
2485 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2486 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2487 }
2488 else
2489 {
2490 IEM_MC_LOCAL(uint32_t, u32Tmp);
2491 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2492 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2493 }
2494 IEM_MC_ADVANCE_RIP();
2495 IEM_MC_END();
2496 }
2497 return VINF_SUCCESS;
2498
2499 case 0: /* MMX */
2500 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2501 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2502 {
2503 /* MMX, greg */
2504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2505 IEM_MC_BEGIN(0, 1);
2506 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2507 IEM_MC_LOCAL(uint64_t, u64Tmp);
2508 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2509 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2510 else
2511 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2512 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2513 IEM_MC_ADVANCE_RIP();
2514 IEM_MC_END();
2515 }
2516 else
2517 {
2518 /* MMX, [mem] */
2519 IEM_MC_BEGIN(0, 2);
2520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2521 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2524 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2525 {
2526 IEM_MC_LOCAL(uint64_t, u64Tmp);
2527 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2528 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2529 }
2530 else
2531 {
2532 IEM_MC_LOCAL(uint32_t, u32Tmp);
2533 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2534 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2535 }
2536 IEM_MC_ADVANCE_RIP();
2537 IEM_MC_END();
2538 }
2539 return VINF_SUCCESS;
2540
2541 default:
2542 return IEMOP_RAISE_INVALID_OPCODE();
2543 }
2544}
2545
2546
2547/** Opcode 0x0f 0x6f. */
2548FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2549{
2550 bool fAligned = false;
2551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2552 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2553 {
2554 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2555 fAligned = true;
2556 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2557 if (fAligned)
2558 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2559 else
2560 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2562 {
2563 /*
2564 * Register, register.
2565 */
2566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2567 IEM_MC_BEGIN(0, 1);
2568 IEM_MC_LOCAL(uint128_t, u128Tmp);
2569 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2570 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2571 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2572 IEM_MC_ADVANCE_RIP();
2573 IEM_MC_END();
2574 }
2575 else
2576 {
2577 /*
2578 * Register, memory.
2579 */
2580 IEM_MC_BEGIN(0, 2);
2581 IEM_MC_LOCAL(uint128_t, u128Tmp);
2582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2583
2584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2586 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2587 if (fAligned)
2588 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2589 else
2590 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2591 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2592
2593 IEM_MC_ADVANCE_RIP();
2594 IEM_MC_END();
2595 }
2596 return VINF_SUCCESS;
2597
2598 case 0: /* MMX */
2599 IEMOP_MNEMONIC("movq Pq,Qq");
2600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2601 {
2602 /*
2603 * Register, register.
2604 */
2605 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2606 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2608 IEM_MC_BEGIN(0, 1);
2609 IEM_MC_LOCAL(uint64_t, u64Tmp);
2610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2611 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2612 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2613 IEM_MC_ADVANCE_RIP();
2614 IEM_MC_END();
2615 }
2616 else
2617 {
2618 /*
2619 * Register, memory.
2620 */
2621 IEM_MC_BEGIN(0, 2);
2622 IEM_MC_LOCAL(uint64_t, u64Tmp);
2623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2624
2625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2628 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2629 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2630
2631 IEM_MC_ADVANCE_RIP();
2632 IEM_MC_END();
2633 }
2634 return VINF_SUCCESS;
2635
2636 default:
2637 return IEMOP_RAISE_INVALID_OPCODE();
2638 }
2639}
2640
2641
2642/** Opcode 0x0f 0x70. The immediate here is evil! */
2643FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2644{
2645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2646 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2647 {
2648 case IEM_OP_PRF_SIZE_OP: /* SSE */
2649 case IEM_OP_PRF_REPNZ: /* SSE */
2650 case IEM_OP_PRF_REPZ: /* SSE */
2651 {
2652 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2653 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2654 {
2655 case IEM_OP_PRF_SIZE_OP:
2656 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2657 pfnAImpl = iemAImpl_pshufd;
2658 break;
2659 case IEM_OP_PRF_REPNZ:
2660 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2661 pfnAImpl = iemAImpl_pshuflw;
2662 break;
2663 case IEM_OP_PRF_REPZ:
2664 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2665 pfnAImpl = iemAImpl_pshufhw;
2666 break;
2667 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2668 }
2669 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2670 {
2671 /*
2672 * Register, register.
2673 */
2674 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2676
2677 IEM_MC_BEGIN(3, 0);
2678 IEM_MC_ARG(uint128_t *, pDst, 0);
2679 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2680 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2681 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2682 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2683 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2684 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2685 IEM_MC_ADVANCE_RIP();
2686 IEM_MC_END();
2687 }
2688 else
2689 {
2690 /*
2691 * Register, memory.
2692 */
2693 IEM_MC_BEGIN(3, 2);
2694 IEM_MC_ARG(uint128_t *, pDst, 0);
2695 IEM_MC_LOCAL(uint128_t, uSrc);
2696 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2698
2699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2700 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2701 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2703 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2704
2705 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2706 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2707 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2708
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 return VINF_SUCCESS;
2713 }
2714
2715 case 0: /* MMX Extension */
2716 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2718 {
2719 /*
2720 * Register, register.
2721 */
2722 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724
2725 IEM_MC_BEGIN(3, 0);
2726 IEM_MC_ARG(uint64_t *, pDst, 0);
2727 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2728 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2729 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2730 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2731 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2732 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2733 IEM_MC_ADVANCE_RIP();
2734 IEM_MC_END();
2735 }
2736 else
2737 {
2738 /*
2739 * Register, memory.
2740 */
2741 IEM_MC_BEGIN(3, 2);
2742 IEM_MC_ARG(uint64_t *, pDst, 0);
2743 IEM_MC_LOCAL(uint64_t, uSrc);
2744 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2748 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2749 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2752
2753 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2754 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2755 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2756
2757 IEM_MC_ADVANCE_RIP();
2758 IEM_MC_END();
2759 }
2760 return VINF_SUCCESS;
2761
2762 default:
2763 return IEMOP_RAISE_INVALID_OPCODE();
2764 }
2765}
2766
2767
2768/** Opcode 0x0f 0x71 11/2. */
2769FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2770
2771/** Opcode 0x66 0x0f 0x71 11/2. */
2772FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2773
2774/** Opcode 0x0f 0x71 11/4. */
2775FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2776
2777/** Opcode 0x66 0x0f 0x71 11/4. */
2778FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2779
2780/** Opcode 0x0f 0x71 11/6. */
2781FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2782
2783/** Opcode 0x66 0x0f 0x71 11/6. */
2784FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2785
2786
2787/** Opcode 0x0f 0x71. */
2788FNIEMOP_DEF(iemOp_Grp12)
2789{
2790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2791 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2792 return IEMOP_RAISE_INVALID_OPCODE();
2793 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2794 {
2795 case 0: case 1: case 3: case 5: case 7:
2796 return IEMOP_RAISE_INVALID_OPCODE();
2797 case 2:
2798 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2799 {
2800 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2801 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2802 default: return IEMOP_RAISE_INVALID_OPCODE();
2803 }
2804 case 4:
2805 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2806 {
2807 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2808 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2809 default: return IEMOP_RAISE_INVALID_OPCODE();
2810 }
2811 case 6:
2812 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2813 {
2814 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2815 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2816 default: return IEMOP_RAISE_INVALID_OPCODE();
2817 }
2818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2819 }
2820}
2821
2822
2823/** Opcode 0x0f 0x72 11/2. */
2824FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2825
2826/** Opcode 0x66 0x0f 0x72 11/2. */
2827FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2828
2829/** Opcode 0x0f 0x72 11/4. */
2830FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2831
2832/** Opcode 0x66 0x0f 0x72 11/4. */
2833FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2834
2835/** Opcode 0x0f 0x72 11/6. */
2836FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2837
2838/** Opcode 0x66 0x0f 0x72 11/6. */
2839FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2840
2841
2842/** Opcode 0x0f 0x72. */
2843FNIEMOP_DEF(iemOp_Grp13)
2844{
2845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2846 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2847 return IEMOP_RAISE_INVALID_OPCODE();
2848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2849 {
2850 case 0: case 1: case 3: case 5: case 7:
2851 return IEMOP_RAISE_INVALID_OPCODE();
2852 case 2:
2853 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2854 {
2855 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2856 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2857 default: return IEMOP_RAISE_INVALID_OPCODE();
2858 }
2859 case 4:
2860 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2861 {
2862 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2863 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2864 default: return IEMOP_RAISE_INVALID_OPCODE();
2865 }
2866 case 6:
2867 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2868 {
2869 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2870 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2871 default: return IEMOP_RAISE_INVALID_OPCODE();
2872 }
2873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2874 }
2875}
2876
2877
2878/** Opcode 0x0f 0x73 11/2. */
2879FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2880
2881/** Opcode 0x66 0x0f 0x73 11/2. */
2882FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2883
2884/** Opcode 0x66 0x0f 0x73 11/3. */
2885FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2886
2887/** Opcode 0x0f 0x73 11/6. */
2888FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2889
2890/** Opcode 0x66 0x0f 0x73 11/6. */
2891FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2892
2893/** Opcode 0x66 0x0f 0x73 11/7. */
2894FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2895
2896
2897/** Opcode 0x0f 0x73. */
2898FNIEMOP_DEF(iemOp_Grp14)
2899{
2900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2901 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2902 return IEMOP_RAISE_INVALID_OPCODE();
2903 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2904 {
2905 case 0: case 1: case 4: case 5:
2906 return IEMOP_RAISE_INVALID_OPCODE();
2907 case 2:
2908 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2909 {
2910 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2911 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2912 default: return IEMOP_RAISE_INVALID_OPCODE();
2913 }
2914 case 3:
2915 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2916 {
2917 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2918 default: return IEMOP_RAISE_INVALID_OPCODE();
2919 }
2920 case 6:
2921 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2922 {
2923 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2924 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2925 default: return IEMOP_RAISE_INVALID_OPCODE();
2926 }
2927 case 7:
2928 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2929 {
2930 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2931 default: return IEMOP_RAISE_INVALID_OPCODE();
2932 }
2933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2934 }
2935}
2936
2937
2938/**
2939 * Common worker for SSE2 and MMX instructions on the forms:
2940 * pxxx mm1, mm2/mem64
2941 * pxxx xmm1, xmm2/mem128
2942 *
2943 * Proper alignment of the 128-bit operand is enforced.
2944 * Exceptions type 4. SSE2 and MMX cpuid checks.
2945 */
2946FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2947{
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2950 {
2951 case IEM_OP_PRF_SIZE_OP: /* SSE */
2952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2953 {
2954 /*
2955 * Register, register.
2956 */
2957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2958 IEM_MC_BEGIN(2, 0);
2959 IEM_MC_ARG(uint128_t *, pDst, 0);
2960 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2961 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2962 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2963 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2964 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2965 IEM_MC_ADVANCE_RIP();
2966 IEM_MC_END();
2967 }
2968 else
2969 {
2970 /*
2971 * Register, memory.
2972 */
2973 IEM_MC_BEGIN(2, 2);
2974 IEM_MC_ARG(uint128_t *, pDst, 0);
2975 IEM_MC_LOCAL(uint128_t, uSrc);
2976 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2978
2979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2982 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2983
2984 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2985 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2986
2987 IEM_MC_ADVANCE_RIP();
2988 IEM_MC_END();
2989 }
2990 return VINF_SUCCESS;
2991
2992 case 0: /* MMX */
2993 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2994 {
2995 /*
2996 * Register, register.
2997 */
2998 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2999 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 IEM_MC_BEGIN(2, 0);
3002 IEM_MC_ARG(uint64_t *, pDst, 0);
3003 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3004 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3005 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3006 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3007 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3008 IEM_MC_ADVANCE_RIP();
3009 IEM_MC_END();
3010 }
3011 else
3012 {
3013 /*
3014 * Register, memory.
3015 */
3016 IEM_MC_BEGIN(2, 2);
3017 IEM_MC_ARG(uint64_t *, pDst, 0);
3018 IEM_MC_LOCAL(uint64_t, uSrc);
3019 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3021
3022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3025 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3026
3027 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3028 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3029
3030 IEM_MC_ADVANCE_RIP();
3031 IEM_MC_END();
3032 }
3033 return VINF_SUCCESS;
3034
3035 default:
3036 return IEMOP_RAISE_INVALID_OPCODE();
3037 }
3038}
3039
3040
3041/** Opcode 0x0f 0x74. */
3042FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3043{
3044 IEMOP_MNEMONIC("pcmpeqb");
3045 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3046}
3047
3048
3049/** Opcode 0x0f 0x75. */
3050FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3051{
3052 IEMOP_MNEMONIC("pcmpeqw");
3053 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3054}
3055
3056
3057/** Opcode 0x0f 0x76. */
3058FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3059{
3060 IEMOP_MNEMONIC("pcmpeqd");
3061 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3062}
3063
3064
3065/** Opcode 0x0f 0x77. */
3066FNIEMOP_STUB(iemOp_emms);
3067/** Opcode 0x0f 0x78. */
3068FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3069/** Opcode 0x0f 0x79. */
3070FNIEMOP_UD_STUB(iemOp_vmwrite);
3071/** Opcode 0x0f 0x7c. */
3072FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3073/** Opcode 0x0f 0x7d. */
3074FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3075
3076
3077/** Opcode 0x0f 0x7e. */
3078FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3079{
3080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3081 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3082 {
3083 case IEM_OP_PRF_SIZE_OP: /* SSE */
3084 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3086 {
3087 /* greg, XMM */
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEM_MC_BEGIN(0, 1);
3090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3091 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3092 {
3093 IEM_MC_LOCAL(uint64_t, u64Tmp);
3094 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3095 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3096 }
3097 else
3098 {
3099 IEM_MC_LOCAL(uint32_t, u32Tmp);
3100 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3101 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3102 }
3103 IEM_MC_ADVANCE_RIP();
3104 IEM_MC_END();
3105 }
3106 else
3107 {
3108 /* [mem], XMM */
3109 IEM_MC_BEGIN(0, 2);
3110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3111 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3114 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3115 {
3116 IEM_MC_LOCAL(uint64_t, u64Tmp);
3117 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3118 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3119 }
3120 else
3121 {
3122 IEM_MC_LOCAL(uint32_t, u32Tmp);
3123 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3124 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3125 }
3126 IEM_MC_ADVANCE_RIP();
3127 IEM_MC_END();
3128 }
3129 return VINF_SUCCESS;
3130
3131 case 0: /* MMX */
3132 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3134 {
3135 /* greg, MMX */
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(0, 1);
3138 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3139 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3140 {
3141 IEM_MC_LOCAL(uint64_t, u64Tmp);
3142 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3143 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3144 }
3145 else
3146 {
3147 IEM_MC_LOCAL(uint32_t, u32Tmp);
3148 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3149 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3150 }
3151 IEM_MC_ADVANCE_RIP();
3152 IEM_MC_END();
3153 }
3154 else
3155 {
3156 /* [mem], MMX */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3159 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3163 {
3164 IEM_MC_LOCAL(uint64_t, u64Tmp);
3165 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3166 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3167 }
3168 else
3169 {
3170 IEM_MC_LOCAL(uint32_t, u32Tmp);
3171 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3172 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3173 }
3174 IEM_MC_ADVANCE_RIP();
3175 IEM_MC_END();
3176 }
3177 return VINF_SUCCESS;
3178
3179 default:
3180 return IEMOP_RAISE_INVALID_OPCODE();
3181 }
3182}
3183
3184
3185/** Opcode 0x0f 0x7f. */
3186FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3187{
3188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3189 bool fAligned = false;
3190 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3191 {
3192 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3193 fAligned = true;
3194 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3195 if (fAligned)
3196 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3197 else
3198 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3200 {
3201 /*
3202 * Register, register.
3203 */
3204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3205 IEM_MC_BEGIN(0, 1);
3206 IEM_MC_LOCAL(uint128_t, u128Tmp);
3207 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3208 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3209 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3210 IEM_MC_ADVANCE_RIP();
3211 IEM_MC_END();
3212 }
3213 else
3214 {
3215 /*
3216 * Register, memory.
3217 */
3218 IEM_MC_BEGIN(0, 2);
3219 IEM_MC_LOCAL(uint128_t, u128Tmp);
3220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3221
3222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3224 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3225 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3226 if (fAligned)
3227 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3228 else
3229 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3230
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 return VINF_SUCCESS;
3235
3236 case 0: /* MMX */
3237 IEMOP_MNEMONIC("movq Qq,Pq");
3238
3239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3240 {
3241 /*
3242 * Register, register.
3243 */
3244 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3245 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3247 IEM_MC_BEGIN(0, 1);
3248 IEM_MC_LOCAL(uint64_t, u64Tmp);
3249 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3250 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3251 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3252 IEM_MC_ADVANCE_RIP();
3253 IEM_MC_END();
3254 }
3255 else
3256 {
3257 /*
3258 * Register, memory.
3259 */
3260 IEM_MC_BEGIN(0, 2);
3261 IEM_MC_LOCAL(uint64_t, u64Tmp);
3262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3263
3264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3267 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3268 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3269
3270 IEM_MC_ADVANCE_RIP();
3271 IEM_MC_END();
3272 }
3273 return VINF_SUCCESS;
3274
3275 default:
3276 return IEMOP_RAISE_INVALID_OPCODE();
3277 }
3278}
3279
3280
3281
3282/** Opcode 0x0f 0x80. */
3283FNIEMOP_DEF(iemOp_jo_Jv)
3284{
3285 IEMOP_MNEMONIC("jo Jv");
3286 IEMOP_HLP_MIN_386();
3287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3288 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3289 {
3290 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3291 IEMOP_HLP_NO_LOCK_PREFIX();
3292
3293 IEM_MC_BEGIN(0, 0);
3294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3295 IEM_MC_REL_JMP_S16(i16Imm);
3296 } IEM_MC_ELSE() {
3297 IEM_MC_ADVANCE_RIP();
3298 } IEM_MC_ENDIF();
3299 IEM_MC_END();
3300 }
3301 else
3302 {
3303 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3304 IEMOP_HLP_NO_LOCK_PREFIX();
3305
3306 IEM_MC_BEGIN(0, 0);
3307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3308 IEM_MC_REL_JMP_S32(i32Imm);
3309 } IEM_MC_ELSE() {
3310 IEM_MC_ADVANCE_RIP();
3311 } IEM_MC_ENDIF();
3312 IEM_MC_END();
3313 }
3314 return VINF_SUCCESS;
3315}
3316
3317
3318/** Opcode 0x0f 0x81. */
3319FNIEMOP_DEF(iemOp_jno_Jv)
3320{
3321 IEMOP_MNEMONIC("jno Jv");
3322 IEMOP_HLP_MIN_386();
3323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3324 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3325 {
3326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3327 IEMOP_HLP_NO_LOCK_PREFIX();
3328
3329 IEM_MC_BEGIN(0, 0);
3330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3331 IEM_MC_ADVANCE_RIP();
3332 } IEM_MC_ELSE() {
3333 IEM_MC_REL_JMP_S16(i16Imm);
3334 } IEM_MC_ENDIF();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3340 IEMOP_HLP_NO_LOCK_PREFIX();
3341
3342 IEM_MC_BEGIN(0, 0);
3343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3344 IEM_MC_ADVANCE_RIP();
3345 } IEM_MC_ELSE() {
3346 IEM_MC_REL_JMP_S32(i32Imm);
3347 } IEM_MC_ENDIF();
3348 IEM_MC_END();
3349 }
3350 return VINF_SUCCESS;
3351}
3352
3353
3354/** Opcode 0x0f 0x82. */
3355FNIEMOP_DEF(iemOp_jc_Jv)
3356{
3357 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3358 IEMOP_HLP_MIN_386();
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3360 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3361 {
3362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3363 IEMOP_HLP_NO_LOCK_PREFIX();
3364
3365 IEM_MC_BEGIN(0, 0);
3366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3367 IEM_MC_REL_JMP_S16(i16Imm);
3368 } IEM_MC_ELSE() {
3369 IEM_MC_ADVANCE_RIP();
3370 } IEM_MC_ENDIF();
3371 IEM_MC_END();
3372 }
3373 else
3374 {
3375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3376 IEMOP_HLP_NO_LOCK_PREFIX();
3377
3378 IEM_MC_BEGIN(0, 0);
3379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3380 IEM_MC_REL_JMP_S32(i32Imm);
3381 } IEM_MC_ELSE() {
3382 IEM_MC_ADVANCE_RIP();
3383 } IEM_MC_ENDIF();
3384 IEM_MC_END();
3385 }
3386 return VINF_SUCCESS;
3387}
3388
3389
3390/** Opcode 0x0f 0x83. */
3391FNIEMOP_DEF(iemOp_jnc_Jv)
3392{
3393 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3394 IEMOP_HLP_MIN_386();
3395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3396 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3397 {
3398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3399 IEMOP_HLP_NO_LOCK_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3403 IEM_MC_ADVANCE_RIP();
3404 } IEM_MC_ELSE() {
3405 IEM_MC_REL_JMP_S16(i16Imm);
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408 }
3409 else
3410 {
3411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3412 IEMOP_HLP_NO_LOCK_PREFIX();
3413
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3416 IEM_MC_ADVANCE_RIP();
3417 } IEM_MC_ELSE() {
3418 IEM_MC_REL_JMP_S32(i32Imm);
3419 } IEM_MC_ENDIF();
3420 IEM_MC_END();
3421 }
3422 return VINF_SUCCESS;
3423}
3424
3425
3426/** Opcode 0x0f 0x84. */
3427FNIEMOP_DEF(iemOp_je_Jv)
3428{
3429 IEMOP_MNEMONIC("je/jz Jv");
3430 IEMOP_HLP_MIN_386();
3431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3432 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3433 {
3434 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3435 IEMOP_HLP_NO_LOCK_PREFIX();
3436
3437 IEM_MC_BEGIN(0, 0);
3438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3439 IEM_MC_REL_JMP_S16(i16Imm);
3440 } IEM_MC_ELSE() {
3441 IEM_MC_ADVANCE_RIP();
3442 } IEM_MC_ENDIF();
3443 IEM_MC_END();
3444 }
3445 else
3446 {
3447 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3448 IEMOP_HLP_NO_LOCK_PREFIX();
3449
3450 IEM_MC_BEGIN(0, 0);
3451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3452 IEM_MC_REL_JMP_S32(i32Imm);
3453 } IEM_MC_ELSE() {
3454 IEM_MC_ADVANCE_RIP();
3455 } IEM_MC_ENDIF();
3456 IEM_MC_END();
3457 }
3458 return VINF_SUCCESS;
3459}
3460
3461
3462/** Opcode 0x0f 0x85. */
3463FNIEMOP_DEF(iemOp_jne_Jv)
3464{
3465 IEMOP_MNEMONIC("jne/jnz Jv");
3466 IEMOP_HLP_MIN_386();
3467 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3468 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3469 {
3470 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3471 IEMOP_HLP_NO_LOCK_PREFIX();
3472
3473 IEM_MC_BEGIN(0, 0);
3474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3475 IEM_MC_ADVANCE_RIP();
3476 } IEM_MC_ELSE() {
3477 IEM_MC_REL_JMP_S16(i16Imm);
3478 } IEM_MC_ENDIF();
3479 IEM_MC_END();
3480 }
3481 else
3482 {
3483 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3484 IEMOP_HLP_NO_LOCK_PREFIX();
3485
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3488 IEM_MC_ADVANCE_RIP();
3489 } IEM_MC_ELSE() {
3490 IEM_MC_REL_JMP_S32(i32Imm);
3491 } IEM_MC_ENDIF();
3492 IEM_MC_END();
3493 }
3494 return VINF_SUCCESS;
3495}
3496
3497
3498/** Opcode 0x0f 0x86. */
3499FNIEMOP_DEF(iemOp_jbe_Jv)
3500{
3501 IEMOP_MNEMONIC("jbe/jna Jv");
3502 IEMOP_HLP_MIN_386();
3503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3504 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3505 {
3506 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3507 IEMOP_HLP_NO_LOCK_PREFIX();
3508
3509 IEM_MC_BEGIN(0, 0);
3510 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3511 IEM_MC_REL_JMP_S16(i16Imm);
3512 } IEM_MC_ELSE() {
3513 IEM_MC_ADVANCE_RIP();
3514 } IEM_MC_ENDIF();
3515 IEM_MC_END();
3516 }
3517 else
3518 {
3519 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3520 IEMOP_HLP_NO_LOCK_PREFIX();
3521
3522 IEM_MC_BEGIN(0, 0);
3523 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3524 IEM_MC_REL_JMP_S32(i32Imm);
3525 } IEM_MC_ELSE() {
3526 IEM_MC_ADVANCE_RIP();
3527 } IEM_MC_ENDIF();
3528 IEM_MC_END();
3529 }
3530 return VINF_SUCCESS;
3531}
3532
3533
3534/** Opcode 0x0f 0x87. */
3535FNIEMOP_DEF(iemOp_jnbe_Jv)
3536{
3537 IEMOP_MNEMONIC("jnbe/ja Jv");
3538 IEMOP_HLP_MIN_386();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3540 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3541 {
3542 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3543 IEMOP_HLP_NO_LOCK_PREFIX();
3544
3545 IEM_MC_BEGIN(0, 0);
3546 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3547 IEM_MC_ADVANCE_RIP();
3548 } IEM_MC_ELSE() {
3549 IEM_MC_REL_JMP_S16(i16Imm);
3550 } IEM_MC_ENDIF();
3551 IEM_MC_END();
3552 }
3553 else
3554 {
3555 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3556 IEMOP_HLP_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3560 IEM_MC_ADVANCE_RIP();
3561 } IEM_MC_ELSE() {
3562 IEM_MC_REL_JMP_S32(i32Imm);
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 return VINF_SUCCESS;
3567}
3568
3569
3570/** Opcode 0x0f 0x88. */
3571FNIEMOP_DEF(iemOp_js_Jv)
3572{
3573 IEMOP_MNEMONIC("js Jv");
3574 IEMOP_HLP_MIN_386();
3575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3576 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3577 {
3578 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3579 IEMOP_HLP_NO_LOCK_PREFIX();
3580
3581 IEM_MC_BEGIN(0, 0);
3582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3583 IEM_MC_REL_JMP_S16(i16Imm);
3584 } IEM_MC_ELSE() {
3585 IEM_MC_ADVANCE_RIP();
3586 } IEM_MC_ENDIF();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3592 IEMOP_HLP_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3596 IEM_MC_REL_JMP_S32(i32Imm);
3597 } IEM_MC_ELSE() {
3598 IEM_MC_ADVANCE_RIP();
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 return VINF_SUCCESS;
3603}
3604
3605
3606/** Opcode 0x0f 0x89. */
3607FNIEMOP_DEF(iemOp_jns_Jv)
3608{
3609 IEMOP_MNEMONIC("jns Jv");
3610 IEMOP_HLP_MIN_386();
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3612 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3613 {
3614 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3615 IEMOP_HLP_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 0);
3618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3619 IEM_MC_ADVANCE_RIP();
3620 } IEM_MC_ELSE() {
3621 IEM_MC_REL_JMP_S16(i16Imm);
3622 } IEM_MC_ENDIF();
3623 IEM_MC_END();
3624 }
3625 else
3626 {
3627 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3628 IEMOP_HLP_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3632 IEM_MC_ADVANCE_RIP();
3633 } IEM_MC_ELSE() {
3634 IEM_MC_REL_JMP_S32(i32Imm);
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 return VINF_SUCCESS;
3639}
3640
3641
3642/** Opcode 0x0f 0x8a. */
3643FNIEMOP_DEF(iemOp_jp_Jv)
3644{
3645 IEMOP_MNEMONIC("jp Jv");
3646 IEMOP_HLP_MIN_386();
3647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3648 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3649 {
3650 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3651 IEMOP_HLP_NO_LOCK_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0);
3654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3655 IEM_MC_REL_JMP_S16(i16Imm);
3656 } IEM_MC_ELSE() {
3657 IEM_MC_ADVANCE_RIP();
3658 } IEM_MC_ENDIF();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3664 IEMOP_HLP_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3668 IEM_MC_REL_JMP_S32(i32Imm);
3669 } IEM_MC_ELSE() {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 return VINF_SUCCESS;
3675}
3676
3677
3678/** Opcode 0x0f 0x8b. */
3679FNIEMOP_DEF(iemOp_jnp_Jv)
3680{
3681 IEMOP_MNEMONIC("jo Jv");
3682 IEMOP_HLP_MIN_386();
3683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3684 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3685 {
3686 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3687 IEMOP_HLP_NO_LOCK_PREFIX();
3688
3689 IEM_MC_BEGIN(0, 0);
3690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3691 IEM_MC_ADVANCE_RIP();
3692 } IEM_MC_ELSE() {
3693 IEM_MC_REL_JMP_S16(i16Imm);
3694 } IEM_MC_ENDIF();
3695 IEM_MC_END();
3696 }
3697 else
3698 {
3699 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3700 IEMOP_HLP_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3704 IEM_MC_ADVANCE_RIP();
3705 } IEM_MC_ELSE() {
3706 IEM_MC_REL_JMP_S32(i32Imm);
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 return VINF_SUCCESS;
3711}
3712
3713
3714/** Opcode 0x0f 0x8c. */
3715FNIEMOP_DEF(iemOp_jl_Jv)
3716{
3717 IEMOP_MNEMONIC("jl/jnge Jv");
3718 IEMOP_HLP_MIN_386();
3719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3720 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3721 {
3722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3723 IEMOP_HLP_NO_LOCK_PREFIX();
3724
3725 IEM_MC_BEGIN(0, 0);
3726 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3727 IEM_MC_REL_JMP_S16(i16Imm);
3728 } IEM_MC_ELSE() {
3729 IEM_MC_ADVANCE_RIP();
3730 } IEM_MC_ENDIF();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3736 IEMOP_HLP_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3740 IEM_MC_REL_JMP_S32(i32Imm);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_ADVANCE_RIP();
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 return VINF_SUCCESS;
3747}
3748
3749
3750/** Opcode 0x0f 0x8d. */
3751FNIEMOP_DEF(iemOp_jnl_Jv)
3752{
3753 IEMOP_MNEMONIC("jnl/jge Jv");
3754 IEMOP_HLP_MIN_386();
3755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3756 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3757 {
3758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3759 IEMOP_HLP_NO_LOCK_PREFIX();
3760
3761 IEM_MC_BEGIN(0, 0);
3762 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3763 IEM_MC_ADVANCE_RIP();
3764 } IEM_MC_ELSE() {
3765 IEM_MC_REL_JMP_S16(i16Imm);
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3772 IEMOP_HLP_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3776 IEM_MC_ADVANCE_RIP();
3777 } IEM_MC_ELSE() {
3778 IEM_MC_REL_JMP_S32(i32Imm);
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 return VINF_SUCCESS;
3783}
3784
3785
3786/** Opcode 0x0f 0x8e. */
3787FNIEMOP_DEF(iemOp_jle_Jv)
3788{
3789 IEMOP_MNEMONIC("jle/jng Jv");
3790 IEMOP_HLP_MIN_386();
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3792 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3793 {
3794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3795 IEMOP_HLP_NO_LOCK_PREFIX();
3796
3797 IEM_MC_BEGIN(0, 0);
3798 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3799 IEM_MC_REL_JMP_S16(i16Imm);
3800 } IEM_MC_ELSE() {
3801 IEM_MC_ADVANCE_RIP();
3802 } IEM_MC_ENDIF();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3808 IEMOP_HLP_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3812 IEM_MC_REL_JMP_S32(i32Imm);
3813 } IEM_MC_ELSE() {
3814 IEM_MC_ADVANCE_RIP();
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821
3822/** Opcode 0x0f 0x8f. */
3823FNIEMOP_DEF(iemOp_jnle_Jv)
3824{
3825 IEMOP_MNEMONIC("jnle/jg Jv");
3826 IEMOP_HLP_MIN_386();
3827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3828 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3829 {
3830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3831 IEMOP_HLP_NO_LOCK_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0);
3834 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3835 IEM_MC_ADVANCE_RIP();
3836 } IEM_MC_ELSE() {
3837 IEM_MC_REL_JMP_S16(i16Imm);
3838 } IEM_MC_ENDIF();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3844 IEMOP_HLP_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3848 IEM_MC_ADVANCE_RIP();
3849 } IEM_MC_ELSE() {
3850 IEM_MC_REL_JMP_S32(i32Imm);
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 return VINF_SUCCESS;
3855}
3856
3857
3858/** Opcode 0x0f 0x90. */
3859FNIEMOP_DEF(iemOp_seto_Eb)
3860{
3861 IEMOP_MNEMONIC("seto Eb");
3862 IEMOP_HLP_MIN_386();
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3865
3866 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3867 * any way. AMD says it's "unused", whatever that means. We're
3868 * ignoring for now. */
3869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3870 {
3871 /* register target */
3872 IEM_MC_BEGIN(0, 0);
3873 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3874 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3875 } IEM_MC_ELSE() {
3876 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3877 } IEM_MC_ENDIF();
3878 IEM_MC_ADVANCE_RIP();
3879 IEM_MC_END();
3880 }
3881 else
3882 {
3883 /* memory target */
3884 IEM_MC_BEGIN(0, 1);
3885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3888 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3889 } IEM_MC_ELSE() {
3890 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3891 } IEM_MC_ENDIF();
3892 IEM_MC_ADVANCE_RIP();
3893 IEM_MC_END();
3894 }
3895 return VINF_SUCCESS;
3896}
3897
3898
3899/** Opcode 0x0f 0x91. */
3900FNIEMOP_DEF(iemOp_setno_Eb)
3901{
3902 IEMOP_MNEMONIC("setno Eb");
3903 IEMOP_HLP_MIN_386();
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3906
3907 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3908 * any way. AMD says it's "unused", whatever that means. We're
3909 * ignoring for now. */
3910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3911 {
3912 /* register target */
3913 IEM_MC_BEGIN(0, 0);
3914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3915 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3916 } IEM_MC_ELSE() {
3917 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3918 } IEM_MC_ENDIF();
3919 IEM_MC_ADVANCE_RIP();
3920 IEM_MC_END();
3921 }
3922 else
3923 {
3924 /* memory target */
3925 IEM_MC_BEGIN(0, 1);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3929 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3930 } IEM_MC_ELSE() {
3931 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3932 } IEM_MC_ENDIF();
3933 IEM_MC_ADVANCE_RIP();
3934 IEM_MC_END();
3935 }
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** Opcode 0x0f 0x92. */
3941FNIEMOP_DEF(iemOp_setc_Eb)
3942{
3943 IEMOP_MNEMONIC("setc Eb");
3944 IEMOP_HLP_MIN_386();
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3947
3948 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3949 * any way. AMD says it's "unused", whatever that means. We're
3950 * ignoring for now. */
3951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3952 {
3953 /* register target */
3954 IEM_MC_BEGIN(0, 0);
3955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3956 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3957 } IEM_MC_ELSE() {
3958 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3959 } IEM_MC_ENDIF();
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 }
3963 else
3964 {
3965 /* memory target */
3966 IEM_MC_BEGIN(0, 1);
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3969 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3970 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3971 } IEM_MC_ELSE() {
3972 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3973 } IEM_MC_ENDIF();
3974 IEM_MC_ADVANCE_RIP();
3975 IEM_MC_END();
3976 }
3977 return VINF_SUCCESS;
3978}
3979
3980
3981/** Opcode 0x0f 0x93. */
3982FNIEMOP_DEF(iemOp_setnc_Eb)
3983{
3984 IEMOP_MNEMONIC("setnc Eb");
3985 IEMOP_HLP_MIN_386();
3986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3987 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3988
3989 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3990 * any way. AMD says it's "unused", whatever that means. We're
3991 * ignoring for now. */
3992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3993 {
3994 /* register target */
3995 IEM_MC_BEGIN(0, 0);
3996 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3997 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3998 } IEM_MC_ELSE() {
3999 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4000 } IEM_MC_ENDIF();
4001 IEM_MC_ADVANCE_RIP();
4002 IEM_MC_END();
4003 }
4004 else
4005 {
4006 /* memory target */
4007 IEM_MC_BEGIN(0, 1);
4008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4010 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4011 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4012 } IEM_MC_ELSE() {
4013 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4014 } IEM_MC_ENDIF();
4015 IEM_MC_ADVANCE_RIP();
4016 IEM_MC_END();
4017 }
4018 return VINF_SUCCESS;
4019}
4020
4021
4022/** Opcode 0x0f 0x94. */
4023FNIEMOP_DEF(iemOp_sete_Eb)
4024{
4025 IEMOP_MNEMONIC("sete Eb");
4026 IEMOP_HLP_MIN_386();
4027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4028 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4029
4030 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4031 * any way. AMD says it's "unused", whatever that means. We're
4032 * ignoring for now. */
4033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4034 {
4035 /* register target */
4036 IEM_MC_BEGIN(0, 0);
4037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4038 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4039 } IEM_MC_ELSE() {
4040 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4041 } IEM_MC_ENDIF();
4042 IEM_MC_ADVANCE_RIP();
4043 IEM_MC_END();
4044 }
4045 else
4046 {
4047 /* memory target */
4048 IEM_MC_BEGIN(0, 1);
4049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4052 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4053 } IEM_MC_ELSE() {
4054 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4055 } IEM_MC_ENDIF();
4056 IEM_MC_ADVANCE_RIP();
4057 IEM_MC_END();
4058 }
4059 return VINF_SUCCESS;
4060}
4061
4062
4063/** Opcode 0x0f 0x95. */
4064FNIEMOP_DEF(iemOp_setne_Eb)
4065{
4066 IEMOP_MNEMONIC("setne Eb");
4067 IEMOP_HLP_MIN_386();
4068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4069 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4070
4071 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4072 * any way. AMD says it's "unused", whatever that means. We're
4073 * ignoring for now. */
4074 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4075 {
4076 /* register target */
4077 IEM_MC_BEGIN(0, 0);
4078 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4079 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4080 } IEM_MC_ELSE() {
4081 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4082 } IEM_MC_ENDIF();
4083 IEM_MC_ADVANCE_RIP();
4084 IEM_MC_END();
4085 }
4086 else
4087 {
4088 /* memory target */
4089 IEM_MC_BEGIN(0, 1);
4090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4093 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4094 } IEM_MC_ELSE() {
4095 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4096 } IEM_MC_ENDIF();
4097 IEM_MC_ADVANCE_RIP();
4098 IEM_MC_END();
4099 }
4100 return VINF_SUCCESS;
4101}
4102
4103
4104/** Opcode 0x0f 0x96. */
4105FNIEMOP_DEF(iemOp_setbe_Eb)
4106{
4107 IEMOP_MNEMONIC("setbe Eb");
4108 IEMOP_HLP_MIN_386();
4109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4110 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4111
4112 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4113 * any way. AMD says it's "unused", whatever that means. We're
4114 * ignoring for now. */
4115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4116 {
4117 /* register target */
4118 IEM_MC_BEGIN(0, 0);
4119 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4121 } IEM_MC_ELSE() {
4122 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4123 } IEM_MC_ENDIF();
4124 IEM_MC_ADVANCE_RIP();
4125 IEM_MC_END();
4126 }
4127 else
4128 {
4129 /* memory target */
4130 IEM_MC_BEGIN(0, 1);
4131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4133 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4134 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4135 } IEM_MC_ELSE() {
4136 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4137 } IEM_MC_ENDIF();
4138 IEM_MC_ADVANCE_RIP();
4139 IEM_MC_END();
4140 }
4141 return VINF_SUCCESS;
4142}
4143
4144
4145/** Opcode 0x0f 0x97. */
4146FNIEMOP_DEF(iemOp_setnbe_Eb)
4147{
4148 IEMOP_MNEMONIC("setnbe Eb");
4149 IEMOP_HLP_MIN_386();
4150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4151 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4152
4153 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4154 * any way. AMD says it's "unused", whatever that means. We're
4155 * ignoring for now. */
4156 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4157 {
4158 /* register target */
4159 IEM_MC_BEGIN(0, 0);
4160 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4161 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4162 } IEM_MC_ELSE() {
4163 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4164 } IEM_MC_ENDIF();
4165 IEM_MC_ADVANCE_RIP();
4166 IEM_MC_END();
4167 }
4168 else
4169 {
4170 /* memory target */
4171 IEM_MC_BEGIN(0, 1);
4172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4174 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4175 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4176 } IEM_MC_ELSE() {
4177 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4178 } IEM_MC_ENDIF();
4179 IEM_MC_ADVANCE_RIP();
4180 IEM_MC_END();
4181 }
4182 return VINF_SUCCESS;
4183}
4184
4185
4186/** Opcode 0x0f 0x98. */
4187FNIEMOP_DEF(iemOp_sets_Eb)
4188{
4189 IEMOP_MNEMONIC("sets Eb");
4190 IEMOP_HLP_MIN_386();
4191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4192 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4193
4194 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4195 * any way. AMD says it's "unused", whatever that means. We're
4196 * ignoring for now. */
4197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4198 {
4199 /* register target */
4200 IEM_MC_BEGIN(0, 0);
4201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4202 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4203 } IEM_MC_ELSE() {
4204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4205 } IEM_MC_ENDIF();
4206 IEM_MC_ADVANCE_RIP();
4207 IEM_MC_END();
4208 }
4209 else
4210 {
4211 /* memory target */
4212 IEM_MC_BEGIN(0, 1);
4213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4216 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4217 } IEM_MC_ELSE() {
4218 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4219 } IEM_MC_ENDIF();
4220 IEM_MC_ADVANCE_RIP();
4221 IEM_MC_END();
4222 }
4223 return VINF_SUCCESS;
4224}
4225
4226
4227/** Opcode 0x0f 0x99. */
4228FNIEMOP_DEF(iemOp_setns_Eb)
4229{
4230 IEMOP_MNEMONIC("setns Eb");
4231 IEMOP_HLP_MIN_386();
4232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4233 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4234
4235 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4236 * any way. AMD says it's "unused", whatever that means. We're
4237 * ignoring for now. */
4238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4239 {
4240 /* register target */
4241 IEM_MC_BEGIN(0, 0);
4242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4243 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4244 } IEM_MC_ELSE() {
4245 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4246 } IEM_MC_ENDIF();
4247 IEM_MC_ADVANCE_RIP();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* memory target */
4253 IEM_MC_BEGIN(0, 1);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4257 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4258 } IEM_MC_ELSE() {
4259 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4260 } IEM_MC_ENDIF();
4261 IEM_MC_ADVANCE_RIP();
4262 IEM_MC_END();
4263 }
4264 return VINF_SUCCESS;
4265}
4266
4267
4268/** Opcode 0x0f 0x9a. */
4269FNIEMOP_DEF(iemOp_setp_Eb)
4270{
4271 IEMOP_MNEMONIC("setnp Eb");
4272 IEMOP_HLP_MIN_386();
4273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4274 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4275
4276 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4277 * any way. AMD says it's "unused", whatever that means. We're
4278 * ignoring for now. */
4279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4280 {
4281 /* register target */
4282 IEM_MC_BEGIN(0, 0);
4283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4284 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4285 } IEM_MC_ELSE() {
4286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4287 } IEM_MC_ENDIF();
4288 IEM_MC_ADVANCE_RIP();
4289 IEM_MC_END();
4290 }
4291 else
4292 {
4293 /* memory target */
4294 IEM_MC_BEGIN(0, 1);
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4297 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4298 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4299 } IEM_MC_ELSE() {
4300 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4301 } IEM_MC_ENDIF();
4302 IEM_MC_ADVANCE_RIP();
4303 IEM_MC_END();
4304 }
4305 return VINF_SUCCESS;
4306}
4307
4308
4309/** Opcode 0x0f 0x9b. */
4310FNIEMOP_DEF(iemOp_setnp_Eb)
4311{
4312 IEMOP_MNEMONIC("setnp Eb");
4313 IEMOP_HLP_MIN_386();
4314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4315 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4316
4317 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4318 * any way. AMD says it's "unused", whatever that means. We're
4319 * ignoring for now. */
4320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4321 {
4322 /* register target */
4323 IEM_MC_BEGIN(0, 0);
4324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4325 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4326 } IEM_MC_ELSE() {
4327 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4328 } IEM_MC_ENDIF();
4329 IEM_MC_ADVANCE_RIP();
4330 IEM_MC_END();
4331 }
4332 else
4333 {
4334 /* memory target */
4335 IEM_MC_BEGIN(0, 1);
4336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4338 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4339 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4340 } IEM_MC_ELSE() {
4341 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4342 } IEM_MC_ENDIF();
4343 IEM_MC_ADVANCE_RIP();
4344 IEM_MC_END();
4345 }
4346 return VINF_SUCCESS;
4347}
4348
4349
4350/** Opcode 0x0f 0x9c. */
4351FNIEMOP_DEF(iemOp_setl_Eb)
4352{
4353 IEMOP_MNEMONIC("setl Eb");
4354 IEMOP_HLP_MIN_386();
4355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4356 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4357
4358 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4359 * any way. AMD says it's "unused", whatever that means. We're
4360 * ignoring for now. */
4361 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4362 {
4363 /* register target */
4364 IEM_MC_BEGIN(0, 0);
4365 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4366 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4367 } IEM_MC_ELSE() {
4368 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4369 } IEM_MC_ENDIF();
4370 IEM_MC_ADVANCE_RIP();
4371 IEM_MC_END();
4372 }
4373 else
4374 {
4375 /* memory target */
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4379 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4380 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4381 } IEM_MC_ELSE() {
4382 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4383 } IEM_MC_ENDIF();
4384 IEM_MC_ADVANCE_RIP();
4385 IEM_MC_END();
4386 }
4387 return VINF_SUCCESS;
4388}
4389
4390
4391/** Opcode 0x0f 0x9d. */
4392FNIEMOP_DEF(iemOp_setnl_Eb)
4393{
4394 IEMOP_MNEMONIC("setnl Eb");
4395 IEMOP_HLP_MIN_386();
4396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4397 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4398
4399 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4400 * any way. AMD says it's "unused", whatever that means. We're
4401 * ignoring for now. */
4402 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4403 {
4404 /* register target */
4405 IEM_MC_BEGIN(0, 0);
4406 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4407 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4408 } IEM_MC_ELSE() {
4409 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4410 } IEM_MC_ENDIF();
4411 IEM_MC_ADVANCE_RIP();
4412 IEM_MC_END();
4413 }
4414 else
4415 {
4416 /* memory target */
4417 IEM_MC_BEGIN(0, 1);
4418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4420 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4421 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4424 } IEM_MC_ENDIF();
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 }
4428 return VINF_SUCCESS;
4429}
4430
4431
4432/** Opcode 0x0f 0x9e. */
4433FNIEMOP_DEF(iemOp_setle_Eb)
4434{
4435 IEMOP_MNEMONIC("setle Eb");
4436 IEMOP_HLP_MIN_386();
4437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4438 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4439
4440 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4441 * any way. AMD says it's "unused", whatever that means. We're
4442 * ignoring for now. */
4443 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4444 {
4445 /* register target */
4446 IEM_MC_BEGIN(0, 0);
4447 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4448 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4449 } IEM_MC_ELSE() {
4450 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4451 } IEM_MC_ENDIF();
4452 IEM_MC_ADVANCE_RIP();
4453 IEM_MC_END();
4454 }
4455 else
4456 {
4457 /* memory target */
4458 IEM_MC_BEGIN(0, 1);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4461 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4462 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4463 } IEM_MC_ELSE() {
4464 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4465 } IEM_MC_ENDIF();
4466 IEM_MC_ADVANCE_RIP();
4467 IEM_MC_END();
4468 }
4469 return VINF_SUCCESS;
4470}
4471
4472
4473/** Opcode 0x0f 0x9f. */
4474FNIEMOP_DEF(iemOp_setnle_Eb)
4475{
4476 IEMOP_MNEMONIC("setnle Eb");
4477 IEMOP_HLP_MIN_386();
4478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4479 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4480
4481 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4482 * any way. AMD says it's "unused", whatever that means. We're
4483 * ignoring for now. */
4484 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4485 {
4486 /* register target */
4487 IEM_MC_BEGIN(0, 0);
4488 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4489 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4490 } IEM_MC_ELSE() {
4491 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4492 } IEM_MC_ENDIF();
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 }
4496 else
4497 {
4498 /* memory target */
4499 IEM_MC_BEGIN(0, 1);
4500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4502 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4503 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4504 } IEM_MC_ELSE() {
4505 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4506 } IEM_MC_ENDIF();
4507 IEM_MC_ADVANCE_RIP();
4508 IEM_MC_END();
4509 }
4510 return VINF_SUCCESS;
4511}
4512
4513
4514/**
4515 * Common 'push segment-register' helper.
4516 */
4517FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4518{
4519 IEMOP_HLP_NO_LOCK_PREFIX();
4520 if (iReg < X86_SREG_FS)
4521 IEMOP_HLP_NO_64BIT();
4522 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4523
4524 switch (pIemCpu->enmEffOpSize)
4525 {
4526 case IEMMODE_16BIT:
4527 IEM_MC_BEGIN(0, 1);
4528 IEM_MC_LOCAL(uint16_t, u16Value);
4529 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4530 IEM_MC_PUSH_U16(u16Value);
4531 IEM_MC_ADVANCE_RIP();
4532 IEM_MC_END();
4533 break;
4534
4535 case IEMMODE_32BIT:
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(uint32_t, u32Value);
4538 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4539 IEM_MC_PUSH_U32_SREG(u32Value);
4540 IEM_MC_ADVANCE_RIP();
4541 IEM_MC_END();
4542 break;
4543
4544 case IEMMODE_64BIT:
4545 IEM_MC_BEGIN(0, 1);
4546 IEM_MC_LOCAL(uint64_t, u64Value);
4547 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4548 IEM_MC_PUSH_U64(u64Value);
4549 IEM_MC_ADVANCE_RIP();
4550 IEM_MC_END();
4551 break;
4552 }
4553
4554 return VINF_SUCCESS;
4555}
4556
4557
4558/** Opcode 0x0f 0xa0. */
4559FNIEMOP_DEF(iemOp_push_fs)
4560{
4561 IEMOP_MNEMONIC("push fs");
4562 IEMOP_HLP_MIN_386();
4563 IEMOP_HLP_NO_LOCK_PREFIX();
4564 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4565}
4566
4567
4568/** Opcode 0x0f 0xa1. */
4569FNIEMOP_DEF(iemOp_pop_fs)
4570{
4571 IEMOP_MNEMONIC("pop fs");
4572 IEMOP_HLP_MIN_386();
4573 IEMOP_HLP_NO_LOCK_PREFIX();
4574 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4575}
4576
4577
4578/** Opcode 0x0f 0xa2. */
4579FNIEMOP_DEF(iemOp_cpuid)
4580{
4581 IEMOP_MNEMONIC("cpuid");
4582 IEMOP_HLP_MIN_486(); /* not all 486es. */
4583 IEMOP_HLP_NO_LOCK_PREFIX();
4584 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4585}
4586
4587
4588/**
4589 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4590 * iemOp_bts_Ev_Gv.
4591 */
4592FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4593{
4594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4596
4597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4598 {
4599 /* register destination. */
4600 IEMOP_HLP_NO_LOCK_PREFIX();
4601 switch (pIemCpu->enmEffOpSize)
4602 {
4603 case IEMMODE_16BIT:
4604 IEM_MC_BEGIN(3, 0);
4605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4606 IEM_MC_ARG(uint16_t, u16Src, 1);
4607 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4608
4609 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4610 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4611 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4612 IEM_MC_REF_EFLAGS(pEFlags);
4613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4614
4615 IEM_MC_ADVANCE_RIP();
4616 IEM_MC_END();
4617 return VINF_SUCCESS;
4618
4619 case IEMMODE_32BIT:
4620 IEM_MC_BEGIN(3, 0);
4621 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4622 IEM_MC_ARG(uint32_t, u32Src, 1);
4623 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4624
4625 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4626 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4627 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4628 IEM_MC_REF_EFLAGS(pEFlags);
4629 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4630
4631 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4632 IEM_MC_ADVANCE_RIP();
4633 IEM_MC_END();
4634 return VINF_SUCCESS;
4635
4636 case IEMMODE_64BIT:
4637 IEM_MC_BEGIN(3, 0);
4638 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4639 IEM_MC_ARG(uint64_t, u64Src, 1);
4640 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4641
4642 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4643 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4644 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4645 IEM_MC_REF_EFLAGS(pEFlags);
4646 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4647
4648 IEM_MC_ADVANCE_RIP();
4649 IEM_MC_END();
4650 return VINF_SUCCESS;
4651
4652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4653 }
4654 }
4655 else
4656 {
4657 /* memory destination. */
4658
4659 uint32_t fAccess;
4660 if (pImpl->pfnLockedU16)
4661 fAccess = IEM_ACCESS_DATA_RW;
4662 else /* BT */
4663 {
4664 IEMOP_HLP_NO_LOCK_PREFIX();
4665 fAccess = IEM_ACCESS_DATA_R;
4666 }
4667
4668 NOREF(fAccess);
4669
4670 /** @todo test negative bit offsets! */
4671 switch (pIemCpu->enmEffOpSize)
4672 {
4673 case IEMMODE_16BIT:
4674 IEM_MC_BEGIN(3, 2);
4675 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4676 IEM_MC_ARG(uint16_t, u16Src, 1);
4677 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4679 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4680
4681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4682 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4683 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4684 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4685 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4686 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4687 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4688 IEM_MC_FETCH_EFLAGS(EFlags);
4689
4690 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4691 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4693 else
4694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4695 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4696
4697 IEM_MC_COMMIT_EFLAGS(EFlags);
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 return VINF_SUCCESS;
4701
4702 case IEMMODE_32BIT:
4703 IEM_MC_BEGIN(3, 2);
4704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4705 IEM_MC_ARG(uint32_t, u32Src, 1);
4706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4708 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4709
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4711 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4712 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4713 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4714 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4715 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4716 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4717 IEM_MC_FETCH_EFLAGS(EFlags);
4718
4719 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4720 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4722 else
4723 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4725
4726 IEM_MC_COMMIT_EFLAGS(EFlags);
4727 IEM_MC_ADVANCE_RIP();
4728 IEM_MC_END();
4729 return VINF_SUCCESS;
4730
4731 case IEMMODE_64BIT:
4732 IEM_MC_BEGIN(3, 2);
4733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4734 IEM_MC_ARG(uint64_t, u64Src, 1);
4735 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4737 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4738
4739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4740 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4741 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4742 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4743 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4744 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4745 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4746 IEM_MC_FETCH_EFLAGS(EFlags);
4747
4748 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4749 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4751 else
4752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4754
4755 IEM_MC_COMMIT_EFLAGS(EFlags);
4756 IEM_MC_ADVANCE_RIP();
4757 IEM_MC_END();
4758 return VINF_SUCCESS;
4759
4760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4761 }
4762 }
4763}
4764
4765
4766/** Opcode 0x0f 0xa3. */
4767FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4768{
4769 IEMOP_MNEMONIC("bt Gv,Gv");
4770 IEMOP_HLP_MIN_386();
4771 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4772}
4773
4774
4775/**
4776 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4777 */
4778FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4779{
4780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4781 IEMOP_HLP_NO_LOCK_PREFIX();
4782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4783
4784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4785 {
4786 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4787 IEMOP_HLP_NO_LOCK_PREFIX();
4788
4789 switch (pIemCpu->enmEffOpSize)
4790 {
4791 case IEMMODE_16BIT:
4792 IEM_MC_BEGIN(4, 0);
4793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4794 IEM_MC_ARG(uint16_t, u16Src, 1);
4795 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4796 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4797
4798 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4799 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4800 IEM_MC_REF_EFLAGS(pEFlags);
4801 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4802
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 return VINF_SUCCESS;
4806
4807 case IEMMODE_32BIT:
4808 IEM_MC_BEGIN(4, 0);
4809 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4810 IEM_MC_ARG(uint32_t, u32Src, 1);
4811 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4812 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4813
4814 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4815 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4816 IEM_MC_REF_EFLAGS(pEFlags);
4817 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4818
4819 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4820 IEM_MC_ADVANCE_RIP();
4821 IEM_MC_END();
4822 return VINF_SUCCESS;
4823
4824 case IEMMODE_64BIT:
4825 IEM_MC_BEGIN(4, 0);
4826 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4827 IEM_MC_ARG(uint64_t, u64Src, 1);
4828 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4829 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4830
4831 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4832 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4833 IEM_MC_REF_EFLAGS(pEFlags);
4834 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4835
4836 IEM_MC_ADVANCE_RIP();
4837 IEM_MC_END();
4838 return VINF_SUCCESS;
4839
4840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4841 }
4842 }
4843 else
4844 {
4845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4846
4847 switch (pIemCpu->enmEffOpSize)
4848 {
4849 case IEMMODE_16BIT:
4850 IEM_MC_BEGIN(4, 2);
4851 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4852 IEM_MC_ARG(uint16_t, u16Src, 1);
4853 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4854 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4856
4857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4858 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4859 IEM_MC_ASSIGN(cShiftArg, cShift);
4860 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4861 IEM_MC_FETCH_EFLAGS(EFlags);
4862 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4863 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4864
4865 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4866 IEM_MC_COMMIT_EFLAGS(EFlags);
4867 IEM_MC_ADVANCE_RIP();
4868 IEM_MC_END();
4869 return VINF_SUCCESS;
4870
4871 case IEMMODE_32BIT:
4872 IEM_MC_BEGIN(4, 2);
4873 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4874 IEM_MC_ARG(uint32_t, u32Src, 1);
4875 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4878
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4880 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4881 IEM_MC_ASSIGN(cShiftArg, cShift);
4882 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4883 IEM_MC_FETCH_EFLAGS(EFlags);
4884 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4885 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4886
4887 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4888 IEM_MC_COMMIT_EFLAGS(EFlags);
4889 IEM_MC_ADVANCE_RIP();
4890 IEM_MC_END();
4891 return VINF_SUCCESS;
4892
4893 case IEMMODE_64BIT:
4894 IEM_MC_BEGIN(4, 2);
4895 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4896 IEM_MC_ARG(uint64_t, u64Src, 1);
4897 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4900
4901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4902 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4903 IEM_MC_ASSIGN(cShiftArg, cShift);
4904 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4905 IEM_MC_FETCH_EFLAGS(EFlags);
4906 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4907 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4908
4909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4910 IEM_MC_COMMIT_EFLAGS(EFlags);
4911 IEM_MC_ADVANCE_RIP();
4912 IEM_MC_END();
4913 return VINF_SUCCESS;
4914
4915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4916 }
4917 }
4918}
4919
4920
4921/**
4922 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4923 */
4924FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4925{
4926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4927 IEMOP_HLP_NO_LOCK_PREFIX();
4928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4929
4930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4931 {
4932 IEMOP_HLP_NO_LOCK_PREFIX();
4933
4934 switch (pIemCpu->enmEffOpSize)
4935 {
4936 case IEMMODE_16BIT:
4937 IEM_MC_BEGIN(4, 0);
4938 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4939 IEM_MC_ARG(uint16_t, u16Src, 1);
4940 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4941 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4942
4943 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4944 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4945 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4946 IEM_MC_REF_EFLAGS(pEFlags);
4947 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4948
4949 IEM_MC_ADVANCE_RIP();
4950 IEM_MC_END();
4951 return VINF_SUCCESS;
4952
4953 case IEMMODE_32BIT:
4954 IEM_MC_BEGIN(4, 0);
4955 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4956 IEM_MC_ARG(uint32_t, u32Src, 1);
4957 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4958 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4959
4960 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4961 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4962 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4963 IEM_MC_REF_EFLAGS(pEFlags);
4964 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4965
4966 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4967 IEM_MC_ADVANCE_RIP();
4968 IEM_MC_END();
4969 return VINF_SUCCESS;
4970
4971 case IEMMODE_64BIT:
4972 IEM_MC_BEGIN(4, 0);
4973 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4974 IEM_MC_ARG(uint64_t, u64Src, 1);
4975 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4976 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4977
4978 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4979 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4980 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4981 IEM_MC_REF_EFLAGS(pEFlags);
4982 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4983
4984 IEM_MC_ADVANCE_RIP();
4985 IEM_MC_END();
4986 return VINF_SUCCESS;
4987
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 }
4991 else
4992 {
4993 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4994
4995 switch (pIemCpu->enmEffOpSize)
4996 {
4997 case IEMMODE_16BIT:
4998 IEM_MC_BEGIN(4, 2);
4999 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5000 IEM_MC_ARG(uint16_t, u16Src, 1);
5001 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5004
5005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5006 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5007 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5008 IEM_MC_FETCH_EFLAGS(EFlags);
5009 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5010 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5011
5012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5013 IEM_MC_COMMIT_EFLAGS(EFlags);
5014 IEM_MC_ADVANCE_RIP();
5015 IEM_MC_END();
5016 return VINF_SUCCESS;
5017
5018 case IEMMODE_32BIT:
5019 IEM_MC_BEGIN(4, 2);
5020 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5021 IEM_MC_ARG(uint32_t, u32Src, 1);
5022 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5023 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5025
5026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5027 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5028 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5029 IEM_MC_FETCH_EFLAGS(EFlags);
5030 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5031 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5032
5033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5034 IEM_MC_COMMIT_EFLAGS(EFlags);
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 return VINF_SUCCESS;
5038
5039 case IEMMODE_64BIT:
5040 IEM_MC_BEGIN(4, 2);
5041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5042 IEM_MC_ARG(uint64_t, u64Src, 1);
5043 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5046
5047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5048 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5049 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5050 IEM_MC_FETCH_EFLAGS(EFlags);
5051 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5052 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5053
5054 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5055 IEM_MC_COMMIT_EFLAGS(EFlags);
5056 IEM_MC_ADVANCE_RIP();
5057 IEM_MC_END();
5058 return VINF_SUCCESS;
5059
5060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5061 }
5062 }
5063}
5064
5065
5066
5067/** Opcode 0x0f 0xa4. */
5068FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5069{
5070 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5071 IEMOP_HLP_MIN_386();
5072 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5073}
5074
5075
5076/** Opcode 0x0f 0xa5. */
5077FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5078{
5079 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5080 IEMOP_HLP_MIN_386();
5081 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5082}
5083
5084
5085/** Opcode 0x0f 0xa8. */
5086FNIEMOP_DEF(iemOp_push_gs)
5087{
5088 IEMOP_MNEMONIC("push gs");
5089 IEMOP_HLP_MIN_386();
5090 IEMOP_HLP_NO_LOCK_PREFIX();
5091 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5092}
5093
5094
5095/** Opcode 0x0f 0xa9. */
5096FNIEMOP_DEF(iemOp_pop_gs)
5097{
5098 IEMOP_MNEMONIC("pop gs");
5099 IEMOP_HLP_MIN_386();
5100 IEMOP_HLP_NO_LOCK_PREFIX();
5101 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5102}
5103
5104
5105/** Opcode 0x0f 0xaa. */
5106FNIEMOP_STUB(iemOp_rsm);
5107//IEMOP_HLP_MIN_386();
5108
5109
5110/** Opcode 0x0f 0xab. */
5111FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5112{
5113 IEMOP_MNEMONIC("bts Ev,Gv");
5114 IEMOP_HLP_MIN_386();
5115 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5116}
5117
5118
5119/** Opcode 0x0f 0xac. */
5120FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5121{
5122 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5123 IEMOP_HLP_MIN_386();
5124 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5125}
5126
5127
5128/** Opcode 0x0f 0xad. */
5129FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5130{
5131 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5132 IEMOP_HLP_MIN_386();
5133 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5134}
5135
5136
5137/** Opcode 0x0f 0xae mem/0. */
5138FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5139{
5140 IEMOP_MNEMONIC("fxsave m512");
5141 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5142 return IEMOP_RAISE_INVALID_OPCODE();
5143
5144 IEM_MC_BEGIN(3, 1);
5145 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5146 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5147 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5150 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5151 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5152 IEM_MC_END();
5153 return VINF_SUCCESS;
5154}
5155
5156
5157/** Opcode 0x0f 0xae mem/1. */
5158FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5159{
5160 IEMOP_MNEMONIC("fxrstor m512");
5161 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5162 return IEMOP_RAISE_INVALID_OPCODE();
5163
5164 IEM_MC_BEGIN(3, 1);
5165 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5166 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5167 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5171 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5172 IEM_MC_END();
5173 return VINF_SUCCESS;
5174}
5175
5176
5177/** Opcode 0x0f 0xae mem/2. */
5178FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5179
5180/** Opcode 0x0f 0xae mem/3. */
5181FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5182
5183/** Opcode 0x0f 0xae mem/4. */
5184FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5185
5186/** Opcode 0x0f 0xae mem/5. */
5187FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5188
5189/** Opcode 0x0f 0xae mem/6. */
5190FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5191
5192/** Opcode 0x0f 0xae mem/7. */
5193FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5194
5195
5196/** Opcode 0x0f 0xae 11b/5. */
5197FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5198{
5199 IEMOP_MNEMONIC("lfence");
5200 IEMOP_HLP_NO_LOCK_PREFIX();
5201 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5202 return IEMOP_RAISE_INVALID_OPCODE();
5203
5204 IEM_MC_BEGIN(0, 0);
5205 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5206 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5207 else
5208 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5209 IEM_MC_ADVANCE_RIP();
5210 IEM_MC_END();
5211 return VINF_SUCCESS;
5212}
5213
5214
5215/** Opcode 0x0f 0xae 11b/6. */
5216FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5217{
5218 IEMOP_MNEMONIC("mfence");
5219 IEMOP_HLP_NO_LOCK_PREFIX();
5220 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5221 return IEMOP_RAISE_INVALID_OPCODE();
5222
5223 IEM_MC_BEGIN(0, 0);
5224 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5225 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5226 else
5227 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 return VINF_SUCCESS;
5231}
5232
5233
5234/** Opcode 0x0f 0xae 11b/7. */
5235FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5236{
5237 IEMOP_MNEMONIC("sfence");
5238 IEMOP_HLP_NO_LOCK_PREFIX();
5239 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5240 return IEMOP_RAISE_INVALID_OPCODE();
5241
5242 IEM_MC_BEGIN(0, 0);
5243 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5244 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5245 else
5246 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5247 IEM_MC_ADVANCE_RIP();
5248 IEM_MC_END();
5249 return VINF_SUCCESS;
5250}
5251
5252
5253/** Opcode 0xf3 0x0f 0xae 11b/0. */
5254FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5255
5256/** Opcode 0xf3 0x0f 0xae 11b/1. */
5257FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5258
5259/** Opcode 0xf3 0x0f 0xae 11b/2. */
5260FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5261
5262/** Opcode 0xf3 0x0f 0xae 11b/3. */
5263FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5264
5265
5266/** Opcode 0x0f 0xae. */
5267FNIEMOP_DEF(iemOp_Grp15)
5268{
5269 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5271 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5272 {
5273 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5274 {
5275 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5276 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5277 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5278 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5279 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5280 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5281 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5282 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5284 }
5285 }
5286 else
5287 {
5288 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5289 {
5290 case 0:
5291 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5292 {
5293 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5294 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5295 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5296 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5297 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5298 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5299 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5300 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5302 }
5303 break;
5304
5305 case IEM_OP_PRF_REPZ:
5306 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5307 {
5308 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5309 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5310 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5311 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5312 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5313 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5314 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5315 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5317 }
5318 break;
5319
5320 default:
5321 return IEMOP_RAISE_INVALID_OPCODE();
5322 }
5323 }
5324}
5325
5326
5327/** Opcode 0x0f 0xaf. */
5328FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5329{
5330 IEMOP_MNEMONIC("imul Gv,Ev");
5331 IEMOP_HLP_MIN_386();
5332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5333 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5334}
5335
5336
5337/** Opcode 0x0f 0xb0. */
5338FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5339{
5340 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5341 IEMOP_HLP_MIN_486();
5342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5343
5344 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5345 {
5346 IEMOP_HLP_DONE_DECODING();
5347 IEM_MC_BEGIN(4, 0);
5348 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5349 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5350 IEM_MC_ARG(uint8_t, u8Src, 2);
5351 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5352
5353 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5354 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5355 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5356 IEM_MC_REF_EFLAGS(pEFlags);
5357 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5358 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5359 else
5360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5361
5362 IEM_MC_ADVANCE_RIP();
5363 IEM_MC_END();
5364 }
5365 else
5366 {
5367 IEM_MC_BEGIN(4, 3);
5368 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5369 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5370 IEM_MC_ARG(uint8_t, u8Src, 2);
5371 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5373 IEM_MC_LOCAL(uint8_t, u8Al);
5374
5375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5376 IEMOP_HLP_DONE_DECODING();
5377 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5378 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5379 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5380 IEM_MC_FETCH_EFLAGS(EFlags);
5381 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5382 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5383 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5384 else
5385 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5386
5387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5388 IEM_MC_COMMIT_EFLAGS(EFlags);
5389 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5390 IEM_MC_ADVANCE_RIP();
5391 IEM_MC_END();
5392 }
5393 return VINF_SUCCESS;
5394}
5395
5396/** Opcode 0x0f 0xb1. */
5397FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5398{
5399 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5400 IEMOP_HLP_MIN_486();
5401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5402
5403 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5404 {
5405 IEMOP_HLP_DONE_DECODING();
5406 switch (pIemCpu->enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 IEM_MC_BEGIN(4, 0);
5410 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5411 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5412 IEM_MC_ARG(uint16_t, u16Src, 2);
5413 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5414
5415 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5416 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5417 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5418 IEM_MC_REF_EFLAGS(pEFlags);
5419 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5420 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5421 else
5422 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5423
5424 IEM_MC_ADVANCE_RIP();
5425 IEM_MC_END();
5426 return VINF_SUCCESS;
5427
5428 case IEMMODE_32BIT:
5429 IEM_MC_BEGIN(4, 0);
5430 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5431 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5432 IEM_MC_ARG(uint32_t, u32Src, 2);
5433 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5434
5435 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5436 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5437 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5438 IEM_MC_REF_EFLAGS(pEFlags);
5439 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5441 else
5442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5443
5444 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5446 IEM_MC_ADVANCE_RIP();
5447 IEM_MC_END();
5448 return VINF_SUCCESS;
5449
5450 case IEMMODE_64BIT:
5451 IEM_MC_BEGIN(4, 0);
5452 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5453 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5454#ifdef RT_ARCH_X86
5455 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5456#else
5457 IEM_MC_ARG(uint64_t, u64Src, 2);
5458#endif
5459 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5460
5461 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5462 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5463 IEM_MC_REF_EFLAGS(pEFlags);
5464#ifdef RT_ARCH_X86
5465 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5466 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5467 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5468 else
5469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5470#else
5471 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5472 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5473 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5474 else
5475 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5476#endif
5477
5478 IEM_MC_ADVANCE_RIP();
5479 IEM_MC_END();
5480 return VINF_SUCCESS;
5481
5482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5483 }
5484 }
5485 else
5486 {
5487 switch (pIemCpu->enmEffOpSize)
5488 {
5489 case IEMMODE_16BIT:
5490 IEM_MC_BEGIN(4, 3);
5491 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5492 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5493 IEM_MC_ARG(uint16_t, u16Src, 2);
5494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5496 IEM_MC_LOCAL(uint16_t, u16Ax);
5497
5498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5499 IEMOP_HLP_DONE_DECODING();
5500 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5501 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5502 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5503 IEM_MC_FETCH_EFLAGS(EFlags);
5504 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5505 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5507 else
5508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5509
5510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5511 IEM_MC_COMMIT_EFLAGS(EFlags);
5512 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 return VINF_SUCCESS;
5516
5517 case IEMMODE_32BIT:
5518 IEM_MC_BEGIN(4, 3);
5519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5520 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5521 IEM_MC_ARG(uint32_t, u32Src, 2);
5522 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_LOCAL(uint32_t, u32Eax);
5525
5526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5527 IEMOP_HLP_DONE_DECODING();
5528 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5529 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5530 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5531 IEM_MC_FETCH_EFLAGS(EFlags);
5532 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5533 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5534 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5535 else
5536 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5537
5538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5539 IEM_MC_COMMIT_EFLAGS(EFlags);
5540 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5541 IEM_MC_ADVANCE_RIP();
5542 IEM_MC_END();
5543 return VINF_SUCCESS;
5544
5545 case IEMMODE_64BIT:
5546 IEM_MC_BEGIN(4, 3);
5547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5548 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5549#ifdef RT_ARCH_X86
5550 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5551#else
5552 IEM_MC_ARG(uint64_t, u64Src, 2);
5553#endif
5554 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5556 IEM_MC_LOCAL(uint64_t, u64Rax);
5557
5558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5559 IEMOP_HLP_DONE_DECODING();
5560 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5561 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5562 IEM_MC_FETCH_EFLAGS(EFlags);
5563 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5564#ifdef RT_ARCH_X86
5565 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5566 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5568 else
5569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5570#else
5571 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5572 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5573 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5574 else
5575 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5576#endif
5577
5578 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5579 IEM_MC_COMMIT_EFLAGS(EFlags);
5580 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 return VINF_SUCCESS;
5584
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 }
5588}
5589
5590
5591FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5592{
5593 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5594 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5595
5596 switch (pIemCpu->enmEffOpSize)
5597 {
5598 case IEMMODE_16BIT:
5599 IEM_MC_BEGIN(5, 1);
5600 IEM_MC_ARG(uint16_t, uSel, 0);
5601 IEM_MC_ARG(uint16_t, offSeg, 1);
5602 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5603 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5604 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5608 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5609 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5610 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5611 IEM_MC_END();
5612 return VINF_SUCCESS;
5613
5614 case IEMMODE_32BIT:
5615 IEM_MC_BEGIN(5, 1);
5616 IEM_MC_ARG(uint16_t, uSel, 0);
5617 IEM_MC_ARG(uint32_t, offSeg, 1);
5618 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5619 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5620 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5621 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5625 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5626 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5627 IEM_MC_END();
5628 return VINF_SUCCESS;
5629
5630 case IEMMODE_64BIT:
5631 IEM_MC_BEGIN(5, 1);
5632 IEM_MC_ARG(uint16_t, uSel, 0);
5633 IEM_MC_ARG(uint64_t, offSeg, 1);
5634 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5635 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5636 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5641 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5642 else
5643 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5644 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5645 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5646 IEM_MC_END();
5647 return VINF_SUCCESS;
5648
5649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5650 }
5651}
5652
5653
5654/** Opcode 0x0f 0xb2. */
5655FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5656{
5657 IEMOP_MNEMONIC("lss Gv,Mp");
5658 IEMOP_HLP_MIN_386();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5661 return IEMOP_RAISE_INVALID_OPCODE();
5662 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5663}
5664
5665
5666/** Opcode 0x0f 0xb3. */
5667FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5668{
5669 IEMOP_MNEMONIC("btr Ev,Gv");
5670 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5671}
5672
5673
5674/** Opcode 0x0f 0xb4. */
5675FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5676{
5677 IEMOP_MNEMONIC("lfs Gv,Mp");
5678 IEMOP_HLP_MIN_386();
5679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5681 return IEMOP_RAISE_INVALID_OPCODE();
5682 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5683}
5684
5685
5686/** Opcode 0x0f 0xb5. */
5687FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5688{
5689 IEMOP_MNEMONIC("lgs Gv,Mp");
5690 IEMOP_HLP_MIN_386();
5691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5693 return IEMOP_RAISE_INVALID_OPCODE();
5694 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5695}
5696
5697
5698/** Opcode 0x0f 0xb6. */
5699FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5700{
5701 IEMOP_MNEMONIC("movzx Gv,Eb");
5702 IEMOP_HLP_MIN_386();
5703
5704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5705 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5706
5707 /*
5708 * If rm is denoting a register, no more instruction bytes.
5709 */
5710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5711 {
5712 switch (pIemCpu->enmEffOpSize)
5713 {
5714 case IEMMODE_16BIT:
5715 IEM_MC_BEGIN(0, 1);
5716 IEM_MC_LOCAL(uint16_t, u16Value);
5717 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5718 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722
5723 case IEMMODE_32BIT:
5724 IEM_MC_BEGIN(0, 1);
5725 IEM_MC_LOCAL(uint32_t, u32Value);
5726 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5727 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5728 IEM_MC_ADVANCE_RIP();
5729 IEM_MC_END();
5730 return VINF_SUCCESS;
5731
5732 case IEMMODE_64BIT:
5733 IEM_MC_BEGIN(0, 1);
5734 IEM_MC_LOCAL(uint64_t, u64Value);
5735 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5736 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5737 IEM_MC_ADVANCE_RIP();
5738 IEM_MC_END();
5739 return VINF_SUCCESS;
5740
5741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5742 }
5743 }
5744 else
5745 {
5746 /*
5747 * We're loading a register from memory.
5748 */
5749 switch (pIemCpu->enmEffOpSize)
5750 {
5751 case IEMMODE_16BIT:
5752 IEM_MC_BEGIN(0, 2);
5753 IEM_MC_LOCAL(uint16_t, u16Value);
5754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5756 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5757 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5758 IEM_MC_ADVANCE_RIP();
5759 IEM_MC_END();
5760 return VINF_SUCCESS;
5761
5762 case IEMMODE_32BIT:
5763 IEM_MC_BEGIN(0, 2);
5764 IEM_MC_LOCAL(uint32_t, u32Value);
5765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5767 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5768 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5769 IEM_MC_ADVANCE_RIP();
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772
5773 case IEMMODE_64BIT:
5774 IEM_MC_BEGIN(0, 2);
5775 IEM_MC_LOCAL(uint64_t, u64Value);
5776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5778 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5779 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5780 IEM_MC_ADVANCE_RIP();
5781 IEM_MC_END();
5782 return VINF_SUCCESS;
5783
5784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5785 }
5786 }
5787}
5788
5789
5790/** Opcode 0x0f 0xb7. */
5791FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5792{
5793 IEMOP_MNEMONIC("movzx Gv,Ew");
5794 IEMOP_HLP_MIN_386();
5795
5796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5797 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5798
5799 /** @todo Not entirely sure how the operand size prefix is handled here,
5800 * assuming that it will be ignored. Would be nice to have a few
5801 * test for this. */
5802 /*
5803 * If rm is denoting a register, no more instruction bytes.
5804 */
5805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5806 {
5807 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5808 {
5809 IEM_MC_BEGIN(0, 1);
5810 IEM_MC_LOCAL(uint32_t, u32Value);
5811 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5812 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5813 IEM_MC_ADVANCE_RIP();
5814 IEM_MC_END();
5815 }
5816 else
5817 {
5818 IEM_MC_BEGIN(0, 1);
5819 IEM_MC_LOCAL(uint64_t, u64Value);
5820 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5821 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5822 IEM_MC_ADVANCE_RIP();
5823 IEM_MC_END();
5824 }
5825 }
5826 else
5827 {
5828 /*
5829 * We're loading a register from memory.
5830 */
5831 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5832 {
5833 IEM_MC_BEGIN(0, 2);
5834 IEM_MC_LOCAL(uint32_t, u32Value);
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5837 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5838 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 }
5842 else
5843 {
5844 IEM_MC_BEGIN(0, 2);
5845 IEM_MC_LOCAL(uint64_t, u64Value);
5846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5848 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5849 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5850 IEM_MC_ADVANCE_RIP();
5851 IEM_MC_END();
5852 }
5853 }
5854 return VINF_SUCCESS;
5855}
5856
5857
5858/** Opcode 0x0f 0xb8. */
5859FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5860
5861
5862/** Opcode 0x0f 0xb9. */
5863FNIEMOP_DEF(iemOp_Grp10)
5864{
5865 Log(("iemOp_Grp10 -> #UD\n"));
5866 return IEMOP_RAISE_INVALID_OPCODE();
5867}
5868
5869
5870/** Opcode 0x0f 0xba. */
5871FNIEMOP_DEF(iemOp_Grp8)
5872{
5873 IEMOP_HLP_MIN_386();
5874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5875 PCIEMOPBINSIZES pImpl;
5876 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5877 {
5878 case 0: case 1: case 2: case 3:
5879 return IEMOP_RAISE_INVALID_OPCODE();
5880 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5881 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5882 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5883 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5885 }
5886 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5887
5888 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5889 {
5890 /* register destination. */
5891 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5892 IEMOP_HLP_NO_LOCK_PREFIX();
5893
5894 switch (pIemCpu->enmEffOpSize)
5895 {
5896 case IEMMODE_16BIT:
5897 IEM_MC_BEGIN(3, 0);
5898 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5899 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5901
5902 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5903 IEM_MC_REF_EFLAGS(pEFlags);
5904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5905
5906 IEM_MC_ADVANCE_RIP();
5907 IEM_MC_END();
5908 return VINF_SUCCESS;
5909
5910 case IEMMODE_32BIT:
5911 IEM_MC_BEGIN(3, 0);
5912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5913 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5915
5916 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5917 IEM_MC_REF_EFLAGS(pEFlags);
5918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5919
5920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5921 IEM_MC_ADVANCE_RIP();
5922 IEM_MC_END();
5923 return VINF_SUCCESS;
5924
5925 case IEMMODE_64BIT:
5926 IEM_MC_BEGIN(3, 0);
5927 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5928 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5930
5931 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5932 IEM_MC_REF_EFLAGS(pEFlags);
5933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5934
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 return VINF_SUCCESS;
5938
5939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5940 }
5941 }
5942 else
5943 {
5944 /* memory destination. */
5945
5946 uint32_t fAccess;
5947 if (pImpl->pfnLockedU16)
5948 fAccess = IEM_ACCESS_DATA_RW;
5949 else /* BT */
5950 {
5951 IEMOP_HLP_NO_LOCK_PREFIX();
5952 fAccess = IEM_ACCESS_DATA_R;
5953 }
5954
5955 /** @todo test negative bit offsets! */
5956 switch (pIemCpu->enmEffOpSize)
5957 {
5958 case IEMMODE_16BIT:
5959 IEM_MC_BEGIN(3, 1);
5960 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5961 IEM_MC_ARG(uint16_t, u16Src, 1);
5962 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5964
5965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5966 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5967 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5968 IEM_MC_FETCH_EFLAGS(EFlags);
5969 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5970 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5972 else
5973 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5974 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5975
5976 IEM_MC_COMMIT_EFLAGS(EFlags);
5977 IEM_MC_ADVANCE_RIP();
5978 IEM_MC_END();
5979 return VINF_SUCCESS;
5980
5981 case IEMMODE_32BIT:
5982 IEM_MC_BEGIN(3, 1);
5983 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5984 IEM_MC_ARG(uint32_t, u32Src, 1);
5985 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5987
5988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5989 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5990 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5991 IEM_MC_FETCH_EFLAGS(EFlags);
5992 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5993 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5994 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5995 else
5996 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5997 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5998
5999 IEM_MC_COMMIT_EFLAGS(EFlags);
6000 IEM_MC_ADVANCE_RIP();
6001 IEM_MC_END();
6002 return VINF_SUCCESS;
6003
6004 case IEMMODE_64BIT:
6005 IEM_MC_BEGIN(3, 1);
6006 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6007 IEM_MC_ARG(uint64_t, u64Src, 1);
6008 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6010
6011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6012 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6013 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6014 IEM_MC_FETCH_EFLAGS(EFlags);
6015 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6016 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6018 else
6019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6020 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6021
6022 IEM_MC_COMMIT_EFLAGS(EFlags);
6023 IEM_MC_ADVANCE_RIP();
6024 IEM_MC_END();
6025 return VINF_SUCCESS;
6026
6027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6028 }
6029 }
6030
6031}
6032
6033
6034/** Opcode 0x0f 0xbb. */
6035FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6036{
6037 IEMOP_MNEMONIC("btc Ev,Gv");
6038 IEMOP_HLP_MIN_386();
6039 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6040}
6041
6042
6043/** Opcode 0x0f 0xbc. */
6044FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6045{
6046 IEMOP_MNEMONIC("bsf Gv,Ev");
6047 IEMOP_HLP_MIN_386();
6048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6049 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6050}
6051
6052
6053/** Opcode 0x0f 0xbd. */
6054FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6055{
6056 IEMOP_MNEMONIC("bsr Gv,Ev");
6057 IEMOP_HLP_MIN_386();
6058 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6059 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6060}
6061
6062
6063/** Opcode 0x0f 0xbe. */
6064FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6065{
6066 IEMOP_MNEMONIC("movsx Gv,Eb");
6067 IEMOP_HLP_MIN_386();
6068
6069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6070 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6071
6072 /*
6073 * If rm is denoting a register, no more instruction bytes.
6074 */
6075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6076 {
6077 switch (pIemCpu->enmEffOpSize)
6078 {
6079 case IEMMODE_16BIT:
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint16_t, u16Value);
6082 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6083 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 return VINF_SUCCESS;
6087
6088 case IEMMODE_32BIT:
6089 IEM_MC_BEGIN(0, 1);
6090 IEM_MC_LOCAL(uint32_t, u32Value);
6091 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6092 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6093 IEM_MC_ADVANCE_RIP();
6094 IEM_MC_END();
6095 return VINF_SUCCESS;
6096
6097 case IEMMODE_64BIT:
6098 IEM_MC_BEGIN(0, 1);
6099 IEM_MC_LOCAL(uint64_t, u64Value);
6100 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6101 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 return VINF_SUCCESS;
6105
6106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6107 }
6108 }
6109 else
6110 {
6111 /*
6112 * We're loading a register from memory.
6113 */
6114 switch (pIemCpu->enmEffOpSize)
6115 {
6116 case IEMMODE_16BIT:
6117 IEM_MC_BEGIN(0, 2);
6118 IEM_MC_LOCAL(uint16_t, u16Value);
6119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6121 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6122 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6123 IEM_MC_ADVANCE_RIP();
6124 IEM_MC_END();
6125 return VINF_SUCCESS;
6126
6127 case IEMMODE_32BIT:
6128 IEM_MC_BEGIN(0, 2);
6129 IEM_MC_LOCAL(uint32_t, u32Value);
6130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6132 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6133 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6134 IEM_MC_ADVANCE_RIP();
6135 IEM_MC_END();
6136 return VINF_SUCCESS;
6137
6138 case IEMMODE_64BIT:
6139 IEM_MC_BEGIN(0, 2);
6140 IEM_MC_LOCAL(uint64_t, u64Value);
6141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6143 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6144 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 return VINF_SUCCESS;
6148
6149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6150 }
6151 }
6152}
6153
6154
6155/** Opcode 0x0f 0xbf. */
6156FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6157{
6158 IEMOP_MNEMONIC("movsx Gv,Ew");
6159 IEMOP_HLP_MIN_386();
6160
6161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6162 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6163
6164 /** @todo Not entirely sure how the operand size prefix is handled here,
6165 * assuming that it will be ignored. Would be nice to have a few
6166 * test for this. */
6167 /*
6168 * If rm is denoting a register, no more instruction bytes.
6169 */
6170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6171 {
6172 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6173 {
6174 IEM_MC_BEGIN(0, 1);
6175 IEM_MC_LOCAL(uint32_t, u32Value);
6176 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6177 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 }
6181 else
6182 {
6183 IEM_MC_BEGIN(0, 1);
6184 IEM_MC_LOCAL(uint64_t, u64Value);
6185 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6186 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6187 IEM_MC_ADVANCE_RIP();
6188 IEM_MC_END();
6189 }
6190 }
6191 else
6192 {
6193 /*
6194 * We're loading a register from memory.
6195 */
6196 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6197 {
6198 IEM_MC_BEGIN(0, 2);
6199 IEM_MC_LOCAL(uint32_t, u32Value);
6200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6202 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6203 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 }
6207 else
6208 {
6209 IEM_MC_BEGIN(0, 2);
6210 IEM_MC_LOCAL(uint64_t, u64Value);
6211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6213 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6214 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6215 IEM_MC_ADVANCE_RIP();
6216 IEM_MC_END();
6217 }
6218 }
6219 return VINF_SUCCESS;
6220}
6221
6222
6223/** Opcode 0x0f 0xc0. */
6224FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6225{
6226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6227 IEMOP_HLP_MIN_486();
6228 IEMOP_MNEMONIC("xadd Eb,Gb");
6229
6230 /*
6231 * If rm is denoting a register, no more instruction bytes.
6232 */
6233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6234 {
6235 IEMOP_HLP_NO_LOCK_PREFIX();
6236
6237 IEM_MC_BEGIN(3, 0);
6238 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6239 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6241
6242 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6243 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6244 IEM_MC_REF_EFLAGS(pEFlags);
6245 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6246
6247 IEM_MC_ADVANCE_RIP();
6248 IEM_MC_END();
6249 }
6250 else
6251 {
6252 /*
6253 * We're accessing memory.
6254 */
6255 IEM_MC_BEGIN(3, 3);
6256 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6257 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6258 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6259 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6261
6262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6263 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6264 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6265 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6266 IEM_MC_FETCH_EFLAGS(EFlags);
6267 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6268 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6269 else
6270 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6271
6272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6273 IEM_MC_COMMIT_EFLAGS(EFlags);
6274 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6275 IEM_MC_ADVANCE_RIP();
6276 IEM_MC_END();
6277 return VINF_SUCCESS;
6278 }
6279 return VINF_SUCCESS;
6280}
6281
6282
6283/** Opcode 0x0f 0xc1. */
6284FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6285{
6286 IEMOP_MNEMONIC("xadd Ev,Gv");
6287 IEMOP_HLP_MIN_486();
6288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6289
6290 /*
6291 * If rm is denoting a register, no more instruction bytes.
6292 */
6293 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6294 {
6295 IEMOP_HLP_NO_LOCK_PREFIX();
6296
6297 switch (pIemCpu->enmEffOpSize)
6298 {
6299 case IEMMODE_16BIT:
6300 IEM_MC_BEGIN(3, 0);
6301 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6302 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6303 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6304
6305 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6306 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6307 IEM_MC_REF_EFLAGS(pEFlags);
6308 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6309
6310 IEM_MC_ADVANCE_RIP();
6311 IEM_MC_END();
6312 return VINF_SUCCESS;
6313
6314 case IEMMODE_32BIT:
6315 IEM_MC_BEGIN(3, 0);
6316 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6317 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6318 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6319
6320 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6321 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6322 IEM_MC_REF_EFLAGS(pEFlags);
6323 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6324
6325 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6326 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 return VINF_SUCCESS;
6330
6331 case IEMMODE_64BIT:
6332 IEM_MC_BEGIN(3, 0);
6333 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6334 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6335 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6336
6337 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6338 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6339 IEM_MC_REF_EFLAGS(pEFlags);
6340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6341
6342 IEM_MC_ADVANCE_RIP();
6343 IEM_MC_END();
6344 return VINF_SUCCESS;
6345
6346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6347 }
6348 }
6349 else
6350 {
6351 /*
6352 * We're accessing memory.
6353 */
6354 switch (pIemCpu->enmEffOpSize)
6355 {
6356 case IEMMODE_16BIT:
6357 IEM_MC_BEGIN(3, 3);
6358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6359 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6360 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6361 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6363
6364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6365 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6366 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6367 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6368 IEM_MC_FETCH_EFLAGS(EFlags);
6369 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6370 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6371 else
6372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6373
6374 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6375 IEM_MC_COMMIT_EFLAGS(EFlags);
6376 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6377 IEM_MC_ADVANCE_RIP();
6378 IEM_MC_END();
6379 return VINF_SUCCESS;
6380
6381 case IEMMODE_32BIT:
6382 IEM_MC_BEGIN(3, 3);
6383 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6384 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6385 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6386 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388
6389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6390 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6391 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6392 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6393 IEM_MC_FETCH_EFLAGS(EFlags);
6394 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6396 else
6397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6398
6399 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6400 IEM_MC_COMMIT_EFLAGS(EFlags);
6401 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 case IEMMODE_64BIT:
6407 IEM_MC_BEGIN(3, 3);
6408 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6409 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6410 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6411 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6413
6414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6415 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6416 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6417 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6418 IEM_MC_FETCH_EFLAGS(EFlags);
6419 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6420 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6421 else
6422 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6423
6424 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6425 IEM_MC_COMMIT_EFLAGS(EFlags);
6426 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6432 }
6433 }
6434}
6435
6436/** Opcode 0x0f 0xc2. */
6437FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6438
6439/** Opcode 0x0f 0xc3. */
6440FNIEMOP_STUB(iemOp_movnti_My_Gy);
6441
6442/** Opcode 0x0f 0xc4. */
6443FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6444
6445/** Opcode 0x0f 0xc5. */
6446FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6447
6448/** Opcode 0x0f 0xc6. */
6449FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6450
6451
6452/** Opcode 0x0f 0xc7 !11/1. */
6453FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6454{
6455 IEMOP_MNEMONIC("cmpxchg8b Mq");
6456
6457 IEM_MC_BEGIN(4, 3);
6458 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6459 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6460 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6461 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6462 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6463 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6465
6466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6467 IEMOP_HLP_DONE_DECODING();
6468 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6469
6470 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6471 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6472 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6473
6474 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6475 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6476 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6477
6478 IEM_MC_FETCH_EFLAGS(EFlags);
6479 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6481 else
6482 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6483
6484 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6485 IEM_MC_COMMIT_EFLAGS(EFlags);
6486 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6487 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6488 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6489 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6490 IEM_MC_ENDIF();
6491 IEM_MC_ADVANCE_RIP();
6492
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495}
6496
6497
6498/** Opcode REX.W 0x0f 0xc7 !11/1. */
6499FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6500
6501/** Opcode 0x0f 0xc7 11/6. */
6502FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6503
6504/** Opcode 0x0f 0xc7 !11/6. */
6505FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6506
6507/** Opcode 0x66 0x0f 0xc7 !11/6. */
6508FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6509
6510/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6511FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6512
6513/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6514FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6515
6516
6517/** Opcode 0x0f 0xc7. */
6518FNIEMOP_DEF(iemOp_Grp9)
6519{
6520 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6522 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6523 {
6524 case 0: case 2: case 3: case 4: case 5:
6525 return IEMOP_RAISE_INVALID_OPCODE();
6526 case 1:
6527 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6528 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6529 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6530 return IEMOP_RAISE_INVALID_OPCODE();
6531 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6532 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6533 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6534 case 6:
6535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6536 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6537 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6538 {
6539 case 0:
6540 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6541 case IEM_OP_PRF_SIZE_OP:
6542 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6543 case IEM_OP_PRF_REPZ:
6544 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6545 default:
6546 return IEMOP_RAISE_INVALID_OPCODE();
6547 }
6548 case 7:
6549 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6550 {
6551 case 0:
6552 case IEM_OP_PRF_REPZ:
6553 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6554 default:
6555 return IEMOP_RAISE_INVALID_OPCODE();
6556 }
6557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6558 }
6559}
6560
6561
6562/**
6563 * Common 'bswap register' helper.
6564 */
6565FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6566{
6567 IEMOP_HLP_NO_LOCK_PREFIX();
6568 switch (pIemCpu->enmEffOpSize)
6569 {
6570 case IEMMODE_16BIT:
6571 IEM_MC_BEGIN(1, 0);
6572 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6573 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6574 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6575 IEM_MC_ADVANCE_RIP();
6576 IEM_MC_END();
6577 return VINF_SUCCESS;
6578
6579 case IEMMODE_32BIT:
6580 IEM_MC_BEGIN(1, 0);
6581 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6582 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6583 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6584 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6585 IEM_MC_ADVANCE_RIP();
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588
6589 case IEMMODE_64BIT:
6590 IEM_MC_BEGIN(1, 0);
6591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6592 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6593 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6594 IEM_MC_ADVANCE_RIP();
6595 IEM_MC_END();
6596 return VINF_SUCCESS;
6597
6598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6599 }
6600}
6601
6602
6603/** Opcode 0x0f 0xc8. */
6604FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6605{
6606 IEMOP_MNEMONIC("bswap rAX/r8");
6607 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6608 prefix. REX.B is the correct prefix it appears. For a parallel
6609 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6610 IEMOP_HLP_MIN_486();
6611 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6612}
6613
6614
6615/** Opcode 0x0f 0xc9. */
6616FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6617{
6618 IEMOP_MNEMONIC("bswap rCX/r9");
6619 IEMOP_HLP_MIN_486();
6620 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6621}
6622
6623
6624/** Opcode 0x0f 0xca. */
6625FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6626{
6627 IEMOP_MNEMONIC("bswap rDX/r9");
6628 IEMOP_HLP_MIN_486();
6629 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6630}
6631
6632
6633/** Opcode 0x0f 0xcb. */
6634FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6635{
6636 IEMOP_MNEMONIC("bswap rBX/r9");
6637 IEMOP_HLP_MIN_486();
6638 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6639}
6640
6641
6642/** Opcode 0x0f 0xcc. */
6643FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6644{
6645 IEMOP_MNEMONIC("bswap rSP/r12");
6646 IEMOP_HLP_MIN_486();
6647 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6648}
6649
6650
6651/** Opcode 0x0f 0xcd. */
6652FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6653{
6654 IEMOP_MNEMONIC("bswap rBP/r13");
6655 IEMOP_HLP_MIN_486();
6656 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6657}
6658
6659
6660/** Opcode 0x0f 0xce. */
6661FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6662{
6663 IEMOP_MNEMONIC("bswap rSI/r14");
6664 IEMOP_HLP_MIN_486();
6665 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6666}
6667
6668
6669/** Opcode 0x0f 0xcf. */
6670FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6671{
6672 IEMOP_MNEMONIC("bswap rDI/r15");
6673 IEMOP_HLP_MIN_486();
6674 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6675}
6676
6677
6678
6679/** Opcode 0x0f 0xd0. */
6680FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6681/** Opcode 0x0f 0xd1. */
6682FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6683/** Opcode 0x0f 0xd2. */
6684FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6685/** Opcode 0x0f 0xd3. */
6686FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6687/** Opcode 0x0f 0xd4. */
6688FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6689/** Opcode 0x0f 0xd5. */
6690FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6691/** Opcode 0x0f 0xd6. */
6692FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6693
6694
6695/** Opcode 0x0f 0xd7. */
6696FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6697{
6698 /* Docs says register only. */
6699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6700 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6701 return IEMOP_RAISE_INVALID_OPCODE();
6702
6703 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6704 /** @todo testcase: Check that the instruction implicitly clears the high
6705 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6706 * and opcode modifications are made to work with the whole width (not
6707 * just 128). */
6708 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6709 {
6710 case IEM_OP_PRF_SIZE_OP: /* SSE */
6711 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6712 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6713 IEM_MC_BEGIN(2, 0);
6714 IEM_MC_ARG(uint64_t *, pDst, 0);
6715 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6716 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6717 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6718 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6719 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6720 IEM_MC_ADVANCE_RIP();
6721 IEM_MC_END();
6722 return VINF_SUCCESS;
6723
6724 case 0: /* MMX */
6725 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6726 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6727 IEM_MC_BEGIN(2, 0);
6728 IEM_MC_ARG(uint64_t *, pDst, 0);
6729 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6730 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6731 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6732 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6733 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6734 IEM_MC_ADVANCE_RIP();
6735 IEM_MC_END();
6736 return VINF_SUCCESS;
6737
6738 default:
6739 return IEMOP_RAISE_INVALID_OPCODE();
6740 }
6741}
6742
6743
6744/** Opcode 0x0f 0xd8. */
6745FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6746/** Opcode 0x0f 0xd9. */
6747FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6748/** Opcode 0x0f 0xda. */
6749FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6750/** Opcode 0x0f 0xdb. */
6751FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6752/** Opcode 0x0f 0xdc. */
6753FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6754/** Opcode 0x0f 0xdd. */
6755FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6756/** Opcode 0x0f 0xde. */
6757FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6758/** Opcode 0x0f 0xdf. */
6759FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6760/** Opcode 0x0f 0xe0. */
6761FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6762/** Opcode 0x0f 0xe1. */
6763FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6764/** Opcode 0x0f 0xe2. */
6765FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6766/** Opcode 0x0f 0xe3. */
6767FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6768/** Opcode 0x0f 0xe4. */
6769FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6770/** Opcode 0x0f 0xe5. */
6771FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6772/** Opcode 0x0f 0xe6. */
6773FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6774/** Opcode 0x0f 0xe7. */
6775FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6776/** Opcode 0x0f 0xe8. */
6777FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6778/** Opcode 0x0f 0xe9. */
6779FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6780/** Opcode 0x0f 0xea. */
6781FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6782/** Opcode 0x0f 0xeb. */
6783FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6784/** Opcode 0x0f 0xec. */
6785FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6786/** Opcode 0x0f 0xed. */
6787FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6788/** Opcode 0x0f 0xee. */
6789FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6790
6791
6792/** Opcode 0x0f 0xef. */
6793FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6794{
6795 IEMOP_MNEMONIC("pxor");
6796 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6797}
6798
6799
6800/** Opcode 0x0f 0xf0. */
6801FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6802/** Opcode 0x0f 0xf1. */
6803FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6804/** Opcode 0x0f 0xf2. */
6805FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6806/** Opcode 0x0f 0xf3. */
6807FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6808/** Opcode 0x0f 0xf4. */
6809FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6810/** Opcode 0x0f 0xf5. */
6811FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6812/** Opcode 0x0f 0xf6. */
6813FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6814/** Opcode 0x0f 0xf7. */
6815FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6816/** Opcode 0x0f 0xf8. */
6817FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6818/** Opcode 0x0f 0xf9. */
6819FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6820/** Opcode 0x0f 0xfa. */
6821FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6822/** Opcode 0x0f 0xfb. */
6823FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6824/** Opcode 0x0f 0xfc. */
6825FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6826/** Opcode 0x0f 0xfd. */
6827FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6828/** Opcode 0x0f 0xfe. */
6829FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6830
6831
6832const PFNIEMOP g_apfnTwoByteMap[256] =
6833{
6834 /* 0x00 */ iemOp_Grp6,
6835 /* 0x01 */ iemOp_Grp7,
6836 /* 0x02 */ iemOp_lar_Gv_Ew,
6837 /* 0x03 */ iemOp_lsl_Gv_Ew,
6838 /* 0x04 */ iemOp_Invalid,
6839 /* 0x05 */ iemOp_syscall,
6840 /* 0x06 */ iemOp_clts,
6841 /* 0x07 */ iemOp_sysret,
6842 /* 0x08 */ iemOp_invd,
6843 /* 0x09 */ iemOp_wbinvd,
6844 /* 0x0a */ iemOp_Invalid,
6845 /* 0x0b */ iemOp_ud2,
6846 /* 0x0c */ iemOp_Invalid,
6847 /* 0x0d */ iemOp_nop_Ev_GrpP,
6848 /* 0x0e */ iemOp_femms,
6849 /* 0x0f */ iemOp_3Dnow,
6850 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6851 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6852 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6853 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6854 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6855 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6856 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6857 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6858 /* 0x18 */ iemOp_prefetch_Grp16,
6859 /* 0x19 */ iemOp_nop_Ev,
6860 /* 0x1a */ iemOp_nop_Ev,
6861 /* 0x1b */ iemOp_nop_Ev,
6862 /* 0x1c */ iemOp_nop_Ev,
6863 /* 0x1d */ iemOp_nop_Ev,
6864 /* 0x1e */ iemOp_nop_Ev,
6865 /* 0x1f */ iemOp_nop_Ev,
6866 /* 0x20 */ iemOp_mov_Rd_Cd,
6867 /* 0x21 */ iemOp_mov_Rd_Dd,
6868 /* 0x22 */ iemOp_mov_Cd_Rd,
6869 /* 0x23 */ iemOp_mov_Dd_Rd,
6870 /* 0x24 */ iemOp_mov_Rd_Td,
6871 /* 0x25 */ iemOp_Invalid,
6872 /* 0x26 */ iemOp_mov_Td_Rd,
6873 /* 0x27 */ iemOp_Invalid,
6874 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6875 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6876 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6877 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6878 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6879 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6880 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6881 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6882 /* 0x30 */ iemOp_wrmsr,
6883 /* 0x31 */ iemOp_rdtsc,
6884 /* 0x32 */ iemOp_rdmsr,
6885 /* 0x33 */ iemOp_rdpmc,
6886 /* 0x34 */ iemOp_sysenter,
6887 /* 0x35 */ iemOp_sysexit,
6888 /* 0x36 */ iemOp_Invalid,
6889 /* 0x37 */ iemOp_getsec,
6890 /* 0x38 */ iemOp_3byte_Esc_A4,
6891 /* 0x39 */ iemOp_Invalid,
6892 /* 0x3a */ iemOp_3byte_Esc_A5,
6893 /* 0x3b */ iemOp_Invalid,
6894 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6895 /* 0x3d */ iemOp_Invalid,
6896 /* 0x3e */ iemOp_Invalid,
6897 /* 0x3f */ iemOp_Invalid,
6898 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6899 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6900 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6901 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6902 /* 0x44 */ iemOp_cmove_Gv_Ev,
6903 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6904 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6905 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6906 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6907 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6908 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6909 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6910 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6911 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6912 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6913 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6914 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6915 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6916 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6917 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6918 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6919 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6920 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6921 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6922 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6923 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6924 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6925 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6926 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6927 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6928 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6929 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6930 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6931 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6932 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6933 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6934 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6935 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6936 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6937 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6938 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6939 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6940 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6941 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6942 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6943 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6944 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6945 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6946 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6947 /* 0x71 */ iemOp_Grp12,
6948 /* 0x72 */ iemOp_Grp13,
6949 /* 0x73 */ iemOp_Grp14,
6950 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6951 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6952 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6953 /* 0x77 */ iemOp_emms,
6954 /* 0x78 */ iemOp_vmread_AmdGrp17,
6955 /* 0x79 */ iemOp_vmwrite,
6956 /* 0x7a */ iemOp_Invalid,
6957 /* 0x7b */ iemOp_Invalid,
6958 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6959 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6960 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6961 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6962 /* 0x80 */ iemOp_jo_Jv,
6963 /* 0x81 */ iemOp_jno_Jv,
6964 /* 0x82 */ iemOp_jc_Jv,
6965 /* 0x83 */ iemOp_jnc_Jv,
6966 /* 0x84 */ iemOp_je_Jv,
6967 /* 0x85 */ iemOp_jne_Jv,
6968 /* 0x86 */ iemOp_jbe_Jv,
6969 /* 0x87 */ iemOp_jnbe_Jv,
6970 /* 0x88 */ iemOp_js_Jv,
6971 /* 0x89 */ iemOp_jns_Jv,
6972 /* 0x8a */ iemOp_jp_Jv,
6973 /* 0x8b */ iemOp_jnp_Jv,
6974 /* 0x8c */ iemOp_jl_Jv,
6975 /* 0x8d */ iemOp_jnl_Jv,
6976 /* 0x8e */ iemOp_jle_Jv,
6977 /* 0x8f */ iemOp_jnle_Jv,
6978 /* 0x90 */ iemOp_seto_Eb,
6979 /* 0x91 */ iemOp_setno_Eb,
6980 /* 0x92 */ iemOp_setc_Eb,
6981 /* 0x93 */ iemOp_setnc_Eb,
6982 /* 0x94 */ iemOp_sete_Eb,
6983 /* 0x95 */ iemOp_setne_Eb,
6984 /* 0x96 */ iemOp_setbe_Eb,
6985 /* 0x97 */ iemOp_setnbe_Eb,
6986 /* 0x98 */ iemOp_sets_Eb,
6987 /* 0x99 */ iemOp_setns_Eb,
6988 /* 0x9a */ iemOp_setp_Eb,
6989 /* 0x9b */ iemOp_setnp_Eb,
6990 /* 0x9c */ iemOp_setl_Eb,
6991 /* 0x9d */ iemOp_setnl_Eb,
6992 /* 0x9e */ iemOp_setle_Eb,
6993 /* 0x9f */ iemOp_setnle_Eb,
6994 /* 0xa0 */ iemOp_push_fs,
6995 /* 0xa1 */ iemOp_pop_fs,
6996 /* 0xa2 */ iemOp_cpuid,
6997 /* 0xa3 */ iemOp_bt_Ev_Gv,
6998 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6999 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7000 /* 0xa6 */ iemOp_Invalid,
7001 /* 0xa7 */ iemOp_Invalid,
7002 /* 0xa8 */ iemOp_push_gs,
7003 /* 0xa9 */ iemOp_pop_gs,
7004 /* 0xaa */ iemOp_rsm,
7005 /* 0xab */ iemOp_bts_Ev_Gv,
7006 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7007 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7008 /* 0xae */ iemOp_Grp15,
7009 /* 0xaf */ iemOp_imul_Gv_Ev,
7010 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7011 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7012 /* 0xb2 */ iemOp_lss_Gv_Mp,
7013 /* 0xb3 */ iemOp_btr_Ev_Gv,
7014 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7015 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7016 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7017 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7018 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7019 /* 0xb9 */ iemOp_Grp10,
7020 /* 0xba */ iemOp_Grp8,
7021 /* 0xbd */ iemOp_btc_Ev_Gv,
7022 /* 0xbc */ iemOp_bsf_Gv_Ev,
7023 /* 0xbd */ iemOp_bsr_Gv_Ev,
7024 /* 0xbe */ iemOp_movsx_Gv_Eb,
7025 /* 0xbf */ iemOp_movsx_Gv_Ew,
7026 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7027 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7028 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7029 /* 0xc3 */ iemOp_movnti_My_Gy,
7030 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7031 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7032 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7033 /* 0xc7 */ iemOp_Grp9,
7034 /* 0xc8 */ iemOp_bswap_rAX_r8,
7035 /* 0xc9 */ iemOp_bswap_rCX_r9,
7036 /* 0xca */ iemOp_bswap_rDX_r10,
7037 /* 0xcb */ iemOp_bswap_rBX_r11,
7038 /* 0xcc */ iemOp_bswap_rSP_r12,
7039 /* 0xcd */ iemOp_bswap_rBP_r13,
7040 /* 0xce */ iemOp_bswap_rSI_r14,
7041 /* 0xcf */ iemOp_bswap_rDI_r15,
7042 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7043 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7044 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7045 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7046 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7047 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7048 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7049 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7050 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7051 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7052 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7053 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7054 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7055 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7056 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7057 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7058 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7059 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7060 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7061 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7062 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7063 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7064 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7065 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7066 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7067 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7068 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7069 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7070 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7071 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7072 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7073 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7074 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7075 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7076 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7077 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7078 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7079 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7080 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7081 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7082 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7083 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7084 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7085 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7086 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7087 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7088 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7089 /* 0xff */ iemOp_Invalid
7090};
7091
7092/** @} */
7093
7094
7095/** @name One byte opcodes.
7096 *
7097 * @{
7098 */
7099
7100/** Opcode 0x00. */
7101FNIEMOP_DEF(iemOp_add_Eb_Gb)
7102{
7103 IEMOP_MNEMONIC("add Eb,Gb");
7104 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7105}
7106
7107
7108/** Opcode 0x01. */
7109FNIEMOP_DEF(iemOp_add_Ev_Gv)
7110{
7111 IEMOP_MNEMONIC("add Ev,Gv");
7112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7113}
7114
7115
7116/** Opcode 0x02. */
7117FNIEMOP_DEF(iemOp_add_Gb_Eb)
7118{
7119 IEMOP_MNEMONIC("add Gb,Eb");
7120 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7121}
7122
7123
7124/** Opcode 0x03. */
7125FNIEMOP_DEF(iemOp_add_Gv_Ev)
7126{
7127 IEMOP_MNEMONIC("add Gv,Ev");
7128 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7129}
7130
7131
7132/** Opcode 0x04. */
7133FNIEMOP_DEF(iemOp_add_Al_Ib)
7134{
7135 IEMOP_MNEMONIC("add al,Ib");
7136 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7137}
7138
7139
7140/** Opcode 0x05. */
7141FNIEMOP_DEF(iemOp_add_eAX_Iz)
7142{
7143 IEMOP_MNEMONIC("add rAX,Iz");
7144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7145}
7146
7147
7148/** Opcode 0x06. */
7149FNIEMOP_DEF(iemOp_push_ES)
7150{
7151 IEMOP_MNEMONIC("push es");
7152 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7153}
7154
7155
7156/** Opcode 0x07. */
7157FNIEMOP_DEF(iemOp_pop_ES)
7158{
7159 IEMOP_MNEMONIC("pop es");
7160 IEMOP_HLP_NO_64BIT();
7161 IEMOP_HLP_NO_LOCK_PREFIX();
7162 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7163}
7164
7165
7166/** Opcode 0x08. */
7167FNIEMOP_DEF(iemOp_or_Eb_Gb)
7168{
7169 IEMOP_MNEMONIC("or Eb,Gb");
7170 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7171 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7172}
7173
7174
7175/** Opcode 0x09. */
7176FNIEMOP_DEF(iemOp_or_Ev_Gv)
7177{
7178 IEMOP_MNEMONIC("or Ev,Gv ");
7179 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7180 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7181}
7182
7183
7184/** Opcode 0x0a. */
7185FNIEMOP_DEF(iemOp_or_Gb_Eb)
7186{
7187 IEMOP_MNEMONIC("or Gb,Eb");
7188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7190}
7191
7192
7193/** Opcode 0x0b. */
7194FNIEMOP_DEF(iemOp_or_Gv_Ev)
7195{
7196 IEMOP_MNEMONIC("or Gv,Ev");
7197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7198 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7199}
7200
7201
7202/** Opcode 0x0c. */
7203FNIEMOP_DEF(iemOp_or_Al_Ib)
7204{
7205 IEMOP_MNEMONIC("or al,Ib");
7206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7207 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7208}
7209
7210
7211/** Opcode 0x0d. */
7212FNIEMOP_DEF(iemOp_or_eAX_Iz)
7213{
7214 IEMOP_MNEMONIC("or rAX,Iz");
7215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7217}
7218
7219
7220/** Opcode 0x0e. */
7221FNIEMOP_DEF(iemOp_push_CS)
7222{
7223 IEMOP_MNEMONIC("push cs");
7224 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7225}
7226
7227
7228/** Opcode 0x0f. */
7229FNIEMOP_DEF(iemOp_2byteEscape)
7230{
7231 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7232 /** @todo PUSH CS on 8086, undefined on 80186. */
7233 IEMOP_HLP_MIN_286();
7234 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7235}
7236
7237/** Opcode 0x10. */
7238FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7239{
7240 IEMOP_MNEMONIC("adc Eb,Gb");
7241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7242}
7243
7244
7245/** Opcode 0x11. */
7246FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7247{
7248 IEMOP_MNEMONIC("adc Ev,Gv");
7249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7250}
7251
7252
7253/** Opcode 0x12. */
7254FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7255{
7256 IEMOP_MNEMONIC("adc Gb,Eb");
7257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7258}
7259
7260
7261/** Opcode 0x13. */
7262FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7263{
7264 IEMOP_MNEMONIC("adc Gv,Ev");
7265 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7266}
7267
7268
7269/** Opcode 0x14. */
7270FNIEMOP_DEF(iemOp_adc_Al_Ib)
7271{
7272 IEMOP_MNEMONIC("adc al,Ib");
7273 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7274}
7275
7276
7277/** Opcode 0x15. */
7278FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7279{
7280 IEMOP_MNEMONIC("adc rAX,Iz");
7281 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7282}
7283
7284
7285/** Opcode 0x16. */
7286FNIEMOP_DEF(iemOp_push_SS)
7287{
7288 IEMOP_MNEMONIC("push ss");
7289 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7290}
7291
7292
7293/** Opcode 0x17. */
7294FNIEMOP_DEF(iemOp_pop_SS)
7295{
7296 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7297 IEMOP_HLP_NO_LOCK_PREFIX();
7298 IEMOP_HLP_NO_64BIT();
7299 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7300}
7301
7302
7303/** Opcode 0x18. */
7304FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7305{
7306 IEMOP_MNEMONIC("sbb Eb,Gb");
7307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7308}
7309
7310
7311/** Opcode 0x19. */
7312FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7313{
7314 IEMOP_MNEMONIC("sbb Ev,Gv");
7315 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7316}
7317
7318
7319/** Opcode 0x1a. */
7320FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7321{
7322 IEMOP_MNEMONIC("sbb Gb,Eb");
7323 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7324}
7325
7326
7327/** Opcode 0x1b. */
7328FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7329{
7330 IEMOP_MNEMONIC("sbb Gv,Ev");
7331 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7332}
7333
7334
7335/** Opcode 0x1c. */
7336FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7337{
7338 IEMOP_MNEMONIC("sbb al,Ib");
7339 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7340}
7341
7342
7343/** Opcode 0x1d. */
7344FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7345{
7346 IEMOP_MNEMONIC("sbb rAX,Iz");
7347 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7348}
7349
7350
7351/** Opcode 0x1e. */
7352FNIEMOP_DEF(iemOp_push_DS)
7353{
7354 IEMOP_MNEMONIC("push ds");
7355 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7356}
7357
7358
7359/** Opcode 0x1f. */
7360FNIEMOP_DEF(iemOp_pop_DS)
7361{
7362 IEMOP_MNEMONIC("pop ds");
7363 IEMOP_HLP_NO_LOCK_PREFIX();
7364 IEMOP_HLP_NO_64BIT();
7365 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7366}
7367
7368
7369/** Opcode 0x20. */
7370FNIEMOP_DEF(iemOp_and_Eb_Gb)
7371{
7372 IEMOP_MNEMONIC("and Eb,Gb");
7373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7375}
7376
7377
7378/** Opcode 0x21. */
7379FNIEMOP_DEF(iemOp_and_Ev_Gv)
7380{
7381 IEMOP_MNEMONIC("and Ev,Gv");
7382 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7383 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7384}
7385
7386
7387/** Opcode 0x22. */
7388FNIEMOP_DEF(iemOp_and_Gb_Eb)
7389{
7390 IEMOP_MNEMONIC("and Gb,Eb");
7391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7393}
7394
7395
7396/** Opcode 0x23. */
7397FNIEMOP_DEF(iemOp_and_Gv_Ev)
7398{
7399 IEMOP_MNEMONIC("and Gv,Ev");
7400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7401 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7402}
7403
7404
7405/** Opcode 0x24. */
7406FNIEMOP_DEF(iemOp_and_Al_Ib)
7407{
7408 IEMOP_MNEMONIC("and al,Ib");
7409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7410 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7411}
7412
7413
7414/** Opcode 0x25. */
7415FNIEMOP_DEF(iemOp_and_eAX_Iz)
7416{
7417 IEMOP_MNEMONIC("and rAX,Iz");
7418 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7420}
7421
7422
7423/** Opcode 0x26. */
7424FNIEMOP_DEF(iemOp_seg_ES)
7425{
7426 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7427 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7428 pIemCpu->iEffSeg = X86_SREG_ES;
7429
7430 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7431 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7432}
7433
7434
7435/** Opcode 0x27. */
7436FNIEMOP_DEF(iemOp_daa)
7437{
7438 IEMOP_MNEMONIC("daa AL");
7439 IEMOP_HLP_NO_64BIT();
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7442 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7443}
7444
7445
7446/** Opcode 0x28. */
7447FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7448{
7449 IEMOP_MNEMONIC("sub Eb,Gb");
7450 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7451}
7452
7453
7454/** Opcode 0x29. */
7455FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7456{
7457 IEMOP_MNEMONIC("sub Ev,Gv");
7458 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7459}
7460
7461
7462/** Opcode 0x2a. */
7463FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7464{
7465 IEMOP_MNEMONIC("sub Gb,Eb");
7466 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7467}
7468
7469
7470/** Opcode 0x2b. */
7471FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7472{
7473 IEMOP_MNEMONIC("sub Gv,Ev");
7474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7475}
7476
7477
7478/** Opcode 0x2c. */
7479FNIEMOP_DEF(iemOp_sub_Al_Ib)
7480{
7481 IEMOP_MNEMONIC("sub al,Ib");
7482 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7483}
7484
7485
7486/** Opcode 0x2d. */
7487FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7488{
7489 IEMOP_MNEMONIC("sub rAX,Iz");
7490 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7491}
7492
7493
7494/** Opcode 0x2e. */
7495FNIEMOP_DEF(iemOp_seg_CS)
7496{
7497 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7498 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7499 pIemCpu->iEffSeg = X86_SREG_CS;
7500
7501 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7502 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7503}
7504
7505
7506/** Opcode 0x2f. */
7507FNIEMOP_DEF(iemOp_das)
7508{
7509 IEMOP_MNEMONIC("das AL");
7510 IEMOP_HLP_NO_64BIT();
7511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7513 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7514}
7515
7516
7517/** Opcode 0x30. */
7518FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7519{
7520 IEMOP_MNEMONIC("xor Eb,Gb");
7521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7523}
7524
7525
7526/** Opcode 0x31. */
7527FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7528{
7529 IEMOP_MNEMONIC("xor Ev,Gv");
7530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7531 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7532}
7533
7534
7535/** Opcode 0x32. */
7536FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7537{
7538 IEMOP_MNEMONIC("xor Gb,Eb");
7539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7540 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7541}
7542
7543
7544/** Opcode 0x33. */
7545FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7546{
7547 IEMOP_MNEMONIC("xor Gv,Ev");
7548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7549 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7550}
7551
7552
7553/** Opcode 0x34. */
7554FNIEMOP_DEF(iemOp_xor_Al_Ib)
7555{
7556 IEMOP_MNEMONIC("xor al,Ib");
7557 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7558 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7559}
7560
7561
7562/** Opcode 0x35. */
7563FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7564{
7565 IEMOP_MNEMONIC("xor rAX,Iz");
7566 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7568}
7569
7570
7571/** Opcode 0x36. */
7572FNIEMOP_DEF(iemOp_seg_SS)
7573{
7574 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7575 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7576 pIemCpu->iEffSeg = X86_SREG_SS;
7577
7578 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7579 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7580}
7581
7582
7583/** Opcode 0x37. */
7584FNIEMOP_STUB(iemOp_aaa);
7585
7586
7587/** Opcode 0x38. */
7588FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7589{
7590 IEMOP_MNEMONIC("cmp Eb,Gb");
7591 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7592 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7593}
7594
7595
7596/** Opcode 0x39. */
7597FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7598{
7599 IEMOP_MNEMONIC("cmp Ev,Gv");
7600 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7601 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7602}
7603
7604
7605/** Opcode 0x3a. */
7606FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7607{
7608 IEMOP_MNEMONIC("cmp Gb,Eb");
7609 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7610}
7611
7612
7613/** Opcode 0x3b. */
7614FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7615{
7616 IEMOP_MNEMONIC("cmp Gv,Ev");
7617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7618}
7619
7620
7621/** Opcode 0x3c. */
7622FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7623{
7624 IEMOP_MNEMONIC("cmp al,Ib");
7625 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7626}
7627
7628
7629/** Opcode 0x3d. */
7630FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7631{
7632 IEMOP_MNEMONIC("cmp rAX,Iz");
7633 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7634}
7635
7636
7637/** Opcode 0x3e. */
7638FNIEMOP_DEF(iemOp_seg_DS)
7639{
7640 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7641 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7642 pIemCpu->iEffSeg = X86_SREG_DS;
7643
7644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7646}
7647
7648
7649/** Opcode 0x3f. */
7650FNIEMOP_STUB(iemOp_aas);
7651
7652/**
7653 * Common 'inc/dec/not/neg register' helper.
7654 */
7655FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7656{
7657 IEMOP_HLP_NO_LOCK_PREFIX();
7658 switch (pIemCpu->enmEffOpSize)
7659 {
7660 case IEMMODE_16BIT:
7661 IEM_MC_BEGIN(2, 0);
7662 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7663 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7664 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7665 IEM_MC_REF_EFLAGS(pEFlags);
7666 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7667 IEM_MC_ADVANCE_RIP();
7668 IEM_MC_END();
7669 return VINF_SUCCESS;
7670
7671 case IEMMODE_32BIT:
7672 IEM_MC_BEGIN(2, 0);
7673 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7674 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7675 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7676 IEM_MC_REF_EFLAGS(pEFlags);
7677 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7678 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7679 IEM_MC_ADVANCE_RIP();
7680 IEM_MC_END();
7681 return VINF_SUCCESS;
7682
7683 case IEMMODE_64BIT:
7684 IEM_MC_BEGIN(2, 0);
7685 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7686 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7687 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7688 IEM_MC_REF_EFLAGS(pEFlags);
7689 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7690 IEM_MC_ADVANCE_RIP();
7691 IEM_MC_END();
7692 return VINF_SUCCESS;
7693 }
7694 return VINF_SUCCESS;
7695}
7696
7697
7698/** Opcode 0x40. */
7699FNIEMOP_DEF(iemOp_inc_eAX)
7700{
7701 /*
7702 * This is a REX prefix in 64-bit mode.
7703 */
7704 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7705 {
7706 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7707 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7708
7709 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7710 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7711 }
7712
7713 IEMOP_MNEMONIC("inc eAX");
7714 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7715}
7716
7717
7718/** Opcode 0x41. */
7719FNIEMOP_DEF(iemOp_inc_eCX)
7720{
7721 /*
7722 * This is a REX prefix in 64-bit mode.
7723 */
7724 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7725 {
7726 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7727 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7728 pIemCpu->uRexB = 1 << 3;
7729
7730 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7731 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7732 }
7733
7734 IEMOP_MNEMONIC("inc eCX");
7735 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7736}
7737
7738
7739/** Opcode 0x42. */
7740FNIEMOP_DEF(iemOp_inc_eDX)
7741{
7742 /*
7743 * This is a REX prefix in 64-bit mode.
7744 */
7745 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7746 {
7747 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7748 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7749 pIemCpu->uRexIndex = 1 << 3;
7750
7751 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7752 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7753 }
7754
7755 IEMOP_MNEMONIC("inc eDX");
7756 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7757}
7758
7759
7760
7761/** Opcode 0x43. */
7762FNIEMOP_DEF(iemOp_inc_eBX)
7763{
7764 /*
7765 * This is a REX prefix in 64-bit mode.
7766 */
7767 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7768 {
7769 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7770 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7771 pIemCpu->uRexB = 1 << 3;
7772 pIemCpu->uRexIndex = 1 << 3;
7773
7774 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7775 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7776 }
7777
7778 IEMOP_MNEMONIC("inc eBX");
7779 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7780}
7781
7782
7783/** Opcode 0x44. */
7784FNIEMOP_DEF(iemOp_inc_eSP)
7785{
7786 /*
7787 * This is a REX prefix in 64-bit mode.
7788 */
7789 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7790 {
7791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7792 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7793 pIemCpu->uRexReg = 1 << 3;
7794
7795 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7796 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7797 }
7798
7799 IEMOP_MNEMONIC("inc eSP");
7800 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7801}
7802
7803
7804/** Opcode 0x45. */
7805FNIEMOP_DEF(iemOp_inc_eBP)
7806{
7807 /*
7808 * This is a REX prefix in 64-bit mode.
7809 */
7810 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7811 {
7812 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7813 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7814 pIemCpu->uRexReg = 1 << 3;
7815 pIemCpu->uRexB = 1 << 3;
7816
7817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7819 }
7820
7821 IEMOP_MNEMONIC("inc eBP");
7822 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7823}
7824
7825
7826/** Opcode 0x46. */
7827FNIEMOP_DEF(iemOp_inc_eSI)
7828{
7829 /*
7830 * This is a REX prefix in 64-bit mode.
7831 */
7832 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7833 {
7834 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7835 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7836 pIemCpu->uRexReg = 1 << 3;
7837 pIemCpu->uRexIndex = 1 << 3;
7838
7839 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7840 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7841 }
7842
7843 IEMOP_MNEMONIC("inc eSI");
7844 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7845}
7846
7847
7848/** Opcode 0x47. */
7849FNIEMOP_DEF(iemOp_inc_eDI)
7850{
7851 /*
7852 * This is a REX prefix in 64-bit mode.
7853 */
7854 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7855 {
7856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7857 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7858 pIemCpu->uRexReg = 1 << 3;
7859 pIemCpu->uRexB = 1 << 3;
7860 pIemCpu->uRexIndex = 1 << 3;
7861
7862 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7863 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7864 }
7865
7866 IEMOP_MNEMONIC("inc eDI");
7867 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7868}
7869
7870
7871/** Opcode 0x48. */
7872FNIEMOP_DEF(iemOp_dec_eAX)
7873{
7874 /*
7875 * This is a REX prefix in 64-bit mode.
7876 */
7877 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7878 {
7879 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7880 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7881 iemRecalEffOpSize(pIemCpu);
7882
7883 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7884 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7885 }
7886
7887 IEMOP_MNEMONIC("dec eAX");
7888 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7889}
7890
7891
7892/** Opcode 0x49. */
7893FNIEMOP_DEF(iemOp_dec_eCX)
7894{
7895 /*
7896 * This is a REX prefix in 64-bit mode.
7897 */
7898 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7899 {
7900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7901 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7902 pIemCpu->uRexB = 1 << 3;
7903 iemRecalEffOpSize(pIemCpu);
7904
7905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7907 }
7908
7909 IEMOP_MNEMONIC("dec eCX");
7910 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7911}
7912
7913
7914/** Opcode 0x4a. */
7915FNIEMOP_DEF(iemOp_dec_eDX)
7916{
7917 /*
7918 * This is a REX prefix in 64-bit mode.
7919 */
7920 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7921 {
7922 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7923 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7924 pIemCpu->uRexIndex = 1 << 3;
7925 iemRecalEffOpSize(pIemCpu);
7926
7927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7928 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7929 }
7930
7931 IEMOP_MNEMONIC("dec eDX");
7932 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7933}
7934
7935
7936/** Opcode 0x4b. */
7937FNIEMOP_DEF(iemOp_dec_eBX)
7938{
7939 /*
7940 * This is a REX prefix in 64-bit mode.
7941 */
7942 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7943 {
7944 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7945 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7946 pIemCpu->uRexB = 1 << 3;
7947 pIemCpu->uRexIndex = 1 << 3;
7948 iemRecalEffOpSize(pIemCpu);
7949
7950 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7951 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7952 }
7953
7954 IEMOP_MNEMONIC("dec eBX");
7955 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7956}
7957
7958
7959/** Opcode 0x4c. */
7960FNIEMOP_DEF(iemOp_dec_eSP)
7961{
7962 /*
7963 * This is a REX prefix in 64-bit mode.
7964 */
7965 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7966 {
7967 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7968 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7969 pIemCpu->uRexReg = 1 << 3;
7970 iemRecalEffOpSize(pIemCpu);
7971
7972 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7973 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7974 }
7975
7976 IEMOP_MNEMONIC("dec eSP");
7977 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7978}
7979
7980
7981/** Opcode 0x4d. */
7982FNIEMOP_DEF(iemOp_dec_eBP)
7983{
7984 /*
7985 * This is a REX prefix in 64-bit mode.
7986 */
7987 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7988 {
7989 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7990 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7991 pIemCpu->uRexReg = 1 << 3;
7992 pIemCpu->uRexB = 1 << 3;
7993 iemRecalEffOpSize(pIemCpu);
7994
7995 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7996 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7997 }
7998
7999 IEMOP_MNEMONIC("dec eBP");
8000 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8001}
8002
8003
8004/** Opcode 0x4e. */
8005FNIEMOP_DEF(iemOp_dec_eSI)
8006{
8007 /*
8008 * This is a REX prefix in 64-bit mode.
8009 */
8010 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8011 {
8012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8013 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8014 pIemCpu->uRexReg = 1 << 3;
8015 pIemCpu->uRexIndex = 1 << 3;
8016 iemRecalEffOpSize(pIemCpu);
8017
8018 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8019 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8020 }
8021
8022 IEMOP_MNEMONIC("dec eSI");
8023 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8024}
8025
8026
8027/** Opcode 0x4f. */
8028FNIEMOP_DEF(iemOp_dec_eDI)
8029{
8030 /*
8031 * This is a REX prefix in 64-bit mode.
8032 */
8033 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8034 {
8035 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8036 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8037 pIemCpu->uRexReg = 1 << 3;
8038 pIemCpu->uRexB = 1 << 3;
8039 pIemCpu->uRexIndex = 1 << 3;
8040 iemRecalEffOpSize(pIemCpu);
8041
8042 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8043 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8044 }
8045
8046 IEMOP_MNEMONIC("dec eDI");
8047 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8048}
8049
8050
8051/**
8052 * Common 'push register' helper.
8053 */
8054FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8055{
8056 IEMOP_HLP_NO_LOCK_PREFIX();
8057 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8058 {
8059 iReg |= pIemCpu->uRexB;
8060 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8061 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8062 }
8063
8064 switch (pIemCpu->enmEffOpSize)
8065 {
8066 case IEMMODE_16BIT:
8067 IEM_MC_BEGIN(0, 1);
8068 IEM_MC_LOCAL(uint16_t, u16Value);
8069 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8070 IEM_MC_PUSH_U16(u16Value);
8071 IEM_MC_ADVANCE_RIP();
8072 IEM_MC_END();
8073 break;
8074
8075 case IEMMODE_32BIT:
8076 IEM_MC_BEGIN(0, 1);
8077 IEM_MC_LOCAL(uint32_t, u32Value);
8078 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8079 IEM_MC_PUSH_U32(u32Value);
8080 IEM_MC_ADVANCE_RIP();
8081 IEM_MC_END();
8082 break;
8083
8084 case IEMMODE_64BIT:
8085 IEM_MC_BEGIN(0, 1);
8086 IEM_MC_LOCAL(uint64_t, u64Value);
8087 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8088 IEM_MC_PUSH_U64(u64Value);
8089 IEM_MC_ADVANCE_RIP();
8090 IEM_MC_END();
8091 break;
8092 }
8093
8094 return VINF_SUCCESS;
8095}
8096
8097
8098/** Opcode 0x50. */
8099FNIEMOP_DEF(iemOp_push_eAX)
8100{
8101 IEMOP_MNEMONIC("push rAX");
8102 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8103}
8104
8105
8106/** Opcode 0x51. */
8107FNIEMOP_DEF(iemOp_push_eCX)
8108{
8109 IEMOP_MNEMONIC("push rCX");
8110 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8111}
8112
8113
8114/** Opcode 0x52. */
8115FNIEMOP_DEF(iemOp_push_eDX)
8116{
8117 IEMOP_MNEMONIC("push rDX");
8118 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8119}
8120
8121
8122/** Opcode 0x53. */
8123FNIEMOP_DEF(iemOp_push_eBX)
8124{
8125 IEMOP_MNEMONIC("push rBX");
8126 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8127}
8128
8129
8130/** Opcode 0x54. */
8131FNIEMOP_DEF(iemOp_push_eSP)
8132{
8133 IEMOP_MNEMONIC("push rSP");
8134 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8135 {
8136 IEM_MC_BEGIN(0, 1);
8137 IEM_MC_LOCAL(uint16_t, u16Value);
8138 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8139 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8140 IEM_MC_PUSH_U16(u16Value);
8141 IEM_MC_ADVANCE_RIP();
8142 IEM_MC_END();
8143 }
8144 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8145}
8146
8147
8148/** Opcode 0x55. */
8149FNIEMOP_DEF(iemOp_push_eBP)
8150{
8151 IEMOP_MNEMONIC("push rBP");
8152 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8153}
8154
8155
8156/** Opcode 0x56. */
8157FNIEMOP_DEF(iemOp_push_eSI)
8158{
8159 IEMOP_MNEMONIC("push rSI");
8160 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8161}
8162
8163
8164/** Opcode 0x57. */
8165FNIEMOP_DEF(iemOp_push_eDI)
8166{
8167 IEMOP_MNEMONIC("push rDI");
8168 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8169}
8170
8171
8172/**
8173 * Common 'pop register' helper.
8174 */
8175FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8176{
8177 IEMOP_HLP_NO_LOCK_PREFIX();
8178 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8179 {
8180 iReg |= pIemCpu->uRexB;
8181 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8182 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8183 }
8184
8185 switch (pIemCpu->enmEffOpSize)
8186 {
8187 case IEMMODE_16BIT:
8188 IEM_MC_BEGIN(0, 1);
8189 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8190 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8191 IEM_MC_POP_U16(pu16Dst);
8192 IEM_MC_ADVANCE_RIP();
8193 IEM_MC_END();
8194 break;
8195
8196 case IEMMODE_32BIT:
8197 IEM_MC_BEGIN(0, 1);
8198 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8199 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8200 IEM_MC_POP_U32(pu32Dst);
8201 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8202 IEM_MC_ADVANCE_RIP();
8203 IEM_MC_END();
8204 break;
8205
8206 case IEMMODE_64BIT:
8207 IEM_MC_BEGIN(0, 1);
8208 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8209 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8210 IEM_MC_POP_U64(pu64Dst);
8211 IEM_MC_ADVANCE_RIP();
8212 IEM_MC_END();
8213 break;
8214 }
8215
8216 return VINF_SUCCESS;
8217}
8218
8219
8220/** Opcode 0x58. */
8221FNIEMOP_DEF(iemOp_pop_eAX)
8222{
8223 IEMOP_MNEMONIC("pop rAX");
8224 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8225}
8226
8227
8228/** Opcode 0x59. */
8229FNIEMOP_DEF(iemOp_pop_eCX)
8230{
8231 IEMOP_MNEMONIC("pop rCX");
8232 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8233}
8234
8235
8236/** Opcode 0x5a. */
8237FNIEMOP_DEF(iemOp_pop_eDX)
8238{
8239 IEMOP_MNEMONIC("pop rDX");
8240 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8241}
8242
8243
8244/** Opcode 0x5b. */
8245FNIEMOP_DEF(iemOp_pop_eBX)
8246{
8247 IEMOP_MNEMONIC("pop rBX");
8248 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8249}
8250
8251
8252/** Opcode 0x5c. */
8253FNIEMOP_DEF(iemOp_pop_eSP)
8254{
8255 IEMOP_MNEMONIC("pop rSP");
8256 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8257 {
8258 if (pIemCpu->uRexB)
8259 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8260 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8261 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8262 }
8263
8264 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8265 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8266 /** @todo add testcase for this instruction. */
8267 switch (pIemCpu->enmEffOpSize)
8268 {
8269 case IEMMODE_16BIT:
8270 IEM_MC_BEGIN(0, 1);
8271 IEM_MC_LOCAL(uint16_t, u16Dst);
8272 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8273 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8274 IEM_MC_ADVANCE_RIP();
8275 IEM_MC_END();
8276 break;
8277
8278 case IEMMODE_32BIT:
8279 IEM_MC_BEGIN(0, 1);
8280 IEM_MC_LOCAL(uint32_t, u32Dst);
8281 IEM_MC_POP_U32(&u32Dst);
8282 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8283 IEM_MC_ADVANCE_RIP();
8284 IEM_MC_END();
8285 break;
8286
8287 case IEMMODE_64BIT:
8288 IEM_MC_BEGIN(0, 1);
8289 IEM_MC_LOCAL(uint64_t, u64Dst);
8290 IEM_MC_POP_U64(&u64Dst);
8291 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8292 IEM_MC_ADVANCE_RIP();
8293 IEM_MC_END();
8294 break;
8295 }
8296
8297 return VINF_SUCCESS;
8298}
8299
8300
8301/** Opcode 0x5d. */
8302FNIEMOP_DEF(iemOp_pop_eBP)
8303{
8304 IEMOP_MNEMONIC("pop rBP");
8305 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8306}
8307
8308
8309/** Opcode 0x5e. */
8310FNIEMOP_DEF(iemOp_pop_eSI)
8311{
8312 IEMOP_MNEMONIC("pop rSI");
8313 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8314}
8315
8316
8317/** Opcode 0x5f. */
8318FNIEMOP_DEF(iemOp_pop_eDI)
8319{
8320 IEMOP_MNEMONIC("pop rDI");
8321 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8322}
8323
8324
8325/** Opcode 0x60. */
8326FNIEMOP_DEF(iemOp_pusha)
8327{
8328 IEMOP_MNEMONIC("pusha");
8329 IEMOP_HLP_MIN_186();
8330 IEMOP_HLP_NO_64BIT();
8331 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8332 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8333 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8334 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8335}
8336
8337
8338/** Opcode 0x61. */
8339FNIEMOP_DEF(iemOp_popa)
8340{
8341 IEMOP_MNEMONIC("popa");
8342 IEMOP_HLP_MIN_186();
8343 IEMOP_HLP_NO_64BIT();
8344 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8345 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8346 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8347 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8348}
8349
8350
8351/** Opcode 0x62. */
8352FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8353// IEMOP_HLP_MIN_186();
8354
8355
8356/** Opcode 0x63 - non-64-bit modes. */
8357FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8358{
8359 IEMOP_MNEMONIC("arpl Ew,Gw");
8360 IEMOP_HLP_MIN_286();
8361 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8363
8364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8365 {
8366 /* Register */
8367 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8368 IEM_MC_BEGIN(3, 0);
8369 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8370 IEM_MC_ARG(uint16_t, u16Src, 1);
8371 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8372
8373 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8374 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8375 IEM_MC_REF_EFLAGS(pEFlags);
8376 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8377
8378 IEM_MC_ADVANCE_RIP();
8379 IEM_MC_END();
8380 }
8381 else
8382 {
8383 /* Memory */
8384 IEM_MC_BEGIN(3, 2);
8385 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8386 IEM_MC_ARG(uint16_t, u16Src, 1);
8387 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8389
8390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8391 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8392 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8393 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8394 IEM_MC_FETCH_EFLAGS(EFlags);
8395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8396
8397 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8398 IEM_MC_COMMIT_EFLAGS(EFlags);
8399 IEM_MC_ADVANCE_RIP();
8400 IEM_MC_END();
8401 }
8402 return VINF_SUCCESS;
8403
8404}
8405
8406
8407/** Opcode 0x63.
8408 * @note This is a weird one. It works like a regular move instruction if
8409 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8410 * @todo This definitely needs a testcase to verify the odd cases. */
8411FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8412{
8413 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8414
8415 IEMOP_MNEMONIC("movsxd Gv,Ev");
8416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8417
8418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8419 {
8420 /*
8421 * Register to register.
8422 */
8423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8424 IEM_MC_BEGIN(0, 1);
8425 IEM_MC_LOCAL(uint64_t, u64Value);
8426 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8427 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8428 IEM_MC_ADVANCE_RIP();
8429 IEM_MC_END();
8430 }
8431 else
8432 {
8433 /*
8434 * We're loading a register from memory.
8435 */
8436 IEM_MC_BEGIN(0, 2);
8437 IEM_MC_LOCAL(uint64_t, u64Value);
8438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8441 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8442 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8443 IEM_MC_ADVANCE_RIP();
8444 IEM_MC_END();
8445 }
8446 return VINF_SUCCESS;
8447}
8448
8449
8450/** Opcode 0x64. */
8451FNIEMOP_DEF(iemOp_seg_FS)
8452{
8453 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8454 IEMOP_HLP_MIN_386();
8455
8456 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8457 pIemCpu->iEffSeg = X86_SREG_FS;
8458
8459 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8460 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8461}
8462
8463
8464/** Opcode 0x65. */
8465FNIEMOP_DEF(iemOp_seg_GS)
8466{
8467 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8468 IEMOP_HLP_MIN_386();
8469
8470 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8471 pIemCpu->iEffSeg = X86_SREG_GS;
8472
8473 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8474 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8475}
8476
8477
8478/** Opcode 0x66. */
8479FNIEMOP_DEF(iemOp_op_size)
8480{
8481 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8482 IEMOP_HLP_MIN_386();
8483
8484 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8485 iemRecalEffOpSize(pIemCpu);
8486
8487 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8488 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8489}
8490
8491
8492/** Opcode 0x67. */
8493FNIEMOP_DEF(iemOp_addr_size)
8494{
8495 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8496 IEMOP_HLP_MIN_386();
8497
8498 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8499 switch (pIemCpu->enmDefAddrMode)
8500 {
8501 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8502 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8503 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8504 default: AssertFailed();
8505 }
8506
8507 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8508 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8509}
8510
8511
8512/** Opcode 0x68. */
8513FNIEMOP_DEF(iemOp_push_Iz)
8514{
8515 IEMOP_MNEMONIC("push Iz");
8516 IEMOP_HLP_MIN_186();
8517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8518 switch (pIemCpu->enmEffOpSize)
8519 {
8520 case IEMMODE_16BIT:
8521 {
8522 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8523 IEMOP_HLP_NO_LOCK_PREFIX();
8524 IEM_MC_BEGIN(0,0);
8525 IEM_MC_PUSH_U16(u16Imm);
8526 IEM_MC_ADVANCE_RIP();
8527 IEM_MC_END();
8528 return VINF_SUCCESS;
8529 }
8530
8531 case IEMMODE_32BIT:
8532 {
8533 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8534 IEMOP_HLP_NO_LOCK_PREFIX();
8535 IEM_MC_BEGIN(0,0);
8536 IEM_MC_PUSH_U32(u32Imm);
8537 IEM_MC_ADVANCE_RIP();
8538 IEM_MC_END();
8539 return VINF_SUCCESS;
8540 }
8541
8542 case IEMMODE_64BIT:
8543 {
8544 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8545 IEMOP_HLP_NO_LOCK_PREFIX();
8546 IEM_MC_BEGIN(0,0);
8547 IEM_MC_PUSH_U64(u64Imm);
8548 IEM_MC_ADVANCE_RIP();
8549 IEM_MC_END();
8550 return VINF_SUCCESS;
8551 }
8552
8553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8554 }
8555}
8556
8557
8558/** Opcode 0x69. */
8559FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8560{
8561 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8562 IEMOP_HLP_MIN_186();
8563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8565
8566 switch (pIemCpu->enmEffOpSize)
8567 {
8568 case IEMMODE_16BIT:
8569 {
8570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8571 {
8572 /* register operand */
8573 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575
8576 IEM_MC_BEGIN(3, 1);
8577 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8578 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8579 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8580 IEM_MC_LOCAL(uint16_t, u16Tmp);
8581
8582 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8583 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8584 IEM_MC_REF_EFLAGS(pEFlags);
8585 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8586 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8587
8588 IEM_MC_ADVANCE_RIP();
8589 IEM_MC_END();
8590 }
8591 else
8592 {
8593 /* memory operand */
8594 IEM_MC_BEGIN(3, 2);
8595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8596 IEM_MC_ARG(uint16_t, u16Src, 1);
8597 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8598 IEM_MC_LOCAL(uint16_t, u16Tmp);
8599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8600
8601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8602 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8603 IEM_MC_ASSIGN(u16Src, u16Imm);
8604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8605 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8606 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8607 IEM_MC_REF_EFLAGS(pEFlags);
8608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8609 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8610
8611 IEM_MC_ADVANCE_RIP();
8612 IEM_MC_END();
8613 }
8614 return VINF_SUCCESS;
8615 }
8616
8617 case IEMMODE_32BIT:
8618 {
8619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8620 {
8621 /* register operand */
8622 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8624
8625 IEM_MC_BEGIN(3, 1);
8626 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8627 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8629 IEM_MC_LOCAL(uint32_t, u32Tmp);
8630
8631 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8632 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8633 IEM_MC_REF_EFLAGS(pEFlags);
8634 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8635 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8636
8637 IEM_MC_ADVANCE_RIP();
8638 IEM_MC_END();
8639 }
8640 else
8641 {
8642 /* memory operand */
8643 IEM_MC_BEGIN(3, 2);
8644 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8645 IEM_MC_ARG(uint32_t, u32Src, 1);
8646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8647 IEM_MC_LOCAL(uint32_t, u32Tmp);
8648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8649
8650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8651 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8652 IEM_MC_ASSIGN(u32Src, u32Imm);
8653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8654 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8655 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8656 IEM_MC_REF_EFLAGS(pEFlags);
8657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8658 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8659
8660 IEM_MC_ADVANCE_RIP();
8661 IEM_MC_END();
8662 }
8663 return VINF_SUCCESS;
8664 }
8665
8666 case IEMMODE_64BIT:
8667 {
8668 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8669 {
8670 /* register operand */
8671 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8673
8674 IEM_MC_BEGIN(3, 1);
8675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8676 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8678 IEM_MC_LOCAL(uint64_t, u64Tmp);
8679
8680 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8681 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8682 IEM_MC_REF_EFLAGS(pEFlags);
8683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8684 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8685
8686 IEM_MC_ADVANCE_RIP();
8687 IEM_MC_END();
8688 }
8689 else
8690 {
8691 /* memory operand */
8692 IEM_MC_BEGIN(3, 2);
8693 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8694 IEM_MC_ARG(uint64_t, u64Src, 1);
8695 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8696 IEM_MC_LOCAL(uint64_t, u64Tmp);
8697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8698
8699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8700 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8701 IEM_MC_ASSIGN(u64Src, u64Imm);
8702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8703 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8704 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8705 IEM_MC_REF_EFLAGS(pEFlags);
8706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8707 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8708
8709 IEM_MC_ADVANCE_RIP();
8710 IEM_MC_END();
8711 }
8712 return VINF_SUCCESS;
8713 }
8714 }
8715 AssertFailedReturn(VERR_IEM_IPE_9);
8716}
8717
8718
8719/** Opcode 0x6a. */
8720FNIEMOP_DEF(iemOp_push_Ib)
8721{
8722 IEMOP_MNEMONIC("push Ib");
8723 IEMOP_HLP_MIN_186();
8724 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8725 IEMOP_HLP_NO_LOCK_PREFIX();
8726 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8727
8728 IEM_MC_BEGIN(0,0);
8729 switch (pIemCpu->enmEffOpSize)
8730 {
8731 case IEMMODE_16BIT:
8732 IEM_MC_PUSH_U16(i8Imm);
8733 break;
8734 case IEMMODE_32BIT:
8735 IEM_MC_PUSH_U32(i8Imm);
8736 break;
8737 case IEMMODE_64BIT:
8738 IEM_MC_PUSH_U64(i8Imm);
8739 break;
8740 }
8741 IEM_MC_ADVANCE_RIP();
8742 IEM_MC_END();
8743 return VINF_SUCCESS;
8744}
8745
8746
8747/** Opcode 0x6b. */
8748FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8749{
8750 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8751 IEMOP_HLP_MIN_186();
8752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8753 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8754
8755 switch (pIemCpu->enmEffOpSize)
8756 {
8757 case IEMMODE_16BIT:
8758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8759 {
8760 /* register operand */
8761 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763
8764 IEM_MC_BEGIN(3, 1);
8765 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8766 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8768 IEM_MC_LOCAL(uint16_t, u16Tmp);
8769
8770 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8771 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8772 IEM_MC_REF_EFLAGS(pEFlags);
8773 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8774 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8775
8776 IEM_MC_ADVANCE_RIP();
8777 IEM_MC_END();
8778 }
8779 else
8780 {
8781 /* memory operand */
8782 IEM_MC_BEGIN(3, 2);
8783 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8784 IEM_MC_ARG(uint16_t, u16Src, 1);
8785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8786 IEM_MC_LOCAL(uint16_t, u16Tmp);
8787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8788
8789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8790 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8791 IEM_MC_ASSIGN(u16Src, u16Imm);
8792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8793 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8794 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8795 IEM_MC_REF_EFLAGS(pEFlags);
8796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8797 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8798
8799 IEM_MC_ADVANCE_RIP();
8800 IEM_MC_END();
8801 }
8802 return VINF_SUCCESS;
8803
8804 case IEMMODE_32BIT:
8805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8806 {
8807 /* register operand */
8808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8810
8811 IEM_MC_BEGIN(3, 1);
8812 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8813 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8815 IEM_MC_LOCAL(uint32_t, u32Tmp);
8816
8817 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8818 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8819 IEM_MC_REF_EFLAGS(pEFlags);
8820 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8821 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8822
8823 IEM_MC_ADVANCE_RIP();
8824 IEM_MC_END();
8825 }
8826 else
8827 {
8828 /* memory operand */
8829 IEM_MC_BEGIN(3, 2);
8830 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8831 IEM_MC_ARG(uint32_t, u32Src, 1);
8832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8833 IEM_MC_LOCAL(uint32_t, u32Tmp);
8834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8835
8836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8837 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8838 IEM_MC_ASSIGN(u32Src, u32Imm);
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8841 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8842 IEM_MC_REF_EFLAGS(pEFlags);
8843 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8844 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8845
8846 IEM_MC_ADVANCE_RIP();
8847 IEM_MC_END();
8848 }
8849 return VINF_SUCCESS;
8850
8851 case IEMMODE_64BIT:
8852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8853 {
8854 /* register operand */
8855 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857
8858 IEM_MC_BEGIN(3, 1);
8859 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8860 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8862 IEM_MC_LOCAL(uint64_t, u64Tmp);
8863
8864 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8865 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8866 IEM_MC_REF_EFLAGS(pEFlags);
8867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8868 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8869
8870 IEM_MC_ADVANCE_RIP();
8871 IEM_MC_END();
8872 }
8873 else
8874 {
8875 /* memory operand */
8876 IEM_MC_BEGIN(3, 2);
8877 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8878 IEM_MC_ARG(uint64_t, u64Src, 1);
8879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8880 IEM_MC_LOCAL(uint64_t, u64Tmp);
8881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8882
8883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8884 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8885 IEM_MC_ASSIGN(u64Src, u64Imm);
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8888 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8889 IEM_MC_REF_EFLAGS(pEFlags);
8890 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8891 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8892
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 }
8896 return VINF_SUCCESS;
8897 }
8898 AssertFailedReturn(VERR_IEM_IPE_8);
8899}
8900
8901
8902/** Opcode 0x6c. */
8903FNIEMOP_DEF(iemOp_insb_Yb_DX)
8904{
8905 IEMOP_HLP_MIN_186();
8906 IEMOP_HLP_NO_LOCK_PREFIX();
8907 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8908 {
8909 IEMOP_MNEMONIC("rep ins Yb,DX");
8910 switch (pIemCpu->enmEffAddrMode)
8911 {
8912 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8913 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8914 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8916 }
8917 }
8918 else
8919 {
8920 IEMOP_MNEMONIC("ins Yb,DX");
8921 switch (pIemCpu->enmEffAddrMode)
8922 {
8923 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8924 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8925 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8927 }
8928 }
8929}
8930
8931
8932/** Opcode 0x6d. */
8933FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8934{
8935 IEMOP_HLP_MIN_186();
8936 IEMOP_HLP_NO_LOCK_PREFIX();
8937 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8938 {
8939 IEMOP_MNEMONIC("rep ins Yv,DX");
8940 switch (pIemCpu->enmEffOpSize)
8941 {
8942 case IEMMODE_16BIT:
8943 switch (pIemCpu->enmEffAddrMode)
8944 {
8945 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8946 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8947 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8949 }
8950 break;
8951 case IEMMODE_64BIT:
8952 case IEMMODE_32BIT:
8953 switch (pIemCpu->enmEffAddrMode)
8954 {
8955 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8956 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8957 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8959 }
8960 break;
8961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8962 }
8963 }
8964 else
8965 {
8966 IEMOP_MNEMONIC("ins Yv,DX");
8967 switch (pIemCpu->enmEffOpSize)
8968 {
8969 case IEMMODE_16BIT:
8970 switch (pIemCpu->enmEffAddrMode)
8971 {
8972 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8973 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8974 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8976 }
8977 break;
8978 case IEMMODE_64BIT:
8979 case IEMMODE_32BIT:
8980 switch (pIemCpu->enmEffAddrMode)
8981 {
8982 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8983 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8984 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8986 }
8987 break;
8988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8989 }
8990 }
8991}
8992
8993
8994/** Opcode 0x6e. */
8995FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8996{
8997 IEMOP_HLP_MIN_186();
8998 IEMOP_HLP_NO_LOCK_PREFIX();
8999 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9000 {
9001 IEMOP_MNEMONIC("rep outs DX,Yb");
9002 switch (pIemCpu->enmEffAddrMode)
9003 {
9004 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9005 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9006 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9008 }
9009 }
9010 else
9011 {
9012 IEMOP_MNEMONIC("outs DX,Yb");
9013 switch (pIemCpu->enmEffAddrMode)
9014 {
9015 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9016 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9017 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9019 }
9020 }
9021}
9022
9023
9024/** Opcode 0x6f. */
9025FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9026{
9027 IEMOP_HLP_MIN_186();
9028 IEMOP_HLP_NO_LOCK_PREFIX();
9029 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9030 {
9031 IEMOP_MNEMONIC("rep outs DX,Yv");
9032 switch (pIemCpu->enmEffOpSize)
9033 {
9034 case IEMMODE_16BIT:
9035 switch (pIemCpu->enmEffAddrMode)
9036 {
9037 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9041 }
9042 break;
9043 case IEMMODE_64BIT:
9044 case IEMMODE_32BIT:
9045 switch (pIemCpu->enmEffAddrMode)
9046 {
9047 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9048 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9049 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9051 }
9052 break;
9053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9054 }
9055 }
9056 else
9057 {
9058 IEMOP_MNEMONIC("outs DX,Yv");
9059 switch (pIemCpu->enmEffOpSize)
9060 {
9061 case IEMMODE_16BIT:
9062 switch (pIemCpu->enmEffAddrMode)
9063 {
9064 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9065 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9066 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9068 }
9069 break;
9070 case IEMMODE_64BIT:
9071 case IEMMODE_32BIT:
9072 switch (pIemCpu->enmEffAddrMode)
9073 {
9074 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9075 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9076 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9078 }
9079 break;
9080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9081 }
9082 }
9083}
9084
9085
9086/** Opcode 0x70. */
9087FNIEMOP_DEF(iemOp_jo_Jb)
9088{
9089 IEMOP_MNEMONIC("jo Jb");
9090 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9091 IEMOP_HLP_NO_LOCK_PREFIX();
9092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9093
9094 IEM_MC_BEGIN(0, 0);
9095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9096 IEM_MC_REL_JMP_S8(i8Imm);
9097 } IEM_MC_ELSE() {
9098 IEM_MC_ADVANCE_RIP();
9099 } IEM_MC_ENDIF();
9100 IEM_MC_END();
9101 return VINF_SUCCESS;
9102}
9103
9104
9105/** Opcode 0x71. */
9106FNIEMOP_DEF(iemOp_jno_Jb)
9107{
9108 IEMOP_MNEMONIC("jno Jb");
9109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9110 IEMOP_HLP_NO_LOCK_PREFIX();
9111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9112
9113 IEM_MC_BEGIN(0, 0);
9114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9115 IEM_MC_ADVANCE_RIP();
9116 } IEM_MC_ELSE() {
9117 IEM_MC_REL_JMP_S8(i8Imm);
9118 } IEM_MC_ENDIF();
9119 IEM_MC_END();
9120 return VINF_SUCCESS;
9121}
9122
9123/** Opcode 0x72. */
9124FNIEMOP_DEF(iemOp_jc_Jb)
9125{
9126 IEMOP_MNEMONIC("jc/jnae Jb");
9127 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9128 IEMOP_HLP_NO_LOCK_PREFIX();
9129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9130
9131 IEM_MC_BEGIN(0, 0);
9132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9133 IEM_MC_REL_JMP_S8(i8Imm);
9134 } IEM_MC_ELSE() {
9135 IEM_MC_ADVANCE_RIP();
9136 } IEM_MC_ENDIF();
9137 IEM_MC_END();
9138 return VINF_SUCCESS;
9139}
9140
9141
9142/** Opcode 0x73. */
9143FNIEMOP_DEF(iemOp_jnc_Jb)
9144{
9145 IEMOP_MNEMONIC("jnc/jnb Jb");
9146 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9147 IEMOP_HLP_NO_LOCK_PREFIX();
9148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9149
9150 IEM_MC_BEGIN(0, 0);
9151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9152 IEM_MC_ADVANCE_RIP();
9153 } IEM_MC_ELSE() {
9154 IEM_MC_REL_JMP_S8(i8Imm);
9155 } IEM_MC_ENDIF();
9156 IEM_MC_END();
9157 return VINF_SUCCESS;
9158}
9159
9160
9161/** Opcode 0x74. */
9162FNIEMOP_DEF(iemOp_je_Jb)
9163{
9164 IEMOP_MNEMONIC("je/jz Jb");
9165 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9166 IEMOP_HLP_NO_LOCK_PREFIX();
9167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9168
9169 IEM_MC_BEGIN(0, 0);
9170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9171 IEM_MC_REL_JMP_S8(i8Imm);
9172 } IEM_MC_ELSE() {
9173 IEM_MC_ADVANCE_RIP();
9174 } IEM_MC_ENDIF();
9175 IEM_MC_END();
9176 return VINF_SUCCESS;
9177}
9178
9179
9180/** Opcode 0x75. */
9181FNIEMOP_DEF(iemOp_jne_Jb)
9182{
9183 IEMOP_MNEMONIC("jne/jnz Jb");
9184 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9185 IEMOP_HLP_NO_LOCK_PREFIX();
9186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9187
9188 IEM_MC_BEGIN(0, 0);
9189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9190 IEM_MC_ADVANCE_RIP();
9191 } IEM_MC_ELSE() {
9192 IEM_MC_REL_JMP_S8(i8Imm);
9193 } IEM_MC_ENDIF();
9194 IEM_MC_END();
9195 return VINF_SUCCESS;
9196}
9197
9198
9199/** Opcode 0x76. */
9200FNIEMOP_DEF(iemOp_jbe_Jb)
9201{
9202 IEMOP_MNEMONIC("jbe/jna Jb");
9203 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9204 IEMOP_HLP_NO_LOCK_PREFIX();
9205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9206
9207 IEM_MC_BEGIN(0, 0);
9208 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9209 IEM_MC_REL_JMP_S8(i8Imm);
9210 } IEM_MC_ELSE() {
9211 IEM_MC_ADVANCE_RIP();
9212 } IEM_MC_ENDIF();
9213 IEM_MC_END();
9214 return VINF_SUCCESS;
9215}
9216
9217
9218/** Opcode 0x77. */
9219FNIEMOP_DEF(iemOp_jnbe_Jb)
9220{
9221 IEMOP_MNEMONIC("jnbe/ja Jb");
9222 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9223 IEMOP_HLP_NO_LOCK_PREFIX();
9224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9225
9226 IEM_MC_BEGIN(0, 0);
9227 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9228 IEM_MC_ADVANCE_RIP();
9229 } IEM_MC_ELSE() {
9230 IEM_MC_REL_JMP_S8(i8Imm);
9231 } IEM_MC_ENDIF();
9232 IEM_MC_END();
9233 return VINF_SUCCESS;
9234}
9235
9236
9237/** Opcode 0x78. */
9238FNIEMOP_DEF(iemOp_js_Jb)
9239{
9240 IEMOP_MNEMONIC("js Jb");
9241 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9242 IEMOP_HLP_NO_LOCK_PREFIX();
9243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9244
9245 IEM_MC_BEGIN(0, 0);
9246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9247 IEM_MC_REL_JMP_S8(i8Imm);
9248 } IEM_MC_ELSE() {
9249 IEM_MC_ADVANCE_RIP();
9250 } IEM_MC_ENDIF();
9251 IEM_MC_END();
9252 return VINF_SUCCESS;
9253}
9254
9255
9256/** Opcode 0x79. */
9257FNIEMOP_DEF(iemOp_jns_Jb)
9258{
9259 IEMOP_MNEMONIC("jns Jb");
9260 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9261 IEMOP_HLP_NO_LOCK_PREFIX();
9262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9263
9264 IEM_MC_BEGIN(0, 0);
9265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9266 IEM_MC_ADVANCE_RIP();
9267 } IEM_MC_ELSE() {
9268 IEM_MC_REL_JMP_S8(i8Imm);
9269 } IEM_MC_ENDIF();
9270 IEM_MC_END();
9271 return VINF_SUCCESS;
9272}
9273
9274
9275/** Opcode 0x7a. */
9276FNIEMOP_DEF(iemOp_jp_Jb)
9277{
9278 IEMOP_MNEMONIC("jp Jb");
9279 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9280 IEMOP_HLP_NO_LOCK_PREFIX();
9281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9282
9283 IEM_MC_BEGIN(0, 0);
9284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9285 IEM_MC_REL_JMP_S8(i8Imm);
9286 } IEM_MC_ELSE() {
9287 IEM_MC_ADVANCE_RIP();
9288 } IEM_MC_ENDIF();
9289 IEM_MC_END();
9290 return VINF_SUCCESS;
9291}
9292
9293
9294/** Opcode 0x7b. */
9295FNIEMOP_DEF(iemOp_jnp_Jb)
9296{
9297 IEMOP_MNEMONIC("jnp Jb");
9298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9299 IEMOP_HLP_NO_LOCK_PREFIX();
9300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9301
9302 IEM_MC_BEGIN(0, 0);
9303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9304 IEM_MC_ADVANCE_RIP();
9305 } IEM_MC_ELSE() {
9306 IEM_MC_REL_JMP_S8(i8Imm);
9307 } IEM_MC_ENDIF();
9308 IEM_MC_END();
9309 return VINF_SUCCESS;
9310}
9311
9312
9313/** Opcode 0x7c. */
9314FNIEMOP_DEF(iemOp_jl_Jb)
9315{
9316 IEMOP_MNEMONIC("jl/jnge Jb");
9317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9318 IEMOP_HLP_NO_LOCK_PREFIX();
9319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9320
9321 IEM_MC_BEGIN(0, 0);
9322 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9323 IEM_MC_REL_JMP_S8(i8Imm);
9324 } IEM_MC_ELSE() {
9325 IEM_MC_ADVANCE_RIP();
9326 } IEM_MC_ENDIF();
9327 IEM_MC_END();
9328 return VINF_SUCCESS;
9329}
9330
9331
9332/** Opcode 0x7d. */
9333FNIEMOP_DEF(iemOp_jnl_Jb)
9334{
9335 IEMOP_MNEMONIC("jnl/jge Jb");
9336 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9337 IEMOP_HLP_NO_LOCK_PREFIX();
9338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9339
9340 IEM_MC_BEGIN(0, 0);
9341 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9342 IEM_MC_ADVANCE_RIP();
9343 } IEM_MC_ELSE() {
9344 IEM_MC_REL_JMP_S8(i8Imm);
9345 } IEM_MC_ENDIF();
9346 IEM_MC_END();
9347 return VINF_SUCCESS;
9348}
9349
9350
9351/** Opcode 0x7e. */
9352FNIEMOP_DEF(iemOp_jle_Jb)
9353{
9354 IEMOP_MNEMONIC("jle/jng Jb");
9355 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9356 IEMOP_HLP_NO_LOCK_PREFIX();
9357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9358
9359 IEM_MC_BEGIN(0, 0);
9360 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9361 IEM_MC_REL_JMP_S8(i8Imm);
9362 } IEM_MC_ELSE() {
9363 IEM_MC_ADVANCE_RIP();
9364 } IEM_MC_ENDIF();
9365 IEM_MC_END();
9366 return VINF_SUCCESS;
9367}
9368
9369
9370/** Opcode 0x7f. */
9371FNIEMOP_DEF(iemOp_jnle_Jb)
9372{
9373 IEMOP_MNEMONIC("jnle/jg Jb");
9374 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9375 IEMOP_HLP_NO_LOCK_PREFIX();
9376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9377
9378 IEM_MC_BEGIN(0, 0);
9379 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9380 IEM_MC_ADVANCE_RIP();
9381 } IEM_MC_ELSE() {
9382 IEM_MC_REL_JMP_S8(i8Imm);
9383 } IEM_MC_ENDIF();
9384 IEM_MC_END();
9385 return VINF_SUCCESS;
9386}
9387
9388
9389/** Opcode 0x80. */
9390FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9391{
9392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9393 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9394 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9395
9396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9397 {
9398 /* register target */
9399 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9400 IEMOP_HLP_NO_LOCK_PREFIX();
9401 IEM_MC_BEGIN(3, 0);
9402 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9403 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9404 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9405
9406 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9407 IEM_MC_REF_EFLAGS(pEFlags);
9408 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9409
9410 IEM_MC_ADVANCE_RIP();
9411 IEM_MC_END();
9412 }
9413 else
9414 {
9415 /* memory target */
9416 uint32_t fAccess;
9417 if (pImpl->pfnLockedU8)
9418 fAccess = IEM_ACCESS_DATA_RW;
9419 else
9420 { /* CMP */
9421 IEMOP_HLP_NO_LOCK_PREFIX();
9422 fAccess = IEM_ACCESS_DATA_R;
9423 }
9424 IEM_MC_BEGIN(3, 2);
9425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9428
9429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9430 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9431 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9432
9433 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9434 IEM_MC_FETCH_EFLAGS(EFlags);
9435 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9436 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9437 else
9438 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9439
9440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9441 IEM_MC_COMMIT_EFLAGS(EFlags);
9442 IEM_MC_ADVANCE_RIP();
9443 IEM_MC_END();
9444 }
9445 return VINF_SUCCESS;
9446}
9447
9448
9449/** Opcode 0x81. */
9450FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9451{
9452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9453 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9454 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9455
9456 switch (pIemCpu->enmEffOpSize)
9457 {
9458 case IEMMODE_16BIT:
9459 {
9460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9461 {
9462 /* register target */
9463 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9464 IEMOP_HLP_NO_LOCK_PREFIX();
9465 IEM_MC_BEGIN(3, 0);
9466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9467 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9469
9470 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9471 IEM_MC_REF_EFLAGS(pEFlags);
9472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9473
9474 IEM_MC_ADVANCE_RIP();
9475 IEM_MC_END();
9476 }
9477 else
9478 {
9479 /* memory target */
9480 uint32_t fAccess;
9481 if (pImpl->pfnLockedU16)
9482 fAccess = IEM_ACCESS_DATA_RW;
9483 else
9484 { /* CMP, TEST */
9485 IEMOP_HLP_NO_LOCK_PREFIX();
9486 fAccess = IEM_ACCESS_DATA_R;
9487 }
9488 IEM_MC_BEGIN(3, 2);
9489 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9490 IEM_MC_ARG(uint16_t, u16Src, 1);
9491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9493
9494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9495 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9496 IEM_MC_ASSIGN(u16Src, u16Imm);
9497 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9498 IEM_MC_FETCH_EFLAGS(EFlags);
9499 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9500 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9501 else
9502 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9503
9504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9505 IEM_MC_COMMIT_EFLAGS(EFlags);
9506 IEM_MC_ADVANCE_RIP();
9507 IEM_MC_END();
9508 }
9509 break;
9510 }
9511
9512 case IEMMODE_32BIT:
9513 {
9514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9515 {
9516 /* register target */
9517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9518 IEMOP_HLP_NO_LOCK_PREFIX();
9519 IEM_MC_BEGIN(3, 0);
9520 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9521 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9523
9524 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9525 IEM_MC_REF_EFLAGS(pEFlags);
9526 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9527 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9528
9529 IEM_MC_ADVANCE_RIP();
9530 IEM_MC_END();
9531 }
9532 else
9533 {
9534 /* memory target */
9535 uint32_t fAccess;
9536 if (pImpl->pfnLockedU32)
9537 fAccess = IEM_ACCESS_DATA_RW;
9538 else
9539 { /* CMP, TEST */
9540 IEMOP_HLP_NO_LOCK_PREFIX();
9541 fAccess = IEM_ACCESS_DATA_R;
9542 }
9543 IEM_MC_BEGIN(3, 2);
9544 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9545 IEM_MC_ARG(uint32_t, u32Src, 1);
9546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9548
9549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9550 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9551 IEM_MC_ASSIGN(u32Src, u32Imm);
9552 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9553 IEM_MC_FETCH_EFLAGS(EFlags);
9554 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9556 else
9557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9558
9559 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9560 IEM_MC_COMMIT_EFLAGS(EFlags);
9561 IEM_MC_ADVANCE_RIP();
9562 IEM_MC_END();
9563 }
9564 break;
9565 }
9566
9567 case IEMMODE_64BIT:
9568 {
9569 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9570 {
9571 /* register target */
9572 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9573 IEMOP_HLP_NO_LOCK_PREFIX();
9574 IEM_MC_BEGIN(3, 0);
9575 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9576 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9578
9579 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9580 IEM_MC_REF_EFLAGS(pEFlags);
9581 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9582
9583 IEM_MC_ADVANCE_RIP();
9584 IEM_MC_END();
9585 }
9586 else
9587 {
9588 /* memory target */
9589 uint32_t fAccess;
9590 if (pImpl->pfnLockedU64)
9591 fAccess = IEM_ACCESS_DATA_RW;
9592 else
9593 { /* CMP */
9594 IEMOP_HLP_NO_LOCK_PREFIX();
9595 fAccess = IEM_ACCESS_DATA_R;
9596 }
9597 IEM_MC_BEGIN(3, 2);
9598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9599 IEM_MC_ARG(uint64_t, u64Src, 1);
9600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9602
9603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9604 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9605 IEM_MC_ASSIGN(u64Src, u64Imm);
9606 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9607 IEM_MC_FETCH_EFLAGS(EFlags);
9608 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9609 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9610 else
9611 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9612
9613 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9614 IEM_MC_COMMIT_EFLAGS(EFlags);
9615 IEM_MC_ADVANCE_RIP();
9616 IEM_MC_END();
9617 }
9618 break;
9619 }
9620 }
9621 return VINF_SUCCESS;
9622}
9623
9624
9625/** Opcode 0x82. */
9626FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9627{
9628 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9629 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9630}
9631
9632
9633/** Opcode 0x83. */
9634FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9635{
9636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9637 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9638 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9639 to the 386 even if absent in the intel reference manuals and some
9640 3rd party opcode listings. */
9641 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9642
9643 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9644 {
9645 /*
9646 * Register target
9647 */
9648 IEMOP_HLP_NO_LOCK_PREFIX();
9649 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9650 switch (pIemCpu->enmEffOpSize)
9651 {
9652 case IEMMODE_16BIT:
9653 {
9654 IEM_MC_BEGIN(3, 0);
9655 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9656 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9658
9659 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9660 IEM_MC_REF_EFLAGS(pEFlags);
9661 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9662
9663 IEM_MC_ADVANCE_RIP();
9664 IEM_MC_END();
9665 break;
9666 }
9667
9668 case IEMMODE_32BIT:
9669 {
9670 IEM_MC_BEGIN(3, 0);
9671 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9672 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9674
9675 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9676 IEM_MC_REF_EFLAGS(pEFlags);
9677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9678 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9679
9680 IEM_MC_ADVANCE_RIP();
9681 IEM_MC_END();
9682 break;
9683 }
9684
9685 case IEMMODE_64BIT:
9686 {
9687 IEM_MC_BEGIN(3, 0);
9688 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9689 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9691
9692 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9693 IEM_MC_REF_EFLAGS(pEFlags);
9694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9695
9696 IEM_MC_ADVANCE_RIP();
9697 IEM_MC_END();
9698 break;
9699 }
9700 }
9701 }
9702 else
9703 {
9704 /*
9705 * Memory target.
9706 */
9707 uint32_t fAccess;
9708 if (pImpl->pfnLockedU16)
9709 fAccess = IEM_ACCESS_DATA_RW;
9710 else
9711 { /* CMP */
9712 IEMOP_HLP_NO_LOCK_PREFIX();
9713 fAccess = IEM_ACCESS_DATA_R;
9714 }
9715
9716 switch (pIemCpu->enmEffOpSize)
9717 {
9718 case IEMMODE_16BIT:
9719 {
9720 IEM_MC_BEGIN(3, 2);
9721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9722 IEM_MC_ARG(uint16_t, u16Src, 1);
9723 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9725
9726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9727 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9728 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9729 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9730 IEM_MC_FETCH_EFLAGS(EFlags);
9731 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9733 else
9734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9735
9736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9737 IEM_MC_COMMIT_EFLAGS(EFlags);
9738 IEM_MC_ADVANCE_RIP();
9739 IEM_MC_END();
9740 break;
9741 }
9742
9743 case IEMMODE_32BIT:
9744 {
9745 IEM_MC_BEGIN(3, 2);
9746 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9747 IEM_MC_ARG(uint32_t, u32Src, 1);
9748 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9750
9751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9752 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9753 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9754 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9755 IEM_MC_FETCH_EFLAGS(EFlags);
9756 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9758 else
9759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9760
9761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9762 IEM_MC_COMMIT_EFLAGS(EFlags);
9763 IEM_MC_ADVANCE_RIP();
9764 IEM_MC_END();
9765 break;
9766 }
9767
9768 case IEMMODE_64BIT:
9769 {
9770 IEM_MC_BEGIN(3, 2);
9771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9772 IEM_MC_ARG(uint64_t, u64Src, 1);
9773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9775
9776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9777 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9778 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9779 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9780 IEM_MC_FETCH_EFLAGS(EFlags);
9781 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9783 else
9784 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9785
9786 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9787 IEM_MC_COMMIT_EFLAGS(EFlags);
9788 IEM_MC_ADVANCE_RIP();
9789 IEM_MC_END();
9790 break;
9791 }
9792 }
9793 }
9794 return VINF_SUCCESS;
9795}
9796
9797
9798/** Opcode 0x84. */
9799FNIEMOP_DEF(iemOp_test_Eb_Gb)
9800{
9801 IEMOP_MNEMONIC("test Eb,Gb");
9802 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9805}
9806
9807
9808/** Opcode 0x85. */
9809FNIEMOP_DEF(iemOp_test_Ev_Gv)
9810{
9811 IEMOP_MNEMONIC("test Ev,Gv");
9812 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9813 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9814 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9815}
9816
9817
9818/** Opcode 0x86. */
9819FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9820{
9821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9822 IEMOP_MNEMONIC("xchg Eb,Gb");
9823
9824 /*
9825 * If rm is denoting a register, no more instruction bytes.
9826 */
9827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9828 {
9829 IEMOP_HLP_NO_LOCK_PREFIX();
9830
9831 IEM_MC_BEGIN(0, 2);
9832 IEM_MC_LOCAL(uint8_t, uTmp1);
9833 IEM_MC_LOCAL(uint8_t, uTmp2);
9834
9835 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9836 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9837 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9838 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9839
9840 IEM_MC_ADVANCE_RIP();
9841 IEM_MC_END();
9842 }
9843 else
9844 {
9845 /*
9846 * We're accessing memory.
9847 */
9848/** @todo the register must be committed separately! */
9849 IEM_MC_BEGIN(2, 2);
9850 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9851 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9853
9854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9855 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9856 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9857 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9858 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9859
9860 IEM_MC_ADVANCE_RIP();
9861 IEM_MC_END();
9862 }
9863 return VINF_SUCCESS;
9864}
9865
9866
9867/** Opcode 0x87. */
9868FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9869{
9870 IEMOP_MNEMONIC("xchg Ev,Gv");
9871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9872
9873 /*
9874 * If rm is denoting a register, no more instruction bytes.
9875 */
9876 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9877 {
9878 IEMOP_HLP_NO_LOCK_PREFIX();
9879
9880 switch (pIemCpu->enmEffOpSize)
9881 {
9882 case IEMMODE_16BIT:
9883 IEM_MC_BEGIN(0, 2);
9884 IEM_MC_LOCAL(uint16_t, uTmp1);
9885 IEM_MC_LOCAL(uint16_t, uTmp2);
9886
9887 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9888 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9889 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9890 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9891
9892 IEM_MC_ADVANCE_RIP();
9893 IEM_MC_END();
9894 return VINF_SUCCESS;
9895
9896 case IEMMODE_32BIT:
9897 IEM_MC_BEGIN(0, 2);
9898 IEM_MC_LOCAL(uint32_t, uTmp1);
9899 IEM_MC_LOCAL(uint32_t, uTmp2);
9900
9901 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9902 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9903 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9904 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9905
9906 IEM_MC_ADVANCE_RIP();
9907 IEM_MC_END();
9908 return VINF_SUCCESS;
9909
9910 case IEMMODE_64BIT:
9911 IEM_MC_BEGIN(0, 2);
9912 IEM_MC_LOCAL(uint64_t, uTmp1);
9913 IEM_MC_LOCAL(uint64_t, uTmp2);
9914
9915 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9916 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9917 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9918 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9919
9920 IEM_MC_ADVANCE_RIP();
9921 IEM_MC_END();
9922 return VINF_SUCCESS;
9923
9924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9925 }
9926 }
9927 else
9928 {
9929 /*
9930 * We're accessing memory.
9931 */
9932 switch (pIemCpu->enmEffOpSize)
9933 {
9934/** @todo the register must be committed separately! */
9935 case IEMMODE_16BIT:
9936 IEM_MC_BEGIN(2, 2);
9937 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9938 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9940
9941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9942 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9943 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9944 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9946
9947 IEM_MC_ADVANCE_RIP();
9948 IEM_MC_END();
9949 return VINF_SUCCESS;
9950
9951 case IEMMODE_32BIT:
9952 IEM_MC_BEGIN(2, 2);
9953 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9954 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9956
9957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9958 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9959 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9960 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9962
9963 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9964 IEM_MC_ADVANCE_RIP();
9965 IEM_MC_END();
9966 return VINF_SUCCESS;
9967
9968 case IEMMODE_64BIT:
9969 IEM_MC_BEGIN(2, 2);
9970 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9971 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9973
9974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9975 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9976 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9977 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9979
9980 IEM_MC_ADVANCE_RIP();
9981 IEM_MC_END();
9982 return VINF_SUCCESS;
9983
9984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9985 }
9986 }
9987}
9988
9989
9990/** Opcode 0x88. */
9991FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9992{
9993 IEMOP_MNEMONIC("mov Eb,Gb");
9994
9995 uint8_t bRm;
9996 IEM_OPCODE_GET_NEXT_U8(&bRm);
9997 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9998
9999 /*
10000 * If rm is denoting a register, no more instruction bytes.
10001 */
10002 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10003 {
10004 IEM_MC_BEGIN(0, 1);
10005 IEM_MC_LOCAL(uint8_t, u8Value);
10006 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10007 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10008 IEM_MC_ADVANCE_RIP();
10009 IEM_MC_END();
10010 }
10011 else
10012 {
10013 /*
10014 * We're writing a register to memory.
10015 */
10016 IEM_MC_BEGIN(0, 2);
10017 IEM_MC_LOCAL(uint8_t, u8Value);
10018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10020 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10021 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10022 IEM_MC_ADVANCE_RIP();
10023 IEM_MC_END();
10024 }
10025 return VINF_SUCCESS;
10026
10027}
10028
10029
10030/** Opcode 0x89. */
10031FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10032{
10033 IEMOP_MNEMONIC("mov Ev,Gv");
10034
10035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10036 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10037
10038 /*
10039 * If rm is denoting a register, no more instruction bytes.
10040 */
10041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10042 {
10043 switch (pIemCpu->enmEffOpSize)
10044 {
10045 case IEMMODE_16BIT:
10046 IEM_MC_BEGIN(0, 1);
10047 IEM_MC_LOCAL(uint16_t, u16Value);
10048 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10049 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 break;
10053
10054 case IEMMODE_32BIT:
10055 IEM_MC_BEGIN(0, 1);
10056 IEM_MC_LOCAL(uint32_t, u32Value);
10057 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10058 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10059 IEM_MC_ADVANCE_RIP();
10060 IEM_MC_END();
10061 break;
10062
10063 case IEMMODE_64BIT:
10064 IEM_MC_BEGIN(0, 1);
10065 IEM_MC_LOCAL(uint64_t, u64Value);
10066 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10067 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10068 IEM_MC_ADVANCE_RIP();
10069 IEM_MC_END();
10070 break;
10071 }
10072 }
10073 else
10074 {
10075 /*
10076 * We're writing a register to memory.
10077 */
10078 switch (pIemCpu->enmEffOpSize)
10079 {
10080 case IEMMODE_16BIT:
10081 IEM_MC_BEGIN(0, 2);
10082 IEM_MC_LOCAL(uint16_t, u16Value);
10083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10085 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10086 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10087 IEM_MC_ADVANCE_RIP();
10088 IEM_MC_END();
10089 break;
10090
10091 case IEMMODE_32BIT:
10092 IEM_MC_BEGIN(0, 2);
10093 IEM_MC_LOCAL(uint32_t, u32Value);
10094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10096 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10097 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10098 IEM_MC_ADVANCE_RIP();
10099 IEM_MC_END();
10100 break;
10101
10102 case IEMMODE_64BIT:
10103 IEM_MC_BEGIN(0, 2);
10104 IEM_MC_LOCAL(uint64_t, u64Value);
10105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10107 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10108 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10109 IEM_MC_ADVANCE_RIP();
10110 IEM_MC_END();
10111 break;
10112 }
10113 }
10114 return VINF_SUCCESS;
10115}
10116
10117
10118/** Opcode 0x8a. */
10119FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10120{
10121 IEMOP_MNEMONIC("mov Gb,Eb");
10122
10123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10124 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10125
10126 /*
10127 * If rm is denoting a register, no more instruction bytes.
10128 */
10129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10130 {
10131 IEM_MC_BEGIN(0, 1);
10132 IEM_MC_LOCAL(uint8_t, u8Value);
10133 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10134 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10135 IEM_MC_ADVANCE_RIP();
10136 IEM_MC_END();
10137 }
10138 else
10139 {
10140 /*
10141 * We're loading a register from memory.
10142 */
10143 IEM_MC_BEGIN(0, 2);
10144 IEM_MC_LOCAL(uint8_t, u8Value);
10145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10147 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10148 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10149 IEM_MC_ADVANCE_RIP();
10150 IEM_MC_END();
10151 }
10152 return VINF_SUCCESS;
10153}
10154
10155
10156/** Opcode 0x8b. */
10157FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10158{
10159 IEMOP_MNEMONIC("mov Gv,Ev");
10160
10161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10162 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10163
10164 /*
10165 * If rm is denoting a register, no more instruction bytes.
10166 */
10167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10168 {
10169 switch (pIemCpu->enmEffOpSize)
10170 {
10171 case IEMMODE_16BIT:
10172 IEM_MC_BEGIN(0, 1);
10173 IEM_MC_LOCAL(uint16_t, u16Value);
10174 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10175 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10176 IEM_MC_ADVANCE_RIP();
10177 IEM_MC_END();
10178 break;
10179
10180 case IEMMODE_32BIT:
10181 IEM_MC_BEGIN(0, 1);
10182 IEM_MC_LOCAL(uint32_t, u32Value);
10183 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10184 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10185 IEM_MC_ADVANCE_RIP();
10186 IEM_MC_END();
10187 break;
10188
10189 case IEMMODE_64BIT:
10190 IEM_MC_BEGIN(0, 1);
10191 IEM_MC_LOCAL(uint64_t, u64Value);
10192 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10193 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10194 IEM_MC_ADVANCE_RIP();
10195 IEM_MC_END();
10196 break;
10197 }
10198 }
10199 else
10200 {
10201 /*
10202 * We're loading a register from memory.
10203 */
10204 switch (pIemCpu->enmEffOpSize)
10205 {
10206 case IEMMODE_16BIT:
10207 IEM_MC_BEGIN(0, 2);
10208 IEM_MC_LOCAL(uint16_t, u16Value);
10209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10211 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10212 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10213 IEM_MC_ADVANCE_RIP();
10214 IEM_MC_END();
10215 break;
10216
10217 case IEMMODE_32BIT:
10218 IEM_MC_BEGIN(0, 2);
10219 IEM_MC_LOCAL(uint32_t, u32Value);
10220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10222 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10223 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10224 IEM_MC_ADVANCE_RIP();
10225 IEM_MC_END();
10226 break;
10227
10228 case IEMMODE_64BIT:
10229 IEM_MC_BEGIN(0, 2);
10230 IEM_MC_LOCAL(uint64_t, u64Value);
10231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10233 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10234 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10235 IEM_MC_ADVANCE_RIP();
10236 IEM_MC_END();
10237 break;
10238 }
10239 }
10240 return VINF_SUCCESS;
10241}
10242
10243
10244/** Opcode 0x63. */
10245FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10246{
10247 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10248 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10249 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10250 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10251 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10252}
10253
10254
10255/** Opcode 0x8c. */
10256FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10257{
10258 IEMOP_MNEMONIC("mov Ev,Sw");
10259
10260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10261 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10262
10263 /*
10264 * Check that the destination register exists. The REX.R prefix is ignored.
10265 */
10266 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10267 if ( iSegReg > X86_SREG_GS)
10268 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10269
10270 /*
10271 * If rm is denoting a register, no more instruction bytes.
10272 * In that case, the operand size is respected and the upper bits are
10273 * cleared (starting with some pentium).
10274 */
10275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10276 {
10277 switch (pIemCpu->enmEffOpSize)
10278 {
10279 case IEMMODE_16BIT:
10280 IEM_MC_BEGIN(0, 1);
10281 IEM_MC_LOCAL(uint16_t, u16Value);
10282 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10283 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10284 IEM_MC_ADVANCE_RIP();
10285 IEM_MC_END();
10286 break;
10287
10288 case IEMMODE_32BIT:
10289 IEM_MC_BEGIN(0, 1);
10290 IEM_MC_LOCAL(uint32_t, u32Value);
10291 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10292 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 break;
10296
10297 case IEMMODE_64BIT:
10298 IEM_MC_BEGIN(0, 1);
10299 IEM_MC_LOCAL(uint64_t, u64Value);
10300 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10301 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10302 IEM_MC_ADVANCE_RIP();
10303 IEM_MC_END();
10304 break;
10305 }
10306 }
10307 else
10308 {
10309 /*
10310 * We're saving the register to memory. The access is word sized
10311 * regardless of operand size prefixes.
10312 */
10313#if 0 /* not necessary */
10314 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10315#endif
10316 IEM_MC_BEGIN(0, 2);
10317 IEM_MC_LOCAL(uint16_t, u16Value);
10318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10320 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10321 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10322 IEM_MC_ADVANCE_RIP();
10323 IEM_MC_END();
10324 }
10325 return VINF_SUCCESS;
10326}
10327
10328
10329
10330
10331/** Opcode 0x8d. */
10332FNIEMOP_DEF(iemOp_lea_Gv_M)
10333{
10334 IEMOP_MNEMONIC("lea Gv,M");
10335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10336 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10338 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10339
10340 switch (pIemCpu->enmEffOpSize)
10341 {
10342 case IEMMODE_16BIT:
10343 IEM_MC_BEGIN(0, 2);
10344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10345 IEM_MC_LOCAL(uint16_t, u16Cast);
10346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10347 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10348 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10349 IEM_MC_ADVANCE_RIP();
10350 IEM_MC_END();
10351 return VINF_SUCCESS;
10352
10353 case IEMMODE_32BIT:
10354 IEM_MC_BEGIN(0, 2);
10355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10356 IEM_MC_LOCAL(uint32_t, u32Cast);
10357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10358 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10359 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10360 IEM_MC_ADVANCE_RIP();
10361 IEM_MC_END();
10362 return VINF_SUCCESS;
10363
10364 case IEMMODE_64BIT:
10365 IEM_MC_BEGIN(0, 1);
10366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10368 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10369 IEM_MC_ADVANCE_RIP();
10370 IEM_MC_END();
10371 return VINF_SUCCESS;
10372 }
10373 AssertFailedReturn(VERR_IEM_IPE_7);
10374}
10375
10376
10377/** Opcode 0x8e. */
10378FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10379{
10380 IEMOP_MNEMONIC("mov Sw,Ev");
10381
10382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10383 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10384
10385 /*
10386 * The practical operand size is 16-bit.
10387 */
10388#if 0 /* not necessary */
10389 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10390#endif
10391
10392 /*
10393 * Check that the destination register exists and can be used with this
10394 * instruction. The REX.R prefix is ignored.
10395 */
10396 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10397 if ( iSegReg == X86_SREG_CS
10398 || iSegReg > X86_SREG_GS)
10399 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10400
10401 /*
10402 * If rm is denoting a register, no more instruction bytes.
10403 */
10404 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10405 {
10406 IEM_MC_BEGIN(2, 0);
10407 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10408 IEM_MC_ARG(uint16_t, u16Value, 1);
10409 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10410 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10411 IEM_MC_END();
10412 }
10413 else
10414 {
10415 /*
10416 * We're loading the register from memory. The access is word sized
10417 * regardless of operand size prefixes.
10418 */
10419 IEM_MC_BEGIN(2, 1);
10420 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10421 IEM_MC_ARG(uint16_t, u16Value, 1);
10422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10424 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10425 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10426 IEM_MC_END();
10427 }
10428 return VINF_SUCCESS;
10429}
10430
10431
10432/** Opcode 0x8f /0. */
10433FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10434{
10435 /* This bugger is rather annoying as it requires rSP to be updated before
10436 doing the effective address calculations. Will eventually require a
10437 split between the R/M+SIB decoding and the effective address
10438 calculation - which is something that is required for any attempt at
10439 reusing this code for a recompiler. It may also be good to have if we
10440 need to delay #UD exception caused by invalid lock prefixes.
10441
10442 For now, we'll do a mostly safe interpreter-only implementation here. */
10443 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10444 * now until tests show it's checked.. */
10445 IEMOP_MNEMONIC("pop Ev");
10446 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10447
10448 /* Register access is relatively easy and can share code. */
10449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10450 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10451
10452 /*
10453 * Memory target.
10454 *
10455 * Intel says that RSP is incremented before it's used in any effective
10456 * address calcuations. This means some serious extra annoyance here since
10457 * we decode and calculate the effective address in one step and like to
10458 * delay committing registers till everything is done.
10459 *
10460 * So, we'll decode and calculate the effective address twice. This will
10461 * require some recoding if turned into a recompiler.
10462 */
10463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10464
10465#ifndef TST_IEM_CHECK_MC
10466 /* Calc effective address with modified ESP. */
10467 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10468 RTGCPTR GCPtrEff;
10469 VBOXSTRICTRC rcStrict;
10470 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10471 if (rcStrict != VINF_SUCCESS)
10472 return rcStrict;
10473 pIemCpu->offOpcode = offOpcodeSaved;
10474
10475 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10476 uint64_t const RspSaved = pCtx->rsp;
10477 switch (pIemCpu->enmEffOpSize)
10478 {
10479 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10480 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10481 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10483 }
10484 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10485 Assert(rcStrict == VINF_SUCCESS);
10486 pCtx->rsp = RspSaved;
10487
10488 /* Perform the operation - this should be CImpl. */
10489 RTUINT64U TmpRsp;
10490 TmpRsp.u = pCtx->rsp;
10491 switch (pIemCpu->enmEffOpSize)
10492 {
10493 case IEMMODE_16BIT:
10494 {
10495 uint16_t u16Value;
10496 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10497 if (rcStrict == VINF_SUCCESS)
10498 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10499 break;
10500 }
10501
10502 case IEMMODE_32BIT:
10503 {
10504 uint32_t u32Value;
10505 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10506 if (rcStrict == VINF_SUCCESS)
10507 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10508 break;
10509 }
10510
10511 case IEMMODE_64BIT:
10512 {
10513 uint64_t u64Value;
10514 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10515 if (rcStrict == VINF_SUCCESS)
10516 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10517 break;
10518 }
10519
10520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10521 }
10522 if (rcStrict == VINF_SUCCESS)
10523 {
10524 pCtx->rsp = TmpRsp.u;
10525 iemRegUpdateRipAndClearRF(pIemCpu);
10526 }
10527 return rcStrict;
10528
10529#else
10530 return VERR_IEM_IPE_2;
10531#endif
10532}
10533
10534
10535/** Opcode 0x8f. */
10536FNIEMOP_DEF(iemOp_Grp1A)
10537{
10538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10539 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10540 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10541
10542 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10543 /** @todo XOP decoding. */
10544 IEMOP_MNEMONIC("3-byte-xop");
10545 return IEMOP_RAISE_INVALID_OPCODE();
10546}
10547
10548
10549/**
10550 * Common 'xchg reg,rAX' helper.
10551 */
10552FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10553{
10554 IEMOP_HLP_NO_LOCK_PREFIX();
10555
10556 iReg |= pIemCpu->uRexB;
10557 switch (pIemCpu->enmEffOpSize)
10558 {
10559 case IEMMODE_16BIT:
10560 IEM_MC_BEGIN(0, 2);
10561 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10562 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10563 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10564 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10565 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10566 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10567 IEM_MC_ADVANCE_RIP();
10568 IEM_MC_END();
10569 return VINF_SUCCESS;
10570
10571 case IEMMODE_32BIT:
10572 IEM_MC_BEGIN(0, 2);
10573 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10574 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10575 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10576 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10577 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10578 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10579 IEM_MC_ADVANCE_RIP();
10580 IEM_MC_END();
10581 return VINF_SUCCESS;
10582
10583 case IEMMODE_64BIT:
10584 IEM_MC_BEGIN(0, 2);
10585 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10586 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10587 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10588 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10589 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10590 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10591 IEM_MC_ADVANCE_RIP();
10592 IEM_MC_END();
10593 return VINF_SUCCESS;
10594
10595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10596 }
10597}
10598
10599
10600/** Opcode 0x90. */
10601FNIEMOP_DEF(iemOp_nop)
10602{
10603 /* R8/R8D and RAX/EAX can be exchanged. */
10604 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10605 {
10606 IEMOP_MNEMONIC("xchg r8,rAX");
10607 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10608 }
10609
10610 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10611 IEMOP_MNEMONIC("pause");
10612 else
10613 IEMOP_MNEMONIC("nop");
10614 IEM_MC_BEGIN(0, 0);
10615 IEM_MC_ADVANCE_RIP();
10616 IEM_MC_END();
10617 return VINF_SUCCESS;
10618}
10619
10620
10621/** Opcode 0x91. */
10622FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10623{
10624 IEMOP_MNEMONIC("xchg rCX,rAX");
10625 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10626}
10627
10628
10629/** Opcode 0x92. */
10630FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10631{
10632 IEMOP_MNEMONIC("xchg rDX,rAX");
10633 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10634}
10635
10636
10637/** Opcode 0x93. */
10638FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10639{
10640 IEMOP_MNEMONIC("xchg rBX,rAX");
10641 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10642}
10643
10644
10645/** Opcode 0x94. */
10646FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10647{
10648 IEMOP_MNEMONIC("xchg rSX,rAX");
10649 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10650}
10651
10652
10653/** Opcode 0x95. */
10654FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10655{
10656 IEMOP_MNEMONIC("xchg rBP,rAX");
10657 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10658}
10659
10660
10661/** Opcode 0x96. */
10662FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10663{
10664 IEMOP_MNEMONIC("xchg rSI,rAX");
10665 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10666}
10667
10668
10669/** Opcode 0x97. */
10670FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10671{
10672 IEMOP_MNEMONIC("xchg rDI,rAX");
10673 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10674}
10675
10676
10677/** Opcode 0x98. */
10678FNIEMOP_DEF(iemOp_cbw)
10679{
10680 IEMOP_HLP_NO_LOCK_PREFIX();
10681 switch (pIemCpu->enmEffOpSize)
10682 {
10683 case IEMMODE_16BIT:
10684 IEMOP_MNEMONIC("cbw");
10685 IEM_MC_BEGIN(0, 1);
10686 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10687 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10688 } IEM_MC_ELSE() {
10689 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10690 } IEM_MC_ENDIF();
10691 IEM_MC_ADVANCE_RIP();
10692 IEM_MC_END();
10693 return VINF_SUCCESS;
10694
10695 case IEMMODE_32BIT:
10696 IEMOP_MNEMONIC("cwde");
10697 IEM_MC_BEGIN(0, 1);
10698 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10699 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10700 } IEM_MC_ELSE() {
10701 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10702 } IEM_MC_ENDIF();
10703 IEM_MC_ADVANCE_RIP();
10704 IEM_MC_END();
10705 return VINF_SUCCESS;
10706
10707 case IEMMODE_64BIT:
10708 IEMOP_MNEMONIC("cdqe");
10709 IEM_MC_BEGIN(0, 1);
10710 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10711 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10712 } IEM_MC_ELSE() {
10713 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10714 } IEM_MC_ENDIF();
10715 IEM_MC_ADVANCE_RIP();
10716 IEM_MC_END();
10717 return VINF_SUCCESS;
10718
10719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10720 }
10721}
10722
10723
10724/** Opcode 0x99. */
10725FNIEMOP_DEF(iemOp_cwd)
10726{
10727 IEMOP_HLP_NO_LOCK_PREFIX();
10728 switch (pIemCpu->enmEffOpSize)
10729 {
10730 case IEMMODE_16BIT:
10731 IEMOP_MNEMONIC("cwd");
10732 IEM_MC_BEGIN(0, 1);
10733 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10734 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10735 } IEM_MC_ELSE() {
10736 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10737 } IEM_MC_ENDIF();
10738 IEM_MC_ADVANCE_RIP();
10739 IEM_MC_END();
10740 return VINF_SUCCESS;
10741
10742 case IEMMODE_32BIT:
10743 IEMOP_MNEMONIC("cdq");
10744 IEM_MC_BEGIN(0, 1);
10745 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10746 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10747 } IEM_MC_ELSE() {
10748 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10749 } IEM_MC_ENDIF();
10750 IEM_MC_ADVANCE_RIP();
10751 IEM_MC_END();
10752 return VINF_SUCCESS;
10753
10754 case IEMMODE_64BIT:
10755 IEMOP_MNEMONIC("cqo");
10756 IEM_MC_BEGIN(0, 1);
10757 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10758 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10759 } IEM_MC_ELSE() {
10760 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10761 } IEM_MC_ENDIF();
10762 IEM_MC_ADVANCE_RIP();
10763 IEM_MC_END();
10764 return VINF_SUCCESS;
10765
10766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10767 }
10768}
10769
10770
10771/** Opcode 0x9a. */
10772FNIEMOP_DEF(iemOp_call_Ap)
10773{
10774 IEMOP_MNEMONIC("call Ap");
10775 IEMOP_HLP_NO_64BIT();
10776
10777 /* Decode the far pointer address and pass it on to the far call C implementation. */
10778 uint32_t offSeg;
10779 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10780 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10781 else
10782 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10783 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10785 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10786}
10787
10788
10789/** Opcode 0x9b. (aka fwait) */
10790FNIEMOP_DEF(iemOp_wait)
10791{
10792 IEMOP_MNEMONIC("wait");
10793 IEMOP_HLP_NO_LOCK_PREFIX();
10794
10795 IEM_MC_BEGIN(0, 0);
10796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10798 IEM_MC_ADVANCE_RIP();
10799 IEM_MC_END();
10800 return VINF_SUCCESS;
10801}
10802
10803
10804/** Opcode 0x9c. */
10805FNIEMOP_DEF(iemOp_pushf_Fv)
10806{
10807 IEMOP_HLP_NO_LOCK_PREFIX();
10808 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10809 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10810}
10811
10812
10813/** Opcode 0x9d. */
10814FNIEMOP_DEF(iemOp_popf_Fv)
10815{
10816 IEMOP_HLP_NO_LOCK_PREFIX();
10817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10818 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10819}
10820
10821
10822/** Opcode 0x9e. */
10823FNIEMOP_DEF(iemOp_sahf)
10824{
10825 IEMOP_MNEMONIC("sahf");
10826 IEMOP_HLP_NO_LOCK_PREFIX();
10827 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10828 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10829 return IEMOP_RAISE_INVALID_OPCODE();
10830 IEM_MC_BEGIN(0, 2);
10831 IEM_MC_LOCAL(uint32_t, u32Flags);
10832 IEM_MC_LOCAL(uint32_t, EFlags);
10833 IEM_MC_FETCH_EFLAGS(EFlags);
10834 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10835 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10836 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10837 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10838 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10839 IEM_MC_COMMIT_EFLAGS(EFlags);
10840 IEM_MC_ADVANCE_RIP();
10841 IEM_MC_END();
10842 return VINF_SUCCESS;
10843}
10844
10845
10846/** Opcode 0x9f. */
10847FNIEMOP_DEF(iemOp_lahf)
10848{
10849 IEMOP_MNEMONIC("lahf");
10850 IEMOP_HLP_NO_LOCK_PREFIX();
10851 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10852 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10853 return IEMOP_RAISE_INVALID_OPCODE();
10854 IEM_MC_BEGIN(0, 1);
10855 IEM_MC_LOCAL(uint8_t, u8Flags);
10856 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10857 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10858 IEM_MC_ADVANCE_RIP();
10859 IEM_MC_END();
10860 return VINF_SUCCESS;
10861}
10862
10863
10864/**
10865 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10866 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10867 * prefixes. Will return on failures.
10868 * @param a_GCPtrMemOff The variable to store the offset in.
10869 */
10870#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10871 do \
10872 { \
10873 switch (pIemCpu->enmEffAddrMode) \
10874 { \
10875 case IEMMODE_16BIT: \
10876 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10877 break; \
10878 case IEMMODE_32BIT: \
10879 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10880 break; \
10881 case IEMMODE_64BIT: \
10882 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10883 break; \
10884 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10885 } \
10886 IEMOP_HLP_NO_LOCK_PREFIX(); \
10887 } while (0)
10888
10889/** Opcode 0xa0. */
10890FNIEMOP_DEF(iemOp_mov_Al_Ob)
10891{
10892 /*
10893 * Get the offset and fend of lock prefixes.
10894 */
10895 RTGCPTR GCPtrMemOff;
10896 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10897
10898 /*
10899 * Fetch AL.
10900 */
10901 IEM_MC_BEGIN(0,1);
10902 IEM_MC_LOCAL(uint8_t, u8Tmp);
10903 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10904 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10905 IEM_MC_ADVANCE_RIP();
10906 IEM_MC_END();
10907 return VINF_SUCCESS;
10908}
10909
10910
10911/** Opcode 0xa1. */
10912FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10913{
10914 /*
10915 * Get the offset and fend of lock prefixes.
10916 */
10917 IEMOP_MNEMONIC("mov rAX,Ov");
10918 RTGCPTR GCPtrMemOff;
10919 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10920
10921 /*
10922 * Fetch rAX.
10923 */
10924 switch (pIemCpu->enmEffOpSize)
10925 {
10926 case IEMMODE_16BIT:
10927 IEM_MC_BEGIN(0,1);
10928 IEM_MC_LOCAL(uint16_t, u16Tmp);
10929 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10930 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10931 IEM_MC_ADVANCE_RIP();
10932 IEM_MC_END();
10933 return VINF_SUCCESS;
10934
10935 case IEMMODE_32BIT:
10936 IEM_MC_BEGIN(0,1);
10937 IEM_MC_LOCAL(uint32_t, u32Tmp);
10938 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10939 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10940 IEM_MC_ADVANCE_RIP();
10941 IEM_MC_END();
10942 return VINF_SUCCESS;
10943
10944 case IEMMODE_64BIT:
10945 IEM_MC_BEGIN(0,1);
10946 IEM_MC_LOCAL(uint64_t, u64Tmp);
10947 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10948 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10949 IEM_MC_ADVANCE_RIP();
10950 IEM_MC_END();
10951 return VINF_SUCCESS;
10952
10953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10954 }
10955}
10956
10957
10958/** Opcode 0xa2. */
10959FNIEMOP_DEF(iemOp_mov_Ob_AL)
10960{
10961 /*
10962 * Get the offset and fend of lock prefixes.
10963 */
10964 RTGCPTR GCPtrMemOff;
10965 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10966
10967 /*
10968 * Store AL.
10969 */
10970 IEM_MC_BEGIN(0,1);
10971 IEM_MC_LOCAL(uint8_t, u8Tmp);
10972 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10973 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10974 IEM_MC_ADVANCE_RIP();
10975 IEM_MC_END();
10976 return VINF_SUCCESS;
10977}
10978
10979
10980/** Opcode 0xa3. */
10981FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10982{
10983 /*
10984 * Get the offset and fend of lock prefixes.
10985 */
10986 RTGCPTR GCPtrMemOff;
10987 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10988
10989 /*
10990 * Store rAX.
10991 */
10992 switch (pIemCpu->enmEffOpSize)
10993 {
10994 case IEMMODE_16BIT:
10995 IEM_MC_BEGIN(0,1);
10996 IEM_MC_LOCAL(uint16_t, u16Tmp);
10997 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10998 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10999 IEM_MC_ADVANCE_RIP();
11000 IEM_MC_END();
11001 return VINF_SUCCESS;
11002
11003 case IEMMODE_32BIT:
11004 IEM_MC_BEGIN(0,1);
11005 IEM_MC_LOCAL(uint32_t, u32Tmp);
11006 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11007 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11008 IEM_MC_ADVANCE_RIP();
11009 IEM_MC_END();
11010 return VINF_SUCCESS;
11011
11012 case IEMMODE_64BIT:
11013 IEM_MC_BEGIN(0,1);
11014 IEM_MC_LOCAL(uint64_t, u64Tmp);
11015 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11016 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11017 IEM_MC_ADVANCE_RIP();
11018 IEM_MC_END();
11019 return VINF_SUCCESS;
11020
11021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11022 }
11023}
11024
11025/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11026#define IEM_MOVS_CASE(ValBits, AddrBits) \
11027 IEM_MC_BEGIN(0, 2); \
11028 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11029 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11030 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11031 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11032 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11033 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11035 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11036 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11037 } IEM_MC_ELSE() { \
11038 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11039 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11040 } IEM_MC_ENDIF(); \
11041 IEM_MC_ADVANCE_RIP(); \
11042 IEM_MC_END();
11043
11044/** Opcode 0xa4. */
11045FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11046{
11047 IEMOP_HLP_NO_LOCK_PREFIX();
11048
11049 /*
11050 * Use the C implementation if a repeat prefix is encountered.
11051 */
11052 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11053 {
11054 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11055 switch (pIemCpu->enmEffAddrMode)
11056 {
11057 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11058 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11059 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11061 }
11062 }
11063 IEMOP_MNEMONIC("movsb Xb,Yb");
11064
11065 /*
11066 * Sharing case implementation with movs[wdq] below.
11067 */
11068 switch (pIemCpu->enmEffAddrMode)
11069 {
11070 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11071 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11072 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11074 }
11075 return VINF_SUCCESS;
11076}
11077
11078
11079/** Opcode 0xa5. */
11080FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11081{
11082 IEMOP_HLP_NO_LOCK_PREFIX();
11083
11084 /*
11085 * Use the C implementation if a repeat prefix is encountered.
11086 */
11087 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11088 {
11089 IEMOP_MNEMONIC("rep movs Xv,Yv");
11090 switch (pIemCpu->enmEffOpSize)
11091 {
11092 case IEMMODE_16BIT:
11093 switch (pIemCpu->enmEffAddrMode)
11094 {
11095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11099 }
11100 break;
11101 case IEMMODE_32BIT:
11102 switch (pIemCpu->enmEffAddrMode)
11103 {
11104 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11105 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11106 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11108 }
11109 case IEMMODE_64BIT:
11110 switch (pIemCpu->enmEffAddrMode)
11111 {
11112 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11113 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11114 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11116 }
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11118 }
11119 }
11120 IEMOP_MNEMONIC("movs Xv,Yv");
11121
11122 /*
11123 * Annoying double switch here.
11124 * Using ugly macro for implementing the cases, sharing it with movsb.
11125 */
11126 switch (pIemCpu->enmEffOpSize)
11127 {
11128 case IEMMODE_16BIT:
11129 switch (pIemCpu->enmEffAddrMode)
11130 {
11131 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11132 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11133 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11135 }
11136 break;
11137
11138 case IEMMODE_32BIT:
11139 switch (pIemCpu->enmEffAddrMode)
11140 {
11141 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11142 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11143 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11145 }
11146 break;
11147
11148 case IEMMODE_64BIT:
11149 switch (pIemCpu->enmEffAddrMode)
11150 {
11151 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11152 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11153 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11155 }
11156 break;
11157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11158 }
11159 return VINF_SUCCESS;
11160}
11161
11162#undef IEM_MOVS_CASE
11163
11164/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11165#define IEM_CMPS_CASE(ValBits, AddrBits) \
11166 IEM_MC_BEGIN(3, 3); \
11167 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11168 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11169 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11170 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11171 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11172 \
11173 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11174 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11175 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11176 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11177 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11178 IEM_MC_REF_EFLAGS(pEFlags); \
11179 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11180 \
11181 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11182 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11183 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11184 } IEM_MC_ELSE() { \
11185 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11186 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11187 } IEM_MC_ENDIF(); \
11188 IEM_MC_ADVANCE_RIP(); \
11189 IEM_MC_END(); \
11190
11191/** Opcode 0xa6. */
11192FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11193{
11194 IEMOP_HLP_NO_LOCK_PREFIX();
11195
11196 /*
11197 * Use the C implementation if a repeat prefix is encountered.
11198 */
11199 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11200 {
11201 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11202 switch (pIemCpu->enmEffAddrMode)
11203 {
11204 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11205 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11206 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11208 }
11209 }
11210 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11211 {
11212 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11213 switch (pIemCpu->enmEffAddrMode)
11214 {
11215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11219 }
11220 }
11221 IEMOP_MNEMONIC("cmps Xb,Yb");
11222
11223 /*
11224 * Sharing case implementation with cmps[wdq] below.
11225 */
11226 switch (pIemCpu->enmEffAddrMode)
11227 {
11228 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11229 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11230 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11232 }
11233 return VINF_SUCCESS;
11234
11235}
11236
11237
11238/** Opcode 0xa7. */
11239FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11240{
11241 IEMOP_HLP_NO_LOCK_PREFIX();
11242
11243 /*
11244 * Use the C implementation if a repeat prefix is encountered.
11245 */
11246 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11247 {
11248 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11249 switch (pIemCpu->enmEffOpSize)
11250 {
11251 case IEMMODE_16BIT:
11252 switch (pIemCpu->enmEffAddrMode)
11253 {
11254 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11255 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11256 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11258 }
11259 break;
11260 case IEMMODE_32BIT:
11261 switch (pIemCpu->enmEffAddrMode)
11262 {
11263 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11264 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11265 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11267 }
11268 case IEMMODE_64BIT:
11269 switch (pIemCpu->enmEffAddrMode)
11270 {
11271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11272 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11273 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11275 }
11276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11277 }
11278 }
11279
11280 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11281 {
11282 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11283 switch (pIemCpu->enmEffOpSize)
11284 {
11285 case IEMMODE_16BIT:
11286 switch (pIemCpu->enmEffAddrMode)
11287 {
11288 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11289 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11290 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11292 }
11293 break;
11294 case IEMMODE_32BIT:
11295 switch (pIemCpu->enmEffAddrMode)
11296 {
11297 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11298 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11299 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11301 }
11302 case IEMMODE_64BIT:
11303 switch (pIemCpu->enmEffAddrMode)
11304 {
11305 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11306 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11307 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11309 }
11310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11311 }
11312 }
11313
11314 IEMOP_MNEMONIC("cmps Xv,Yv");
11315
11316 /*
11317 * Annoying double switch here.
11318 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11319 */
11320 switch (pIemCpu->enmEffOpSize)
11321 {
11322 case IEMMODE_16BIT:
11323 switch (pIemCpu->enmEffAddrMode)
11324 {
11325 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11326 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11327 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11329 }
11330 break;
11331
11332 case IEMMODE_32BIT:
11333 switch (pIemCpu->enmEffAddrMode)
11334 {
11335 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11336 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11337 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11339 }
11340 break;
11341
11342 case IEMMODE_64BIT:
11343 switch (pIemCpu->enmEffAddrMode)
11344 {
11345 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11346 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11347 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11349 }
11350 break;
11351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11352 }
11353 return VINF_SUCCESS;
11354
11355}
11356
11357#undef IEM_CMPS_CASE
11358
11359/** Opcode 0xa8. */
11360FNIEMOP_DEF(iemOp_test_AL_Ib)
11361{
11362 IEMOP_MNEMONIC("test al,Ib");
11363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11365}
11366
11367
11368/** Opcode 0xa9. */
11369FNIEMOP_DEF(iemOp_test_eAX_Iz)
11370{
11371 IEMOP_MNEMONIC("test rAX,Iz");
11372 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11373 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11374}
11375
11376
11377/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11378#define IEM_STOS_CASE(ValBits, AddrBits) \
11379 IEM_MC_BEGIN(0, 2); \
11380 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11381 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11382 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11383 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11384 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11386 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11387 } IEM_MC_ELSE() { \
11388 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11389 } IEM_MC_ENDIF(); \
11390 IEM_MC_ADVANCE_RIP(); \
11391 IEM_MC_END(); \
11392
11393/** Opcode 0xaa. */
11394FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11395{
11396 IEMOP_HLP_NO_LOCK_PREFIX();
11397
11398 /*
11399 * Use the C implementation if a repeat prefix is encountered.
11400 */
11401 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11402 {
11403 IEMOP_MNEMONIC("rep stos Yb,al");
11404 switch (pIemCpu->enmEffAddrMode)
11405 {
11406 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11407 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11408 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11410 }
11411 }
11412 IEMOP_MNEMONIC("stos Yb,al");
11413
11414 /*
11415 * Sharing case implementation with stos[wdq] below.
11416 */
11417 switch (pIemCpu->enmEffAddrMode)
11418 {
11419 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11420 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11421 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11423 }
11424 return VINF_SUCCESS;
11425}
11426
11427
11428/** Opcode 0xab. */
11429FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11430{
11431 IEMOP_HLP_NO_LOCK_PREFIX();
11432
11433 /*
11434 * Use the C implementation if a repeat prefix is encountered.
11435 */
11436 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11437 {
11438 IEMOP_MNEMONIC("rep stos Yv,rAX");
11439 switch (pIemCpu->enmEffOpSize)
11440 {
11441 case IEMMODE_16BIT:
11442 switch (pIemCpu->enmEffAddrMode)
11443 {
11444 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11445 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11446 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11448 }
11449 break;
11450 case IEMMODE_32BIT:
11451 switch (pIemCpu->enmEffAddrMode)
11452 {
11453 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11454 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11455 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11457 }
11458 case IEMMODE_64BIT:
11459 switch (pIemCpu->enmEffAddrMode)
11460 {
11461 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11462 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11463 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11465 }
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 }
11469 IEMOP_MNEMONIC("stos Yv,rAX");
11470
11471 /*
11472 * Annoying double switch here.
11473 * Using ugly macro for implementing the cases, sharing it with stosb.
11474 */
11475 switch (pIemCpu->enmEffOpSize)
11476 {
11477 case IEMMODE_16BIT:
11478 switch (pIemCpu->enmEffAddrMode)
11479 {
11480 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11481 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11482 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11484 }
11485 break;
11486
11487 case IEMMODE_32BIT:
11488 switch (pIemCpu->enmEffAddrMode)
11489 {
11490 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11491 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11492 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11494 }
11495 break;
11496
11497 case IEMMODE_64BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11501 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11502 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 break;
11506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11507 }
11508 return VINF_SUCCESS;
11509}
11510
11511#undef IEM_STOS_CASE
11512
11513/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11514#define IEM_LODS_CASE(ValBits, AddrBits) \
11515 IEM_MC_BEGIN(0, 2); \
11516 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11517 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11518 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11519 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11520 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11522 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11523 } IEM_MC_ELSE() { \
11524 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11525 } IEM_MC_ENDIF(); \
11526 IEM_MC_ADVANCE_RIP(); \
11527 IEM_MC_END();
11528
11529/** Opcode 0xac. */
11530FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11531{
11532 IEMOP_HLP_NO_LOCK_PREFIX();
11533
11534 /*
11535 * Use the C implementation if a repeat prefix is encountered.
11536 */
11537 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11538 {
11539 IEMOP_MNEMONIC("rep lodsb al,Xb");
11540 switch (pIemCpu->enmEffAddrMode)
11541 {
11542 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11546 }
11547 }
11548 IEMOP_MNEMONIC("lodsb al,Xb");
11549
11550 /*
11551 * Sharing case implementation with stos[wdq] below.
11552 */
11553 switch (pIemCpu->enmEffAddrMode)
11554 {
11555 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11556 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11557 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11559 }
11560 return VINF_SUCCESS;
11561}
11562
11563
11564/** Opcode 0xad. */
11565FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11566{
11567 IEMOP_HLP_NO_LOCK_PREFIX();
11568
11569 /*
11570 * Use the C implementation if a repeat prefix is encountered.
11571 */
11572 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11573 {
11574 IEMOP_MNEMONIC("rep lods rAX,Xv");
11575 switch (pIemCpu->enmEffOpSize)
11576 {
11577 case IEMMODE_16BIT:
11578 switch (pIemCpu->enmEffAddrMode)
11579 {
11580 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11581 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11582 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11584 }
11585 break;
11586 case IEMMODE_32BIT:
11587 switch (pIemCpu->enmEffAddrMode)
11588 {
11589 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11590 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11591 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11593 }
11594 case IEMMODE_64BIT:
11595 switch (pIemCpu->enmEffAddrMode)
11596 {
11597 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11601 }
11602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11603 }
11604 }
11605 IEMOP_MNEMONIC("lods rAX,Xv");
11606
11607 /*
11608 * Annoying double switch here.
11609 * Using ugly macro for implementing the cases, sharing it with lodsb.
11610 */
11611 switch (pIemCpu->enmEffOpSize)
11612 {
11613 case IEMMODE_16BIT:
11614 switch (pIemCpu->enmEffAddrMode)
11615 {
11616 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11617 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11618 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11620 }
11621 break;
11622
11623 case IEMMODE_32BIT:
11624 switch (pIemCpu->enmEffAddrMode)
11625 {
11626 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11627 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11628 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11630 }
11631 break;
11632
11633 case IEMMODE_64BIT:
11634 switch (pIemCpu->enmEffAddrMode)
11635 {
11636 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11637 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11638 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11640 }
11641 break;
11642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11643 }
11644 return VINF_SUCCESS;
11645}
11646
11647#undef IEM_LODS_CASE
11648
11649/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11650#define IEM_SCAS_CASE(ValBits, AddrBits) \
11651 IEM_MC_BEGIN(3, 2); \
11652 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11653 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11655 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11656 \
11657 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11658 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11659 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11660 IEM_MC_REF_EFLAGS(pEFlags); \
11661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11662 \
11663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11664 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11665 } IEM_MC_ELSE() { \
11666 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11667 } IEM_MC_ENDIF(); \
11668 IEM_MC_ADVANCE_RIP(); \
11669 IEM_MC_END();
11670
11671/** Opcode 0xae. */
11672FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11673{
11674 IEMOP_HLP_NO_LOCK_PREFIX();
11675
11676 /*
11677 * Use the C implementation if a repeat prefix is encountered.
11678 */
11679 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11680 {
11681 IEMOP_MNEMONIC("repe scasb al,Xb");
11682 switch (pIemCpu->enmEffAddrMode)
11683 {
11684 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11685 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11686 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11688 }
11689 }
11690 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11691 {
11692 IEMOP_MNEMONIC("repne scasb al,Xb");
11693 switch (pIemCpu->enmEffAddrMode)
11694 {
11695 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11696 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11697 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11699 }
11700 }
11701 IEMOP_MNEMONIC("scasb al,Xb");
11702
11703 /*
11704 * Sharing case implementation with stos[wdq] below.
11705 */
11706 switch (pIemCpu->enmEffAddrMode)
11707 {
11708 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11709 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11710 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11712 }
11713 return VINF_SUCCESS;
11714}
11715
11716
11717/** Opcode 0xaf. */
11718FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11719{
11720 IEMOP_HLP_NO_LOCK_PREFIX();
11721
11722 /*
11723 * Use the C implementation if a repeat prefix is encountered.
11724 */
11725 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11726 {
11727 IEMOP_MNEMONIC("repe scas rAX,Xv");
11728 switch (pIemCpu->enmEffOpSize)
11729 {
11730 case IEMMODE_16BIT:
11731 switch (pIemCpu->enmEffAddrMode)
11732 {
11733 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11734 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11735 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11737 }
11738 break;
11739 case IEMMODE_32BIT:
11740 switch (pIemCpu->enmEffAddrMode)
11741 {
11742 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11746 }
11747 case IEMMODE_64BIT:
11748 switch (pIemCpu->enmEffAddrMode)
11749 {
11750 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11751 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11752 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11754 }
11755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11756 }
11757 }
11758 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11759 {
11760 IEMOP_MNEMONIC("repne scas rAX,Xv");
11761 switch (pIemCpu->enmEffOpSize)
11762 {
11763 case IEMMODE_16BIT:
11764 switch (pIemCpu->enmEffAddrMode)
11765 {
11766 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11767 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11768 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11770 }
11771 break;
11772 case IEMMODE_32BIT:
11773 switch (pIemCpu->enmEffAddrMode)
11774 {
11775 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11776 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11777 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11779 }
11780 case IEMMODE_64BIT:
11781 switch (pIemCpu->enmEffAddrMode)
11782 {
11783 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11784 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11785 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11787 }
11788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11789 }
11790 }
11791 IEMOP_MNEMONIC("scas rAX,Xv");
11792
11793 /*
11794 * Annoying double switch here.
11795 * Using ugly macro for implementing the cases, sharing it with scasb.
11796 */
11797 switch (pIemCpu->enmEffOpSize)
11798 {
11799 case IEMMODE_16BIT:
11800 switch (pIemCpu->enmEffAddrMode)
11801 {
11802 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11803 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11804 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11806 }
11807 break;
11808
11809 case IEMMODE_32BIT:
11810 switch (pIemCpu->enmEffAddrMode)
11811 {
11812 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11813 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11814 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11816 }
11817 break;
11818
11819 case IEMMODE_64BIT:
11820 switch (pIemCpu->enmEffAddrMode)
11821 {
11822 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11823 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11824 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11826 }
11827 break;
11828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11829 }
11830 return VINF_SUCCESS;
11831}
11832
11833#undef IEM_SCAS_CASE
11834
11835/**
11836 * Common 'mov r8, imm8' helper.
11837 */
11838FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11839{
11840 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11841 IEMOP_HLP_NO_LOCK_PREFIX();
11842
11843 IEM_MC_BEGIN(0, 1);
11844 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11845 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11846 IEM_MC_ADVANCE_RIP();
11847 IEM_MC_END();
11848
11849 return VINF_SUCCESS;
11850}
11851
11852
11853/** Opcode 0xb0. */
11854FNIEMOP_DEF(iemOp_mov_AL_Ib)
11855{
11856 IEMOP_MNEMONIC("mov AL,Ib");
11857 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11858}
11859
11860
11861/** Opcode 0xb1. */
11862FNIEMOP_DEF(iemOp_CL_Ib)
11863{
11864 IEMOP_MNEMONIC("mov CL,Ib");
11865 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11866}
11867
11868
11869/** Opcode 0xb2. */
11870FNIEMOP_DEF(iemOp_DL_Ib)
11871{
11872 IEMOP_MNEMONIC("mov DL,Ib");
11873 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11874}
11875
11876
11877/** Opcode 0xb3. */
11878FNIEMOP_DEF(iemOp_BL_Ib)
11879{
11880 IEMOP_MNEMONIC("mov BL,Ib");
11881 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11882}
11883
11884
11885/** Opcode 0xb4. */
11886FNIEMOP_DEF(iemOp_mov_AH_Ib)
11887{
11888 IEMOP_MNEMONIC("mov AH,Ib");
11889 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11890}
11891
11892
11893/** Opcode 0xb5. */
11894FNIEMOP_DEF(iemOp_CH_Ib)
11895{
11896 IEMOP_MNEMONIC("mov CH,Ib");
11897 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11898}
11899
11900
11901/** Opcode 0xb6. */
11902FNIEMOP_DEF(iemOp_DH_Ib)
11903{
11904 IEMOP_MNEMONIC("mov DH,Ib");
11905 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11906}
11907
11908
11909/** Opcode 0xb7. */
11910FNIEMOP_DEF(iemOp_BH_Ib)
11911{
11912 IEMOP_MNEMONIC("mov BH,Ib");
11913 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11914}
11915
11916
11917/**
11918 * Common 'mov regX,immX' helper.
11919 */
11920FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11921{
11922 switch (pIemCpu->enmEffOpSize)
11923 {
11924 case IEMMODE_16BIT:
11925 {
11926 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11927 IEMOP_HLP_NO_LOCK_PREFIX();
11928
11929 IEM_MC_BEGIN(0, 1);
11930 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11931 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11932 IEM_MC_ADVANCE_RIP();
11933 IEM_MC_END();
11934 break;
11935 }
11936
11937 case IEMMODE_32BIT:
11938 {
11939 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11940 IEMOP_HLP_NO_LOCK_PREFIX();
11941
11942 IEM_MC_BEGIN(0, 1);
11943 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11944 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11945 IEM_MC_ADVANCE_RIP();
11946 IEM_MC_END();
11947 break;
11948 }
11949 case IEMMODE_64BIT:
11950 {
11951 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11952 IEMOP_HLP_NO_LOCK_PREFIX();
11953
11954 IEM_MC_BEGIN(0, 1);
11955 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11956 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11957 IEM_MC_ADVANCE_RIP();
11958 IEM_MC_END();
11959 break;
11960 }
11961 }
11962
11963 return VINF_SUCCESS;
11964}
11965
11966
11967/** Opcode 0xb8. */
11968FNIEMOP_DEF(iemOp_eAX_Iv)
11969{
11970 IEMOP_MNEMONIC("mov rAX,IV");
11971 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11972}
11973
11974
11975/** Opcode 0xb9. */
11976FNIEMOP_DEF(iemOp_eCX_Iv)
11977{
11978 IEMOP_MNEMONIC("mov rCX,IV");
11979 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11980}
11981
11982
11983/** Opcode 0xba. */
11984FNIEMOP_DEF(iemOp_eDX_Iv)
11985{
11986 IEMOP_MNEMONIC("mov rDX,IV");
11987 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11988}
11989
11990
11991/** Opcode 0xbb. */
11992FNIEMOP_DEF(iemOp_eBX_Iv)
11993{
11994 IEMOP_MNEMONIC("mov rBX,IV");
11995 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11996}
11997
11998
11999/** Opcode 0xbc. */
12000FNIEMOP_DEF(iemOp_eSP_Iv)
12001{
12002 IEMOP_MNEMONIC("mov rSP,IV");
12003 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12004}
12005
12006
12007/** Opcode 0xbd. */
12008FNIEMOP_DEF(iemOp_eBP_Iv)
12009{
12010 IEMOP_MNEMONIC("mov rBP,IV");
12011 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12012}
12013
12014
12015/** Opcode 0xbe. */
12016FNIEMOP_DEF(iemOp_eSI_Iv)
12017{
12018 IEMOP_MNEMONIC("mov rSI,IV");
12019 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12020}
12021
12022
12023/** Opcode 0xbf. */
12024FNIEMOP_DEF(iemOp_eDI_Iv)
12025{
12026 IEMOP_MNEMONIC("mov rDI,IV");
12027 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12028}
12029
12030
12031/** Opcode 0xc0. */
12032FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12033{
12034 IEMOP_HLP_MIN_186();
12035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12036 PCIEMOPSHIFTSIZES pImpl;
12037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12038 {
12039 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12040 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12041 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12042 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12043 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12044 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12045 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12046 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12047 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12048 }
12049 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12050
12051 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12052 {
12053 /* register */
12054 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12055 IEMOP_HLP_NO_LOCK_PREFIX();
12056 IEM_MC_BEGIN(3, 0);
12057 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12058 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12059 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12060 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12061 IEM_MC_REF_EFLAGS(pEFlags);
12062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12063 IEM_MC_ADVANCE_RIP();
12064 IEM_MC_END();
12065 }
12066 else
12067 {
12068 /* memory */
12069 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12070 IEM_MC_BEGIN(3, 2);
12071 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12072 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12073 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12075
12076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12077 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12078 IEM_MC_ASSIGN(cShiftArg, cShift);
12079 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12080 IEM_MC_FETCH_EFLAGS(EFlags);
12081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12082
12083 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12084 IEM_MC_COMMIT_EFLAGS(EFlags);
12085 IEM_MC_ADVANCE_RIP();
12086 IEM_MC_END();
12087 }
12088 return VINF_SUCCESS;
12089}
12090
12091
12092/** Opcode 0xc1. */
12093FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12094{
12095 IEMOP_HLP_MIN_186();
12096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12097 PCIEMOPSHIFTSIZES pImpl;
12098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12099 {
12100 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12101 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12102 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12103 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12104 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12105 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12106 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12107 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12108 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12109 }
12110 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12111
12112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12113 {
12114 /* register */
12115 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12116 IEMOP_HLP_NO_LOCK_PREFIX();
12117 switch (pIemCpu->enmEffOpSize)
12118 {
12119 case IEMMODE_16BIT:
12120 IEM_MC_BEGIN(3, 0);
12121 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12122 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12123 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12124 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12125 IEM_MC_REF_EFLAGS(pEFlags);
12126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12127 IEM_MC_ADVANCE_RIP();
12128 IEM_MC_END();
12129 return VINF_SUCCESS;
12130
12131 case IEMMODE_32BIT:
12132 IEM_MC_BEGIN(3, 0);
12133 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12134 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12135 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12136 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12137 IEM_MC_REF_EFLAGS(pEFlags);
12138 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12139 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12140 IEM_MC_ADVANCE_RIP();
12141 IEM_MC_END();
12142 return VINF_SUCCESS;
12143
12144 case IEMMODE_64BIT:
12145 IEM_MC_BEGIN(3, 0);
12146 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12147 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12149 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12150 IEM_MC_REF_EFLAGS(pEFlags);
12151 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12152 IEM_MC_ADVANCE_RIP();
12153 IEM_MC_END();
12154 return VINF_SUCCESS;
12155
12156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12157 }
12158 }
12159 else
12160 {
12161 /* memory */
12162 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12163 switch (pIemCpu->enmEffOpSize)
12164 {
12165 case IEMMODE_16BIT:
12166 IEM_MC_BEGIN(3, 2);
12167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12168 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12169 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12171
12172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12173 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12174 IEM_MC_ASSIGN(cShiftArg, cShift);
12175 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12176 IEM_MC_FETCH_EFLAGS(EFlags);
12177 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12178
12179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12180 IEM_MC_COMMIT_EFLAGS(EFlags);
12181 IEM_MC_ADVANCE_RIP();
12182 IEM_MC_END();
12183 return VINF_SUCCESS;
12184
12185 case IEMMODE_32BIT:
12186 IEM_MC_BEGIN(3, 2);
12187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12188 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12189 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12191
12192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12193 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12194 IEM_MC_ASSIGN(cShiftArg, cShift);
12195 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12196 IEM_MC_FETCH_EFLAGS(EFlags);
12197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12198
12199 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12200 IEM_MC_COMMIT_EFLAGS(EFlags);
12201 IEM_MC_ADVANCE_RIP();
12202 IEM_MC_END();
12203 return VINF_SUCCESS;
12204
12205 case IEMMODE_64BIT:
12206 IEM_MC_BEGIN(3, 2);
12207 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12208 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12209 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12211
12212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12213 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12214 IEM_MC_ASSIGN(cShiftArg, cShift);
12215 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12216 IEM_MC_FETCH_EFLAGS(EFlags);
12217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12218
12219 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12220 IEM_MC_COMMIT_EFLAGS(EFlags);
12221 IEM_MC_ADVANCE_RIP();
12222 IEM_MC_END();
12223 return VINF_SUCCESS;
12224
12225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12226 }
12227 }
12228}
12229
12230
12231/** Opcode 0xc2. */
12232FNIEMOP_DEF(iemOp_retn_Iw)
12233{
12234 IEMOP_MNEMONIC("retn Iw");
12235 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12236 IEMOP_HLP_NO_LOCK_PREFIX();
12237 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12238 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12239}
12240
12241
12242/** Opcode 0xc3. */
12243FNIEMOP_DEF(iemOp_retn)
12244{
12245 IEMOP_MNEMONIC("retn");
12246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12247 IEMOP_HLP_NO_LOCK_PREFIX();
12248 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12249}
12250
12251
12252/** Opcode 0xc4. */
12253FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12254{
12255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12256 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12257 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12258 {
12259 IEMOP_MNEMONIC("2-byte-vex");
12260 /* The LES instruction is invalid 64-bit mode. In legacy and
12261 compatability mode it is invalid with MOD=3.
12262 The use as a VEX prefix is made possible by assigning the inverted
12263 REX.R to the top MOD bit, and the top bit in the inverted register
12264 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12265 to accessing registers 0..7 in this VEX form. */
12266 /** @todo VEX: Just use new tables for it. */
12267 return IEMOP_RAISE_INVALID_OPCODE();
12268 }
12269 IEMOP_MNEMONIC("les Gv,Mp");
12270 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12271}
12272
12273
12274/** Opcode 0xc5. */
12275FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12276{
12277 /* The LDS instruction is invalid 64-bit mode. In legacy and
12278 compatability mode it is invalid with MOD=3.
12279 The use as a VEX prefix is made possible by assigning the inverted
12280 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12281 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12283 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12284 {
12285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12286 {
12287 IEMOP_MNEMONIC("lds Gv,Mp");
12288 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12289 }
12290 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12291 }
12292
12293 IEMOP_MNEMONIC("3-byte-vex");
12294 /** @todo Test when exctly the VEX conformance checks kick in during
12295 * instruction decoding and fetching (using \#PF). */
12296 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12297 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12298 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12299#if 0 /* will make sense of this next week... */
12300 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12301 &&
12302 )
12303 {
12304
12305 }
12306#endif
12307
12308 /** @todo VEX: Just use new tables for it. */
12309 return IEMOP_RAISE_INVALID_OPCODE();
12310}
12311
12312
12313/** Opcode 0xc6. */
12314FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12315{
12316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12317 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12318 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12319 return IEMOP_RAISE_INVALID_OPCODE();
12320 IEMOP_MNEMONIC("mov Eb,Ib");
12321
12322 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12323 {
12324 /* register access */
12325 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12326 IEM_MC_BEGIN(0, 0);
12327 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12328 IEM_MC_ADVANCE_RIP();
12329 IEM_MC_END();
12330 }
12331 else
12332 {
12333 /* memory access. */
12334 IEM_MC_BEGIN(0, 1);
12335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12337 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12338 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12339 IEM_MC_ADVANCE_RIP();
12340 IEM_MC_END();
12341 }
12342 return VINF_SUCCESS;
12343}
12344
12345
12346/** Opcode 0xc7. */
12347FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12348{
12349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12350 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12351 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12352 return IEMOP_RAISE_INVALID_OPCODE();
12353 IEMOP_MNEMONIC("mov Ev,Iz");
12354
12355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12356 {
12357 /* register access */
12358 switch (pIemCpu->enmEffOpSize)
12359 {
12360 case IEMMODE_16BIT:
12361 IEM_MC_BEGIN(0, 0);
12362 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12363 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12364 IEM_MC_ADVANCE_RIP();
12365 IEM_MC_END();
12366 return VINF_SUCCESS;
12367
12368 case IEMMODE_32BIT:
12369 IEM_MC_BEGIN(0, 0);
12370 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12371 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12372 IEM_MC_ADVANCE_RIP();
12373 IEM_MC_END();
12374 return VINF_SUCCESS;
12375
12376 case IEMMODE_64BIT:
12377 IEM_MC_BEGIN(0, 0);
12378 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12379 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12380 IEM_MC_ADVANCE_RIP();
12381 IEM_MC_END();
12382 return VINF_SUCCESS;
12383
12384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12385 }
12386 }
12387 else
12388 {
12389 /* memory access. */
12390 switch (pIemCpu->enmEffOpSize)
12391 {
12392 case IEMMODE_16BIT:
12393 IEM_MC_BEGIN(0, 1);
12394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12396 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12397 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12398 IEM_MC_ADVANCE_RIP();
12399 IEM_MC_END();
12400 return VINF_SUCCESS;
12401
12402 case IEMMODE_32BIT:
12403 IEM_MC_BEGIN(0, 1);
12404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12406 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12407 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12408 IEM_MC_ADVANCE_RIP();
12409 IEM_MC_END();
12410 return VINF_SUCCESS;
12411
12412 case IEMMODE_64BIT:
12413 IEM_MC_BEGIN(0, 1);
12414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12416 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12417 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12418 IEM_MC_ADVANCE_RIP();
12419 IEM_MC_END();
12420 return VINF_SUCCESS;
12421
12422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12423 }
12424 }
12425}
12426
12427
12428
12429
12430/** Opcode 0xc8. */
12431FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12432{
12433 IEMOP_MNEMONIC("enter Iw,Ib");
12434 IEMOP_HLP_MIN_186();
12435 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12436 IEMOP_HLP_NO_LOCK_PREFIX();
12437 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12438 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12439 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12440}
12441
12442
12443/** Opcode 0xc9. */
12444FNIEMOP_DEF(iemOp_leave)
12445{
12446 IEMOP_MNEMONIC("retn");
12447 IEMOP_HLP_MIN_186();
12448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12449 IEMOP_HLP_NO_LOCK_PREFIX();
12450 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12451}
12452
12453
12454/** Opcode 0xca. */
12455FNIEMOP_DEF(iemOp_retf_Iw)
12456{
12457 IEMOP_MNEMONIC("retf Iw");
12458 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12459 IEMOP_HLP_NO_LOCK_PREFIX();
12460 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12461 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12462}
12463
12464
12465/** Opcode 0xcb. */
12466FNIEMOP_DEF(iemOp_retf)
12467{
12468 IEMOP_MNEMONIC("retf");
12469 IEMOP_HLP_NO_LOCK_PREFIX();
12470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12471 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12472}
12473
12474
12475/** Opcode 0xcc. */
12476FNIEMOP_DEF(iemOp_int_3)
12477{
12478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12479 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12480}
12481
12482
12483/** Opcode 0xcd. */
12484FNIEMOP_DEF(iemOp_int_Ib)
12485{
12486 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12488 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12489}
12490
12491
12492/** Opcode 0xce. */
12493FNIEMOP_DEF(iemOp_into)
12494{
12495 IEMOP_MNEMONIC("into");
12496 IEMOP_HLP_NO_64BIT();
12497
12498 IEM_MC_BEGIN(2, 0);
12499 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12500 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12501 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12502 IEM_MC_END();
12503 return VINF_SUCCESS;
12504}
12505
12506
12507/** Opcode 0xcf. */
12508FNIEMOP_DEF(iemOp_iret)
12509{
12510 IEMOP_MNEMONIC("iret");
12511 IEMOP_HLP_NO_LOCK_PREFIX();
12512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12513}
12514
12515
12516/** Opcode 0xd0. */
12517FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12518{
12519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12520 PCIEMOPSHIFTSIZES pImpl;
12521 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12522 {
12523 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12524 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12525 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12526 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12527 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12528 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12529 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12530 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12531 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12532 }
12533 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12534
12535 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12536 {
12537 /* register */
12538 IEMOP_HLP_NO_LOCK_PREFIX();
12539 IEM_MC_BEGIN(3, 0);
12540 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12541 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12543 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12544 IEM_MC_REF_EFLAGS(pEFlags);
12545 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12546 IEM_MC_ADVANCE_RIP();
12547 IEM_MC_END();
12548 }
12549 else
12550 {
12551 /* memory */
12552 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12553 IEM_MC_BEGIN(3, 2);
12554 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12555 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12556 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12558
12559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12560 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12561 IEM_MC_FETCH_EFLAGS(EFlags);
12562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12563
12564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12565 IEM_MC_COMMIT_EFLAGS(EFlags);
12566 IEM_MC_ADVANCE_RIP();
12567 IEM_MC_END();
12568 }
12569 return VINF_SUCCESS;
12570}
12571
12572
12573
12574/** Opcode 0xd1. */
12575FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12576{
12577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12578 PCIEMOPSHIFTSIZES pImpl;
12579 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12580 {
12581 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12582 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12583 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12584 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12585 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12586 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12587 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12588 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12589 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12590 }
12591 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12592
12593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12594 {
12595 /* register */
12596 IEMOP_HLP_NO_LOCK_PREFIX();
12597 switch (pIemCpu->enmEffOpSize)
12598 {
12599 case IEMMODE_16BIT:
12600 IEM_MC_BEGIN(3, 0);
12601 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12602 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12603 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12604 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12605 IEM_MC_REF_EFLAGS(pEFlags);
12606 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12607 IEM_MC_ADVANCE_RIP();
12608 IEM_MC_END();
12609 return VINF_SUCCESS;
12610
12611 case IEMMODE_32BIT:
12612 IEM_MC_BEGIN(3, 0);
12613 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12614 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12615 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12616 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12617 IEM_MC_REF_EFLAGS(pEFlags);
12618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12619 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12620 IEM_MC_ADVANCE_RIP();
12621 IEM_MC_END();
12622 return VINF_SUCCESS;
12623
12624 case IEMMODE_64BIT:
12625 IEM_MC_BEGIN(3, 0);
12626 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12627 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12629 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12630 IEM_MC_REF_EFLAGS(pEFlags);
12631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12632 IEM_MC_ADVANCE_RIP();
12633 IEM_MC_END();
12634 return VINF_SUCCESS;
12635
12636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12637 }
12638 }
12639 else
12640 {
12641 /* memory */
12642 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12643 switch (pIemCpu->enmEffOpSize)
12644 {
12645 case IEMMODE_16BIT:
12646 IEM_MC_BEGIN(3, 2);
12647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12648 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12651
12652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12653 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12654 IEM_MC_FETCH_EFLAGS(EFlags);
12655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12656
12657 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12658 IEM_MC_COMMIT_EFLAGS(EFlags);
12659 IEM_MC_ADVANCE_RIP();
12660 IEM_MC_END();
12661 return VINF_SUCCESS;
12662
12663 case IEMMODE_32BIT:
12664 IEM_MC_BEGIN(3, 2);
12665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12666 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12667 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12669
12670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12671 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12672 IEM_MC_FETCH_EFLAGS(EFlags);
12673 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12674
12675 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12676 IEM_MC_COMMIT_EFLAGS(EFlags);
12677 IEM_MC_ADVANCE_RIP();
12678 IEM_MC_END();
12679 return VINF_SUCCESS;
12680
12681 case IEMMODE_64BIT:
12682 IEM_MC_BEGIN(3, 2);
12683 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12684 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12685 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12687
12688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12689 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12690 IEM_MC_FETCH_EFLAGS(EFlags);
12691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12692
12693 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12694 IEM_MC_COMMIT_EFLAGS(EFlags);
12695 IEM_MC_ADVANCE_RIP();
12696 IEM_MC_END();
12697 return VINF_SUCCESS;
12698
12699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12700 }
12701 }
12702}
12703
12704
12705/** Opcode 0xd2. */
12706FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12707{
12708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12709 PCIEMOPSHIFTSIZES pImpl;
12710 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12711 {
12712 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12713 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12714 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12715 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12716 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12717 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12718 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12719 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12720 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12721 }
12722 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12723
12724 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12725 {
12726 /* register */
12727 IEMOP_HLP_NO_LOCK_PREFIX();
12728 IEM_MC_BEGIN(3, 0);
12729 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12730 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12732 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12733 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12734 IEM_MC_REF_EFLAGS(pEFlags);
12735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12736 IEM_MC_ADVANCE_RIP();
12737 IEM_MC_END();
12738 }
12739 else
12740 {
12741 /* memory */
12742 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12743 IEM_MC_BEGIN(3, 2);
12744 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12745 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12746 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12748
12749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12750 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12751 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12752 IEM_MC_FETCH_EFLAGS(EFlags);
12753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12754
12755 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12756 IEM_MC_COMMIT_EFLAGS(EFlags);
12757 IEM_MC_ADVANCE_RIP();
12758 IEM_MC_END();
12759 }
12760 return VINF_SUCCESS;
12761}
12762
12763
12764/** Opcode 0xd3. */
12765FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12766{
12767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12768 PCIEMOPSHIFTSIZES pImpl;
12769 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12770 {
12771 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12772 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12773 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12774 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12775 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12776 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12777 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12778 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12780 }
12781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12782
12783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12784 {
12785 /* register */
12786 IEMOP_HLP_NO_LOCK_PREFIX();
12787 switch (pIemCpu->enmEffOpSize)
12788 {
12789 case IEMMODE_16BIT:
12790 IEM_MC_BEGIN(3, 0);
12791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12792 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12794 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12795 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12796 IEM_MC_REF_EFLAGS(pEFlags);
12797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12798 IEM_MC_ADVANCE_RIP();
12799 IEM_MC_END();
12800 return VINF_SUCCESS;
12801
12802 case IEMMODE_32BIT:
12803 IEM_MC_BEGIN(3, 0);
12804 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12805 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12807 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12808 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12809 IEM_MC_REF_EFLAGS(pEFlags);
12810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12811 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12812 IEM_MC_ADVANCE_RIP();
12813 IEM_MC_END();
12814 return VINF_SUCCESS;
12815
12816 case IEMMODE_64BIT:
12817 IEM_MC_BEGIN(3, 0);
12818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12819 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12820 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12821 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12822 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12823 IEM_MC_REF_EFLAGS(pEFlags);
12824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12825 IEM_MC_ADVANCE_RIP();
12826 IEM_MC_END();
12827 return VINF_SUCCESS;
12828
12829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12830 }
12831 }
12832 else
12833 {
12834 /* memory */
12835 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12836 switch (pIemCpu->enmEffOpSize)
12837 {
12838 case IEMMODE_16BIT:
12839 IEM_MC_BEGIN(3, 2);
12840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12841 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12842 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12844
12845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12846 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12847 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12848 IEM_MC_FETCH_EFLAGS(EFlags);
12849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12850
12851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12852 IEM_MC_COMMIT_EFLAGS(EFlags);
12853 IEM_MC_ADVANCE_RIP();
12854 IEM_MC_END();
12855 return VINF_SUCCESS;
12856
12857 case IEMMODE_32BIT:
12858 IEM_MC_BEGIN(3, 2);
12859 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12860 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12861 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12863
12864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12865 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12866 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12867 IEM_MC_FETCH_EFLAGS(EFlags);
12868 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12869
12870 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12871 IEM_MC_COMMIT_EFLAGS(EFlags);
12872 IEM_MC_ADVANCE_RIP();
12873 IEM_MC_END();
12874 return VINF_SUCCESS;
12875
12876 case IEMMODE_64BIT:
12877 IEM_MC_BEGIN(3, 2);
12878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12879 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12880 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12882
12883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12884 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12885 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12886 IEM_MC_FETCH_EFLAGS(EFlags);
12887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12888
12889 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12890 IEM_MC_COMMIT_EFLAGS(EFlags);
12891 IEM_MC_ADVANCE_RIP();
12892 IEM_MC_END();
12893 return VINF_SUCCESS;
12894
12895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12896 }
12897 }
12898}
12899
12900/** Opcode 0xd4. */
12901FNIEMOP_DEF(iemOp_aam_Ib)
12902{
12903 IEMOP_MNEMONIC("aam Ib");
12904 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12905 IEMOP_HLP_NO_LOCK_PREFIX();
12906 IEMOP_HLP_NO_64BIT();
12907 if (!bImm)
12908 return IEMOP_RAISE_DIVIDE_ERROR();
12909 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12910}
12911
12912
12913/** Opcode 0xd5. */
12914FNIEMOP_DEF(iemOp_aad_Ib)
12915{
12916 IEMOP_MNEMONIC("aad Ib");
12917 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12918 IEMOP_HLP_NO_LOCK_PREFIX();
12919 IEMOP_HLP_NO_64BIT();
12920 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12921}
12922
12923
12924/** Opcode 0xd6. */
12925FNIEMOP_DEF(iemOp_salc)
12926{
12927 IEMOP_MNEMONIC("salc");
12928 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
12929 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12931 IEMOP_HLP_NO_64BIT();
12932
12933 IEM_MC_BEGIN(0, 0);
12934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12935 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12936 } IEM_MC_ELSE() {
12937 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12938 } IEM_MC_ENDIF();
12939 IEM_MC_ADVANCE_RIP();
12940 IEM_MC_END();
12941 return VINF_SUCCESS;
12942}
12943
12944
12945/** Opcode 0xd7. */
12946FNIEMOP_DEF(iemOp_xlat)
12947{
12948 IEMOP_MNEMONIC("xlat");
12949 IEMOP_HLP_NO_LOCK_PREFIX();
12950 switch (pIemCpu->enmEffAddrMode)
12951 {
12952 case IEMMODE_16BIT:
12953 IEM_MC_BEGIN(2, 0);
12954 IEM_MC_LOCAL(uint8_t, u8Tmp);
12955 IEM_MC_LOCAL(uint16_t, u16Addr);
12956 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12957 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12958 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12959 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12960 IEM_MC_ADVANCE_RIP();
12961 IEM_MC_END();
12962 return VINF_SUCCESS;
12963
12964 case IEMMODE_32BIT:
12965 IEM_MC_BEGIN(2, 0);
12966 IEM_MC_LOCAL(uint8_t, u8Tmp);
12967 IEM_MC_LOCAL(uint32_t, u32Addr);
12968 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12969 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12970 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12971 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12972 IEM_MC_ADVANCE_RIP();
12973 IEM_MC_END();
12974 return VINF_SUCCESS;
12975
12976 case IEMMODE_64BIT:
12977 IEM_MC_BEGIN(2, 0);
12978 IEM_MC_LOCAL(uint8_t, u8Tmp);
12979 IEM_MC_LOCAL(uint64_t, u64Addr);
12980 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12981 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12982 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12983 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12984 IEM_MC_ADVANCE_RIP();
12985 IEM_MC_END();
12986 return VINF_SUCCESS;
12987
12988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12989 }
12990}
12991
12992
12993/**
12994 * Common worker for FPU instructions working on ST0 and STn, and storing the
12995 * result in ST0.
12996 *
12997 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12998 */
12999FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13000{
13001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13002
13003 IEM_MC_BEGIN(3, 1);
13004 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13005 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13006 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13007 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13008
13009 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13010 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13011 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13012 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13013 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13014 IEM_MC_ELSE()
13015 IEM_MC_FPU_STACK_UNDERFLOW(0);
13016 IEM_MC_ENDIF();
13017 IEM_MC_USED_FPU();
13018 IEM_MC_ADVANCE_RIP();
13019
13020 IEM_MC_END();
13021 return VINF_SUCCESS;
13022}
13023
13024
13025/**
13026 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13027 * flags.
13028 *
13029 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13030 */
13031FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13032{
13033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13034
13035 IEM_MC_BEGIN(3, 1);
13036 IEM_MC_LOCAL(uint16_t, u16Fsw);
13037 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13038 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13039 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13040
13041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13043 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13044 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13045 IEM_MC_UPDATE_FSW(u16Fsw);
13046 IEM_MC_ELSE()
13047 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13048 IEM_MC_ENDIF();
13049 IEM_MC_USED_FPU();
13050 IEM_MC_ADVANCE_RIP();
13051
13052 IEM_MC_END();
13053 return VINF_SUCCESS;
13054}
13055
13056
13057/**
13058 * Common worker for FPU instructions working on ST0 and STn, only affecting
13059 * flags, and popping when done.
13060 *
13061 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13062 */
13063FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13064{
13065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13066
13067 IEM_MC_BEGIN(3, 1);
13068 IEM_MC_LOCAL(uint16_t, u16Fsw);
13069 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13070 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13071 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13072
13073 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13074 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13075 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13076 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13077 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13078 IEM_MC_ELSE()
13079 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13080 IEM_MC_ENDIF();
13081 IEM_MC_USED_FPU();
13082 IEM_MC_ADVANCE_RIP();
13083
13084 IEM_MC_END();
13085 return VINF_SUCCESS;
13086}
13087
13088
13089/** Opcode 0xd8 11/0. */
13090FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13091{
13092 IEMOP_MNEMONIC("fadd st0,stN");
13093 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13094}
13095
13096
13097/** Opcode 0xd8 11/1. */
13098FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13099{
13100 IEMOP_MNEMONIC("fmul st0,stN");
13101 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13102}
13103
13104
13105/** Opcode 0xd8 11/2. */
13106FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13107{
13108 IEMOP_MNEMONIC("fcom st0,stN");
13109 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13110}
13111
13112
13113/** Opcode 0xd8 11/3. */
13114FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13115{
13116 IEMOP_MNEMONIC("fcomp st0,stN");
13117 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13118}
13119
13120
13121/** Opcode 0xd8 11/4. */
13122FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13123{
13124 IEMOP_MNEMONIC("fsub st0,stN");
13125 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13126}
13127
13128
13129/** Opcode 0xd8 11/5. */
13130FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13131{
13132 IEMOP_MNEMONIC("fsubr st0,stN");
13133 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13134}
13135
13136
13137/** Opcode 0xd8 11/6. */
13138FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13139{
13140 IEMOP_MNEMONIC("fdiv st0,stN");
13141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13142}
13143
13144
13145/** Opcode 0xd8 11/7. */
13146FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13147{
13148 IEMOP_MNEMONIC("fdivr st0,stN");
13149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13150}
13151
13152
13153/**
13154 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13155 * the result in ST0.
13156 *
13157 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13158 */
13159FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13160{
13161 IEM_MC_BEGIN(3, 3);
13162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13163 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13164 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13165 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13166 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13167 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13168
13169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13171
13172 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13173 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13174 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13175
13176 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13177 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13178 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13179 IEM_MC_ELSE()
13180 IEM_MC_FPU_STACK_UNDERFLOW(0);
13181 IEM_MC_ENDIF();
13182 IEM_MC_USED_FPU();
13183 IEM_MC_ADVANCE_RIP();
13184
13185 IEM_MC_END();
13186 return VINF_SUCCESS;
13187}
13188
13189
13190/** Opcode 0xd8 !11/0. */
13191FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13192{
13193 IEMOP_MNEMONIC("fadd st0,m32r");
13194 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13195}
13196
13197
13198/** Opcode 0xd8 !11/1. */
13199FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13200{
13201 IEMOP_MNEMONIC("fmul st0,m32r");
13202 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13203}
13204
13205
13206/** Opcode 0xd8 !11/2. */
13207FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13208{
13209 IEMOP_MNEMONIC("fcom st0,m32r");
13210
13211 IEM_MC_BEGIN(3, 3);
13212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13213 IEM_MC_LOCAL(uint16_t, u16Fsw);
13214 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13215 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13216 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13217 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13218
13219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13221
13222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13223 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13224 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13225
13226 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13227 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13228 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13229 IEM_MC_ELSE()
13230 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13231 IEM_MC_ENDIF();
13232 IEM_MC_USED_FPU();
13233 IEM_MC_ADVANCE_RIP();
13234
13235 IEM_MC_END();
13236 return VINF_SUCCESS;
13237}
13238
13239
13240/** Opcode 0xd8 !11/3. */
13241FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13242{
13243 IEMOP_MNEMONIC("fcomp st0,m32r");
13244
13245 IEM_MC_BEGIN(3, 3);
13246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13247 IEM_MC_LOCAL(uint16_t, u16Fsw);
13248 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13249 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13251 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13252
13253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13255
13256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13258 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13259
13260 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13261 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13262 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13263 IEM_MC_ELSE()
13264 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13265 IEM_MC_ENDIF();
13266 IEM_MC_USED_FPU();
13267 IEM_MC_ADVANCE_RIP();
13268
13269 IEM_MC_END();
13270 return VINF_SUCCESS;
13271}
13272
13273
13274/** Opcode 0xd8 !11/4. */
13275FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13276{
13277 IEMOP_MNEMONIC("fsub st0,m32r");
13278 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13279}
13280
13281
13282/** Opcode 0xd8 !11/5. */
13283FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13284{
13285 IEMOP_MNEMONIC("fsubr st0,m32r");
13286 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13287}
13288
13289
13290/** Opcode 0xd8 !11/6. */
13291FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13292{
13293 IEMOP_MNEMONIC("fdiv st0,m32r");
13294 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13295}
13296
13297
13298/** Opcode 0xd8 !11/7. */
13299FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13300{
13301 IEMOP_MNEMONIC("fdivr st0,m32r");
13302 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13303}
13304
13305
13306/** Opcode 0xd8. */
13307FNIEMOP_DEF(iemOp_EscF0)
13308{
13309 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13311
13312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13313 {
13314 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13315 {
13316 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13317 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13318 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13319 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13320 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13321 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13322 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13323 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13325 }
13326 }
13327 else
13328 {
13329 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13330 {
13331 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13332 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13333 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13334 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13335 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13336 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13337 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13338 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13340 }
13341 }
13342}
13343
13344
13345/** Opcode 0xd9 /0 mem32real
13346 * @sa iemOp_fld_m64r */
13347FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13348{
13349 IEMOP_MNEMONIC("fld m32r");
13350
13351 IEM_MC_BEGIN(2, 3);
13352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13353 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13354 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13355 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13356 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13357
13358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13360
13361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13363 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13364
13365 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13366 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13367 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13368 IEM_MC_ELSE()
13369 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13370 IEM_MC_ENDIF();
13371 IEM_MC_USED_FPU();
13372 IEM_MC_ADVANCE_RIP();
13373
13374 IEM_MC_END();
13375 return VINF_SUCCESS;
13376}
13377
13378
13379/** Opcode 0xd9 !11/2 mem32real */
13380FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13381{
13382 IEMOP_MNEMONIC("fst m32r");
13383 IEM_MC_BEGIN(3, 2);
13384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13385 IEM_MC_LOCAL(uint16_t, u16Fsw);
13386 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13387 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13388 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13389
13390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13392 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13393 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13394
13395 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13396 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13397 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13398 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13399 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13400 IEM_MC_ELSE()
13401 IEM_MC_IF_FCW_IM()
13402 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13403 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13404 IEM_MC_ENDIF();
13405 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13406 IEM_MC_ENDIF();
13407 IEM_MC_USED_FPU();
13408 IEM_MC_ADVANCE_RIP();
13409
13410 IEM_MC_END();
13411 return VINF_SUCCESS;
13412}
13413
13414
13415/** Opcode 0xd9 !11/3 */
13416FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13417{
13418 IEMOP_MNEMONIC("fstp m32r");
13419 IEM_MC_BEGIN(3, 2);
13420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13421 IEM_MC_LOCAL(uint16_t, u16Fsw);
13422 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13423 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13424 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13425
13426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13430
13431 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13433 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13434 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13435 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13436 IEM_MC_ELSE()
13437 IEM_MC_IF_FCW_IM()
13438 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13439 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13440 IEM_MC_ENDIF();
13441 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13442 IEM_MC_ENDIF();
13443 IEM_MC_USED_FPU();
13444 IEM_MC_ADVANCE_RIP();
13445
13446 IEM_MC_END();
13447 return VINF_SUCCESS;
13448}
13449
13450
13451/** Opcode 0xd9 !11/4 */
13452FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13453{
13454 IEMOP_MNEMONIC("fldenv m14/28byte");
13455 IEM_MC_BEGIN(3, 0);
13456 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13457 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13458 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13462 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13463 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13464 IEM_MC_END();
13465 return VINF_SUCCESS;
13466}
13467
13468
13469/** Opcode 0xd9 !11/5 */
13470FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13471{
13472 IEMOP_MNEMONIC("fldcw m2byte");
13473 IEM_MC_BEGIN(1, 1);
13474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13475 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13479 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13480 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13481 IEM_MC_END();
13482 return VINF_SUCCESS;
13483}
13484
13485
13486/** Opcode 0xd9 !11/6 */
13487FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13488{
13489 IEMOP_MNEMONIC("fstenv m14/m28byte");
13490 IEM_MC_BEGIN(3, 0);
13491 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13492 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13493 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13497 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13498 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13499 IEM_MC_END();
13500 return VINF_SUCCESS;
13501}
13502
13503
13504/** Opcode 0xd9 !11/7 */
13505FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13506{
13507 IEMOP_MNEMONIC("fnstcw m2byte");
13508 IEM_MC_BEGIN(2, 0);
13509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13510 IEM_MC_LOCAL(uint16_t, u16Fcw);
13511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13514 IEM_MC_FETCH_FCW(u16Fcw);
13515 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13516 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13517 IEM_MC_END();
13518 return VINF_SUCCESS;
13519}
13520
13521
13522/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13523FNIEMOP_DEF(iemOp_fnop)
13524{
13525 IEMOP_MNEMONIC("fnop");
13526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13527
13528 IEM_MC_BEGIN(0, 0);
13529 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13530 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13531 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13532 * intel optimizations. Investigate. */
13533 IEM_MC_UPDATE_FPU_OPCODE_IP();
13534 IEM_MC_USED_FPU();
13535 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13536 IEM_MC_END();
13537 return VINF_SUCCESS;
13538}
13539
13540
13541/** Opcode 0xd9 11/0 stN */
13542FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13543{
13544 IEMOP_MNEMONIC("fld stN");
13545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13546
13547 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13548 * indicates that it does. */
13549 IEM_MC_BEGIN(0, 2);
13550 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13551 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13555 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13556 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13557 IEM_MC_ELSE()
13558 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13559 IEM_MC_ENDIF();
13560 IEM_MC_USED_FPU();
13561 IEM_MC_ADVANCE_RIP();
13562 IEM_MC_END();
13563
13564 return VINF_SUCCESS;
13565}
13566
13567
13568/** Opcode 0xd9 11/3 stN */
13569FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13570{
13571 IEMOP_MNEMONIC("fxch stN");
13572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13573
13574 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13575 * indicates that it does. */
13576 IEM_MC_BEGIN(1, 3);
13577 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13578 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13579 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13580 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13583 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13584 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13585 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13586 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13587 IEM_MC_ELSE()
13588 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13589 IEM_MC_ENDIF();
13590 IEM_MC_USED_FPU();
13591 IEM_MC_ADVANCE_RIP();
13592 IEM_MC_END();
13593
13594 return VINF_SUCCESS;
13595}
13596
13597
13598/** Opcode 0xd9 11/4, 0xdd 11/2. */
13599FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13600{
13601 IEMOP_MNEMONIC("fstp st0,stN");
13602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13603
13604 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13605 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13606 if (!iDstReg)
13607 {
13608 IEM_MC_BEGIN(0, 1);
13609 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13611 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13612 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13613 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13614 IEM_MC_ELSE()
13615 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13616 IEM_MC_ENDIF();
13617 IEM_MC_USED_FPU();
13618 IEM_MC_ADVANCE_RIP();
13619 IEM_MC_END();
13620 }
13621 else
13622 {
13623 IEM_MC_BEGIN(0, 2);
13624 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13625 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13628 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13629 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13630 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13631 IEM_MC_ELSE()
13632 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13633 IEM_MC_ENDIF();
13634 IEM_MC_USED_FPU();
13635 IEM_MC_ADVANCE_RIP();
13636 IEM_MC_END();
13637 }
13638 return VINF_SUCCESS;
13639}
13640
13641
13642/**
13643 * Common worker for FPU instructions working on ST0 and replaces it with the
13644 * result, i.e. unary operators.
13645 *
13646 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13647 */
13648FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13649{
13650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13651
13652 IEM_MC_BEGIN(2, 1);
13653 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13654 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13655 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13656
13657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13660 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13661 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13662 IEM_MC_ELSE()
13663 IEM_MC_FPU_STACK_UNDERFLOW(0);
13664 IEM_MC_ENDIF();
13665 IEM_MC_USED_FPU();
13666 IEM_MC_ADVANCE_RIP();
13667
13668 IEM_MC_END();
13669 return VINF_SUCCESS;
13670}
13671
13672
13673/** Opcode 0xd9 0xe0. */
13674FNIEMOP_DEF(iemOp_fchs)
13675{
13676 IEMOP_MNEMONIC("fchs st0");
13677 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13678}
13679
13680
13681/** Opcode 0xd9 0xe1. */
13682FNIEMOP_DEF(iemOp_fabs)
13683{
13684 IEMOP_MNEMONIC("fabs st0");
13685 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13686}
13687
13688
13689/**
13690 * Common worker for FPU instructions working on ST0 and only returns FSW.
13691 *
13692 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13693 */
13694FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13695{
13696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13697
13698 IEM_MC_BEGIN(2, 1);
13699 IEM_MC_LOCAL(uint16_t, u16Fsw);
13700 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13701 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13702
13703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13705 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13706 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13707 IEM_MC_UPDATE_FSW(u16Fsw);
13708 IEM_MC_ELSE()
13709 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13710 IEM_MC_ENDIF();
13711 IEM_MC_USED_FPU();
13712 IEM_MC_ADVANCE_RIP();
13713
13714 IEM_MC_END();
13715 return VINF_SUCCESS;
13716}
13717
13718
13719/** Opcode 0xd9 0xe4. */
13720FNIEMOP_DEF(iemOp_ftst)
13721{
13722 IEMOP_MNEMONIC("ftst st0");
13723 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13724}
13725
13726
13727/** Opcode 0xd9 0xe5. */
13728FNIEMOP_DEF(iemOp_fxam)
13729{
13730 IEMOP_MNEMONIC("fxam st0");
13731 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13732}
13733
13734
13735/**
13736 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13737 *
13738 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13739 */
13740FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13741{
13742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13743
13744 IEM_MC_BEGIN(1, 1);
13745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13746 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13747
13748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13749 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13750 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13751 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13752 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13753 IEM_MC_ELSE()
13754 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13755 IEM_MC_ENDIF();
13756 IEM_MC_USED_FPU();
13757 IEM_MC_ADVANCE_RIP();
13758
13759 IEM_MC_END();
13760 return VINF_SUCCESS;
13761}
13762
13763
13764/** Opcode 0xd9 0xe8. */
13765FNIEMOP_DEF(iemOp_fld1)
13766{
13767 IEMOP_MNEMONIC("fld1");
13768 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13769}
13770
13771
13772/** Opcode 0xd9 0xe9. */
13773FNIEMOP_DEF(iemOp_fldl2t)
13774{
13775 IEMOP_MNEMONIC("fldl2t");
13776 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13777}
13778
13779
13780/** Opcode 0xd9 0xea. */
13781FNIEMOP_DEF(iemOp_fldl2e)
13782{
13783 IEMOP_MNEMONIC("fldl2e");
13784 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13785}
13786
13787/** Opcode 0xd9 0xeb. */
13788FNIEMOP_DEF(iemOp_fldpi)
13789{
13790 IEMOP_MNEMONIC("fldpi");
13791 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13792}
13793
13794
13795/** Opcode 0xd9 0xec. */
13796FNIEMOP_DEF(iemOp_fldlg2)
13797{
13798 IEMOP_MNEMONIC("fldlg2");
13799 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13800}
13801
13802/** Opcode 0xd9 0xed. */
13803FNIEMOP_DEF(iemOp_fldln2)
13804{
13805 IEMOP_MNEMONIC("fldln2");
13806 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13807}
13808
13809
13810/** Opcode 0xd9 0xee. */
13811FNIEMOP_DEF(iemOp_fldz)
13812{
13813 IEMOP_MNEMONIC("fldz");
13814 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13815}
13816
13817
13818/** Opcode 0xd9 0xf0. */
13819FNIEMOP_DEF(iemOp_f2xm1)
13820{
13821 IEMOP_MNEMONIC("f2xm1 st0");
13822 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13823}
13824
13825
13826/** Opcode 0xd9 0xf1. */
13827FNIEMOP_DEF(iemOp_fylx2)
13828{
13829 IEMOP_MNEMONIC("fylx2 st0");
13830 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13831}
13832
13833
13834/**
13835 * Common worker for FPU instructions working on ST0 and having two outputs, one
13836 * replacing ST0 and one pushed onto the stack.
13837 *
13838 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13839 */
13840FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13841{
13842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13843
13844 IEM_MC_BEGIN(2, 1);
13845 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13846 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13847 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13848
13849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13851 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13852 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13853 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13854 IEM_MC_ELSE()
13855 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13856 IEM_MC_ENDIF();
13857 IEM_MC_USED_FPU();
13858 IEM_MC_ADVANCE_RIP();
13859
13860 IEM_MC_END();
13861 return VINF_SUCCESS;
13862}
13863
13864
13865/** Opcode 0xd9 0xf2. */
13866FNIEMOP_DEF(iemOp_fptan)
13867{
13868 IEMOP_MNEMONIC("fptan st0");
13869 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13870}
13871
13872
13873/**
13874 * Common worker for FPU instructions working on STn and ST0, storing the result
13875 * in STn, and popping the stack unless IE, DE or ZE was raised.
13876 *
13877 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13878 */
13879FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13880{
13881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13882
13883 IEM_MC_BEGIN(3, 1);
13884 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13885 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13886 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13887 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13888
13889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13891
13892 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13893 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13894 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13895 IEM_MC_ELSE()
13896 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13897 IEM_MC_ENDIF();
13898 IEM_MC_USED_FPU();
13899 IEM_MC_ADVANCE_RIP();
13900
13901 IEM_MC_END();
13902 return VINF_SUCCESS;
13903}
13904
13905
13906/** Opcode 0xd9 0xf3. */
13907FNIEMOP_DEF(iemOp_fpatan)
13908{
13909 IEMOP_MNEMONIC("fpatan st1,st0");
13910 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13911}
13912
13913
13914/** Opcode 0xd9 0xf4. */
13915FNIEMOP_DEF(iemOp_fxtract)
13916{
13917 IEMOP_MNEMONIC("fxtract st0");
13918 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13919}
13920
13921
13922/** Opcode 0xd9 0xf5. */
13923FNIEMOP_DEF(iemOp_fprem1)
13924{
13925 IEMOP_MNEMONIC("fprem1 st0, st1");
13926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13927}
13928
13929
13930/** Opcode 0xd9 0xf6. */
13931FNIEMOP_DEF(iemOp_fdecstp)
13932{
13933 IEMOP_MNEMONIC("fdecstp");
13934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13935 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13936 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13937 * FINCSTP and FDECSTP. */
13938
13939 IEM_MC_BEGIN(0,0);
13940
13941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13942 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13943
13944 IEM_MC_FPU_STACK_DEC_TOP();
13945 IEM_MC_UPDATE_FSW_CONST(0);
13946
13947 IEM_MC_USED_FPU();
13948 IEM_MC_ADVANCE_RIP();
13949 IEM_MC_END();
13950 return VINF_SUCCESS;
13951}
13952
13953
13954/** Opcode 0xd9 0xf7. */
13955FNIEMOP_DEF(iemOp_fincstp)
13956{
13957 IEMOP_MNEMONIC("fincstp");
13958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13959 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13960 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13961 * FINCSTP and FDECSTP. */
13962
13963 IEM_MC_BEGIN(0,0);
13964
13965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13967
13968 IEM_MC_FPU_STACK_INC_TOP();
13969 IEM_MC_UPDATE_FSW_CONST(0);
13970
13971 IEM_MC_USED_FPU();
13972 IEM_MC_ADVANCE_RIP();
13973 IEM_MC_END();
13974 return VINF_SUCCESS;
13975}
13976
13977
13978/** Opcode 0xd9 0xf8. */
13979FNIEMOP_DEF(iemOp_fprem)
13980{
13981 IEMOP_MNEMONIC("fprem st0, st1");
13982 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13983}
13984
13985
13986/** Opcode 0xd9 0xf9. */
13987FNIEMOP_DEF(iemOp_fyl2xp1)
13988{
13989 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13990 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13991}
13992
13993
13994/** Opcode 0xd9 0xfa. */
13995FNIEMOP_DEF(iemOp_fsqrt)
13996{
13997 IEMOP_MNEMONIC("fsqrt st0");
13998 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13999}
14000
14001
14002/** Opcode 0xd9 0xfb. */
14003FNIEMOP_DEF(iemOp_fsincos)
14004{
14005 IEMOP_MNEMONIC("fsincos st0");
14006 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14007}
14008
14009
14010/** Opcode 0xd9 0xfc. */
14011FNIEMOP_DEF(iemOp_frndint)
14012{
14013 IEMOP_MNEMONIC("frndint st0");
14014 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14015}
14016
14017
14018/** Opcode 0xd9 0xfd. */
14019FNIEMOP_DEF(iemOp_fscale)
14020{
14021 IEMOP_MNEMONIC("fscale st0, st1");
14022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14023}
14024
14025
14026/** Opcode 0xd9 0xfe. */
14027FNIEMOP_DEF(iemOp_fsin)
14028{
14029 IEMOP_MNEMONIC("fsin st0");
14030 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14031}
14032
14033
14034/** Opcode 0xd9 0xff. */
14035FNIEMOP_DEF(iemOp_fcos)
14036{
14037 IEMOP_MNEMONIC("fcos st0");
14038 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14039}
14040
14041
14042/** Used by iemOp_EscF1. */
14043static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14044{
14045 /* 0xe0 */ iemOp_fchs,
14046 /* 0xe1 */ iemOp_fabs,
14047 /* 0xe2 */ iemOp_Invalid,
14048 /* 0xe3 */ iemOp_Invalid,
14049 /* 0xe4 */ iemOp_ftst,
14050 /* 0xe5 */ iemOp_fxam,
14051 /* 0xe6 */ iemOp_Invalid,
14052 /* 0xe7 */ iemOp_Invalid,
14053 /* 0xe8 */ iemOp_fld1,
14054 /* 0xe9 */ iemOp_fldl2t,
14055 /* 0xea */ iemOp_fldl2e,
14056 /* 0xeb */ iemOp_fldpi,
14057 /* 0xec */ iemOp_fldlg2,
14058 /* 0xed */ iemOp_fldln2,
14059 /* 0xee */ iemOp_fldz,
14060 /* 0xef */ iemOp_Invalid,
14061 /* 0xf0 */ iemOp_f2xm1,
14062 /* 0xf1 */ iemOp_fylx2,
14063 /* 0xf2 */ iemOp_fptan,
14064 /* 0xf3 */ iemOp_fpatan,
14065 /* 0xf4 */ iemOp_fxtract,
14066 /* 0xf5 */ iemOp_fprem1,
14067 /* 0xf6 */ iemOp_fdecstp,
14068 /* 0xf7 */ iemOp_fincstp,
14069 /* 0xf8 */ iemOp_fprem,
14070 /* 0xf9 */ iemOp_fyl2xp1,
14071 /* 0xfa */ iemOp_fsqrt,
14072 /* 0xfb */ iemOp_fsincos,
14073 /* 0xfc */ iemOp_frndint,
14074 /* 0xfd */ iemOp_fscale,
14075 /* 0xfe */ iemOp_fsin,
14076 /* 0xff */ iemOp_fcos
14077};
14078
14079
14080/** Opcode 0xd9. */
14081FNIEMOP_DEF(iemOp_EscF1)
14082{
14083 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14086 {
14087 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14088 {
14089 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14090 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14091 case 2:
14092 if (bRm == 0xd0)
14093 return FNIEMOP_CALL(iemOp_fnop);
14094 return IEMOP_RAISE_INVALID_OPCODE();
14095 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14096 case 4:
14097 case 5:
14098 case 6:
14099 case 7:
14100 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14101 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14103 }
14104 }
14105 else
14106 {
14107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14108 {
14109 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14110 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14111 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14112 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14113 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14114 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14115 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14116 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14118 }
14119 }
14120}
14121
14122
14123/** Opcode 0xda 11/0. */
14124FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14125{
14126 IEMOP_MNEMONIC("fcmovb st0,stN");
14127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14128
14129 IEM_MC_BEGIN(0, 1);
14130 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14131
14132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14134
14135 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14137 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14138 IEM_MC_ENDIF();
14139 IEM_MC_UPDATE_FPU_OPCODE_IP();
14140 IEM_MC_ELSE()
14141 IEM_MC_FPU_STACK_UNDERFLOW(0);
14142 IEM_MC_ENDIF();
14143 IEM_MC_USED_FPU();
14144 IEM_MC_ADVANCE_RIP();
14145
14146 IEM_MC_END();
14147 return VINF_SUCCESS;
14148}
14149
14150
14151/** Opcode 0xda 11/1. */
14152FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14153{
14154 IEMOP_MNEMONIC("fcmove st0,stN");
14155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14156
14157 IEM_MC_BEGIN(0, 1);
14158 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14159
14160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14162
14163 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14165 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14166 IEM_MC_ENDIF();
14167 IEM_MC_UPDATE_FPU_OPCODE_IP();
14168 IEM_MC_ELSE()
14169 IEM_MC_FPU_STACK_UNDERFLOW(0);
14170 IEM_MC_ENDIF();
14171 IEM_MC_USED_FPU();
14172 IEM_MC_ADVANCE_RIP();
14173
14174 IEM_MC_END();
14175 return VINF_SUCCESS;
14176}
14177
14178
14179/** Opcode 0xda 11/2. */
14180FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14181{
14182 IEMOP_MNEMONIC("fcmovbe st0,stN");
14183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14184
14185 IEM_MC_BEGIN(0, 1);
14186 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14187
14188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14190
14191 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14192 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14193 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14194 IEM_MC_ENDIF();
14195 IEM_MC_UPDATE_FPU_OPCODE_IP();
14196 IEM_MC_ELSE()
14197 IEM_MC_FPU_STACK_UNDERFLOW(0);
14198 IEM_MC_ENDIF();
14199 IEM_MC_USED_FPU();
14200 IEM_MC_ADVANCE_RIP();
14201
14202 IEM_MC_END();
14203 return VINF_SUCCESS;
14204}
14205
14206
14207/** Opcode 0xda 11/3. */
14208FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14209{
14210 IEMOP_MNEMONIC("fcmovu st0,stN");
14211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14212
14213 IEM_MC_BEGIN(0, 1);
14214 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14215
14216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14218
14219 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14220 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14221 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14222 IEM_MC_ENDIF();
14223 IEM_MC_UPDATE_FPU_OPCODE_IP();
14224 IEM_MC_ELSE()
14225 IEM_MC_FPU_STACK_UNDERFLOW(0);
14226 IEM_MC_ENDIF();
14227 IEM_MC_USED_FPU();
14228 IEM_MC_ADVANCE_RIP();
14229
14230 IEM_MC_END();
14231 return VINF_SUCCESS;
14232}
14233
14234
14235/**
14236 * Common worker for FPU instructions working on ST0 and STn, only affecting
14237 * flags, and popping twice when done.
14238 *
14239 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14240 */
14241FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14242{
14243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14244
14245 IEM_MC_BEGIN(3, 1);
14246 IEM_MC_LOCAL(uint16_t, u16Fsw);
14247 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14250
14251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14253 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14254 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14255 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14256 IEM_MC_ELSE()
14257 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14258 IEM_MC_ENDIF();
14259 IEM_MC_USED_FPU();
14260 IEM_MC_ADVANCE_RIP();
14261
14262 IEM_MC_END();
14263 return VINF_SUCCESS;
14264}
14265
14266
14267/** Opcode 0xda 0xe9. */
14268FNIEMOP_DEF(iemOp_fucompp)
14269{
14270 IEMOP_MNEMONIC("fucompp st0,stN");
14271 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14272}
14273
14274
14275/**
14276 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14277 * the result in ST0.
14278 *
14279 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14280 */
14281FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14282{
14283 IEM_MC_BEGIN(3, 3);
14284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14285 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14286 IEM_MC_LOCAL(int32_t, i32Val2);
14287 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14288 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14289 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14290
14291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14293
14294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14295 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14296 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14297
14298 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14299 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14300 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14301 IEM_MC_ELSE()
14302 IEM_MC_FPU_STACK_UNDERFLOW(0);
14303 IEM_MC_ENDIF();
14304 IEM_MC_USED_FPU();
14305 IEM_MC_ADVANCE_RIP();
14306
14307 IEM_MC_END();
14308 return VINF_SUCCESS;
14309}
14310
14311
14312/** Opcode 0xda !11/0. */
14313FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14314{
14315 IEMOP_MNEMONIC("fiadd m32i");
14316 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14317}
14318
14319
14320/** Opcode 0xda !11/1. */
14321FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14322{
14323 IEMOP_MNEMONIC("fimul m32i");
14324 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14325}
14326
14327
14328/** Opcode 0xda !11/2. */
14329FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14330{
14331 IEMOP_MNEMONIC("ficom st0,m32i");
14332
14333 IEM_MC_BEGIN(3, 3);
14334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14335 IEM_MC_LOCAL(uint16_t, u16Fsw);
14336 IEM_MC_LOCAL(int32_t, i32Val2);
14337 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14338 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14339 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14340
14341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14343
14344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14346 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14347
14348 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14349 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14350 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14351 IEM_MC_ELSE()
14352 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14353 IEM_MC_ENDIF();
14354 IEM_MC_USED_FPU();
14355 IEM_MC_ADVANCE_RIP();
14356
14357 IEM_MC_END();
14358 return VINF_SUCCESS;
14359}
14360
14361
14362/** Opcode 0xda !11/3. */
14363FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14364{
14365 IEMOP_MNEMONIC("ficomp st0,m32i");
14366
14367 IEM_MC_BEGIN(3, 3);
14368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14369 IEM_MC_LOCAL(uint16_t, u16Fsw);
14370 IEM_MC_LOCAL(int32_t, i32Val2);
14371 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14373 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14374
14375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14377
14378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14380 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14381
14382 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14383 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14384 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14385 IEM_MC_ELSE()
14386 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14387 IEM_MC_ENDIF();
14388 IEM_MC_USED_FPU();
14389 IEM_MC_ADVANCE_RIP();
14390
14391 IEM_MC_END();
14392 return VINF_SUCCESS;
14393}
14394
14395
14396/** Opcode 0xda !11/4. */
14397FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14398{
14399 IEMOP_MNEMONIC("fisub m32i");
14400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14401}
14402
14403
14404/** Opcode 0xda !11/5. */
14405FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14406{
14407 IEMOP_MNEMONIC("fisubr m32i");
14408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14409}
14410
14411
14412/** Opcode 0xda !11/6. */
14413FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14414{
14415 IEMOP_MNEMONIC("fidiv m32i");
14416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14417}
14418
14419
14420/** Opcode 0xda !11/7. */
14421FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14422{
14423 IEMOP_MNEMONIC("fidivr m32i");
14424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14425}
14426
14427
14428/** Opcode 0xda. */
14429FNIEMOP_DEF(iemOp_EscF2)
14430{
14431 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14434 {
14435 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14436 {
14437 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14438 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14439 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14440 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14441 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14442 case 5:
14443 if (bRm == 0xe9)
14444 return FNIEMOP_CALL(iemOp_fucompp);
14445 return IEMOP_RAISE_INVALID_OPCODE();
14446 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14447 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14449 }
14450 }
14451 else
14452 {
14453 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14454 {
14455 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14456 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14457 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14458 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14459 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14460 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14461 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14462 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14464 }
14465 }
14466}
14467
14468
14469/** Opcode 0xdb !11/0. */
14470FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14471{
14472 IEMOP_MNEMONIC("fild m32i");
14473
14474 IEM_MC_BEGIN(2, 3);
14475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14476 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14477 IEM_MC_LOCAL(int32_t, i32Val);
14478 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14479 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14480
14481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14483
14484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14486 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14487
14488 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14489 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14490 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14491 IEM_MC_ELSE()
14492 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14493 IEM_MC_ENDIF();
14494 IEM_MC_USED_FPU();
14495 IEM_MC_ADVANCE_RIP();
14496
14497 IEM_MC_END();
14498 return VINF_SUCCESS;
14499}
14500
14501
14502/** Opcode 0xdb !11/1. */
14503FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14504{
14505 IEMOP_MNEMONIC("fisttp m32i");
14506 IEM_MC_BEGIN(3, 2);
14507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14508 IEM_MC_LOCAL(uint16_t, u16Fsw);
14509 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14510 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14511 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14512
14513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14517
14518 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14520 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14521 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14522 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14523 IEM_MC_ELSE()
14524 IEM_MC_IF_FCW_IM()
14525 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14526 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14527 IEM_MC_ENDIF();
14528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14529 IEM_MC_ENDIF();
14530 IEM_MC_USED_FPU();
14531 IEM_MC_ADVANCE_RIP();
14532
14533 IEM_MC_END();
14534 return VINF_SUCCESS;
14535}
14536
14537
14538/** Opcode 0xdb !11/2. */
14539FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14540{
14541 IEMOP_MNEMONIC("fist m32i");
14542 IEM_MC_BEGIN(3, 2);
14543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14544 IEM_MC_LOCAL(uint16_t, u16Fsw);
14545 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14546 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14547 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14548
14549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14553
14554 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14555 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14556 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14557 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14558 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14559 IEM_MC_ELSE()
14560 IEM_MC_IF_FCW_IM()
14561 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14562 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14563 IEM_MC_ENDIF();
14564 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14565 IEM_MC_ENDIF();
14566 IEM_MC_USED_FPU();
14567 IEM_MC_ADVANCE_RIP();
14568
14569 IEM_MC_END();
14570 return VINF_SUCCESS;
14571}
14572
14573
14574/** Opcode 0xdb !11/3. */
14575FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14576{
14577 IEMOP_MNEMONIC("fisttp m32i");
14578 IEM_MC_BEGIN(3, 2);
14579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14580 IEM_MC_LOCAL(uint16_t, u16Fsw);
14581 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14582 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14583 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14584
14585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14587 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14589
14590 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14591 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14592 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14593 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14594 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14595 IEM_MC_ELSE()
14596 IEM_MC_IF_FCW_IM()
14597 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14598 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14599 IEM_MC_ENDIF();
14600 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14601 IEM_MC_ENDIF();
14602 IEM_MC_USED_FPU();
14603 IEM_MC_ADVANCE_RIP();
14604
14605 IEM_MC_END();
14606 return VINF_SUCCESS;
14607}
14608
14609
14610/** Opcode 0xdb !11/5. */
14611FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14612{
14613 IEMOP_MNEMONIC("fld m80r");
14614
14615 IEM_MC_BEGIN(2, 3);
14616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14617 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14618 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14619 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14620 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14621
14622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14624
14625 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14627 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14628
14629 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14630 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14631 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14632 IEM_MC_ELSE()
14633 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14634 IEM_MC_ENDIF();
14635 IEM_MC_USED_FPU();
14636 IEM_MC_ADVANCE_RIP();
14637
14638 IEM_MC_END();
14639 return VINF_SUCCESS;
14640}
14641
14642
14643/** Opcode 0xdb !11/7. */
14644FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14645{
14646 IEMOP_MNEMONIC("fstp m80r");
14647 IEM_MC_BEGIN(3, 2);
14648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14649 IEM_MC_LOCAL(uint16_t, u16Fsw);
14650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14651 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14652 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14653
14654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14658
14659 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14660 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14661 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14662 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14663 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14664 IEM_MC_ELSE()
14665 IEM_MC_IF_FCW_IM()
14666 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14667 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14668 IEM_MC_ENDIF();
14669 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14670 IEM_MC_ENDIF();
14671 IEM_MC_USED_FPU();
14672 IEM_MC_ADVANCE_RIP();
14673
14674 IEM_MC_END();
14675 return VINF_SUCCESS;
14676}
14677
14678
14679/** Opcode 0xdb 11/0. */
14680FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14681{
14682 IEMOP_MNEMONIC("fcmovnb st0,stN");
14683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14684
14685 IEM_MC_BEGIN(0, 1);
14686 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14687
14688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14690
14691 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14692 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14693 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14694 IEM_MC_ENDIF();
14695 IEM_MC_UPDATE_FPU_OPCODE_IP();
14696 IEM_MC_ELSE()
14697 IEM_MC_FPU_STACK_UNDERFLOW(0);
14698 IEM_MC_ENDIF();
14699 IEM_MC_USED_FPU();
14700 IEM_MC_ADVANCE_RIP();
14701
14702 IEM_MC_END();
14703 return VINF_SUCCESS;
14704}
14705
14706
14707/** Opcode 0xdb 11/1. */
14708FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14709{
14710 IEMOP_MNEMONIC("fcmovne st0,stN");
14711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14712
14713 IEM_MC_BEGIN(0, 1);
14714 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14715
14716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14718
14719 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14720 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14721 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14722 IEM_MC_ENDIF();
14723 IEM_MC_UPDATE_FPU_OPCODE_IP();
14724 IEM_MC_ELSE()
14725 IEM_MC_FPU_STACK_UNDERFLOW(0);
14726 IEM_MC_ENDIF();
14727 IEM_MC_USED_FPU();
14728 IEM_MC_ADVANCE_RIP();
14729
14730 IEM_MC_END();
14731 return VINF_SUCCESS;
14732}
14733
14734
14735/** Opcode 0xdb 11/2. */
14736FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14737{
14738 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14740
14741 IEM_MC_BEGIN(0, 1);
14742 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14743
14744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14746
14747 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14748 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14749 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14750 IEM_MC_ENDIF();
14751 IEM_MC_UPDATE_FPU_OPCODE_IP();
14752 IEM_MC_ELSE()
14753 IEM_MC_FPU_STACK_UNDERFLOW(0);
14754 IEM_MC_ENDIF();
14755 IEM_MC_USED_FPU();
14756 IEM_MC_ADVANCE_RIP();
14757
14758 IEM_MC_END();
14759 return VINF_SUCCESS;
14760}
14761
14762
14763/** Opcode 0xdb 11/3. */
14764FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14765{
14766 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14768
14769 IEM_MC_BEGIN(0, 1);
14770 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14771
14772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14774
14775 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14776 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14777 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14778 IEM_MC_ENDIF();
14779 IEM_MC_UPDATE_FPU_OPCODE_IP();
14780 IEM_MC_ELSE()
14781 IEM_MC_FPU_STACK_UNDERFLOW(0);
14782 IEM_MC_ENDIF();
14783 IEM_MC_USED_FPU();
14784 IEM_MC_ADVANCE_RIP();
14785
14786 IEM_MC_END();
14787 return VINF_SUCCESS;
14788}
14789
14790
14791/** Opcode 0xdb 0xe0. */
14792FNIEMOP_DEF(iemOp_fneni)
14793{
14794 IEMOP_MNEMONIC("fneni (8087/ign)");
14795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14796 IEM_MC_BEGIN(0,0);
14797 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14798 IEM_MC_ADVANCE_RIP();
14799 IEM_MC_END();
14800 return VINF_SUCCESS;
14801}
14802
14803
14804/** Opcode 0xdb 0xe1. */
14805FNIEMOP_DEF(iemOp_fndisi)
14806{
14807 IEMOP_MNEMONIC("fndisi (8087/ign)");
14808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14809 IEM_MC_BEGIN(0,0);
14810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14811 IEM_MC_ADVANCE_RIP();
14812 IEM_MC_END();
14813 return VINF_SUCCESS;
14814}
14815
14816
14817/** Opcode 0xdb 0xe2. */
14818FNIEMOP_DEF(iemOp_fnclex)
14819{
14820 IEMOP_MNEMONIC("fnclex");
14821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14822
14823 IEM_MC_BEGIN(0,0);
14824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14825 IEM_MC_CLEAR_FSW_EX();
14826 IEM_MC_ADVANCE_RIP();
14827 IEM_MC_END();
14828 return VINF_SUCCESS;
14829}
14830
14831
14832/** Opcode 0xdb 0xe3. */
14833FNIEMOP_DEF(iemOp_fninit)
14834{
14835 IEMOP_MNEMONIC("fninit");
14836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14837 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14838}
14839
14840
14841/** Opcode 0xdb 0xe4. */
14842FNIEMOP_DEF(iemOp_fnsetpm)
14843{
14844 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14846 IEM_MC_BEGIN(0,0);
14847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14848 IEM_MC_ADVANCE_RIP();
14849 IEM_MC_END();
14850 return VINF_SUCCESS;
14851}
14852
14853
14854/** Opcode 0xdb 0xe5. */
14855FNIEMOP_DEF(iemOp_frstpm)
14856{
14857 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14858#if 0 /* #UDs on newer CPUs */
14859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14860 IEM_MC_BEGIN(0,0);
14861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14862 IEM_MC_ADVANCE_RIP();
14863 IEM_MC_END();
14864 return VINF_SUCCESS;
14865#else
14866 return IEMOP_RAISE_INVALID_OPCODE();
14867#endif
14868}
14869
14870
14871/** Opcode 0xdb 11/5. */
14872FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14873{
14874 IEMOP_MNEMONIC("fucomi st0,stN");
14875 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14876}
14877
14878
14879/** Opcode 0xdb 11/6. */
14880FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14881{
14882 IEMOP_MNEMONIC("fcomi st0,stN");
14883 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14884}
14885
14886
14887/** Opcode 0xdb. */
14888FNIEMOP_DEF(iemOp_EscF3)
14889{
14890 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14893 {
14894 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14895 {
14896 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14897 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14898 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14899 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14900 case 4:
14901 switch (bRm)
14902 {
14903 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14904 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14905 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14906 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14907 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14908 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14909 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14910 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14912 }
14913 break;
14914 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14915 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14916 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14918 }
14919 }
14920 else
14921 {
14922 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14923 {
14924 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14925 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14926 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14927 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14928 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14929 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14930 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14931 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14933 }
14934 }
14935}
14936
14937
14938/**
14939 * Common worker for FPU instructions working on STn and ST0, and storing the
14940 * result in STn unless IE, DE or ZE was raised.
14941 *
14942 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14943 */
14944FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14945{
14946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14947
14948 IEM_MC_BEGIN(3, 1);
14949 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14950 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14951 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14953
14954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14956
14957 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14958 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14959 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14960 IEM_MC_ELSE()
14961 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14962 IEM_MC_ENDIF();
14963 IEM_MC_USED_FPU();
14964 IEM_MC_ADVANCE_RIP();
14965
14966 IEM_MC_END();
14967 return VINF_SUCCESS;
14968}
14969
14970
14971/** Opcode 0xdc 11/0. */
14972FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14973{
14974 IEMOP_MNEMONIC("fadd stN,st0");
14975 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14976}
14977
14978
14979/** Opcode 0xdc 11/1. */
14980FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14981{
14982 IEMOP_MNEMONIC("fmul stN,st0");
14983 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14984}
14985
14986
14987/** Opcode 0xdc 11/4. */
14988FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14989{
14990 IEMOP_MNEMONIC("fsubr stN,st0");
14991 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14992}
14993
14994
14995/** Opcode 0xdc 11/5. */
14996FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14997{
14998 IEMOP_MNEMONIC("fsub stN,st0");
14999 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15000}
15001
15002
15003/** Opcode 0xdc 11/6. */
15004FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15005{
15006 IEMOP_MNEMONIC("fdivr stN,st0");
15007 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15008}
15009
15010
15011/** Opcode 0xdc 11/7. */
15012FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15013{
15014 IEMOP_MNEMONIC("fdiv stN,st0");
15015 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15016}
15017
15018
15019/**
15020 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15021 * memory operand, and storing the result in ST0.
15022 *
15023 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15024 */
15025FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15026{
15027 IEM_MC_BEGIN(3, 3);
15028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15029 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15030 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15031 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15032 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15033 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15034
15035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15039
15040 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15041 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15042 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15043 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15044 IEM_MC_ELSE()
15045 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15046 IEM_MC_ENDIF();
15047 IEM_MC_USED_FPU();
15048 IEM_MC_ADVANCE_RIP();
15049
15050 IEM_MC_END();
15051 return VINF_SUCCESS;
15052}
15053
15054
15055/** Opcode 0xdc !11/0. */
15056FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15057{
15058 IEMOP_MNEMONIC("fadd m64r");
15059 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15060}
15061
15062
15063/** Opcode 0xdc !11/1. */
15064FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15065{
15066 IEMOP_MNEMONIC("fmul m64r");
15067 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15068}
15069
15070
15071/** Opcode 0xdc !11/2. */
15072FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15073{
15074 IEMOP_MNEMONIC("fcom st0,m64r");
15075
15076 IEM_MC_BEGIN(3, 3);
15077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15078 IEM_MC_LOCAL(uint16_t, u16Fsw);
15079 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15080 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15082 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15083
15084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15086
15087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15089 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15090
15091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15092 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15093 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15094 IEM_MC_ELSE()
15095 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15096 IEM_MC_ENDIF();
15097 IEM_MC_USED_FPU();
15098 IEM_MC_ADVANCE_RIP();
15099
15100 IEM_MC_END();
15101 return VINF_SUCCESS;
15102}
15103
15104
15105/** Opcode 0xdc !11/3. */
15106FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15107{
15108 IEMOP_MNEMONIC("fcomp st0,m64r");
15109
15110 IEM_MC_BEGIN(3, 3);
15111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15112 IEM_MC_LOCAL(uint16_t, u16Fsw);
15113 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15114 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15115 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15116 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15117
15118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15120
15121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15123 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15124
15125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15127 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15128 IEM_MC_ELSE()
15129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15130 IEM_MC_ENDIF();
15131 IEM_MC_USED_FPU();
15132 IEM_MC_ADVANCE_RIP();
15133
15134 IEM_MC_END();
15135 return VINF_SUCCESS;
15136}
15137
15138
15139/** Opcode 0xdc !11/4. */
15140FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15141{
15142 IEMOP_MNEMONIC("fsub m64r");
15143 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15144}
15145
15146
15147/** Opcode 0xdc !11/5. */
15148FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15149{
15150 IEMOP_MNEMONIC("fsubr m64r");
15151 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15152}
15153
15154
15155/** Opcode 0xdc !11/6. */
15156FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15157{
15158 IEMOP_MNEMONIC("fdiv m64r");
15159 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15160}
15161
15162
15163/** Opcode 0xdc !11/7. */
15164FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15165{
15166 IEMOP_MNEMONIC("fdivr m64r");
15167 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15168}
15169
15170
15171/** Opcode 0xdc. */
15172FNIEMOP_DEF(iemOp_EscF4)
15173{
15174 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15177 {
15178 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15179 {
15180 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15181 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15182 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15183 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15184 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15185 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15186 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15187 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15189 }
15190 }
15191 else
15192 {
15193 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15194 {
15195 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15196 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15197 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15198 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15199 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15200 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15201 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15202 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15204 }
15205 }
15206}
15207
15208
15209/** Opcode 0xdd !11/0.
15210 * @sa iemOp_fld_m32r */
15211FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15212{
15213 IEMOP_MNEMONIC("fld m64r");
15214
15215 IEM_MC_BEGIN(2, 3);
15216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15217 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15218 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15219 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15220 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15221
15222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15226
15227 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15228 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15229 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15230 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15231 IEM_MC_ELSE()
15232 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15233 IEM_MC_ENDIF();
15234 IEM_MC_USED_FPU();
15235 IEM_MC_ADVANCE_RIP();
15236
15237 IEM_MC_END();
15238 return VINF_SUCCESS;
15239}
15240
15241
15242/** Opcode 0xdd !11/0. */
15243FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15244{
15245 IEMOP_MNEMONIC("fisttp m64i");
15246 IEM_MC_BEGIN(3, 2);
15247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15248 IEM_MC_LOCAL(uint16_t, u16Fsw);
15249 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15250 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15251 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15252
15253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15257
15258 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15260 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15261 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15262 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15263 IEM_MC_ELSE()
15264 IEM_MC_IF_FCW_IM()
15265 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15266 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15267 IEM_MC_ENDIF();
15268 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15269 IEM_MC_ENDIF();
15270 IEM_MC_USED_FPU();
15271 IEM_MC_ADVANCE_RIP();
15272
15273 IEM_MC_END();
15274 return VINF_SUCCESS;
15275}
15276
15277
15278/** Opcode 0xdd !11/0. */
15279FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15280{
15281 IEMOP_MNEMONIC("fst m64r");
15282 IEM_MC_BEGIN(3, 2);
15283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15284 IEM_MC_LOCAL(uint16_t, u16Fsw);
15285 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15286 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15288
15289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15293
15294 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15295 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15296 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15297 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15298 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15299 IEM_MC_ELSE()
15300 IEM_MC_IF_FCW_IM()
15301 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15302 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15303 IEM_MC_ENDIF();
15304 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15305 IEM_MC_ENDIF();
15306 IEM_MC_USED_FPU();
15307 IEM_MC_ADVANCE_RIP();
15308
15309 IEM_MC_END();
15310 return VINF_SUCCESS;
15311}
15312
15313
15314
15315
15316/** Opcode 0xdd !11/0. */
15317FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15318{
15319 IEMOP_MNEMONIC("fstp m64r");
15320 IEM_MC_BEGIN(3, 2);
15321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15322 IEM_MC_LOCAL(uint16_t, u16Fsw);
15323 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15324 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15326
15327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15330 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15331
15332 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15333 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15334 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15335 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15336 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15337 IEM_MC_ELSE()
15338 IEM_MC_IF_FCW_IM()
15339 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15340 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15341 IEM_MC_ENDIF();
15342 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15343 IEM_MC_ENDIF();
15344 IEM_MC_USED_FPU();
15345 IEM_MC_ADVANCE_RIP();
15346
15347 IEM_MC_END();
15348 return VINF_SUCCESS;
15349}
15350
15351
15352/** Opcode 0xdd !11/0. */
15353FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15354{
15355 IEMOP_MNEMONIC("frstor m94/108byte");
15356 IEM_MC_BEGIN(3, 0);
15357 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15358 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15359 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15362 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15363 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15364 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15365 IEM_MC_END();
15366 return VINF_SUCCESS;
15367}
15368
15369
15370/** Opcode 0xdd !11/0. */
15371FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15372{
15373 IEMOP_MNEMONIC("fnsave m94/108byte");
15374 IEM_MC_BEGIN(3, 0);
15375 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15376 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15377 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15381 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15382 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15383 IEM_MC_END();
15384 return VINF_SUCCESS;
15385
15386}
15387
15388/** Opcode 0xdd !11/0. */
15389FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15390{
15391 IEMOP_MNEMONIC("fnstsw m16");
15392
15393 IEM_MC_BEGIN(0, 2);
15394 IEM_MC_LOCAL(uint16_t, u16Tmp);
15395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15396
15397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15400
15401 IEM_MC_FETCH_FSW(u16Tmp);
15402 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15403 IEM_MC_ADVANCE_RIP();
15404
15405/** @todo Debug / drop a hint to the verifier that things may differ
15406 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15407 * NT4SP1. (X86_FSW_PE) */
15408 IEM_MC_END();
15409 return VINF_SUCCESS;
15410}
15411
15412
15413/** Opcode 0xdd 11/0. */
15414FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15415{
15416 IEMOP_MNEMONIC("ffree stN");
15417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15418 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15419 unmodified. */
15420
15421 IEM_MC_BEGIN(0, 0);
15422
15423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15425
15426 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15427 IEM_MC_UPDATE_FPU_OPCODE_IP();
15428
15429 IEM_MC_USED_FPU();
15430 IEM_MC_ADVANCE_RIP();
15431 IEM_MC_END();
15432 return VINF_SUCCESS;
15433}
15434
15435
15436/** Opcode 0xdd 11/1. */
15437FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15438{
15439 IEMOP_MNEMONIC("fst st0,stN");
15440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15441
15442 IEM_MC_BEGIN(0, 2);
15443 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15444 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15447 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15448 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15449 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15450 IEM_MC_ELSE()
15451 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15452 IEM_MC_ENDIF();
15453 IEM_MC_USED_FPU();
15454 IEM_MC_ADVANCE_RIP();
15455 IEM_MC_END();
15456 return VINF_SUCCESS;
15457}
15458
15459
15460/** Opcode 0xdd 11/3. */
15461FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15462{
15463 IEMOP_MNEMONIC("fcom st0,stN");
15464 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15465}
15466
15467
15468/** Opcode 0xdd 11/4. */
15469FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15470{
15471 IEMOP_MNEMONIC("fcomp st0,stN");
15472 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15473}
15474
15475
15476/** Opcode 0xdd. */
15477FNIEMOP_DEF(iemOp_EscF5)
15478{
15479 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15482 {
15483 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15484 {
15485 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15486 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15487 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15488 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15489 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15490 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15491 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15492 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15494 }
15495 }
15496 else
15497 {
15498 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15499 {
15500 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15501 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15502 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15503 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15504 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15505 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15506 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15507 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15509 }
15510 }
15511}
15512
15513
15514/** Opcode 0xde 11/0. */
15515FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15516{
15517 IEMOP_MNEMONIC("faddp stN,st0");
15518 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15519}
15520
15521
15522/** Opcode 0xde 11/0. */
15523FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15524{
15525 IEMOP_MNEMONIC("fmulp stN,st0");
15526 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15527}
15528
15529
15530/** Opcode 0xde 0xd9. */
15531FNIEMOP_DEF(iemOp_fcompp)
15532{
15533 IEMOP_MNEMONIC("fucompp st0,stN");
15534 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15535}
15536
15537
15538/** Opcode 0xde 11/4. */
15539FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15540{
15541 IEMOP_MNEMONIC("fsubrp stN,st0");
15542 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15543}
15544
15545
15546/** Opcode 0xde 11/5. */
15547FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15548{
15549 IEMOP_MNEMONIC("fsubp stN,st0");
15550 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15551}
15552
15553
15554/** Opcode 0xde 11/6. */
15555FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15556{
15557 IEMOP_MNEMONIC("fdivrp stN,st0");
15558 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15559}
15560
15561
15562/** Opcode 0xde 11/7. */
15563FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15564{
15565 IEMOP_MNEMONIC("fdivp stN,st0");
15566 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15567}
15568
15569
15570/**
15571 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15572 * the result in ST0.
15573 *
15574 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15575 */
15576FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15577{
15578 IEM_MC_BEGIN(3, 3);
15579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15580 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15581 IEM_MC_LOCAL(int16_t, i16Val2);
15582 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15583 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15584 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15585
15586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15588
15589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15591 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15592
15593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15594 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15595 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15596 IEM_MC_ELSE()
15597 IEM_MC_FPU_STACK_UNDERFLOW(0);
15598 IEM_MC_ENDIF();
15599 IEM_MC_USED_FPU();
15600 IEM_MC_ADVANCE_RIP();
15601
15602 IEM_MC_END();
15603 return VINF_SUCCESS;
15604}
15605
15606
15607/** Opcode 0xde !11/0. */
15608FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15609{
15610 IEMOP_MNEMONIC("fiadd m16i");
15611 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15612}
15613
15614
15615/** Opcode 0xde !11/1. */
15616FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15617{
15618 IEMOP_MNEMONIC("fimul m16i");
15619 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15620}
15621
15622
15623/** Opcode 0xde !11/2. */
15624FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15625{
15626 IEMOP_MNEMONIC("ficom st0,m16i");
15627
15628 IEM_MC_BEGIN(3, 3);
15629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15630 IEM_MC_LOCAL(uint16_t, u16Fsw);
15631 IEM_MC_LOCAL(int16_t, i16Val2);
15632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15634 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15635
15636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15638
15639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15641 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15642
15643 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15644 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15645 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15646 IEM_MC_ELSE()
15647 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15648 IEM_MC_ENDIF();
15649 IEM_MC_USED_FPU();
15650 IEM_MC_ADVANCE_RIP();
15651
15652 IEM_MC_END();
15653 return VINF_SUCCESS;
15654}
15655
15656
15657/** Opcode 0xde !11/3. */
15658FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15659{
15660 IEMOP_MNEMONIC("ficomp st0,m16i");
15661
15662 IEM_MC_BEGIN(3, 3);
15663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15664 IEM_MC_LOCAL(uint16_t, u16Fsw);
15665 IEM_MC_LOCAL(int16_t, i16Val2);
15666 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15668 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15669
15670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15672
15673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15675 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15676
15677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15678 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15679 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15680 IEM_MC_ELSE()
15681 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15682 IEM_MC_ENDIF();
15683 IEM_MC_USED_FPU();
15684 IEM_MC_ADVANCE_RIP();
15685
15686 IEM_MC_END();
15687 return VINF_SUCCESS;
15688}
15689
15690
15691/** Opcode 0xde !11/4. */
15692FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC("fisub m16i");
15695 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15696}
15697
15698
15699/** Opcode 0xde !11/5. */
15700FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15701{
15702 IEMOP_MNEMONIC("fisubr m16i");
15703 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15704}
15705
15706
15707/** Opcode 0xde !11/6. */
15708FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15709{
15710 IEMOP_MNEMONIC("fiadd m16i");
15711 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15712}
15713
15714
15715/** Opcode 0xde !11/7. */
15716FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15717{
15718 IEMOP_MNEMONIC("fiadd m16i");
15719 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15720}
15721
15722
15723/** Opcode 0xde. */
15724FNIEMOP_DEF(iemOp_EscF6)
15725{
15726 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15729 {
15730 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15731 {
15732 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15733 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15734 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15735 case 3: if (bRm == 0xd9)
15736 return FNIEMOP_CALL(iemOp_fcompp);
15737 return IEMOP_RAISE_INVALID_OPCODE();
15738 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15739 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15740 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15741 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15743 }
15744 }
15745 else
15746 {
15747 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15748 {
15749 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15750 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15751 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15752 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15753 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15754 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15755 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15756 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15758 }
15759 }
15760}
15761
15762
15763/** Opcode 0xdf 11/0.
15764 * Undocument instruction, assumed to work like ffree + fincstp. */
15765FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15766{
15767 IEMOP_MNEMONIC("ffreep stN");
15768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15769
15770 IEM_MC_BEGIN(0, 0);
15771
15772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15774
15775 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15776 IEM_MC_FPU_STACK_INC_TOP();
15777 IEM_MC_UPDATE_FPU_OPCODE_IP();
15778
15779 IEM_MC_USED_FPU();
15780 IEM_MC_ADVANCE_RIP();
15781 IEM_MC_END();
15782 return VINF_SUCCESS;
15783}
15784
15785
15786/** Opcode 0xdf 0xe0. */
15787FNIEMOP_DEF(iemOp_fnstsw_ax)
15788{
15789 IEMOP_MNEMONIC("fnstsw ax");
15790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15791
15792 IEM_MC_BEGIN(0, 1);
15793 IEM_MC_LOCAL(uint16_t, u16Tmp);
15794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15795 IEM_MC_FETCH_FSW(u16Tmp);
15796 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15797 IEM_MC_ADVANCE_RIP();
15798 IEM_MC_END();
15799 return VINF_SUCCESS;
15800}
15801
15802
15803/** Opcode 0xdf 11/5. */
15804FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15805{
15806 IEMOP_MNEMONIC("fcomip st0,stN");
15807 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15808}
15809
15810
15811/** Opcode 0xdf 11/6. */
15812FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15813{
15814 IEMOP_MNEMONIC("fcomip st0,stN");
15815 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15816}
15817
15818
15819/** Opcode 0xdf !11/0. */
15820FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15821{
15822 IEMOP_MNEMONIC("fild m16i");
15823
15824 IEM_MC_BEGIN(2, 3);
15825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15826 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15827 IEM_MC_LOCAL(int16_t, i16Val);
15828 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15829 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15830
15831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15833
15834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15836 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15837
15838 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15839 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15840 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15841 IEM_MC_ELSE()
15842 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15843 IEM_MC_ENDIF();
15844 IEM_MC_USED_FPU();
15845 IEM_MC_ADVANCE_RIP();
15846
15847 IEM_MC_END();
15848 return VINF_SUCCESS;
15849}
15850
15851
15852/** Opcode 0xdf !11/1. */
15853FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15854{
15855 IEMOP_MNEMONIC("fisttp m16i");
15856 IEM_MC_BEGIN(3, 2);
15857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15858 IEM_MC_LOCAL(uint16_t, u16Fsw);
15859 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15860 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15861 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15862
15863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15867
15868 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15869 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15870 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15871 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15872 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15873 IEM_MC_ELSE()
15874 IEM_MC_IF_FCW_IM()
15875 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15876 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15877 IEM_MC_ENDIF();
15878 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15879 IEM_MC_ENDIF();
15880 IEM_MC_USED_FPU();
15881 IEM_MC_ADVANCE_RIP();
15882
15883 IEM_MC_END();
15884 return VINF_SUCCESS;
15885}
15886
15887
15888/** Opcode 0xdf !11/2. */
15889FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15890{
15891 IEMOP_MNEMONIC("fistp m16i");
15892 IEM_MC_BEGIN(3, 2);
15893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15894 IEM_MC_LOCAL(uint16_t, u16Fsw);
15895 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15896 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15898
15899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15903
15904 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15905 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15906 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15907 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15908 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15909 IEM_MC_ELSE()
15910 IEM_MC_IF_FCW_IM()
15911 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15912 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15913 IEM_MC_ENDIF();
15914 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15915 IEM_MC_ENDIF();
15916 IEM_MC_USED_FPU();
15917 IEM_MC_ADVANCE_RIP();
15918
15919 IEM_MC_END();
15920 return VINF_SUCCESS;
15921}
15922
15923
15924/** Opcode 0xdf !11/3. */
15925FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15926{
15927 IEMOP_MNEMONIC("fistp m16i");
15928 IEM_MC_BEGIN(3, 2);
15929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15930 IEM_MC_LOCAL(uint16_t, u16Fsw);
15931 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15932 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15934
15935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15939
15940 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15941 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15942 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15943 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15944 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15945 IEM_MC_ELSE()
15946 IEM_MC_IF_FCW_IM()
15947 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15948 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15949 IEM_MC_ENDIF();
15950 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15951 IEM_MC_ENDIF();
15952 IEM_MC_USED_FPU();
15953 IEM_MC_ADVANCE_RIP();
15954
15955 IEM_MC_END();
15956 return VINF_SUCCESS;
15957}
15958
15959
15960/** Opcode 0xdf !11/4. */
15961FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15962
15963
15964/** Opcode 0xdf !11/5. */
15965FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15966{
15967 IEMOP_MNEMONIC("fild m64i");
15968
15969 IEM_MC_BEGIN(2, 3);
15970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15971 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15972 IEM_MC_LOCAL(int64_t, i64Val);
15973 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15974 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15975
15976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15978
15979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15981 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15982
15983 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15984 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15985 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15986 IEM_MC_ELSE()
15987 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15988 IEM_MC_ENDIF();
15989 IEM_MC_USED_FPU();
15990 IEM_MC_ADVANCE_RIP();
15991
15992 IEM_MC_END();
15993 return VINF_SUCCESS;
15994}
15995
15996
15997/** Opcode 0xdf !11/6. */
15998FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15999
16000
16001/** Opcode 0xdf !11/7. */
16002FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16003{
16004 IEMOP_MNEMONIC("fistp m64i");
16005 IEM_MC_BEGIN(3, 2);
16006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16007 IEM_MC_LOCAL(uint16_t, u16Fsw);
16008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16009 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16010 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16011
16012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16016
16017 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16018 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16019 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16020 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16021 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16022 IEM_MC_ELSE()
16023 IEM_MC_IF_FCW_IM()
16024 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16025 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16026 IEM_MC_ENDIF();
16027 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16028 IEM_MC_ENDIF();
16029 IEM_MC_USED_FPU();
16030 IEM_MC_ADVANCE_RIP();
16031
16032 IEM_MC_END();
16033 return VINF_SUCCESS;
16034}
16035
16036
16037/** Opcode 0xdf. */
16038FNIEMOP_DEF(iemOp_EscF7)
16039{
16040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16042 {
16043 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16044 {
16045 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16046 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16047 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16048 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16049 case 4: if (bRm == 0xe0)
16050 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16051 return IEMOP_RAISE_INVALID_OPCODE();
16052 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16053 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16054 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16056 }
16057 }
16058 else
16059 {
16060 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16061 {
16062 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16063 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16064 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16065 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16066 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16067 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16068 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16069 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16071 }
16072 }
16073}
16074
16075
16076/** Opcode 0xe0. */
16077FNIEMOP_DEF(iemOp_loopne_Jb)
16078{
16079 IEMOP_MNEMONIC("loopne Jb");
16080 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16081 IEMOP_HLP_NO_LOCK_PREFIX();
16082 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16083
16084 switch (pIemCpu->enmEffAddrMode)
16085 {
16086 case IEMMODE_16BIT:
16087 IEM_MC_BEGIN(0,0);
16088 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16089 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16090 IEM_MC_REL_JMP_S8(i8Imm);
16091 } IEM_MC_ELSE() {
16092 IEM_MC_ADVANCE_RIP();
16093 } IEM_MC_ENDIF();
16094 IEM_MC_END();
16095 return VINF_SUCCESS;
16096
16097 case IEMMODE_32BIT:
16098 IEM_MC_BEGIN(0,0);
16099 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16100 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16101 IEM_MC_REL_JMP_S8(i8Imm);
16102 } IEM_MC_ELSE() {
16103 IEM_MC_ADVANCE_RIP();
16104 } IEM_MC_ENDIF();
16105 IEM_MC_END();
16106 return VINF_SUCCESS;
16107
16108 case IEMMODE_64BIT:
16109 IEM_MC_BEGIN(0,0);
16110 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16111 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16112 IEM_MC_REL_JMP_S8(i8Imm);
16113 } IEM_MC_ELSE() {
16114 IEM_MC_ADVANCE_RIP();
16115 } IEM_MC_ENDIF();
16116 IEM_MC_END();
16117 return VINF_SUCCESS;
16118
16119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16120 }
16121}
16122
16123
16124/** Opcode 0xe1. */
16125FNIEMOP_DEF(iemOp_loope_Jb)
16126{
16127 IEMOP_MNEMONIC("loope Jb");
16128 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16129 IEMOP_HLP_NO_LOCK_PREFIX();
16130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16131
16132 switch (pIemCpu->enmEffAddrMode)
16133 {
16134 case IEMMODE_16BIT:
16135 IEM_MC_BEGIN(0,0);
16136 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16137 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16138 IEM_MC_REL_JMP_S8(i8Imm);
16139 } IEM_MC_ELSE() {
16140 IEM_MC_ADVANCE_RIP();
16141 } IEM_MC_ENDIF();
16142 IEM_MC_END();
16143 return VINF_SUCCESS;
16144
16145 case IEMMODE_32BIT:
16146 IEM_MC_BEGIN(0,0);
16147 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16148 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16149 IEM_MC_REL_JMP_S8(i8Imm);
16150 } IEM_MC_ELSE() {
16151 IEM_MC_ADVANCE_RIP();
16152 } IEM_MC_ENDIF();
16153 IEM_MC_END();
16154 return VINF_SUCCESS;
16155
16156 case IEMMODE_64BIT:
16157 IEM_MC_BEGIN(0,0);
16158 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16159 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16160 IEM_MC_REL_JMP_S8(i8Imm);
16161 } IEM_MC_ELSE() {
16162 IEM_MC_ADVANCE_RIP();
16163 } IEM_MC_ENDIF();
16164 IEM_MC_END();
16165 return VINF_SUCCESS;
16166
16167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16168 }
16169}
16170
16171
16172/** Opcode 0xe2. */
16173FNIEMOP_DEF(iemOp_loop_Jb)
16174{
16175 IEMOP_MNEMONIC("loop Jb");
16176 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16177 IEMOP_HLP_NO_LOCK_PREFIX();
16178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16179
16180 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16181 * using the 32-bit operand size override. How can that be restarted? See
16182 * weird pseudo code in intel manual. */
16183 switch (pIemCpu->enmEffAddrMode)
16184 {
16185 case IEMMODE_16BIT:
16186 IEM_MC_BEGIN(0,0);
16187 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16188 {
16189 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16190 IEM_MC_IF_CX_IS_NZ() {
16191 IEM_MC_REL_JMP_S8(i8Imm);
16192 } IEM_MC_ELSE() {
16193 IEM_MC_ADVANCE_RIP();
16194 } IEM_MC_ENDIF();
16195 }
16196 else
16197 {
16198 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16199 IEM_MC_ADVANCE_RIP();
16200 }
16201 IEM_MC_END();
16202 return VINF_SUCCESS;
16203
16204 case IEMMODE_32BIT:
16205 IEM_MC_BEGIN(0,0);
16206 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16207 {
16208 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16209 IEM_MC_IF_ECX_IS_NZ() {
16210 IEM_MC_REL_JMP_S8(i8Imm);
16211 } IEM_MC_ELSE() {
16212 IEM_MC_ADVANCE_RIP();
16213 } IEM_MC_ENDIF();
16214 }
16215 else
16216 {
16217 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16218 IEM_MC_ADVANCE_RIP();
16219 }
16220 IEM_MC_END();
16221 return VINF_SUCCESS;
16222
16223 case IEMMODE_64BIT:
16224 IEM_MC_BEGIN(0,0);
16225 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16226 {
16227 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16228 IEM_MC_IF_RCX_IS_NZ() {
16229 IEM_MC_REL_JMP_S8(i8Imm);
16230 } IEM_MC_ELSE() {
16231 IEM_MC_ADVANCE_RIP();
16232 } IEM_MC_ENDIF();
16233 }
16234 else
16235 {
16236 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16237 IEM_MC_ADVANCE_RIP();
16238 }
16239 IEM_MC_END();
16240 return VINF_SUCCESS;
16241
16242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16243 }
16244}
16245
16246
16247/** Opcode 0xe3. */
16248FNIEMOP_DEF(iemOp_jecxz_Jb)
16249{
16250 IEMOP_MNEMONIC("jecxz Jb");
16251 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16252 IEMOP_HLP_NO_LOCK_PREFIX();
16253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16254
16255 switch (pIemCpu->enmEffAddrMode)
16256 {
16257 case IEMMODE_16BIT:
16258 IEM_MC_BEGIN(0,0);
16259 IEM_MC_IF_CX_IS_NZ() {
16260 IEM_MC_ADVANCE_RIP();
16261 } IEM_MC_ELSE() {
16262 IEM_MC_REL_JMP_S8(i8Imm);
16263 } IEM_MC_ENDIF();
16264 IEM_MC_END();
16265 return VINF_SUCCESS;
16266
16267 case IEMMODE_32BIT:
16268 IEM_MC_BEGIN(0,0);
16269 IEM_MC_IF_ECX_IS_NZ() {
16270 IEM_MC_ADVANCE_RIP();
16271 } IEM_MC_ELSE() {
16272 IEM_MC_REL_JMP_S8(i8Imm);
16273 } IEM_MC_ENDIF();
16274 IEM_MC_END();
16275 return VINF_SUCCESS;
16276
16277 case IEMMODE_64BIT:
16278 IEM_MC_BEGIN(0,0);
16279 IEM_MC_IF_RCX_IS_NZ() {
16280 IEM_MC_ADVANCE_RIP();
16281 } IEM_MC_ELSE() {
16282 IEM_MC_REL_JMP_S8(i8Imm);
16283 } IEM_MC_ENDIF();
16284 IEM_MC_END();
16285 return VINF_SUCCESS;
16286
16287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16288 }
16289}
16290
16291
16292/** Opcode 0xe4 */
16293FNIEMOP_DEF(iemOp_in_AL_Ib)
16294{
16295 IEMOP_MNEMONIC("in eAX,Ib");
16296 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16297 IEMOP_HLP_NO_LOCK_PREFIX();
16298 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16299}
16300
16301
16302/** Opcode 0xe5 */
16303FNIEMOP_DEF(iemOp_in_eAX_Ib)
16304{
16305 IEMOP_MNEMONIC("in eAX,Ib");
16306 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16307 IEMOP_HLP_NO_LOCK_PREFIX();
16308 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16309}
16310
16311
16312/** Opcode 0xe6 */
16313FNIEMOP_DEF(iemOp_out_Ib_AL)
16314{
16315 IEMOP_MNEMONIC("out Ib,AL");
16316 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16317 IEMOP_HLP_NO_LOCK_PREFIX();
16318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16319}
16320
16321
16322/** Opcode 0xe7 */
16323FNIEMOP_DEF(iemOp_out_Ib_eAX)
16324{
16325 IEMOP_MNEMONIC("out Ib,eAX");
16326 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16327 IEMOP_HLP_NO_LOCK_PREFIX();
16328 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16329}
16330
16331
16332/** Opcode 0xe8. */
16333FNIEMOP_DEF(iemOp_call_Jv)
16334{
16335 IEMOP_MNEMONIC("call Jv");
16336 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16337 switch (pIemCpu->enmEffOpSize)
16338 {
16339 case IEMMODE_16BIT:
16340 {
16341 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16342 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16343 }
16344
16345 case IEMMODE_32BIT:
16346 {
16347 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16348 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16349 }
16350
16351 case IEMMODE_64BIT:
16352 {
16353 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16354 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16355 }
16356
16357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16358 }
16359}
16360
16361
16362/** Opcode 0xe9. */
16363FNIEMOP_DEF(iemOp_jmp_Jv)
16364{
16365 IEMOP_MNEMONIC("jmp Jv");
16366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16367 switch (pIemCpu->enmEffOpSize)
16368 {
16369 case IEMMODE_16BIT:
16370 {
16371 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16372 IEM_MC_BEGIN(0, 0);
16373 IEM_MC_REL_JMP_S16(i16Imm);
16374 IEM_MC_END();
16375 return VINF_SUCCESS;
16376 }
16377
16378 case IEMMODE_64BIT:
16379 case IEMMODE_32BIT:
16380 {
16381 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16382 IEM_MC_BEGIN(0, 0);
16383 IEM_MC_REL_JMP_S32(i32Imm);
16384 IEM_MC_END();
16385 return VINF_SUCCESS;
16386 }
16387
16388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16389 }
16390}
16391
16392
16393/** Opcode 0xea. */
16394FNIEMOP_DEF(iemOp_jmp_Ap)
16395{
16396 IEMOP_MNEMONIC("jmp Ap");
16397 IEMOP_HLP_NO_64BIT();
16398
16399 /* Decode the far pointer address and pass it on to the far call C implementation. */
16400 uint32_t offSeg;
16401 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16402 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16403 else
16404 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16405 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16406 IEMOP_HLP_NO_LOCK_PREFIX();
16407 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16408}
16409
16410
16411/** Opcode 0xeb. */
16412FNIEMOP_DEF(iemOp_jmp_Jb)
16413{
16414 IEMOP_MNEMONIC("jmp Jb");
16415 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16416 IEMOP_HLP_NO_LOCK_PREFIX();
16417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16418
16419 IEM_MC_BEGIN(0, 0);
16420 IEM_MC_REL_JMP_S8(i8Imm);
16421 IEM_MC_END();
16422 return VINF_SUCCESS;
16423}
16424
16425
16426/** Opcode 0xec */
16427FNIEMOP_DEF(iemOp_in_AL_DX)
16428{
16429 IEMOP_MNEMONIC("in AL,DX");
16430 IEMOP_HLP_NO_LOCK_PREFIX();
16431 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16432}
16433
16434
16435/** Opcode 0xed */
16436FNIEMOP_DEF(iemOp_eAX_DX)
16437{
16438 IEMOP_MNEMONIC("in eAX,DX");
16439 IEMOP_HLP_NO_LOCK_PREFIX();
16440 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16441}
16442
16443
16444/** Opcode 0xee */
16445FNIEMOP_DEF(iemOp_out_DX_AL)
16446{
16447 IEMOP_MNEMONIC("out DX,AL");
16448 IEMOP_HLP_NO_LOCK_PREFIX();
16449 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16450}
16451
16452
16453/** Opcode 0xef */
16454FNIEMOP_DEF(iemOp_out_DX_eAX)
16455{
16456 IEMOP_MNEMONIC("out DX,eAX");
16457 IEMOP_HLP_NO_LOCK_PREFIX();
16458 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16459}
16460
16461
16462/** Opcode 0xf0. */
16463FNIEMOP_DEF(iemOp_lock)
16464{
16465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16466 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16467
16468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16470}
16471
16472
16473/** Opcode 0xf1. */
16474FNIEMOP_DEF(iemOp_int_1)
16475{
16476 IEMOP_MNEMONIC("int1"); /* icebp */
16477 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16478 /** @todo testcase! */
16479 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16480}
16481
16482
16483/** Opcode 0xf2. */
16484FNIEMOP_DEF(iemOp_repne)
16485{
16486 /* This overrides any previous REPE prefix. */
16487 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16489 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16490
16491 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16492 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16493}
16494
16495
16496/** Opcode 0xf3. */
16497FNIEMOP_DEF(iemOp_repe)
16498{
16499 /* This overrides any previous REPNE prefix. */
16500 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16501 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16502 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16503
16504 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16505 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16506}
16507
16508
16509/** Opcode 0xf4. */
16510FNIEMOP_DEF(iemOp_hlt)
16511{
16512 IEMOP_HLP_NO_LOCK_PREFIX();
16513 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16514}
16515
16516
16517/** Opcode 0xf5. */
16518FNIEMOP_DEF(iemOp_cmc)
16519{
16520 IEMOP_MNEMONIC("cmc");
16521 IEMOP_HLP_NO_LOCK_PREFIX();
16522 IEM_MC_BEGIN(0, 0);
16523 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16524 IEM_MC_ADVANCE_RIP();
16525 IEM_MC_END();
16526 return VINF_SUCCESS;
16527}
16528
16529
16530/**
16531 * Common implementation of 'inc/dec/not/neg Eb'.
16532 *
16533 * @param bRm The RM byte.
16534 * @param pImpl The instruction implementation.
16535 */
16536FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16537{
16538 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16539 {
16540 /* register access */
16541 IEM_MC_BEGIN(2, 0);
16542 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16543 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16544 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16545 IEM_MC_REF_EFLAGS(pEFlags);
16546 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16547 IEM_MC_ADVANCE_RIP();
16548 IEM_MC_END();
16549 }
16550 else
16551 {
16552 /* memory access. */
16553 IEM_MC_BEGIN(2, 2);
16554 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16555 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16557
16558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16559 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16560 IEM_MC_FETCH_EFLAGS(EFlags);
16561 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16562 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16563 else
16564 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16565
16566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16567 IEM_MC_COMMIT_EFLAGS(EFlags);
16568 IEM_MC_ADVANCE_RIP();
16569 IEM_MC_END();
16570 }
16571 return VINF_SUCCESS;
16572}
16573
16574
16575/**
16576 * Common implementation of 'inc/dec/not/neg Ev'.
16577 *
16578 * @param bRm The RM byte.
16579 * @param pImpl The instruction implementation.
16580 */
16581FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16582{
16583 /* Registers are handled by a common worker. */
16584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16585 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16586
16587 /* Memory we do here. */
16588 switch (pIemCpu->enmEffOpSize)
16589 {
16590 case IEMMODE_16BIT:
16591 IEM_MC_BEGIN(2, 2);
16592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16595
16596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16597 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16598 IEM_MC_FETCH_EFLAGS(EFlags);
16599 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16600 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16601 else
16602 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16603
16604 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16605 IEM_MC_COMMIT_EFLAGS(EFlags);
16606 IEM_MC_ADVANCE_RIP();
16607 IEM_MC_END();
16608 return VINF_SUCCESS;
16609
16610 case IEMMODE_32BIT:
16611 IEM_MC_BEGIN(2, 2);
16612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16615
16616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16617 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16618 IEM_MC_FETCH_EFLAGS(EFlags);
16619 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16620 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16621 else
16622 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16623
16624 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16625 IEM_MC_COMMIT_EFLAGS(EFlags);
16626 IEM_MC_ADVANCE_RIP();
16627 IEM_MC_END();
16628 return VINF_SUCCESS;
16629
16630 case IEMMODE_64BIT:
16631 IEM_MC_BEGIN(2, 2);
16632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16633 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16635
16636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16637 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16638 IEM_MC_FETCH_EFLAGS(EFlags);
16639 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16640 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16641 else
16642 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16643
16644 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16645 IEM_MC_COMMIT_EFLAGS(EFlags);
16646 IEM_MC_ADVANCE_RIP();
16647 IEM_MC_END();
16648 return VINF_SUCCESS;
16649
16650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16651 }
16652}
16653
16654
16655/** Opcode 0xf6 /0. */
16656FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16657{
16658 IEMOP_MNEMONIC("test Eb,Ib");
16659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16660
16661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16662 {
16663 /* register access */
16664 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16665 IEMOP_HLP_NO_LOCK_PREFIX();
16666
16667 IEM_MC_BEGIN(3, 0);
16668 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16669 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16670 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16671 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16672 IEM_MC_REF_EFLAGS(pEFlags);
16673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16674 IEM_MC_ADVANCE_RIP();
16675 IEM_MC_END();
16676 }
16677 else
16678 {
16679 /* memory access. */
16680 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16681
16682 IEM_MC_BEGIN(3, 2);
16683 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16684 IEM_MC_ARG(uint8_t, u8Src, 1);
16685 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16687
16688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16689 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16690 IEM_MC_ASSIGN(u8Src, u8Imm);
16691 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16692 IEM_MC_FETCH_EFLAGS(EFlags);
16693 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16694
16695 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16696 IEM_MC_COMMIT_EFLAGS(EFlags);
16697 IEM_MC_ADVANCE_RIP();
16698 IEM_MC_END();
16699 }
16700 return VINF_SUCCESS;
16701}
16702
16703
16704/** Opcode 0xf7 /0. */
16705FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16706{
16707 IEMOP_MNEMONIC("test Ev,Iv");
16708 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16710
16711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16712 {
16713 /* register access */
16714 switch (pIemCpu->enmEffOpSize)
16715 {
16716 case IEMMODE_16BIT:
16717 {
16718 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16719 IEM_MC_BEGIN(3, 0);
16720 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16721 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16722 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16723 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16724 IEM_MC_REF_EFLAGS(pEFlags);
16725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16726 IEM_MC_ADVANCE_RIP();
16727 IEM_MC_END();
16728 return VINF_SUCCESS;
16729 }
16730
16731 case IEMMODE_32BIT:
16732 {
16733 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16734 IEM_MC_BEGIN(3, 0);
16735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16736 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16737 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16738 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16739 IEM_MC_REF_EFLAGS(pEFlags);
16740 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16741 /* No clearing the high dword here - test doesn't write back the result. */
16742 IEM_MC_ADVANCE_RIP();
16743 IEM_MC_END();
16744 return VINF_SUCCESS;
16745 }
16746
16747 case IEMMODE_64BIT:
16748 {
16749 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16750 IEM_MC_BEGIN(3, 0);
16751 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16752 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16753 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16754 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16755 IEM_MC_REF_EFLAGS(pEFlags);
16756 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16757 IEM_MC_ADVANCE_RIP();
16758 IEM_MC_END();
16759 return VINF_SUCCESS;
16760 }
16761
16762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16763 }
16764 }
16765 else
16766 {
16767 /* memory access. */
16768 switch (pIemCpu->enmEffOpSize)
16769 {
16770 case IEMMODE_16BIT:
16771 {
16772 IEM_MC_BEGIN(3, 2);
16773 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16774 IEM_MC_ARG(uint16_t, u16Src, 1);
16775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16777
16778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16779 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16780 IEM_MC_ASSIGN(u16Src, u16Imm);
16781 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16782 IEM_MC_FETCH_EFLAGS(EFlags);
16783 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16784
16785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16786 IEM_MC_COMMIT_EFLAGS(EFlags);
16787 IEM_MC_ADVANCE_RIP();
16788 IEM_MC_END();
16789 return VINF_SUCCESS;
16790 }
16791
16792 case IEMMODE_32BIT:
16793 {
16794 IEM_MC_BEGIN(3, 2);
16795 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16796 IEM_MC_ARG(uint32_t, u32Src, 1);
16797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16799
16800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16801 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16802 IEM_MC_ASSIGN(u32Src, u32Imm);
16803 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16804 IEM_MC_FETCH_EFLAGS(EFlags);
16805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16806
16807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16808 IEM_MC_COMMIT_EFLAGS(EFlags);
16809 IEM_MC_ADVANCE_RIP();
16810 IEM_MC_END();
16811 return VINF_SUCCESS;
16812 }
16813
16814 case IEMMODE_64BIT:
16815 {
16816 IEM_MC_BEGIN(3, 2);
16817 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16818 IEM_MC_ARG(uint64_t, u64Src, 1);
16819 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16821
16822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16824 IEM_MC_ASSIGN(u64Src, u64Imm);
16825 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16826 IEM_MC_FETCH_EFLAGS(EFlags);
16827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16828
16829 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16830 IEM_MC_COMMIT_EFLAGS(EFlags);
16831 IEM_MC_ADVANCE_RIP();
16832 IEM_MC_END();
16833 return VINF_SUCCESS;
16834 }
16835
16836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16837 }
16838 }
16839}
16840
16841
16842/** Opcode 0xf6 /4, /5, /6 and /7. */
16843FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16844{
16845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16846
16847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16848 {
16849 /* register access */
16850 IEMOP_HLP_NO_LOCK_PREFIX();
16851 IEM_MC_BEGIN(3, 1);
16852 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16853 IEM_MC_ARG(uint8_t, u8Value, 1);
16854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16855 IEM_MC_LOCAL(int32_t, rc);
16856
16857 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16858 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16859 IEM_MC_REF_EFLAGS(pEFlags);
16860 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16861 IEM_MC_IF_LOCAL_IS_Z(rc) {
16862 IEM_MC_ADVANCE_RIP();
16863 } IEM_MC_ELSE() {
16864 IEM_MC_RAISE_DIVIDE_ERROR();
16865 } IEM_MC_ENDIF();
16866
16867 IEM_MC_END();
16868 }
16869 else
16870 {
16871 /* memory access. */
16872 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16873
16874 IEM_MC_BEGIN(3, 2);
16875 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16876 IEM_MC_ARG(uint8_t, u8Value, 1);
16877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16879 IEM_MC_LOCAL(int32_t, rc);
16880
16881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16882 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16883 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16884 IEM_MC_REF_EFLAGS(pEFlags);
16885 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16886 IEM_MC_IF_LOCAL_IS_Z(rc) {
16887 IEM_MC_ADVANCE_RIP();
16888 } IEM_MC_ELSE() {
16889 IEM_MC_RAISE_DIVIDE_ERROR();
16890 } IEM_MC_ENDIF();
16891
16892 IEM_MC_END();
16893 }
16894 return VINF_SUCCESS;
16895}
16896
16897
16898/** Opcode 0xf7 /4, /5, /6 and /7. */
16899FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16900{
16901 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16902 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16903
16904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16905 {
16906 /* register access */
16907 switch (pIemCpu->enmEffOpSize)
16908 {
16909 case IEMMODE_16BIT:
16910 {
16911 IEMOP_HLP_NO_LOCK_PREFIX();
16912 IEM_MC_BEGIN(4, 1);
16913 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16914 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16915 IEM_MC_ARG(uint16_t, u16Value, 2);
16916 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16917 IEM_MC_LOCAL(int32_t, rc);
16918
16919 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16920 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16921 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16922 IEM_MC_REF_EFLAGS(pEFlags);
16923 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16924 IEM_MC_IF_LOCAL_IS_Z(rc) {
16925 IEM_MC_ADVANCE_RIP();
16926 } IEM_MC_ELSE() {
16927 IEM_MC_RAISE_DIVIDE_ERROR();
16928 } IEM_MC_ENDIF();
16929
16930 IEM_MC_END();
16931 return VINF_SUCCESS;
16932 }
16933
16934 case IEMMODE_32BIT:
16935 {
16936 IEMOP_HLP_NO_LOCK_PREFIX();
16937 IEM_MC_BEGIN(4, 1);
16938 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16939 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16940 IEM_MC_ARG(uint32_t, u32Value, 2);
16941 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16942 IEM_MC_LOCAL(int32_t, rc);
16943
16944 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16945 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16946 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16947 IEM_MC_REF_EFLAGS(pEFlags);
16948 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16949 IEM_MC_IF_LOCAL_IS_Z(rc) {
16950 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16951 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16952 IEM_MC_ADVANCE_RIP();
16953 } IEM_MC_ELSE() {
16954 IEM_MC_RAISE_DIVIDE_ERROR();
16955 } IEM_MC_ENDIF();
16956
16957 IEM_MC_END();
16958 return VINF_SUCCESS;
16959 }
16960
16961 case IEMMODE_64BIT:
16962 {
16963 IEMOP_HLP_NO_LOCK_PREFIX();
16964 IEM_MC_BEGIN(4, 1);
16965 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16966 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16967 IEM_MC_ARG(uint64_t, u64Value, 2);
16968 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16969 IEM_MC_LOCAL(int32_t, rc);
16970
16971 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16972 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16973 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16974 IEM_MC_REF_EFLAGS(pEFlags);
16975 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16976 IEM_MC_IF_LOCAL_IS_Z(rc) {
16977 IEM_MC_ADVANCE_RIP();
16978 } IEM_MC_ELSE() {
16979 IEM_MC_RAISE_DIVIDE_ERROR();
16980 } IEM_MC_ENDIF();
16981
16982 IEM_MC_END();
16983 return VINF_SUCCESS;
16984 }
16985
16986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16987 }
16988 }
16989 else
16990 {
16991 /* memory access. */
16992 switch (pIemCpu->enmEffOpSize)
16993 {
16994 case IEMMODE_16BIT:
16995 {
16996 IEMOP_HLP_NO_LOCK_PREFIX();
16997 IEM_MC_BEGIN(4, 2);
16998 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16999 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17000 IEM_MC_ARG(uint16_t, u16Value, 2);
17001 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17003 IEM_MC_LOCAL(int32_t, rc);
17004
17005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17006 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17007 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17008 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17009 IEM_MC_REF_EFLAGS(pEFlags);
17010 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17011 IEM_MC_IF_LOCAL_IS_Z(rc) {
17012 IEM_MC_ADVANCE_RIP();
17013 } IEM_MC_ELSE() {
17014 IEM_MC_RAISE_DIVIDE_ERROR();
17015 } IEM_MC_ENDIF();
17016
17017 IEM_MC_END();
17018 return VINF_SUCCESS;
17019 }
17020
17021 case IEMMODE_32BIT:
17022 {
17023 IEMOP_HLP_NO_LOCK_PREFIX();
17024 IEM_MC_BEGIN(4, 2);
17025 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17026 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17027 IEM_MC_ARG(uint32_t, u32Value, 2);
17028 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17030 IEM_MC_LOCAL(int32_t, rc);
17031
17032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17033 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17034 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17035 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17036 IEM_MC_REF_EFLAGS(pEFlags);
17037 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17038 IEM_MC_IF_LOCAL_IS_Z(rc) {
17039 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17040 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17041 IEM_MC_ADVANCE_RIP();
17042 } IEM_MC_ELSE() {
17043 IEM_MC_RAISE_DIVIDE_ERROR();
17044 } IEM_MC_ENDIF();
17045
17046 IEM_MC_END();
17047 return VINF_SUCCESS;
17048 }
17049
17050 case IEMMODE_64BIT:
17051 {
17052 IEMOP_HLP_NO_LOCK_PREFIX();
17053 IEM_MC_BEGIN(4, 2);
17054 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17055 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17056 IEM_MC_ARG(uint64_t, u64Value, 2);
17057 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17059 IEM_MC_LOCAL(int32_t, rc);
17060
17061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17062 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17063 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17064 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17065 IEM_MC_REF_EFLAGS(pEFlags);
17066 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17067 IEM_MC_IF_LOCAL_IS_Z(rc) {
17068 IEM_MC_ADVANCE_RIP();
17069 } IEM_MC_ELSE() {
17070 IEM_MC_RAISE_DIVIDE_ERROR();
17071 } IEM_MC_ENDIF();
17072
17073 IEM_MC_END();
17074 return VINF_SUCCESS;
17075 }
17076
17077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17078 }
17079 }
17080}
17081
17082/** Opcode 0xf6. */
17083FNIEMOP_DEF(iemOp_Grp3_Eb)
17084{
17085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17087 {
17088 case 0:
17089 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17090 case 1:
17091/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17092 return IEMOP_RAISE_INVALID_OPCODE();
17093 case 2:
17094 IEMOP_MNEMONIC("not Eb");
17095 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17096 case 3:
17097 IEMOP_MNEMONIC("neg Eb");
17098 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17099 case 4:
17100 IEMOP_MNEMONIC("mul Eb");
17101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17102 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17103 case 5:
17104 IEMOP_MNEMONIC("imul Eb");
17105 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17106 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17107 case 6:
17108 IEMOP_MNEMONIC("div Eb");
17109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17110 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17111 case 7:
17112 IEMOP_MNEMONIC("idiv Eb");
17113 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17114 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17116 }
17117}
17118
17119
17120/** Opcode 0xf7. */
17121FNIEMOP_DEF(iemOp_Grp3_Ev)
17122{
17123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17124 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17125 {
17126 case 0:
17127 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17128 case 1:
17129/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17130 return IEMOP_RAISE_INVALID_OPCODE();
17131 case 2:
17132 IEMOP_MNEMONIC("not Ev");
17133 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17134 case 3:
17135 IEMOP_MNEMONIC("neg Ev");
17136 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17137 case 4:
17138 IEMOP_MNEMONIC("mul Ev");
17139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17140 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17141 case 5:
17142 IEMOP_MNEMONIC("imul Ev");
17143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17144 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17145 case 6:
17146 IEMOP_MNEMONIC("div Ev");
17147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17148 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17149 case 7:
17150 IEMOP_MNEMONIC("idiv Ev");
17151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17152 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17154 }
17155}
17156
17157
17158/** Opcode 0xf8. */
17159FNIEMOP_DEF(iemOp_clc)
17160{
17161 IEMOP_MNEMONIC("clc");
17162 IEMOP_HLP_NO_LOCK_PREFIX();
17163 IEM_MC_BEGIN(0, 0);
17164 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17165 IEM_MC_ADVANCE_RIP();
17166 IEM_MC_END();
17167 return VINF_SUCCESS;
17168}
17169
17170
17171/** Opcode 0xf9. */
17172FNIEMOP_DEF(iemOp_stc)
17173{
17174 IEMOP_MNEMONIC("stc");
17175 IEMOP_HLP_NO_LOCK_PREFIX();
17176 IEM_MC_BEGIN(0, 0);
17177 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17178 IEM_MC_ADVANCE_RIP();
17179 IEM_MC_END();
17180 return VINF_SUCCESS;
17181}
17182
17183
17184/** Opcode 0xfa. */
17185FNIEMOP_DEF(iemOp_cli)
17186{
17187 IEMOP_MNEMONIC("cli");
17188 IEMOP_HLP_NO_LOCK_PREFIX();
17189 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17190}
17191
17192
17193FNIEMOP_DEF(iemOp_sti)
17194{
17195 IEMOP_MNEMONIC("sti");
17196 IEMOP_HLP_NO_LOCK_PREFIX();
17197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17198}
17199
17200
17201/** Opcode 0xfc. */
17202FNIEMOP_DEF(iemOp_cld)
17203{
17204 IEMOP_MNEMONIC("cld");
17205 IEMOP_HLP_NO_LOCK_PREFIX();
17206 IEM_MC_BEGIN(0, 0);
17207 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17208 IEM_MC_ADVANCE_RIP();
17209 IEM_MC_END();
17210 return VINF_SUCCESS;
17211}
17212
17213
17214/** Opcode 0xfd. */
17215FNIEMOP_DEF(iemOp_std)
17216{
17217 IEMOP_MNEMONIC("std");
17218 IEMOP_HLP_NO_LOCK_PREFIX();
17219 IEM_MC_BEGIN(0, 0);
17220 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17221 IEM_MC_ADVANCE_RIP();
17222 IEM_MC_END();
17223 return VINF_SUCCESS;
17224}
17225
17226
17227/** Opcode 0xfe. */
17228FNIEMOP_DEF(iemOp_Grp4)
17229{
17230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17231 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17232 {
17233 case 0:
17234 IEMOP_MNEMONIC("inc Ev");
17235 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17236 case 1:
17237 IEMOP_MNEMONIC("dec Ev");
17238 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17239 default:
17240 IEMOP_MNEMONIC("grp4-ud");
17241 return IEMOP_RAISE_INVALID_OPCODE();
17242 }
17243}
17244
17245
17246/**
17247 * Opcode 0xff /2.
17248 * @param bRm The RM byte.
17249 */
17250FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17251{
17252 IEMOP_MNEMONIC("calln Ev");
17253 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17254 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17255
17256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17257 {
17258 /* The new RIP is taken from a register. */
17259 switch (pIemCpu->enmEffOpSize)
17260 {
17261 case IEMMODE_16BIT:
17262 IEM_MC_BEGIN(1, 0);
17263 IEM_MC_ARG(uint16_t, u16Target, 0);
17264 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17265 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17266 IEM_MC_END()
17267 return VINF_SUCCESS;
17268
17269 case IEMMODE_32BIT:
17270 IEM_MC_BEGIN(1, 0);
17271 IEM_MC_ARG(uint32_t, u32Target, 0);
17272 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17273 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17274 IEM_MC_END()
17275 return VINF_SUCCESS;
17276
17277 case IEMMODE_64BIT:
17278 IEM_MC_BEGIN(1, 0);
17279 IEM_MC_ARG(uint64_t, u64Target, 0);
17280 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17281 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17282 IEM_MC_END()
17283 return VINF_SUCCESS;
17284
17285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17286 }
17287 }
17288 else
17289 {
17290 /* The new RIP is taken from a register. */
17291 switch (pIemCpu->enmEffOpSize)
17292 {
17293 case IEMMODE_16BIT:
17294 IEM_MC_BEGIN(1, 1);
17295 IEM_MC_ARG(uint16_t, u16Target, 0);
17296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17298 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17299 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17300 IEM_MC_END()
17301 return VINF_SUCCESS;
17302
17303 case IEMMODE_32BIT:
17304 IEM_MC_BEGIN(1, 1);
17305 IEM_MC_ARG(uint32_t, u32Target, 0);
17306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17308 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17309 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17310 IEM_MC_END()
17311 return VINF_SUCCESS;
17312
17313 case IEMMODE_64BIT:
17314 IEM_MC_BEGIN(1, 1);
17315 IEM_MC_ARG(uint64_t, u64Target, 0);
17316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17318 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17319 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17320 IEM_MC_END()
17321 return VINF_SUCCESS;
17322
17323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17324 }
17325 }
17326}
17327
17328typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17329
17330FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17331{
17332 /* Registers? How?? */
17333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17334 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17335
17336 /* Far pointer loaded from memory. */
17337 switch (pIemCpu->enmEffOpSize)
17338 {
17339 case IEMMODE_16BIT:
17340 IEM_MC_BEGIN(3, 1);
17341 IEM_MC_ARG(uint16_t, u16Sel, 0);
17342 IEM_MC_ARG(uint16_t, offSeg, 1);
17343 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17347 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17348 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17349 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17350 IEM_MC_END();
17351 return VINF_SUCCESS;
17352
17353 case IEMMODE_64BIT:
17354 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17355 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17356 * and call far qword [rsp] encodings. */
17357 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17358 {
17359 IEM_MC_BEGIN(3, 1);
17360 IEM_MC_ARG(uint16_t, u16Sel, 0);
17361 IEM_MC_ARG(uint64_t, offSeg, 1);
17362 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17366 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17367 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17368 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17369 IEM_MC_END();
17370 return VINF_SUCCESS;
17371 }
17372 /* AMD falls thru. */
17373
17374 case IEMMODE_32BIT:
17375 IEM_MC_BEGIN(3, 1);
17376 IEM_MC_ARG(uint16_t, u16Sel, 0);
17377 IEM_MC_ARG(uint32_t, offSeg, 1);
17378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17382 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17383 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17384 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17385 IEM_MC_END();
17386 return VINF_SUCCESS;
17387
17388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17389 }
17390}
17391
17392
17393/**
17394 * Opcode 0xff /3.
17395 * @param bRm The RM byte.
17396 */
17397FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17398{
17399 IEMOP_MNEMONIC("callf Ep");
17400 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17401}
17402
17403
17404/**
17405 * Opcode 0xff /4.
17406 * @param bRm The RM byte.
17407 */
17408FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17409{
17410 IEMOP_MNEMONIC("jmpn Ev");
17411 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17412 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17413
17414 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17415 {
17416 /* The new RIP is taken from a register. */
17417 switch (pIemCpu->enmEffOpSize)
17418 {
17419 case IEMMODE_16BIT:
17420 IEM_MC_BEGIN(0, 1);
17421 IEM_MC_LOCAL(uint16_t, u16Target);
17422 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17423 IEM_MC_SET_RIP_U16(u16Target);
17424 IEM_MC_END()
17425 return VINF_SUCCESS;
17426
17427 case IEMMODE_32BIT:
17428 IEM_MC_BEGIN(0, 1);
17429 IEM_MC_LOCAL(uint32_t, u32Target);
17430 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17431 IEM_MC_SET_RIP_U32(u32Target);
17432 IEM_MC_END()
17433 return VINF_SUCCESS;
17434
17435 case IEMMODE_64BIT:
17436 IEM_MC_BEGIN(0, 1);
17437 IEM_MC_LOCAL(uint64_t, u64Target);
17438 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17439 IEM_MC_SET_RIP_U64(u64Target);
17440 IEM_MC_END()
17441 return VINF_SUCCESS;
17442
17443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17444 }
17445 }
17446 else
17447 {
17448 /* The new RIP is taken from a memory location. */
17449 switch (pIemCpu->enmEffOpSize)
17450 {
17451 case IEMMODE_16BIT:
17452 IEM_MC_BEGIN(0, 2);
17453 IEM_MC_LOCAL(uint16_t, u16Target);
17454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17456 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17457 IEM_MC_SET_RIP_U16(u16Target);
17458 IEM_MC_END()
17459 return VINF_SUCCESS;
17460
17461 case IEMMODE_32BIT:
17462 IEM_MC_BEGIN(0, 2);
17463 IEM_MC_LOCAL(uint32_t, u32Target);
17464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17466 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17467 IEM_MC_SET_RIP_U32(u32Target);
17468 IEM_MC_END()
17469 return VINF_SUCCESS;
17470
17471 case IEMMODE_64BIT:
17472 IEM_MC_BEGIN(0, 2);
17473 IEM_MC_LOCAL(uint64_t, u64Target);
17474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17476 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17477 IEM_MC_SET_RIP_U64(u64Target);
17478 IEM_MC_END()
17479 return VINF_SUCCESS;
17480
17481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17482 }
17483 }
17484}
17485
17486
17487/**
17488 * Opcode 0xff /5.
17489 * @param bRm The RM byte.
17490 */
17491FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17492{
17493 IEMOP_MNEMONIC("jmpf Ep");
17494 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17495}
17496
17497
17498/**
17499 * Opcode 0xff /6.
17500 * @param bRm The RM byte.
17501 */
17502FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17503{
17504 IEMOP_MNEMONIC("push Ev");
17505 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17506
17507 /* Registers are handled by a common worker. */
17508 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17509 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17510
17511 /* Memory we do here. */
17512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17513 switch (pIemCpu->enmEffOpSize)
17514 {
17515 case IEMMODE_16BIT:
17516 IEM_MC_BEGIN(0, 2);
17517 IEM_MC_LOCAL(uint16_t, u16Src);
17518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17520 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17521 IEM_MC_PUSH_U16(u16Src);
17522 IEM_MC_ADVANCE_RIP();
17523 IEM_MC_END();
17524 return VINF_SUCCESS;
17525
17526 case IEMMODE_32BIT:
17527 IEM_MC_BEGIN(0, 2);
17528 IEM_MC_LOCAL(uint32_t, u32Src);
17529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17531 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17532 IEM_MC_PUSH_U32(u32Src);
17533 IEM_MC_ADVANCE_RIP();
17534 IEM_MC_END();
17535 return VINF_SUCCESS;
17536
17537 case IEMMODE_64BIT:
17538 IEM_MC_BEGIN(0, 2);
17539 IEM_MC_LOCAL(uint64_t, u64Src);
17540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17542 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17543 IEM_MC_PUSH_U64(u64Src);
17544 IEM_MC_ADVANCE_RIP();
17545 IEM_MC_END();
17546 return VINF_SUCCESS;
17547
17548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17549 }
17550}
17551
17552
17553/** Opcode 0xff. */
17554FNIEMOP_DEF(iemOp_Grp5)
17555{
17556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17557 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17558 {
17559 case 0:
17560 IEMOP_MNEMONIC("inc Ev");
17561 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17562 case 1:
17563 IEMOP_MNEMONIC("dec Ev");
17564 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17565 case 2:
17566 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17567 case 3:
17568 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17569 case 4:
17570 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17571 case 5:
17572 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17573 case 6:
17574 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17575 case 7:
17576 IEMOP_MNEMONIC("grp5-ud");
17577 return IEMOP_RAISE_INVALID_OPCODE();
17578 }
17579 AssertFailedReturn(VERR_IEM_IPE_3);
17580}
17581
17582
17583
17584const PFNIEMOP g_apfnOneByteMap[256] =
17585{
17586 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17587 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17588 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17589 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17590 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17591 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17592 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17593 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17594 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17595 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17596 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17597 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17598 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17599 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17600 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17601 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17602 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17603 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17604 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17605 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17606 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17607 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17608 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17609 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17610 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17611 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17612 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17613 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17614 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17615 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17616 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17617 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17618 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17619 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17620 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17621 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17622 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17623 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17624 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17625 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17626 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17627 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17628 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17629 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17630 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17631 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17632 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17633 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17634 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17635 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17636 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17637 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17638 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17639 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17640 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17641 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17642 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17643 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17644 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17645 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17646 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17647 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17648 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17649 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17650};
17651
17652
17653/** @} */
17654
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette