VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61555

Last change on this file since 61555 was 61555, checked in by vboxsync, 9 years ago

IEM: Try enable iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 606.1 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61555 2016-06-07 23:35:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507/** Opcode 0x0f 0x11. */
1508FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1509/** Opcode 0x0f 0x12. */
1510FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1511/** Opcode 0x0f 0x13. */
1512FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1513/** Opcode 0x0f 0x14. */
1514FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1515/** Opcode 0x0f 0x15. */
1516FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1517/** Opcode 0x0f 0x16. */
1518FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1519/** Opcode 0x0f 0x17. */
1520FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1521
1522
1523/** Opcode 0x0f 0x18. */
1524FNIEMOP_DEF(iemOp_prefetch_Grp16)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1528 {
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1531 {
1532 case 4: /* Aliased to /0 for the time being according to AMD. */
1533 case 5: /* Aliased to /0 for the time being according to AMD. */
1534 case 6: /* Aliased to /0 for the time being according to AMD. */
1535 case 7: /* Aliased to /0 for the time being according to AMD. */
1536 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1537 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1538 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1539 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1541 }
1542
1543 IEM_MC_BEGIN(0, 1);
1544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 /* Currently a NOP. */
1547 IEM_MC_ADVANCE_RIP();
1548 IEM_MC_END();
1549 return VINF_SUCCESS;
1550 }
1551
1552 return IEMOP_RAISE_INVALID_OPCODE();
1553}
1554
1555
1556/** Opcode 0x0f 0x19..0x1f. */
1557FNIEMOP_DEF(iemOp_nop_Ev)
1558{
1559 IEMOP_HLP_NO_LOCK_PREFIX();
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1562 {
1563 IEM_MC_BEGIN(0, 0);
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 else
1568 {
1569 IEM_MC_BEGIN(0, 1);
1570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1572 /* Currently a NOP. */
1573 IEM_MC_ADVANCE_RIP();
1574 IEM_MC_END();
1575 }
1576 return VINF_SUCCESS;
1577}
1578
1579
1580/** Opcode 0x0f 0x20. */
1581FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1582{
1583 /* mod is ignored, as is operand size overrides. */
1584 IEMOP_MNEMONIC("mov Rd,Cd");
1585 IEMOP_HLP_MIN_386();
1586 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1587 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1588 else
1589 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1590
1591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1592 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1593 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1594 {
1595 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1596 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1597 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1598 iCrReg |= 8;
1599 }
1600 switch (iCrReg)
1601 {
1602 case 0: case 2: case 3: case 4: case 8:
1603 break;
1604 default:
1605 return IEMOP_RAISE_INVALID_OPCODE();
1606 }
1607 IEMOP_HLP_DONE_DECODING();
1608
1609 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1610}
1611
1612
1613/** Opcode 0x0f 0x21. */
1614FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1615{
1616 IEMOP_MNEMONIC("mov Rd,Dd");
1617 IEMOP_HLP_MIN_386();
1618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1619 IEMOP_HLP_NO_LOCK_PREFIX();
1620 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1621 return IEMOP_RAISE_INVALID_OPCODE();
1622 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1623 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1624 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1625}
1626
1627
1628/** Opcode 0x0f 0x22. */
1629FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1630{
1631 /* mod is ignored, as is operand size overrides. */
1632 IEMOP_MNEMONIC("mov Cd,Rd");
1633 IEMOP_HLP_MIN_386();
1634 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1635 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1636 else
1637 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1638
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1641 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1642 {
1643 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1644 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1645 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1646 iCrReg |= 8;
1647 }
1648 switch (iCrReg)
1649 {
1650 case 0: case 2: case 3: case 4: case 8:
1651 break;
1652 default:
1653 return IEMOP_RAISE_INVALID_OPCODE();
1654 }
1655 IEMOP_HLP_DONE_DECODING();
1656
1657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1658}
1659
1660
1661/** Opcode 0x0f 0x23. */
1662FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1663{
1664 IEMOP_MNEMONIC("mov Dd,Rd");
1665 IEMOP_HLP_MIN_386();
1666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1668 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1669 return IEMOP_RAISE_INVALID_OPCODE();
1670 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1671 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1672 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1673}
1674
1675
1676/** Opcode 0x0f 0x24. */
1677FNIEMOP_DEF(iemOp_mov_Rd_Td)
1678{
1679 IEMOP_MNEMONIC("mov Rd,Td");
1680 /** @todo works on 386 and 486. */
1681 /* The RM byte is not considered, see testcase. */
1682 return IEMOP_RAISE_INVALID_OPCODE();
1683}
1684
1685
1686/** Opcode 0x0f 0x26. */
1687FNIEMOP_DEF(iemOp_mov_Td_Rd)
1688{
1689 IEMOP_MNEMONIC("mov Td,Rd");
1690 /** @todo works on 386 and 486. */
1691 /* The RM byte is not considered, see testcase. */
1692 return IEMOP_RAISE_INVALID_OPCODE();
1693}
1694
1695
1696/** Opcode 0x0f 0x28. */
1697FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1698{
1699 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1701 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1702 {
1703 /*
1704 * Register, register.
1705 */
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1707 IEM_MC_BEGIN(0, 0);
1708 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1710 else
1711 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1713 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1714 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1715 IEM_MC_ADVANCE_RIP();
1716 IEM_MC_END();
1717 }
1718 else
1719 {
1720 /*
1721 * Register, memory.
1722 */
1723 IEM_MC_BEGIN(0, 2);
1724 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1726
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1729 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1730 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1731 else
1732 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1733 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1734
1735 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1736 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1737
1738 IEM_MC_ADVANCE_RIP();
1739 IEM_MC_END();
1740 }
1741 return VINF_SUCCESS;
1742}
1743
1744
1745/** Opcode 0x0f 0x29. */
1746FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1747{
1748 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752 /*
1753 * Register, register.
1754 */
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1756 IEM_MC_BEGIN(0, 0);
1757 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1759 else
1760 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1761 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1762 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1763 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 }
1767 else
1768 {
1769 /*
1770 * Memory, register.
1771 */
1772 IEM_MC_BEGIN(0, 2);
1773 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1775
1776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1777 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1778 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1780 else
1781 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1782 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1783
1784 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1785 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1786
1787 IEM_MC_ADVANCE_RIP();
1788 IEM_MC_END();
1789 }
1790 return VINF_SUCCESS;
1791}
1792
1793
1794/** Opcode 0x0f 0x2a. */
1795FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1796
1797
1798/** Opcode 0x0f 0x2b. */
1799#ifndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
1800FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1801{
1802 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1804 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1805 {
1806 /*
1807 * Register, memory.
1808 */
1809 IEM_MC_BEGIN(0, 2);
1810 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1812
1813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1814 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1815 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1816 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1817 else
1818 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1820
1821 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1822 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1823
1824 IEM_MC_ADVANCE_RIP();
1825 IEM_MC_END();
1826 }
1827 /* The register, register encoding is invalid. */
1828 else
1829 return IEMOP_RAISE_INVALID_OPCODE();
1830 return VINF_SUCCESS;
1831}
1832#else
1833FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1834#endif
1835
1836
1837/** Opcode 0x0f 0x2c. */
1838FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1839/** Opcode 0x0f 0x2d. */
1840FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1841/** Opcode 0x0f 0x2e. */
1842FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1843/** Opcode 0x0f 0x2f. */
1844FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1845
1846
1847/** Opcode 0x0f 0x30. */
1848FNIEMOP_DEF(iemOp_wrmsr)
1849{
1850 IEMOP_MNEMONIC("wrmsr");
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1853}
1854
1855
1856/** Opcode 0x0f 0x31. */
1857FNIEMOP_DEF(iemOp_rdtsc)
1858{
1859 IEMOP_MNEMONIC("rdtsc");
1860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1861 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1862}
1863
1864
1865/** Opcode 0x0f 0x33. */
1866FNIEMOP_DEF(iemOp_rdmsr)
1867{
1868 IEMOP_MNEMONIC("rdmsr");
1869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1870 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1871}
1872
1873
1874/** Opcode 0x0f 0x34. */
1875FNIEMOP_STUB(iemOp_rdpmc);
1876/** Opcode 0x0f 0x34. */
1877FNIEMOP_STUB(iemOp_sysenter);
1878/** Opcode 0x0f 0x35. */
1879FNIEMOP_STUB(iemOp_sysexit);
1880/** Opcode 0x0f 0x37. */
1881FNIEMOP_STUB(iemOp_getsec);
1882/** Opcode 0x0f 0x38. */
1883FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1884/** Opcode 0x0f 0x3a. */
1885FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1886
1887
1888/**
1889 * Implements a conditional move.
1890 *
1891 * Wish there was an obvious way to do this where we could share and reduce
1892 * code bloat.
1893 *
1894 * @param a_Cnd The conditional "microcode" operation.
1895 */
1896#define CMOV_X(a_Cnd) \
1897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1899 { \
1900 switch (pIemCpu->enmEffOpSize) \
1901 { \
1902 case IEMMODE_16BIT: \
1903 IEM_MC_BEGIN(0, 1); \
1904 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1905 a_Cnd { \
1906 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1907 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1908 } IEM_MC_ENDIF(); \
1909 IEM_MC_ADVANCE_RIP(); \
1910 IEM_MC_END(); \
1911 return VINF_SUCCESS; \
1912 \
1913 case IEMMODE_32BIT: \
1914 IEM_MC_BEGIN(0, 1); \
1915 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1916 a_Cnd { \
1917 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1918 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1919 } IEM_MC_ELSE() { \
1920 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1921 } IEM_MC_ENDIF(); \
1922 IEM_MC_ADVANCE_RIP(); \
1923 IEM_MC_END(); \
1924 return VINF_SUCCESS; \
1925 \
1926 case IEMMODE_64BIT: \
1927 IEM_MC_BEGIN(0, 1); \
1928 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1929 a_Cnd { \
1930 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1931 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1932 } IEM_MC_ENDIF(); \
1933 IEM_MC_ADVANCE_RIP(); \
1934 IEM_MC_END(); \
1935 return VINF_SUCCESS; \
1936 \
1937 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1938 } \
1939 } \
1940 else \
1941 { \
1942 switch (pIemCpu->enmEffOpSize) \
1943 { \
1944 case IEMMODE_16BIT: \
1945 IEM_MC_BEGIN(0, 2); \
1946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1947 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1949 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1950 a_Cnd { \
1951 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1952 } IEM_MC_ENDIF(); \
1953 IEM_MC_ADVANCE_RIP(); \
1954 IEM_MC_END(); \
1955 return VINF_SUCCESS; \
1956 \
1957 case IEMMODE_32BIT: \
1958 IEM_MC_BEGIN(0, 2); \
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1960 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1962 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1963 a_Cnd { \
1964 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1965 } IEM_MC_ELSE() { \
1966 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1967 } IEM_MC_ENDIF(); \
1968 IEM_MC_ADVANCE_RIP(); \
1969 IEM_MC_END(); \
1970 return VINF_SUCCESS; \
1971 \
1972 case IEMMODE_64BIT: \
1973 IEM_MC_BEGIN(0, 2); \
1974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1975 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1977 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1978 a_Cnd { \
1979 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1980 } IEM_MC_ENDIF(); \
1981 IEM_MC_ADVANCE_RIP(); \
1982 IEM_MC_END(); \
1983 return VINF_SUCCESS; \
1984 \
1985 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1986 } \
1987 } do {} while (0)
1988
1989
1990
1991/** Opcode 0x0f 0x40. */
1992FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1993{
1994 IEMOP_MNEMONIC("cmovo Gv,Ev");
1995 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1996}
1997
1998
1999/** Opcode 0x0f 0x41. */
2000FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2001{
2002 IEMOP_MNEMONIC("cmovno Gv,Ev");
2003 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2004}
2005
2006
2007/** Opcode 0x0f 0x42. */
2008FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2009{
2010 IEMOP_MNEMONIC("cmovc Gv,Ev");
2011 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2012}
2013
2014
2015/** Opcode 0x0f 0x43. */
2016FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2017{
2018 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2019 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2020}
2021
2022
2023/** Opcode 0x0f 0x44. */
2024FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2025{
2026 IEMOP_MNEMONIC("cmove Gv,Ev");
2027 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2028}
2029
2030
2031/** Opcode 0x0f 0x45. */
2032FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2033{
2034 IEMOP_MNEMONIC("cmovne Gv,Ev");
2035 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2036}
2037
2038
2039/** Opcode 0x0f 0x46. */
2040FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2041{
2042 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2043 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2044}
2045
2046
2047/** Opcode 0x0f 0x47. */
2048FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2049{
2050 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2051 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2052}
2053
2054
2055/** Opcode 0x0f 0x48. */
2056FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2057{
2058 IEMOP_MNEMONIC("cmovs Gv,Ev");
2059 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2060}
2061
2062
2063/** Opcode 0x0f 0x49. */
2064FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2065{
2066 IEMOP_MNEMONIC("cmovns Gv,Ev");
2067 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2068}
2069
2070
2071/** Opcode 0x0f 0x4a. */
2072FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2073{
2074 IEMOP_MNEMONIC("cmovp Gv,Ev");
2075 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2076}
2077
2078
2079/** Opcode 0x0f 0x4b. */
2080FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2081{
2082 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2083 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2084}
2085
2086
2087/** Opcode 0x0f 0x4c. */
2088FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2089{
2090 IEMOP_MNEMONIC("cmovl Gv,Ev");
2091 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2092}
2093
2094
2095/** Opcode 0x0f 0x4d. */
2096FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2097{
2098 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2099 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2100}
2101
2102
2103/** Opcode 0x0f 0x4e. */
2104FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2105{
2106 IEMOP_MNEMONIC("cmovle Gv,Ev");
2107 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2108}
2109
2110
2111/** Opcode 0x0f 0x4f. */
2112FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2113{
2114 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2115 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2116}
2117
2118#undef CMOV_X
2119
2120/** Opcode 0x0f 0x50. */
2121FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2122/** Opcode 0x0f 0x51. */
2123FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2124/** Opcode 0x0f 0x52. */
2125FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2126/** Opcode 0x0f 0x53. */
2127FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2128/** Opcode 0x0f 0x54. */
2129FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2130/** Opcode 0x0f 0x55. */
2131FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2132/** Opcode 0x0f 0x56. */
2133FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2134/** Opcode 0x0f 0x57. */
2135FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2136/** Opcode 0x0f 0x58. */
2137FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2138/** Opcode 0x0f 0x59. */
2139FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2140/** Opcode 0x0f 0x5a. */
2141FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2142/** Opcode 0x0f 0x5b. */
2143FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2144/** Opcode 0x0f 0x5c. */
2145FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2146/** Opcode 0x0f 0x5d. */
2147FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2148/** Opcode 0x0f 0x5e. */
2149FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2150/** Opcode 0x0f 0x5f. */
2151FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2152
2153
2154/**
2155 * Common worker for SSE2 and MMX instructions on the forms:
2156 * pxxxx xmm1, xmm2/mem128
2157 * pxxxx mm1, mm2/mem32
2158 *
2159 * The 2nd operand is the first half of a register, which in the memory case
2160 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2161 * memory accessed for MMX.
2162 *
2163 * Exceptions type 4.
2164 */
2165FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2166{
2167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2168 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2169 {
2170 case IEM_OP_PRF_SIZE_OP: /* SSE */
2171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2172 {
2173 /*
2174 * Register, register.
2175 */
2176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2177 IEM_MC_BEGIN(2, 0);
2178 IEM_MC_ARG(uint128_t *, pDst, 0);
2179 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2180 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2181 IEM_MC_PREPARE_SSE_USAGE();
2182 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2183 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2184 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2185 IEM_MC_ADVANCE_RIP();
2186 IEM_MC_END();
2187 }
2188 else
2189 {
2190 /*
2191 * Register, memory.
2192 */
2193 IEM_MC_BEGIN(2, 2);
2194 IEM_MC_ARG(uint128_t *, pDst, 0);
2195 IEM_MC_LOCAL(uint64_t, uSrc);
2196 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2198
2199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2202 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2203
2204 IEM_MC_PREPARE_SSE_USAGE();
2205 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2206 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2207
2208 IEM_MC_ADVANCE_RIP();
2209 IEM_MC_END();
2210 }
2211 return VINF_SUCCESS;
2212
2213 case 0: /* MMX */
2214 if (!pImpl->pfnU64)
2215 return IEMOP_RAISE_INVALID_OPCODE();
2216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2217 {
2218 /*
2219 * Register, register.
2220 */
2221 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2222 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2224 IEM_MC_BEGIN(2, 0);
2225 IEM_MC_ARG(uint64_t *, pDst, 0);
2226 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2227 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2228 IEM_MC_PREPARE_FPU_USAGE();
2229 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2230 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2231 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * Register, memory.
2239 */
2240 IEM_MC_BEGIN(2, 2);
2241 IEM_MC_ARG(uint64_t *, pDst, 0);
2242 IEM_MC_LOCAL(uint32_t, uSrc);
2243 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2245
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2249 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2250
2251 IEM_MC_PREPARE_FPU_USAGE();
2252 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2253 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2254
2255 IEM_MC_ADVANCE_RIP();
2256 IEM_MC_END();
2257 }
2258 return VINF_SUCCESS;
2259
2260 default:
2261 return IEMOP_RAISE_INVALID_OPCODE();
2262 }
2263}
2264
2265
2266/** Opcode 0x0f 0x60. */
2267FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2268{
2269 IEMOP_MNEMONIC("punpcklbw");
2270 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2271}
2272
2273
2274/** Opcode 0x0f 0x61. */
2275FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2276{
2277 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2278 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2279}
2280
2281
2282/** Opcode 0x0f 0x62. */
2283FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2284{
2285 IEMOP_MNEMONIC("punpckldq");
2286 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2287}
2288
2289
2290/** Opcode 0x0f 0x63. */
2291FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2292/** Opcode 0x0f 0x64. */
2293FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2294/** Opcode 0x0f 0x65. */
2295FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2296/** Opcode 0x0f 0x66. */
2297FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2298/** Opcode 0x0f 0x67. */
2299FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2300
2301
2302/**
2303 * Common worker for SSE2 and MMX instructions on the forms:
2304 * pxxxx xmm1, xmm2/mem128
2305 * pxxxx mm1, mm2/mem64
2306 *
2307 * The 2nd operand is the second half of a register, which in the memory case
2308 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2309 * where it may read the full 128 bits or only the upper 64 bits.
2310 *
2311 * Exceptions type 4.
2312 */
2313FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2314{
2315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2316 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2317 {
2318 case IEM_OP_PRF_SIZE_OP: /* SSE */
2319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2320 {
2321 /*
2322 * Register, register.
2323 */
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_BEGIN(2, 0);
2326 IEM_MC_ARG(uint128_t *, pDst, 0);
2327 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2328 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2329 IEM_MC_PREPARE_SSE_USAGE();
2330 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2331 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2332 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2333 IEM_MC_ADVANCE_RIP();
2334 IEM_MC_END();
2335 }
2336 else
2337 {
2338 /*
2339 * Register, memory.
2340 */
2341 IEM_MC_BEGIN(2, 2);
2342 IEM_MC_ARG(uint128_t *, pDst, 0);
2343 IEM_MC_LOCAL(uint128_t, uSrc);
2344 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2346
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2350 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2351
2352 IEM_MC_PREPARE_SSE_USAGE();
2353 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2354 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2355
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 return VINF_SUCCESS;
2360
2361 case 0: /* MMX */
2362 if (!pImpl->pfnU64)
2363 return IEMOP_RAISE_INVALID_OPCODE();
2364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2365 {
2366 /*
2367 * Register, register.
2368 */
2369 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2370 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2372 IEM_MC_BEGIN(2, 0);
2373 IEM_MC_ARG(uint64_t *, pDst, 0);
2374 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2375 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2376 IEM_MC_PREPARE_FPU_USAGE();
2377 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2378 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2379 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2380 IEM_MC_ADVANCE_RIP();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * Register, memory.
2387 */
2388 IEM_MC_BEGIN(2, 2);
2389 IEM_MC_ARG(uint64_t *, pDst, 0);
2390 IEM_MC_LOCAL(uint64_t, uSrc);
2391 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2393
2394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2397 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2398
2399 IEM_MC_PREPARE_FPU_USAGE();
2400 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2401 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2402
2403 IEM_MC_ADVANCE_RIP();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407
2408 default:
2409 return IEMOP_RAISE_INVALID_OPCODE();
2410 }
2411}
2412
2413
2414/** Opcode 0x0f 0x68. */
2415FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2416{
2417 IEMOP_MNEMONIC("punpckhbw");
2418 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2419}
2420
2421
2422/** Opcode 0x0f 0x69. */
2423FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2424{
2425 IEMOP_MNEMONIC("punpckhwd");
2426 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2427}
2428
2429
2430/** Opcode 0x0f 0x6a. */
2431FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2432{
2433 IEMOP_MNEMONIC("punpckhdq");
2434 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2435}
2436
2437/** Opcode 0x0f 0x6b. */
2438FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2439
2440
2441/** Opcode 0x0f 0x6c. */
2442FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2443{
2444 IEMOP_MNEMONIC("punpcklqdq");
2445 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2446}
2447
2448
2449/** Opcode 0x0f 0x6d. */
2450FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2451{
2452 IEMOP_MNEMONIC("punpckhqdq");
2453 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2454}
2455
2456
2457/** Opcode 0x0f 0x6e. */
2458FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2459{
2460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2461 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2462 {
2463 case IEM_OP_PRF_SIZE_OP: /* SSE */
2464 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /* XMM, greg*/
2468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2469 IEM_MC_BEGIN(0, 1);
2470 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2471 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2472 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2473 {
2474 IEM_MC_LOCAL(uint64_t, u64Tmp);
2475 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2476 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2477 }
2478 else
2479 {
2480 IEM_MC_LOCAL(uint32_t, u32Tmp);
2481 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2482 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2483 }
2484 IEM_MC_ADVANCE_RIP();
2485 IEM_MC_END();
2486 }
2487 else
2488 {
2489 /* XMM, [mem] */
2490 IEM_MC_BEGIN(0, 2);
2491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2496 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2497 {
2498 IEM_MC_LOCAL(uint64_t, u64Tmp);
2499 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2500 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2501 }
2502 else
2503 {
2504 IEM_MC_LOCAL(uint32_t, u32Tmp);
2505 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2506 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2507 }
2508 IEM_MC_ADVANCE_RIP();
2509 IEM_MC_END();
2510 }
2511 return VINF_SUCCESS;
2512
2513 case 0: /* MMX */
2514 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2515 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2516 {
2517 /* MMX, greg */
2518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2519 IEM_MC_BEGIN(0, 1);
2520 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2521 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2522 IEM_MC_LOCAL(uint64_t, u64Tmp);
2523 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2524 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2525 else
2526 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2527 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2528 IEM_MC_ADVANCE_RIP();
2529 IEM_MC_END();
2530 }
2531 else
2532 {
2533 /* MMX, [mem] */
2534 IEM_MC_BEGIN(0, 2);
2535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2536 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2540 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2541 {
2542 IEM_MC_LOCAL(uint64_t, u64Tmp);
2543 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2544 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2545 }
2546 else
2547 {
2548 IEM_MC_LOCAL(uint32_t, u32Tmp);
2549 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2550 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2551 }
2552 IEM_MC_ADVANCE_RIP();
2553 IEM_MC_END();
2554 }
2555 return VINF_SUCCESS;
2556
2557 default:
2558 return IEMOP_RAISE_INVALID_OPCODE();
2559 }
2560}
2561
2562
2563/** Opcode 0x0f 0x6f. */
2564FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2565{
2566 bool fAligned = false;
2567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2568 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2569 {
2570 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2571 fAligned = true;
2572 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2573 if (fAligned)
2574 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2575 else
2576 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2577 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2578 {
2579 /*
2580 * Register, register.
2581 */
2582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2583 IEM_MC_BEGIN(0, 0);
2584 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2585 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2586 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
2587 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2588 IEM_MC_ADVANCE_RIP();
2589 IEM_MC_END();
2590 }
2591 else
2592 {
2593 /*
2594 * Register, memory.
2595 */
2596 IEM_MC_BEGIN(0, 2);
2597 IEM_MC_LOCAL(uint128_t, u128Tmp);
2598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2599
2600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2602 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2603 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2604 if (fAligned)
2605 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2606 else
2607 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2608 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2609
2610 IEM_MC_ADVANCE_RIP();
2611 IEM_MC_END();
2612 }
2613 return VINF_SUCCESS;
2614
2615 case 0: /* MMX */
2616 IEMOP_MNEMONIC("movq Pq,Qq");
2617 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2618 {
2619 /*
2620 * Register, register.
2621 */
2622 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2623 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2625 IEM_MC_BEGIN(0, 1);
2626 IEM_MC_LOCAL(uint64_t, u64Tmp);
2627 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2628 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2629 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2630 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2631 IEM_MC_ADVANCE_RIP();
2632 IEM_MC_END();
2633 }
2634 else
2635 {
2636 /*
2637 * Register, memory.
2638 */
2639 IEM_MC_BEGIN(0, 2);
2640 IEM_MC_LOCAL(uint64_t, u64Tmp);
2641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2642
2643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2646 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2647 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2648 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 }
2653 return VINF_SUCCESS;
2654
2655 default:
2656 return IEMOP_RAISE_INVALID_OPCODE();
2657 }
2658}
2659
2660
2661/** Opcode 0x0f 0x70. The immediate here is evil! */
2662FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2663{
2664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2666 {
2667 case IEM_OP_PRF_SIZE_OP: /* SSE */
2668 case IEM_OP_PRF_REPNZ: /* SSE */
2669 case IEM_OP_PRF_REPZ: /* SSE */
2670 {
2671 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2672 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2673 {
2674 case IEM_OP_PRF_SIZE_OP:
2675 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2676 pfnAImpl = iemAImpl_pshufd;
2677 break;
2678 case IEM_OP_PRF_REPNZ:
2679 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2680 pfnAImpl = iemAImpl_pshuflw;
2681 break;
2682 case IEM_OP_PRF_REPZ:
2683 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2684 pfnAImpl = iemAImpl_pshufhw;
2685 break;
2686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2687 }
2688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2689 {
2690 /*
2691 * Register, register.
2692 */
2693 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2695
2696 IEM_MC_BEGIN(3, 0);
2697 IEM_MC_ARG(uint128_t *, pDst, 0);
2698 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2699 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2700 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2701 IEM_MC_PREPARE_SSE_USAGE();
2702 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2703 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2704 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2705 IEM_MC_ADVANCE_RIP();
2706 IEM_MC_END();
2707 }
2708 else
2709 {
2710 /*
2711 * Register, memory.
2712 */
2713 IEM_MC_BEGIN(3, 2);
2714 IEM_MC_ARG(uint128_t *, pDst, 0);
2715 IEM_MC_LOCAL(uint128_t, uSrc);
2716 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2718
2719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2720 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2721 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2724
2725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2726 IEM_MC_PREPARE_SSE_USAGE();
2727 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2728 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2729
2730 IEM_MC_ADVANCE_RIP();
2731 IEM_MC_END();
2732 }
2733 return VINF_SUCCESS;
2734 }
2735
2736 case 0: /* MMX Extension */
2737 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2739 {
2740 /*
2741 * Register, register.
2742 */
2743 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745
2746 IEM_MC_BEGIN(3, 0);
2747 IEM_MC_ARG(uint64_t *, pDst, 0);
2748 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2749 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2751 IEM_MC_PREPARE_FPU_USAGE();
2752 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2753 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2754 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2755 IEM_MC_ADVANCE_RIP();
2756 IEM_MC_END();
2757 }
2758 else
2759 {
2760 /*
2761 * Register, memory.
2762 */
2763 IEM_MC_BEGIN(3, 2);
2764 IEM_MC_ARG(uint64_t *, pDst, 0);
2765 IEM_MC_LOCAL(uint64_t, uSrc);
2766 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2768
2769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2770 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2771 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2774
2775 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2776 IEM_MC_PREPARE_FPU_USAGE();
2777 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2778 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2779
2780 IEM_MC_ADVANCE_RIP();
2781 IEM_MC_END();
2782 }
2783 return VINF_SUCCESS;
2784
2785 default:
2786 return IEMOP_RAISE_INVALID_OPCODE();
2787 }
2788}
2789
2790
2791/** Opcode 0x0f 0x71 11/2. */
2792FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2793
2794/** Opcode 0x66 0x0f 0x71 11/2. */
2795FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2796
2797/** Opcode 0x0f 0x71 11/4. */
2798FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2799
2800/** Opcode 0x66 0x0f 0x71 11/4. */
2801FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2802
2803/** Opcode 0x0f 0x71 11/6. */
2804FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2805
2806/** Opcode 0x66 0x0f 0x71 11/6. */
2807FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2808
2809
2810/** Opcode 0x0f 0x71. */
2811FNIEMOP_DEF(iemOp_Grp12)
2812{
2813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2814 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2815 return IEMOP_RAISE_INVALID_OPCODE();
2816 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2817 {
2818 case 0: case 1: case 3: case 5: case 7:
2819 return IEMOP_RAISE_INVALID_OPCODE();
2820 case 2:
2821 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2822 {
2823 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2824 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2825 default: return IEMOP_RAISE_INVALID_OPCODE();
2826 }
2827 case 4:
2828 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2829 {
2830 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2831 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2832 default: return IEMOP_RAISE_INVALID_OPCODE();
2833 }
2834 case 6:
2835 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2836 {
2837 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2838 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2839 default: return IEMOP_RAISE_INVALID_OPCODE();
2840 }
2841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2842 }
2843}
2844
2845
2846/** Opcode 0x0f 0x72 11/2. */
2847FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2848
2849/** Opcode 0x66 0x0f 0x72 11/2. */
2850FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2851
2852/** Opcode 0x0f 0x72 11/4. */
2853FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2854
2855/** Opcode 0x66 0x0f 0x72 11/4. */
2856FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2857
2858/** Opcode 0x0f 0x72 11/6. */
2859FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2860
2861/** Opcode 0x66 0x0f 0x72 11/6. */
2862FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2863
2864
2865/** Opcode 0x0f 0x72. */
2866FNIEMOP_DEF(iemOp_Grp13)
2867{
2868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2869 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2870 return IEMOP_RAISE_INVALID_OPCODE();
2871 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2872 {
2873 case 0: case 1: case 3: case 5: case 7:
2874 return IEMOP_RAISE_INVALID_OPCODE();
2875 case 2:
2876 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2877 {
2878 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2879 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2880 default: return IEMOP_RAISE_INVALID_OPCODE();
2881 }
2882 case 4:
2883 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2884 {
2885 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2886 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2887 default: return IEMOP_RAISE_INVALID_OPCODE();
2888 }
2889 case 6:
2890 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2891 {
2892 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2893 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2894 default: return IEMOP_RAISE_INVALID_OPCODE();
2895 }
2896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2897 }
2898}
2899
2900
2901/** Opcode 0x0f 0x73 11/2. */
2902FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2903
2904/** Opcode 0x66 0x0f 0x73 11/2. */
2905FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2906
2907/** Opcode 0x66 0x0f 0x73 11/3. */
2908FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2909
2910/** Opcode 0x0f 0x73 11/6. */
2911FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2912
2913/** Opcode 0x66 0x0f 0x73 11/6. */
2914FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2915
2916/** Opcode 0x66 0x0f 0x73 11/7. */
2917FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2918
2919
2920/** Opcode 0x0f 0x73. */
2921FNIEMOP_DEF(iemOp_Grp14)
2922{
2923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2924 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2925 return IEMOP_RAISE_INVALID_OPCODE();
2926 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2927 {
2928 case 0: case 1: case 4: case 5:
2929 return IEMOP_RAISE_INVALID_OPCODE();
2930 case 2:
2931 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2932 {
2933 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2934 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2935 default: return IEMOP_RAISE_INVALID_OPCODE();
2936 }
2937 case 3:
2938 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2939 {
2940 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2941 default: return IEMOP_RAISE_INVALID_OPCODE();
2942 }
2943 case 6:
2944 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2945 {
2946 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2947 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2948 default: return IEMOP_RAISE_INVALID_OPCODE();
2949 }
2950 case 7:
2951 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2952 {
2953 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2954 default: return IEMOP_RAISE_INVALID_OPCODE();
2955 }
2956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2957 }
2958}
2959
2960
2961/**
2962 * Common worker for SSE2 and MMX instructions on the forms:
2963 * pxxx mm1, mm2/mem64
2964 * pxxx xmm1, xmm2/mem128
2965 *
2966 * Proper alignment of the 128-bit operand is enforced.
2967 * Exceptions type 4. SSE2 and MMX cpuid checks.
2968 */
2969FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2970{
2971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2972 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2973 {
2974 case IEM_OP_PRF_SIZE_OP: /* SSE */
2975 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2976 {
2977 /*
2978 * Register, register.
2979 */
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEM_MC_BEGIN(2, 0);
2982 IEM_MC_ARG(uint128_t *, pDst, 0);
2983 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2984 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2985 IEM_MC_PREPARE_SSE_USAGE();
2986 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2987 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2988 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 }
2992 else
2993 {
2994 /*
2995 * Register, memory.
2996 */
2997 IEM_MC_BEGIN(2, 2);
2998 IEM_MC_ARG(uint128_t *, pDst, 0);
2999 IEM_MC_LOCAL(uint128_t, uSrc);
3000 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3002
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3006 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3007
3008 IEM_MC_PREPARE_SSE_USAGE();
3009 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3010 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3011
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 return VINF_SUCCESS;
3016
3017 case 0: /* MMX */
3018 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3019 {
3020 /*
3021 * Register, register.
3022 */
3023 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3024 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_BEGIN(2, 0);
3027 IEM_MC_ARG(uint64_t *, pDst, 0);
3028 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3029 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3030 IEM_MC_PREPARE_FPU_USAGE();
3031 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3032 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3033 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3034 IEM_MC_ADVANCE_RIP();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /*
3040 * Register, memory.
3041 */
3042 IEM_MC_BEGIN(2, 2);
3043 IEM_MC_ARG(uint64_t *, pDst, 0);
3044 IEM_MC_LOCAL(uint64_t, uSrc);
3045 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3047
3048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3051 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3052
3053 IEM_MC_PREPARE_FPU_USAGE();
3054 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3055 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3056
3057 IEM_MC_ADVANCE_RIP();
3058 IEM_MC_END();
3059 }
3060 return VINF_SUCCESS;
3061
3062 default:
3063 return IEMOP_RAISE_INVALID_OPCODE();
3064 }
3065}
3066
3067
3068/** Opcode 0x0f 0x74. */
3069FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3070{
3071 IEMOP_MNEMONIC("pcmpeqb");
3072 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3073}
3074
3075
3076/** Opcode 0x0f 0x75. */
3077FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3078{
3079 IEMOP_MNEMONIC("pcmpeqw");
3080 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3081}
3082
3083
3084/** Opcode 0x0f 0x76. */
3085FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3086{
3087 IEMOP_MNEMONIC("pcmpeqd");
3088 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3089}
3090
3091
3092/** Opcode 0x0f 0x77. */
3093FNIEMOP_STUB(iemOp_emms);
3094/** Opcode 0x0f 0x78. */
3095FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3096/** Opcode 0x0f 0x79. */
3097FNIEMOP_UD_STUB(iemOp_vmwrite);
3098/** Opcode 0x0f 0x7c. */
3099FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3100/** Opcode 0x0f 0x7d. */
3101FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3102
3103
3104/** Opcode 0x0f 0x7e. */
3105FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3106{
3107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3108 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3109 {
3110 case IEM_OP_PRF_SIZE_OP: /* SSE */
3111 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3113 {
3114 /* greg, XMM */
3115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3116 IEM_MC_BEGIN(0, 1);
3117 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3118 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3119 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3120 {
3121 IEM_MC_LOCAL(uint64_t, u64Tmp);
3122 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3123 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3124 }
3125 else
3126 {
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3129 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3130 }
3131 IEM_MC_ADVANCE_RIP();
3132 IEM_MC_END();
3133 }
3134 else
3135 {
3136 /* [mem], XMM */
3137 IEM_MC_BEGIN(0, 2);
3138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3139 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3142 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3143 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3144 {
3145 IEM_MC_LOCAL(uint64_t, u64Tmp);
3146 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3147 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3148 }
3149 else
3150 {
3151 IEM_MC_LOCAL(uint32_t, u32Tmp);
3152 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3153 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3154 }
3155 IEM_MC_ADVANCE_RIP();
3156 IEM_MC_END();
3157 }
3158 return VINF_SUCCESS;
3159
3160 case 0: /* MMX */
3161 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3163 {
3164 /* greg, MMX */
3165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3166 IEM_MC_BEGIN(0, 1);
3167 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3168 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3169 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3170 {
3171 IEM_MC_LOCAL(uint64_t, u64Tmp);
3172 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3173 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3174 }
3175 else
3176 {
3177 IEM_MC_LOCAL(uint32_t, u32Tmp);
3178 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3179 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3180 }
3181 IEM_MC_ADVANCE_RIP();
3182 IEM_MC_END();
3183 }
3184 else
3185 {
3186 /* [mem], MMX */
3187 IEM_MC_BEGIN(0, 2);
3188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3189 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3192 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3193 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3194 {
3195 IEM_MC_LOCAL(uint64_t, u64Tmp);
3196 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3197 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3198 }
3199 else
3200 {
3201 IEM_MC_LOCAL(uint32_t, u32Tmp);
3202 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3203 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3204 }
3205 IEM_MC_ADVANCE_RIP();
3206 IEM_MC_END();
3207 }
3208 return VINF_SUCCESS;
3209
3210 default:
3211 return IEMOP_RAISE_INVALID_OPCODE();
3212 }
3213}
3214
3215
3216/** Opcode 0x0f 0x7f. */
3217FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3218{
3219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3220 bool fAligned = false;
3221 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3222 {
3223 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3224 fAligned = true;
3225 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3226 if (fAligned)
3227 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3228 else
3229 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3230 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3231 {
3232 /*
3233 * Register, register.
3234 */
3235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3236 IEM_MC_BEGIN(0, 0);
3237 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3238 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3239 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
3240 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3241 IEM_MC_ADVANCE_RIP();
3242 IEM_MC_END();
3243 }
3244 else
3245 {
3246 /*
3247 * Register, memory.
3248 */
3249 IEM_MC_BEGIN(0, 2);
3250 IEM_MC_LOCAL(uint128_t, u128Tmp);
3251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3252
3253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3256 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3257
3258 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3259 if (fAligned)
3260 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3261 else
3262 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3263
3264 IEM_MC_ADVANCE_RIP();
3265 IEM_MC_END();
3266 }
3267 return VINF_SUCCESS;
3268
3269 case 0: /* MMX */
3270 IEMOP_MNEMONIC("movq Qq,Pq");
3271
3272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3273 {
3274 /*
3275 * Register, register.
3276 */
3277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEM_MC_BEGIN(0, 1);
3281 IEM_MC_LOCAL(uint64_t, u64Tmp);
3282 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3283 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3284 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3285 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(0, 2);
3295 IEM_MC_LOCAL(uint64_t, u64Tmp);
3296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3297
3298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3300 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3301 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3302
3303 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3304 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3305
3306 IEM_MC_ADVANCE_RIP();
3307 IEM_MC_END();
3308 }
3309 return VINF_SUCCESS;
3310
3311 default:
3312 return IEMOP_RAISE_INVALID_OPCODE();
3313 }
3314}
3315
3316
3317
3318/** Opcode 0x0f 0x80. */
3319FNIEMOP_DEF(iemOp_jo_Jv)
3320{
3321 IEMOP_MNEMONIC("jo Jv");
3322 IEMOP_HLP_MIN_386();
3323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3324 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3325 {
3326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3327 IEMOP_HLP_NO_LOCK_PREFIX();
3328
3329 IEM_MC_BEGIN(0, 0);
3330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3331 IEM_MC_REL_JMP_S16(i16Imm);
3332 } IEM_MC_ELSE() {
3333 IEM_MC_ADVANCE_RIP();
3334 } IEM_MC_ENDIF();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3340 IEMOP_HLP_NO_LOCK_PREFIX();
3341
3342 IEM_MC_BEGIN(0, 0);
3343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3344 IEM_MC_REL_JMP_S32(i32Imm);
3345 } IEM_MC_ELSE() {
3346 IEM_MC_ADVANCE_RIP();
3347 } IEM_MC_ENDIF();
3348 IEM_MC_END();
3349 }
3350 return VINF_SUCCESS;
3351}
3352
3353
3354/** Opcode 0x0f 0x81. */
3355FNIEMOP_DEF(iemOp_jno_Jv)
3356{
3357 IEMOP_MNEMONIC("jno Jv");
3358 IEMOP_HLP_MIN_386();
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3360 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3361 {
3362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3363 IEMOP_HLP_NO_LOCK_PREFIX();
3364
3365 IEM_MC_BEGIN(0, 0);
3366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3367 IEM_MC_ADVANCE_RIP();
3368 } IEM_MC_ELSE() {
3369 IEM_MC_REL_JMP_S16(i16Imm);
3370 } IEM_MC_ENDIF();
3371 IEM_MC_END();
3372 }
3373 else
3374 {
3375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3376 IEMOP_HLP_NO_LOCK_PREFIX();
3377
3378 IEM_MC_BEGIN(0, 0);
3379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3380 IEM_MC_ADVANCE_RIP();
3381 } IEM_MC_ELSE() {
3382 IEM_MC_REL_JMP_S32(i32Imm);
3383 } IEM_MC_ENDIF();
3384 IEM_MC_END();
3385 }
3386 return VINF_SUCCESS;
3387}
3388
3389
3390/** Opcode 0x0f 0x82. */
3391FNIEMOP_DEF(iemOp_jc_Jv)
3392{
3393 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3394 IEMOP_HLP_MIN_386();
3395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3396 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3397 {
3398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3399 IEMOP_HLP_NO_LOCK_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3403 IEM_MC_REL_JMP_S16(i16Imm);
3404 } IEM_MC_ELSE() {
3405 IEM_MC_ADVANCE_RIP();
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408 }
3409 else
3410 {
3411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3412 IEMOP_HLP_NO_LOCK_PREFIX();
3413
3414 IEM_MC_BEGIN(0, 0);
3415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3416 IEM_MC_REL_JMP_S32(i32Imm);
3417 } IEM_MC_ELSE() {
3418 IEM_MC_ADVANCE_RIP();
3419 } IEM_MC_ENDIF();
3420 IEM_MC_END();
3421 }
3422 return VINF_SUCCESS;
3423}
3424
3425
3426/** Opcode 0x0f 0x83. */
3427FNIEMOP_DEF(iemOp_jnc_Jv)
3428{
3429 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3430 IEMOP_HLP_MIN_386();
3431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3432 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3433 {
3434 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3435 IEMOP_HLP_NO_LOCK_PREFIX();
3436
3437 IEM_MC_BEGIN(0, 0);
3438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3439 IEM_MC_ADVANCE_RIP();
3440 } IEM_MC_ELSE() {
3441 IEM_MC_REL_JMP_S16(i16Imm);
3442 } IEM_MC_ENDIF();
3443 IEM_MC_END();
3444 }
3445 else
3446 {
3447 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3448 IEMOP_HLP_NO_LOCK_PREFIX();
3449
3450 IEM_MC_BEGIN(0, 0);
3451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3452 IEM_MC_ADVANCE_RIP();
3453 } IEM_MC_ELSE() {
3454 IEM_MC_REL_JMP_S32(i32Imm);
3455 } IEM_MC_ENDIF();
3456 IEM_MC_END();
3457 }
3458 return VINF_SUCCESS;
3459}
3460
3461
3462/** Opcode 0x0f 0x84. */
3463FNIEMOP_DEF(iemOp_je_Jv)
3464{
3465 IEMOP_MNEMONIC("je/jz Jv");
3466 IEMOP_HLP_MIN_386();
3467 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3468 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3469 {
3470 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3471 IEMOP_HLP_NO_LOCK_PREFIX();
3472
3473 IEM_MC_BEGIN(0, 0);
3474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3475 IEM_MC_REL_JMP_S16(i16Imm);
3476 } IEM_MC_ELSE() {
3477 IEM_MC_ADVANCE_RIP();
3478 } IEM_MC_ENDIF();
3479 IEM_MC_END();
3480 }
3481 else
3482 {
3483 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3484 IEMOP_HLP_NO_LOCK_PREFIX();
3485
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3488 IEM_MC_REL_JMP_S32(i32Imm);
3489 } IEM_MC_ELSE() {
3490 IEM_MC_ADVANCE_RIP();
3491 } IEM_MC_ENDIF();
3492 IEM_MC_END();
3493 }
3494 return VINF_SUCCESS;
3495}
3496
3497
3498/** Opcode 0x0f 0x85. */
3499FNIEMOP_DEF(iemOp_jne_Jv)
3500{
3501 IEMOP_MNEMONIC("jne/jnz Jv");
3502 IEMOP_HLP_MIN_386();
3503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3504 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3505 {
3506 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3507 IEMOP_HLP_NO_LOCK_PREFIX();
3508
3509 IEM_MC_BEGIN(0, 0);
3510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3511 IEM_MC_ADVANCE_RIP();
3512 } IEM_MC_ELSE() {
3513 IEM_MC_REL_JMP_S16(i16Imm);
3514 } IEM_MC_ENDIF();
3515 IEM_MC_END();
3516 }
3517 else
3518 {
3519 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3520 IEMOP_HLP_NO_LOCK_PREFIX();
3521
3522 IEM_MC_BEGIN(0, 0);
3523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3524 IEM_MC_ADVANCE_RIP();
3525 } IEM_MC_ELSE() {
3526 IEM_MC_REL_JMP_S32(i32Imm);
3527 } IEM_MC_ENDIF();
3528 IEM_MC_END();
3529 }
3530 return VINF_SUCCESS;
3531}
3532
3533
3534/** Opcode 0x0f 0x86. */
3535FNIEMOP_DEF(iemOp_jbe_Jv)
3536{
3537 IEMOP_MNEMONIC("jbe/jna Jv");
3538 IEMOP_HLP_MIN_386();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3540 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3541 {
3542 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3543 IEMOP_HLP_NO_LOCK_PREFIX();
3544
3545 IEM_MC_BEGIN(0, 0);
3546 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3547 IEM_MC_REL_JMP_S16(i16Imm);
3548 } IEM_MC_ELSE() {
3549 IEM_MC_ADVANCE_RIP();
3550 } IEM_MC_ENDIF();
3551 IEM_MC_END();
3552 }
3553 else
3554 {
3555 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3556 IEMOP_HLP_NO_LOCK_PREFIX();
3557
3558 IEM_MC_BEGIN(0, 0);
3559 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3560 IEM_MC_REL_JMP_S32(i32Imm);
3561 } IEM_MC_ELSE() {
3562 IEM_MC_ADVANCE_RIP();
3563 } IEM_MC_ENDIF();
3564 IEM_MC_END();
3565 }
3566 return VINF_SUCCESS;
3567}
3568
3569
3570/** Opcode 0x0f 0x87. */
3571FNIEMOP_DEF(iemOp_jnbe_Jv)
3572{
3573 IEMOP_MNEMONIC("jnbe/ja Jv");
3574 IEMOP_HLP_MIN_386();
3575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3576 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3577 {
3578 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3579 IEMOP_HLP_NO_LOCK_PREFIX();
3580
3581 IEM_MC_BEGIN(0, 0);
3582 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3583 IEM_MC_ADVANCE_RIP();
3584 } IEM_MC_ELSE() {
3585 IEM_MC_REL_JMP_S16(i16Imm);
3586 } IEM_MC_ENDIF();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3592 IEMOP_HLP_NO_LOCK_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0);
3595 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3596 IEM_MC_ADVANCE_RIP();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S32(i32Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601 }
3602 return VINF_SUCCESS;
3603}
3604
3605
3606/** Opcode 0x0f 0x88. */
3607FNIEMOP_DEF(iemOp_js_Jv)
3608{
3609 IEMOP_MNEMONIC("js Jv");
3610 IEMOP_HLP_MIN_386();
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3612 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3613 {
3614 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3615 IEMOP_HLP_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 0);
3618 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3619 IEM_MC_REL_JMP_S16(i16Imm);
3620 } IEM_MC_ELSE() {
3621 IEM_MC_ADVANCE_RIP();
3622 } IEM_MC_ENDIF();
3623 IEM_MC_END();
3624 }
3625 else
3626 {
3627 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3628 IEMOP_HLP_NO_LOCK_PREFIX();
3629
3630 IEM_MC_BEGIN(0, 0);
3631 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3632 IEM_MC_REL_JMP_S32(i32Imm);
3633 } IEM_MC_ELSE() {
3634 IEM_MC_ADVANCE_RIP();
3635 } IEM_MC_ENDIF();
3636 IEM_MC_END();
3637 }
3638 return VINF_SUCCESS;
3639}
3640
3641
3642/** Opcode 0x0f 0x89. */
3643FNIEMOP_DEF(iemOp_jns_Jv)
3644{
3645 IEMOP_MNEMONIC("jns Jv");
3646 IEMOP_HLP_MIN_386();
3647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3648 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3649 {
3650 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3651 IEMOP_HLP_NO_LOCK_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0);
3654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3655 IEM_MC_ADVANCE_RIP();
3656 } IEM_MC_ELSE() {
3657 IEM_MC_REL_JMP_S16(i16Imm);
3658 } IEM_MC_ENDIF();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3664 IEMOP_HLP_NO_LOCK_PREFIX();
3665
3666 IEM_MC_BEGIN(0, 0);
3667 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3668 IEM_MC_ADVANCE_RIP();
3669 } IEM_MC_ELSE() {
3670 IEM_MC_REL_JMP_S32(i32Imm);
3671 } IEM_MC_ENDIF();
3672 IEM_MC_END();
3673 }
3674 return VINF_SUCCESS;
3675}
3676
3677
3678/** Opcode 0x0f 0x8a. */
3679FNIEMOP_DEF(iemOp_jp_Jv)
3680{
3681 IEMOP_MNEMONIC("jp Jv");
3682 IEMOP_HLP_MIN_386();
3683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3684 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3685 {
3686 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3687 IEMOP_HLP_NO_LOCK_PREFIX();
3688
3689 IEM_MC_BEGIN(0, 0);
3690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3691 IEM_MC_REL_JMP_S16(i16Imm);
3692 } IEM_MC_ELSE() {
3693 IEM_MC_ADVANCE_RIP();
3694 } IEM_MC_ENDIF();
3695 IEM_MC_END();
3696 }
3697 else
3698 {
3699 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3700 IEMOP_HLP_NO_LOCK_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0);
3703 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3704 IEM_MC_REL_JMP_S32(i32Imm);
3705 } IEM_MC_ELSE() {
3706 IEM_MC_ADVANCE_RIP();
3707 } IEM_MC_ENDIF();
3708 IEM_MC_END();
3709 }
3710 return VINF_SUCCESS;
3711}
3712
3713
3714/** Opcode 0x0f 0x8b. */
3715FNIEMOP_DEF(iemOp_jnp_Jv)
3716{
3717 IEMOP_MNEMONIC("jo Jv");
3718 IEMOP_HLP_MIN_386();
3719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3720 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3721 {
3722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3723 IEMOP_HLP_NO_LOCK_PREFIX();
3724
3725 IEM_MC_BEGIN(0, 0);
3726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3727 IEM_MC_ADVANCE_RIP();
3728 } IEM_MC_ELSE() {
3729 IEM_MC_REL_JMP_S16(i16Imm);
3730 } IEM_MC_ENDIF();
3731 IEM_MC_END();
3732 }
3733 else
3734 {
3735 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3736 IEMOP_HLP_NO_LOCK_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3740 IEM_MC_ADVANCE_RIP();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_REL_JMP_S32(i32Imm);
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745 }
3746 return VINF_SUCCESS;
3747}
3748
3749
3750/** Opcode 0x0f 0x8c. */
3751FNIEMOP_DEF(iemOp_jl_Jv)
3752{
3753 IEMOP_MNEMONIC("jl/jnge Jv");
3754 IEMOP_HLP_MIN_386();
3755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3756 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3757 {
3758 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3759 IEMOP_HLP_NO_LOCK_PREFIX();
3760
3761 IEM_MC_BEGIN(0, 0);
3762 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3763 IEM_MC_REL_JMP_S16(i16Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768 }
3769 else
3770 {
3771 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3772 IEMOP_HLP_NO_LOCK_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0);
3775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3776 IEM_MC_REL_JMP_S32(i32Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781 }
3782 return VINF_SUCCESS;
3783}
3784
3785
3786/** Opcode 0x0f 0x8d. */
3787FNIEMOP_DEF(iemOp_jnl_Jv)
3788{
3789 IEMOP_MNEMONIC("jnl/jge Jv");
3790 IEMOP_HLP_MIN_386();
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3792 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3793 {
3794 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3795 IEMOP_HLP_NO_LOCK_PREFIX();
3796
3797 IEM_MC_BEGIN(0, 0);
3798 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3799 IEM_MC_ADVANCE_RIP();
3800 } IEM_MC_ELSE() {
3801 IEM_MC_REL_JMP_S16(i16Imm);
3802 } IEM_MC_ENDIF();
3803 IEM_MC_END();
3804 }
3805 else
3806 {
3807 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3808 IEMOP_HLP_NO_LOCK_PREFIX();
3809
3810 IEM_MC_BEGIN(0, 0);
3811 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3812 IEM_MC_ADVANCE_RIP();
3813 } IEM_MC_ELSE() {
3814 IEM_MC_REL_JMP_S32(i32Imm);
3815 } IEM_MC_ENDIF();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819}
3820
3821
3822/** Opcode 0x0f 0x8e. */
3823FNIEMOP_DEF(iemOp_jle_Jv)
3824{
3825 IEMOP_MNEMONIC("jle/jng Jv");
3826 IEMOP_HLP_MIN_386();
3827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3828 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3829 {
3830 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3831 IEMOP_HLP_NO_LOCK_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0);
3834 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3835 IEM_MC_REL_JMP_S16(i16Imm);
3836 } IEM_MC_ELSE() {
3837 IEM_MC_ADVANCE_RIP();
3838 } IEM_MC_ENDIF();
3839 IEM_MC_END();
3840 }
3841 else
3842 {
3843 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3844 IEMOP_HLP_NO_LOCK_PREFIX();
3845
3846 IEM_MC_BEGIN(0, 0);
3847 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3848 IEM_MC_REL_JMP_S32(i32Imm);
3849 } IEM_MC_ELSE() {
3850 IEM_MC_ADVANCE_RIP();
3851 } IEM_MC_ENDIF();
3852 IEM_MC_END();
3853 }
3854 return VINF_SUCCESS;
3855}
3856
3857
3858/** Opcode 0x0f 0x8f. */
3859FNIEMOP_DEF(iemOp_jnle_Jv)
3860{
3861 IEMOP_MNEMONIC("jnle/jg Jv");
3862 IEMOP_HLP_MIN_386();
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3864 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3865 {
3866 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3867 IEMOP_HLP_NO_LOCK_PREFIX();
3868
3869 IEM_MC_BEGIN(0, 0);
3870 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3871 IEM_MC_ADVANCE_RIP();
3872 } IEM_MC_ELSE() {
3873 IEM_MC_REL_JMP_S16(i16Imm);
3874 } IEM_MC_ENDIF();
3875 IEM_MC_END();
3876 }
3877 else
3878 {
3879 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3880 IEMOP_HLP_NO_LOCK_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3884 IEM_MC_ADVANCE_RIP();
3885 } IEM_MC_ELSE() {
3886 IEM_MC_REL_JMP_S32(i32Imm);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_END();
3889 }
3890 return VINF_SUCCESS;
3891}
3892
3893
3894/** Opcode 0x0f 0x90. */
3895FNIEMOP_DEF(iemOp_seto_Eb)
3896{
3897 IEMOP_MNEMONIC("seto Eb");
3898 IEMOP_HLP_MIN_386();
3899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3900 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3901
3902 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3903 * any way. AMD says it's "unused", whatever that means. We're
3904 * ignoring for now. */
3905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3906 {
3907 /* register target */
3908 IEM_MC_BEGIN(0, 0);
3909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3910 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3911 } IEM_MC_ELSE() {
3912 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3913 } IEM_MC_ENDIF();
3914 IEM_MC_ADVANCE_RIP();
3915 IEM_MC_END();
3916 }
3917 else
3918 {
3919 /* memory target */
3920 IEM_MC_BEGIN(0, 1);
3921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3923 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3924 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3925 } IEM_MC_ELSE() {
3926 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3927 } IEM_MC_ENDIF();
3928 IEM_MC_ADVANCE_RIP();
3929 IEM_MC_END();
3930 }
3931 return VINF_SUCCESS;
3932}
3933
3934
3935/** Opcode 0x0f 0x91. */
3936FNIEMOP_DEF(iemOp_setno_Eb)
3937{
3938 IEMOP_MNEMONIC("setno Eb");
3939 IEMOP_HLP_MIN_386();
3940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3942
3943 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3944 * any way. AMD says it's "unused", whatever that means. We're
3945 * ignoring for now. */
3946 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3947 {
3948 /* register target */
3949 IEM_MC_BEGIN(0, 0);
3950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3951 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3952 } IEM_MC_ELSE() {
3953 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3954 } IEM_MC_ENDIF();
3955 IEM_MC_ADVANCE_RIP();
3956 IEM_MC_END();
3957 }
3958 else
3959 {
3960 /* memory target */
3961 IEM_MC_BEGIN(0, 1);
3962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3965 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3966 } IEM_MC_ELSE() {
3967 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3968 } IEM_MC_ENDIF();
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 }
3972 return VINF_SUCCESS;
3973}
3974
3975
3976/** Opcode 0x0f 0x92. */
3977FNIEMOP_DEF(iemOp_setc_Eb)
3978{
3979 IEMOP_MNEMONIC("setc Eb");
3980 IEMOP_HLP_MIN_386();
3981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3982 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3983
3984 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3985 * any way. AMD says it's "unused", whatever that means. We're
3986 * ignoring for now. */
3987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3988 {
3989 /* register target */
3990 IEM_MC_BEGIN(0, 0);
3991 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3992 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3995 } IEM_MC_ENDIF();
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 }
3999 else
4000 {
4001 /* memory target */
4002 IEM_MC_BEGIN(0, 1);
4003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4006 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4007 } IEM_MC_ELSE() {
4008 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4009 } IEM_MC_ENDIF();
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 }
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** Opcode 0x0f 0x93. */
4018FNIEMOP_DEF(iemOp_setnc_Eb)
4019{
4020 IEMOP_MNEMONIC("setnc Eb");
4021 IEMOP_HLP_MIN_386();
4022 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4023 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4024
4025 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4026 * any way. AMD says it's "unused", whatever that means. We're
4027 * ignoring for now. */
4028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4029 {
4030 /* register target */
4031 IEM_MC_BEGIN(0, 0);
4032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4033 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4034 } IEM_MC_ELSE() {
4035 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4036 } IEM_MC_ENDIF();
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 else
4041 {
4042 /* memory target */
4043 IEM_MC_BEGIN(0, 1);
4044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4046 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4047 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4048 } IEM_MC_ELSE() {
4049 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4050 } IEM_MC_ENDIF();
4051 IEM_MC_ADVANCE_RIP();
4052 IEM_MC_END();
4053 }
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** Opcode 0x0f 0x94. */
4059FNIEMOP_DEF(iemOp_sete_Eb)
4060{
4061 IEMOP_MNEMONIC("sete Eb");
4062 IEMOP_HLP_MIN_386();
4063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4064 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4065
4066 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4067 * any way. AMD says it's "unused", whatever that means. We're
4068 * ignoring for now. */
4069 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4070 {
4071 /* register target */
4072 IEM_MC_BEGIN(0, 0);
4073 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4074 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4075 } IEM_MC_ELSE() {
4076 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4077 } IEM_MC_ENDIF();
4078 IEM_MC_ADVANCE_RIP();
4079 IEM_MC_END();
4080 }
4081 else
4082 {
4083 /* memory target */
4084 IEM_MC_BEGIN(0, 1);
4085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4088 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4089 } IEM_MC_ELSE() {
4090 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4091 } IEM_MC_ENDIF();
4092 IEM_MC_ADVANCE_RIP();
4093 IEM_MC_END();
4094 }
4095 return VINF_SUCCESS;
4096}
4097
4098
4099/** Opcode 0x0f 0x95. */
4100FNIEMOP_DEF(iemOp_setne_Eb)
4101{
4102 IEMOP_MNEMONIC("setne Eb");
4103 IEMOP_HLP_MIN_386();
4104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4105 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4106
4107 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4108 * any way. AMD says it's "unused", whatever that means. We're
4109 * ignoring for now. */
4110 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4111 {
4112 /* register target */
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4115 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4116 } IEM_MC_ELSE() {
4117 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4118 } IEM_MC_ENDIF();
4119 IEM_MC_ADVANCE_RIP();
4120 IEM_MC_END();
4121 }
4122 else
4123 {
4124 /* memory target */
4125 IEM_MC_BEGIN(0, 1);
4126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4128 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4129 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4130 } IEM_MC_ELSE() {
4131 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4132 } IEM_MC_ENDIF();
4133 IEM_MC_ADVANCE_RIP();
4134 IEM_MC_END();
4135 }
4136 return VINF_SUCCESS;
4137}
4138
4139
4140/** Opcode 0x0f 0x96. */
4141FNIEMOP_DEF(iemOp_setbe_Eb)
4142{
4143 IEMOP_MNEMONIC("setbe Eb");
4144 IEMOP_HLP_MIN_386();
4145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4146 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4147
4148 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4149 * any way. AMD says it's "unused", whatever that means. We're
4150 * ignoring for now. */
4151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4152 {
4153 /* register target */
4154 IEM_MC_BEGIN(0, 0);
4155 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4156 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4157 } IEM_MC_ELSE() {
4158 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4159 } IEM_MC_ENDIF();
4160 IEM_MC_ADVANCE_RIP();
4161 IEM_MC_END();
4162 }
4163 else
4164 {
4165 /* memory target */
4166 IEM_MC_BEGIN(0, 1);
4167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4169 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4170 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4171 } IEM_MC_ELSE() {
4172 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4173 } IEM_MC_ENDIF();
4174 IEM_MC_ADVANCE_RIP();
4175 IEM_MC_END();
4176 }
4177 return VINF_SUCCESS;
4178}
4179
4180
4181/** Opcode 0x0f 0x97. */
4182FNIEMOP_DEF(iemOp_setnbe_Eb)
4183{
4184 IEMOP_MNEMONIC("setnbe Eb");
4185 IEMOP_HLP_MIN_386();
4186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4187 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4188
4189 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4190 * any way. AMD says it's "unused", whatever that means. We're
4191 * ignoring for now. */
4192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4193 {
4194 /* register target */
4195 IEM_MC_BEGIN(0, 0);
4196 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4197 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4198 } IEM_MC_ELSE() {
4199 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4200 } IEM_MC_ENDIF();
4201 IEM_MC_ADVANCE_RIP();
4202 IEM_MC_END();
4203 }
4204 else
4205 {
4206 /* memory target */
4207 IEM_MC_BEGIN(0, 1);
4208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4210 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4211 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4212 } IEM_MC_ELSE() {
4213 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4214 } IEM_MC_ENDIF();
4215 IEM_MC_ADVANCE_RIP();
4216 IEM_MC_END();
4217 }
4218 return VINF_SUCCESS;
4219}
4220
4221
4222/** Opcode 0x0f 0x98. */
4223FNIEMOP_DEF(iemOp_sets_Eb)
4224{
4225 IEMOP_MNEMONIC("sets Eb");
4226 IEMOP_HLP_MIN_386();
4227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4228 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4229
4230 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4231 * any way. AMD says it's "unused", whatever that means. We're
4232 * ignoring for now. */
4233 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4234 {
4235 /* register target */
4236 IEM_MC_BEGIN(0, 0);
4237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4238 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4241 } IEM_MC_ENDIF();
4242 IEM_MC_ADVANCE_RIP();
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /* memory target */
4248 IEM_MC_BEGIN(0, 1);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4252 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4253 } IEM_MC_ELSE() {
4254 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4255 } IEM_MC_ENDIF();
4256 IEM_MC_ADVANCE_RIP();
4257 IEM_MC_END();
4258 }
4259 return VINF_SUCCESS;
4260}
4261
4262
4263/** Opcode 0x0f 0x99. */
4264FNIEMOP_DEF(iemOp_setns_Eb)
4265{
4266 IEMOP_MNEMONIC("setns Eb");
4267 IEMOP_HLP_MIN_386();
4268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4269 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4270
4271 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4272 * any way. AMD says it's "unused", whatever that means. We're
4273 * ignoring for now. */
4274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4275 {
4276 /* register target */
4277 IEM_MC_BEGIN(0, 0);
4278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4279 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4280 } IEM_MC_ELSE() {
4281 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4282 } IEM_MC_ENDIF();
4283 IEM_MC_ADVANCE_RIP();
4284 IEM_MC_END();
4285 }
4286 else
4287 {
4288 /* memory target */
4289 IEM_MC_BEGIN(0, 1);
4290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4293 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4294 } IEM_MC_ELSE() {
4295 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4296 } IEM_MC_ENDIF();
4297 IEM_MC_ADVANCE_RIP();
4298 IEM_MC_END();
4299 }
4300 return VINF_SUCCESS;
4301}
4302
4303
4304/** Opcode 0x0f 0x9a. */
4305FNIEMOP_DEF(iemOp_setp_Eb)
4306{
4307 IEMOP_MNEMONIC("setnp Eb");
4308 IEMOP_HLP_MIN_386();
4309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4310 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4311
4312 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4313 * any way. AMD says it's "unused", whatever that means. We're
4314 * ignoring for now. */
4315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4316 {
4317 /* register target */
4318 IEM_MC_BEGIN(0, 0);
4319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4320 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4321 } IEM_MC_ELSE() {
4322 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4323 } IEM_MC_ENDIF();
4324 IEM_MC_ADVANCE_RIP();
4325 IEM_MC_END();
4326 }
4327 else
4328 {
4329 /* memory target */
4330 IEM_MC_BEGIN(0, 1);
4331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4334 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4337 } IEM_MC_ENDIF();
4338 IEM_MC_ADVANCE_RIP();
4339 IEM_MC_END();
4340 }
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** Opcode 0x0f 0x9b. */
4346FNIEMOP_DEF(iemOp_setnp_Eb)
4347{
4348 IEMOP_MNEMONIC("setnp Eb");
4349 IEMOP_HLP_MIN_386();
4350 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4351 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4352
4353 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4354 * any way. AMD says it's "unused", whatever that means. We're
4355 * ignoring for now. */
4356 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4357 {
4358 /* register target */
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4361 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4364 } IEM_MC_ENDIF();
4365 IEM_MC_ADVANCE_RIP();
4366 IEM_MC_END();
4367 }
4368 else
4369 {
4370 /* memory target */
4371 IEM_MC_BEGIN(0, 1);
4372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4375 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ELSE() {
4377 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4378 } IEM_MC_ENDIF();
4379 IEM_MC_ADVANCE_RIP();
4380 IEM_MC_END();
4381 }
4382 return VINF_SUCCESS;
4383}
4384
4385
4386/** Opcode 0x0f 0x9c. */
4387FNIEMOP_DEF(iemOp_setl_Eb)
4388{
4389 IEMOP_MNEMONIC("setl Eb");
4390 IEMOP_HLP_MIN_386();
4391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4392 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4393
4394 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4395 * any way. AMD says it's "unused", whatever that means. We're
4396 * ignoring for now. */
4397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4398 {
4399 /* register target */
4400 IEM_MC_BEGIN(0, 0);
4401 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4403 } IEM_MC_ELSE() {
4404 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4405 } IEM_MC_ENDIF();
4406 IEM_MC_ADVANCE_RIP();
4407 IEM_MC_END();
4408 }
4409 else
4410 {
4411 /* memory target */
4412 IEM_MC_BEGIN(0, 1);
4413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4415 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4416 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4417 } IEM_MC_ELSE() {
4418 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4419 } IEM_MC_ENDIF();
4420 IEM_MC_ADVANCE_RIP();
4421 IEM_MC_END();
4422 }
4423 return VINF_SUCCESS;
4424}
4425
4426
4427/** Opcode 0x0f 0x9d. */
4428FNIEMOP_DEF(iemOp_setnl_Eb)
4429{
4430 IEMOP_MNEMONIC("setnl Eb");
4431 IEMOP_HLP_MIN_386();
4432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4433 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4434
4435 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4436 * any way. AMD says it's "unused", whatever that means. We're
4437 * ignoring for now. */
4438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4439 {
4440 /* register target */
4441 IEM_MC_BEGIN(0, 0);
4442 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4443 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4444 } IEM_MC_ELSE() {
4445 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4446 } IEM_MC_ENDIF();
4447 IEM_MC_ADVANCE_RIP();
4448 IEM_MC_END();
4449 }
4450 else
4451 {
4452 /* memory target */
4453 IEM_MC_BEGIN(0, 1);
4454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4456 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x9e. */
4469FNIEMOP_DEF(iemOp_setle_Eb)
4470{
4471 IEMOP_MNEMONIC("setle Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4475
4476 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4477 * any way. AMD says it's "unused", whatever that means. We're
4478 * ignoring for now. */
4479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4480 {
4481 /* register target */
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4498 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4499 } IEM_MC_ELSE() {
4500 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4501 } IEM_MC_ENDIF();
4502 IEM_MC_ADVANCE_RIP();
4503 IEM_MC_END();
4504 }
4505 return VINF_SUCCESS;
4506}
4507
4508
4509/** Opcode 0x0f 0x9f. */
4510FNIEMOP_DEF(iemOp_setnle_Eb)
4511{
4512 IEMOP_MNEMONIC("setnle Eb");
4513 IEMOP_HLP_MIN_386();
4514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4515 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEM_MC_BEGIN(0, 0);
4524 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4525 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4526 } IEM_MC_ELSE() {
4527 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4528 } IEM_MC_ENDIF();
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 }
4532 else
4533 {
4534 /* memory target */
4535 IEM_MC_BEGIN(0, 1);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4538 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4539 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4540 } IEM_MC_ELSE() {
4541 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ENDIF();
4543 IEM_MC_ADVANCE_RIP();
4544 IEM_MC_END();
4545 }
4546 return VINF_SUCCESS;
4547}
4548
4549
4550/**
4551 * Common 'push segment-register' helper.
4552 */
4553FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4554{
4555 IEMOP_HLP_NO_LOCK_PREFIX();
4556 if (iReg < X86_SREG_FS)
4557 IEMOP_HLP_NO_64BIT();
4558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4559
4560 switch (pIemCpu->enmEffOpSize)
4561 {
4562 case IEMMODE_16BIT:
4563 IEM_MC_BEGIN(0, 1);
4564 IEM_MC_LOCAL(uint16_t, u16Value);
4565 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4566 IEM_MC_PUSH_U16(u16Value);
4567 IEM_MC_ADVANCE_RIP();
4568 IEM_MC_END();
4569 break;
4570
4571 case IEMMODE_32BIT:
4572 IEM_MC_BEGIN(0, 1);
4573 IEM_MC_LOCAL(uint32_t, u32Value);
4574 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4575 IEM_MC_PUSH_U32_SREG(u32Value);
4576 IEM_MC_ADVANCE_RIP();
4577 IEM_MC_END();
4578 break;
4579
4580 case IEMMODE_64BIT:
4581 IEM_MC_BEGIN(0, 1);
4582 IEM_MC_LOCAL(uint64_t, u64Value);
4583 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4584 IEM_MC_PUSH_U64(u64Value);
4585 IEM_MC_ADVANCE_RIP();
4586 IEM_MC_END();
4587 break;
4588 }
4589
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0xa0. */
4595FNIEMOP_DEF(iemOp_push_fs)
4596{
4597 IEMOP_MNEMONIC("push fs");
4598 IEMOP_HLP_MIN_386();
4599 IEMOP_HLP_NO_LOCK_PREFIX();
4600 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4601}
4602
4603
4604/** Opcode 0x0f 0xa1. */
4605FNIEMOP_DEF(iemOp_pop_fs)
4606{
4607 IEMOP_MNEMONIC("pop fs");
4608 IEMOP_HLP_MIN_386();
4609 IEMOP_HLP_NO_LOCK_PREFIX();
4610 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4611}
4612
4613
4614/** Opcode 0x0f 0xa2. */
4615FNIEMOP_DEF(iemOp_cpuid)
4616{
4617 IEMOP_MNEMONIC("cpuid");
4618 IEMOP_HLP_MIN_486(); /* not all 486es. */
4619 IEMOP_HLP_NO_LOCK_PREFIX();
4620 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4621}
4622
4623
4624/**
4625 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4626 * iemOp_bts_Ev_Gv.
4627 */
4628FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4629{
4630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4632
4633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4634 {
4635 /* register destination. */
4636 IEMOP_HLP_NO_LOCK_PREFIX();
4637 switch (pIemCpu->enmEffOpSize)
4638 {
4639 case IEMMODE_16BIT:
4640 IEM_MC_BEGIN(3, 0);
4641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4642 IEM_MC_ARG(uint16_t, u16Src, 1);
4643 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4644
4645 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4646 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4648 IEM_MC_REF_EFLAGS(pEFlags);
4649 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4650
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 return VINF_SUCCESS;
4654
4655 case IEMMODE_32BIT:
4656 IEM_MC_BEGIN(3, 0);
4657 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4658 IEM_MC_ARG(uint32_t, u32Src, 1);
4659 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4660
4661 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4662 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4663 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4664 IEM_MC_REF_EFLAGS(pEFlags);
4665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4666
4667 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 return VINF_SUCCESS;
4671
4672 case IEMMODE_64BIT:
4673 IEM_MC_BEGIN(3, 0);
4674 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4675 IEM_MC_ARG(uint64_t, u64Src, 1);
4676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4677
4678 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4679 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4680 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4681 IEM_MC_REF_EFLAGS(pEFlags);
4682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4683
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 return VINF_SUCCESS;
4687
4688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4689 }
4690 }
4691 else
4692 {
4693 /* memory destination. */
4694
4695 uint32_t fAccess;
4696 if (pImpl->pfnLockedU16)
4697 fAccess = IEM_ACCESS_DATA_RW;
4698 else /* BT */
4699 {
4700 IEMOP_HLP_NO_LOCK_PREFIX();
4701 fAccess = IEM_ACCESS_DATA_R;
4702 }
4703
4704 NOREF(fAccess);
4705
4706 /** @todo test negative bit offsets! */
4707 switch (pIemCpu->enmEffOpSize)
4708 {
4709 case IEMMODE_16BIT:
4710 IEM_MC_BEGIN(3, 2);
4711 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4712 IEM_MC_ARG(uint16_t, u16Src, 1);
4713 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4715 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4716
4717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4718 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4719 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4720 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4721 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4722 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4723 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4724 IEM_MC_FETCH_EFLAGS(EFlags);
4725
4726 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4727 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4729 else
4730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4732
4733 IEM_MC_COMMIT_EFLAGS(EFlags);
4734 IEM_MC_ADVANCE_RIP();
4735 IEM_MC_END();
4736 return VINF_SUCCESS;
4737
4738 case IEMMODE_32BIT:
4739 IEM_MC_BEGIN(3, 2);
4740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4741 IEM_MC_ARG(uint32_t, u32Src, 1);
4742 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4744 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4745
4746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4747 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4748 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4749 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4750 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4751 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4752 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4753 IEM_MC_FETCH_EFLAGS(EFlags);
4754
4755 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4756 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4758 else
4759 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4761
4762 IEM_MC_COMMIT_EFLAGS(EFlags);
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 return VINF_SUCCESS;
4766
4767 case IEMMODE_64BIT:
4768 IEM_MC_BEGIN(3, 2);
4769 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4770 IEM_MC_ARG(uint64_t, u64Src, 1);
4771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4773 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4774
4775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4776 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4777 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4778 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4779 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4780 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4781 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4782 IEM_MC_FETCH_EFLAGS(EFlags);
4783
4784 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4785 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4787 else
4788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4789 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4790
4791 IEM_MC_COMMIT_EFLAGS(EFlags);
4792 IEM_MC_ADVANCE_RIP();
4793 IEM_MC_END();
4794 return VINF_SUCCESS;
4795
4796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4797 }
4798 }
4799}
4800
4801
4802/** Opcode 0x0f 0xa3. */
4803FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4804{
4805 IEMOP_MNEMONIC("bt Gv,Gv");
4806 IEMOP_HLP_MIN_386();
4807 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4808}
4809
4810
4811/**
4812 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4813 */
4814FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4815{
4816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4817 IEMOP_HLP_NO_LOCK_PREFIX();
4818 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4819
4820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4821 {
4822 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4823 IEMOP_HLP_NO_LOCK_PREFIX();
4824
4825 switch (pIemCpu->enmEffOpSize)
4826 {
4827 case IEMMODE_16BIT:
4828 IEM_MC_BEGIN(4, 0);
4829 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4830 IEM_MC_ARG(uint16_t, u16Src, 1);
4831 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4832 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4833
4834 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4835 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4836 IEM_MC_REF_EFLAGS(pEFlags);
4837 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4838
4839 IEM_MC_ADVANCE_RIP();
4840 IEM_MC_END();
4841 return VINF_SUCCESS;
4842
4843 case IEMMODE_32BIT:
4844 IEM_MC_BEGIN(4, 0);
4845 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4846 IEM_MC_ARG(uint32_t, u32Src, 1);
4847 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4848 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4849
4850 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4851 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4852 IEM_MC_REF_EFLAGS(pEFlags);
4853 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4854
4855 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4856 IEM_MC_ADVANCE_RIP();
4857 IEM_MC_END();
4858 return VINF_SUCCESS;
4859
4860 case IEMMODE_64BIT:
4861 IEM_MC_BEGIN(4, 0);
4862 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4863 IEM_MC_ARG(uint64_t, u64Src, 1);
4864 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4865 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4866
4867 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4868 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4869 IEM_MC_REF_EFLAGS(pEFlags);
4870 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4871
4872 IEM_MC_ADVANCE_RIP();
4873 IEM_MC_END();
4874 return VINF_SUCCESS;
4875
4876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4877 }
4878 }
4879 else
4880 {
4881 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4882
4883 switch (pIemCpu->enmEffOpSize)
4884 {
4885 case IEMMODE_16BIT:
4886 IEM_MC_BEGIN(4, 2);
4887 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4888 IEM_MC_ARG(uint16_t, u16Src, 1);
4889 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4892
4893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4894 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4895 IEM_MC_ASSIGN(cShiftArg, cShift);
4896 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4897 IEM_MC_FETCH_EFLAGS(EFlags);
4898 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4899 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4900
4901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4902 IEM_MC_COMMIT_EFLAGS(EFlags);
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 return VINF_SUCCESS;
4906
4907 case IEMMODE_32BIT:
4908 IEM_MC_BEGIN(4, 2);
4909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4910 IEM_MC_ARG(uint32_t, u32Src, 1);
4911 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4914
4915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4916 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4917 IEM_MC_ASSIGN(cShiftArg, cShift);
4918 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4919 IEM_MC_FETCH_EFLAGS(EFlags);
4920 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4921 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4922
4923 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4924 IEM_MC_COMMIT_EFLAGS(EFlags);
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927 return VINF_SUCCESS;
4928
4929 case IEMMODE_64BIT:
4930 IEM_MC_BEGIN(4, 2);
4931 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4932 IEM_MC_ARG(uint64_t, u64Src, 1);
4933 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4934 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4936
4937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4938 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4939 IEM_MC_ASSIGN(cShiftArg, cShift);
4940 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4941 IEM_MC_FETCH_EFLAGS(EFlags);
4942 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4943 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4944
4945 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4946 IEM_MC_COMMIT_EFLAGS(EFlags);
4947 IEM_MC_ADVANCE_RIP();
4948 IEM_MC_END();
4949 return VINF_SUCCESS;
4950
4951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4952 }
4953 }
4954}
4955
4956
4957/**
4958 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4959 */
4960FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4961{
4962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4963 IEMOP_HLP_NO_LOCK_PREFIX();
4964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4965
4966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4967 {
4968 IEMOP_HLP_NO_LOCK_PREFIX();
4969
4970 switch (pIemCpu->enmEffOpSize)
4971 {
4972 case IEMMODE_16BIT:
4973 IEM_MC_BEGIN(4, 0);
4974 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4975 IEM_MC_ARG(uint16_t, u16Src, 1);
4976 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4977 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4978
4979 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4980 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4981 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4982 IEM_MC_REF_EFLAGS(pEFlags);
4983 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4984
4985 IEM_MC_ADVANCE_RIP();
4986 IEM_MC_END();
4987 return VINF_SUCCESS;
4988
4989 case IEMMODE_32BIT:
4990 IEM_MC_BEGIN(4, 0);
4991 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4992 IEM_MC_ARG(uint32_t, u32Src, 1);
4993 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4994 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4995
4996 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4997 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4998 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4999 IEM_MC_REF_EFLAGS(pEFlags);
5000 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5001
5002 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 return VINF_SUCCESS;
5006
5007 case IEMMODE_64BIT:
5008 IEM_MC_BEGIN(4, 0);
5009 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5010 IEM_MC_ARG(uint64_t, u64Src, 1);
5011 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5012 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5013
5014 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5015 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5016 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5017 IEM_MC_REF_EFLAGS(pEFlags);
5018 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5019
5020 IEM_MC_ADVANCE_RIP();
5021 IEM_MC_END();
5022 return VINF_SUCCESS;
5023
5024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5025 }
5026 }
5027 else
5028 {
5029 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5030
5031 switch (pIemCpu->enmEffOpSize)
5032 {
5033 case IEMMODE_16BIT:
5034 IEM_MC_BEGIN(4, 2);
5035 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5036 IEM_MC_ARG(uint16_t, u16Src, 1);
5037 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5038 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5040
5041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5042 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5043 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5044 IEM_MC_FETCH_EFLAGS(EFlags);
5045 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5046 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5047
5048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5049 IEM_MC_COMMIT_EFLAGS(EFlags);
5050 IEM_MC_ADVANCE_RIP();
5051 IEM_MC_END();
5052 return VINF_SUCCESS;
5053
5054 case IEMMODE_32BIT:
5055 IEM_MC_BEGIN(4, 2);
5056 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5057 IEM_MC_ARG(uint32_t, u32Src, 1);
5058 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5061
5062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5063 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5064 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5065 IEM_MC_FETCH_EFLAGS(EFlags);
5066 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5067 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5068
5069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5070 IEM_MC_COMMIT_EFLAGS(EFlags);
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074
5075 case IEMMODE_64BIT:
5076 IEM_MC_BEGIN(4, 2);
5077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5078 IEM_MC_ARG(uint64_t, u64Src, 1);
5079 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5080 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5082
5083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5084 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5085 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5086 IEM_MC_FETCH_EFLAGS(EFlags);
5087 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5091 IEM_MC_COMMIT_EFLAGS(EFlags);
5092 IEM_MC_ADVANCE_RIP();
5093 IEM_MC_END();
5094 return VINF_SUCCESS;
5095
5096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5097 }
5098 }
5099}
5100
5101
5102
5103/** Opcode 0x0f 0xa4. */
5104FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5105{
5106 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5107 IEMOP_HLP_MIN_386();
5108 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5109}
5110
5111
5112/** Opcode 0x0f 0xa5. */
5113FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5114{
5115 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5116 IEMOP_HLP_MIN_386();
5117 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5118}
5119
5120
5121/** Opcode 0x0f 0xa8. */
5122FNIEMOP_DEF(iemOp_push_gs)
5123{
5124 IEMOP_MNEMONIC("push gs");
5125 IEMOP_HLP_MIN_386();
5126 IEMOP_HLP_NO_LOCK_PREFIX();
5127 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5128}
5129
5130
5131/** Opcode 0x0f 0xa9. */
5132FNIEMOP_DEF(iemOp_pop_gs)
5133{
5134 IEMOP_MNEMONIC("pop gs");
5135 IEMOP_HLP_MIN_386();
5136 IEMOP_HLP_NO_LOCK_PREFIX();
5137 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5138}
5139
5140
5141/** Opcode 0x0f 0xaa. */
5142FNIEMOP_STUB(iemOp_rsm);
5143//IEMOP_HLP_MIN_386();
5144
5145
5146/** Opcode 0x0f 0xab. */
5147FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5148{
5149 IEMOP_MNEMONIC("bts Ev,Gv");
5150 IEMOP_HLP_MIN_386();
5151 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5152}
5153
5154
5155/** Opcode 0x0f 0xac. */
5156FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5157{
5158 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5159 IEMOP_HLP_MIN_386();
5160 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5161}
5162
5163
5164/** Opcode 0x0f 0xad. */
5165FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5166{
5167 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5168 IEMOP_HLP_MIN_386();
5169 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5170}
5171
5172
5173/** Opcode 0x0f 0xae mem/0. */
5174FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5175{
5176 IEMOP_MNEMONIC("fxsave m512");
5177 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5178 return IEMOP_RAISE_INVALID_OPCODE();
5179
5180 IEM_MC_BEGIN(3, 1);
5181 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5182 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5183 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5187 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5188 IEM_MC_END();
5189 return VINF_SUCCESS;
5190}
5191
5192
5193/** Opcode 0x0f 0xae mem/1. */
5194FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5195{
5196 IEMOP_MNEMONIC("fxrstor m512");
5197 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5198 return IEMOP_RAISE_INVALID_OPCODE();
5199
5200 IEM_MC_BEGIN(3, 1);
5201 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5202 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5203 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5206 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5207 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5208 IEM_MC_END();
5209 return VINF_SUCCESS;
5210}
5211
5212
5213/** Opcode 0x0f 0xae mem/2. */
5214FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5215
5216/** Opcode 0x0f 0xae mem/3. */
5217FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5218
5219/** Opcode 0x0f 0xae mem/4. */
5220FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5221
5222/** Opcode 0x0f 0xae mem/5. */
5223FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5224
5225/** Opcode 0x0f 0xae mem/6. */
5226FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5227
5228/** Opcode 0x0f 0xae mem/7. */
5229FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5230
5231
5232/** Opcode 0x0f 0xae 11b/5. */
5233FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5234{
5235 IEMOP_MNEMONIC("lfence");
5236 IEMOP_HLP_NO_LOCK_PREFIX();
5237 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5238 return IEMOP_RAISE_INVALID_OPCODE();
5239
5240 IEM_MC_BEGIN(0, 0);
5241 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5242 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5243 else
5244 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5245 IEM_MC_ADVANCE_RIP();
5246 IEM_MC_END();
5247 return VINF_SUCCESS;
5248}
5249
5250
5251/** Opcode 0x0f 0xae 11b/6. */
5252FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5253{
5254 IEMOP_MNEMONIC("mfence");
5255 IEMOP_HLP_NO_LOCK_PREFIX();
5256 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5257 return IEMOP_RAISE_INVALID_OPCODE();
5258
5259 IEM_MC_BEGIN(0, 0);
5260 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5261 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5262 else
5263 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5264 IEM_MC_ADVANCE_RIP();
5265 IEM_MC_END();
5266 return VINF_SUCCESS;
5267}
5268
5269
5270/** Opcode 0x0f 0xae 11b/7. */
5271FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5272{
5273 IEMOP_MNEMONIC("sfence");
5274 IEMOP_HLP_NO_LOCK_PREFIX();
5275 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5276 return IEMOP_RAISE_INVALID_OPCODE();
5277
5278 IEM_MC_BEGIN(0, 0);
5279 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5280 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5281 else
5282 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5283 IEM_MC_ADVANCE_RIP();
5284 IEM_MC_END();
5285 return VINF_SUCCESS;
5286}
5287
5288
5289/** Opcode 0xf3 0x0f 0xae 11b/0. */
5290FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5291
5292/** Opcode 0xf3 0x0f 0xae 11b/1. */
5293FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5294
5295/** Opcode 0xf3 0x0f 0xae 11b/2. */
5296FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5297
5298/** Opcode 0xf3 0x0f 0xae 11b/3. */
5299FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5300
5301
5302/** Opcode 0x0f 0xae. */
5303FNIEMOP_DEF(iemOp_Grp15)
5304{
5305 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5307 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5308 {
5309 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5310 {
5311 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5312 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5313 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5314 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5315 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5316 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5317 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5318 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5320 }
5321 }
5322 else
5323 {
5324 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5325 {
5326 case 0:
5327 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5328 {
5329 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5330 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5331 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5332 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5333 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5334 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5335 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5336 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5338 }
5339 break;
5340
5341 case IEM_OP_PRF_REPZ:
5342 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5343 {
5344 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5345 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5346 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5347 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5348 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5349 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5350 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5351 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354 break;
5355
5356 default:
5357 return IEMOP_RAISE_INVALID_OPCODE();
5358 }
5359 }
5360}
5361
5362
5363/** Opcode 0x0f 0xaf. */
5364FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5365{
5366 IEMOP_MNEMONIC("imul Gv,Ev");
5367 IEMOP_HLP_MIN_386();
5368 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5369 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5370}
5371
5372
5373/** Opcode 0x0f 0xb0. */
5374FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5375{
5376 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5377 IEMOP_HLP_MIN_486();
5378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5379
5380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5381 {
5382 IEMOP_HLP_DONE_DECODING();
5383 IEM_MC_BEGIN(4, 0);
5384 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5385 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5386 IEM_MC_ARG(uint8_t, u8Src, 2);
5387 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5388
5389 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5390 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5391 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5392 IEM_MC_REF_EFLAGS(pEFlags);
5393 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5395 else
5396 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5397
5398 IEM_MC_ADVANCE_RIP();
5399 IEM_MC_END();
5400 }
5401 else
5402 {
5403 IEM_MC_BEGIN(4, 3);
5404 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5405 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5406 IEM_MC_ARG(uint8_t, u8Src, 2);
5407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5409 IEM_MC_LOCAL(uint8_t, u8Al);
5410
5411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5412 IEMOP_HLP_DONE_DECODING();
5413 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5414 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5415 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5416 IEM_MC_FETCH_EFLAGS(EFlags);
5417 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5418 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5419 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5420 else
5421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5422
5423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5424 IEM_MC_COMMIT_EFLAGS(EFlags);
5425 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5426 IEM_MC_ADVANCE_RIP();
5427 IEM_MC_END();
5428 }
5429 return VINF_SUCCESS;
5430}
5431
5432/** Opcode 0x0f 0xb1. */
5433FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5434{
5435 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5436 IEMOP_HLP_MIN_486();
5437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5438
5439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5440 {
5441 IEMOP_HLP_DONE_DECODING();
5442 switch (pIemCpu->enmEffOpSize)
5443 {
5444 case IEMMODE_16BIT:
5445 IEM_MC_BEGIN(4, 0);
5446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5447 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5448 IEM_MC_ARG(uint16_t, u16Src, 2);
5449 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5450
5451 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5452 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5453 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5454 IEM_MC_REF_EFLAGS(pEFlags);
5455 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5456 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5457 else
5458 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5459
5460 IEM_MC_ADVANCE_RIP();
5461 IEM_MC_END();
5462 return VINF_SUCCESS;
5463
5464 case IEMMODE_32BIT:
5465 IEM_MC_BEGIN(4, 0);
5466 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5467 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5468 IEM_MC_ARG(uint32_t, u32Src, 2);
5469 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5470
5471 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5472 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5473 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5474 IEM_MC_REF_EFLAGS(pEFlags);
5475 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5476 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5477 else
5478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5479
5480 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5481 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 return VINF_SUCCESS;
5485
5486 case IEMMODE_64BIT:
5487 IEM_MC_BEGIN(4, 0);
5488 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5489 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5490#ifdef RT_ARCH_X86
5491 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5492#else
5493 IEM_MC_ARG(uint64_t, u64Src, 2);
5494#endif
5495 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5496
5497 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5498 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5499 IEM_MC_REF_EFLAGS(pEFlags);
5500#ifdef RT_ARCH_X86
5501 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5502 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5503 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5504 else
5505 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5506#else
5507 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5508 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5510 else
5511 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5512#endif
5513
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5519 }
5520 }
5521 else
5522 {
5523 switch (pIemCpu->enmEffOpSize)
5524 {
5525 case IEMMODE_16BIT:
5526 IEM_MC_BEGIN(4, 3);
5527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5528 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5529 IEM_MC_ARG(uint16_t, u16Src, 2);
5530 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5532 IEM_MC_LOCAL(uint16_t, u16Ax);
5533
5534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5535 IEMOP_HLP_DONE_DECODING();
5536 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5537 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5538 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5539 IEM_MC_FETCH_EFLAGS(EFlags);
5540 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5541 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5542 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5543 else
5544 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5545
5546 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5547 IEM_MC_COMMIT_EFLAGS(EFlags);
5548 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5549 IEM_MC_ADVANCE_RIP();
5550 IEM_MC_END();
5551 return VINF_SUCCESS;
5552
5553 case IEMMODE_32BIT:
5554 IEM_MC_BEGIN(4, 3);
5555 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5556 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5557 IEM_MC_ARG(uint32_t, u32Src, 2);
5558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5560 IEM_MC_LOCAL(uint32_t, u32Eax);
5561
5562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5563 IEMOP_HLP_DONE_DECODING();
5564 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5565 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5566 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5567 IEM_MC_FETCH_EFLAGS(EFlags);
5568 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5569 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5570 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5571 else
5572 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5573
5574 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5575 IEM_MC_COMMIT_EFLAGS(EFlags);
5576 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 case IEMMODE_64BIT:
5582 IEM_MC_BEGIN(4, 3);
5583 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5584 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5585#ifdef RT_ARCH_X86
5586 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5587#else
5588 IEM_MC_ARG(uint64_t, u64Src, 2);
5589#endif
5590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5592 IEM_MC_LOCAL(uint64_t, u64Rax);
5593
5594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5595 IEMOP_HLP_DONE_DECODING();
5596 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5597 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5598 IEM_MC_FETCH_EFLAGS(EFlags);
5599 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5600#ifdef RT_ARCH_X86
5601 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5602 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5603 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5604 else
5605 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5606#else
5607 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5608 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5609 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5610 else
5611 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5612#endif
5613
5614 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5615 IEM_MC_COMMIT_EFLAGS(EFlags);
5616 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5617 IEM_MC_ADVANCE_RIP();
5618 IEM_MC_END();
5619 return VINF_SUCCESS;
5620
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624}
5625
5626
5627FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5628{
5629 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5630 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5631
5632 switch (pIemCpu->enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(5, 1);
5636 IEM_MC_ARG(uint16_t, uSel, 0);
5637 IEM_MC_ARG(uint16_t, offSeg, 1);
5638 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5639 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5640 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5641 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5645 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5646 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5647 IEM_MC_END();
5648 return VINF_SUCCESS;
5649
5650 case IEMMODE_32BIT:
5651 IEM_MC_BEGIN(5, 1);
5652 IEM_MC_ARG(uint16_t, uSel, 0);
5653 IEM_MC_ARG(uint32_t, offSeg, 1);
5654 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5655 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5656 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5657 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5661 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5662 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5663 IEM_MC_END();
5664 return VINF_SUCCESS;
5665
5666 case IEMMODE_64BIT:
5667 IEM_MC_BEGIN(5, 1);
5668 IEM_MC_ARG(uint16_t, uSel, 0);
5669 IEM_MC_ARG(uint64_t, offSeg, 1);
5670 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5671 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5672 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5673 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5677 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5678 else
5679 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5680 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5681 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5682 IEM_MC_END();
5683 return VINF_SUCCESS;
5684
5685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5686 }
5687}
5688
5689
5690/** Opcode 0x0f 0xb2. */
5691FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5692{
5693 IEMOP_MNEMONIC("lss Gv,Mp");
5694 IEMOP_HLP_MIN_386();
5695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5697 return IEMOP_RAISE_INVALID_OPCODE();
5698 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5699}
5700
5701
5702/** Opcode 0x0f 0xb3. */
5703FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5704{
5705 IEMOP_MNEMONIC("btr Ev,Gv");
5706 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5707}
5708
5709
5710/** Opcode 0x0f 0xb4. */
5711FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5712{
5713 IEMOP_MNEMONIC("lfs Gv,Mp");
5714 IEMOP_HLP_MIN_386();
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5717 return IEMOP_RAISE_INVALID_OPCODE();
5718 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5719}
5720
5721
5722/** Opcode 0x0f 0xb5. */
5723FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5724{
5725 IEMOP_MNEMONIC("lgs Gv,Mp");
5726 IEMOP_HLP_MIN_386();
5727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5729 return IEMOP_RAISE_INVALID_OPCODE();
5730 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5731}
5732
5733
5734/** Opcode 0x0f 0xb6. */
5735FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5736{
5737 IEMOP_MNEMONIC("movzx Gv,Eb");
5738 IEMOP_HLP_MIN_386();
5739
5740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5741 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5742
5743 /*
5744 * If rm is denoting a register, no more instruction bytes.
5745 */
5746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5747 {
5748 switch (pIemCpu->enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 1);
5752 IEM_MC_LOCAL(uint16_t, u16Value);
5753 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5754 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 return VINF_SUCCESS;
5758
5759 case IEMMODE_32BIT:
5760 IEM_MC_BEGIN(0, 1);
5761 IEM_MC_LOCAL(uint32_t, u32Value);
5762 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5763 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5764 IEM_MC_ADVANCE_RIP();
5765 IEM_MC_END();
5766 return VINF_SUCCESS;
5767
5768 case IEMMODE_64BIT:
5769 IEM_MC_BEGIN(0, 1);
5770 IEM_MC_LOCAL(uint64_t, u64Value);
5771 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5772 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5773 IEM_MC_ADVANCE_RIP();
5774 IEM_MC_END();
5775 return VINF_SUCCESS;
5776
5777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5778 }
5779 }
5780 else
5781 {
5782 /*
5783 * We're loading a register from memory.
5784 */
5785 switch (pIemCpu->enmEffOpSize)
5786 {
5787 case IEMMODE_16BIT:
5788 IEM_MC_BEGIN(0, 2);
5789 IEM_MC_LOCAL(uint16_t, u16Value);
5790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5792 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5793 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5794 IEM_MC_ADVANCE_RIP();
5795 IEM_MC_END();
5796 return VINF_SUCCESS;
5797
5798 case IEMMODE_32BIT:
5799 IEM_MC_BEGIN(0, 2);
5800 IEM_MC_LOCAL(uint32_t, u32Value);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5803 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5804 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5805 IEM_MC_ADVANCE_RIP();
5806 IEM_MC_END();
5807 return VINF_SUCCESS;
5808
5809 case IEMMODE_64BIT:
5810 IEM_MC_BEGIN(0, 2);
5811 IEM_MC_LOCAL(uint64_t, u64Value);
5812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5814 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5815 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 return VINF_SUCCESS;
5819
5820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5821 }
5822 }
5823}
5824
5825
5826/** Opcode 0x0f 0xb7. */
5827FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5828{
5829 IEMOP_MNEMONIC("movzx Gv,Ew");
5830 IEMOP_HLP_MIN_386();
5831
5832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5833 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5834
5835 /** @todo Not entirely sure how the operand size prefix is handled here,
5836 * assuming that it will be ignored. Would be nice to have a few
5837 * test for this. */
5838 /*
5839 * If rm is denoting a register, no more instruction bytes.
5840 */
5841 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5842 {
5843 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5844 {
5845 IEM_MC_BEGIN(0, 1);
5846 IEM_MC_LOCAL(uint32_t, u32Value);
5847 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5848 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 IEM_MC_BEGIN(0, 1);
5855 IEM_MC_LOCAL(uint64_t, u64Value);
5856 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5857 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5858 IEM_MC_ADVANCE_RIP();
5859 IEM_MC_END();
5860 }
5861 }
5862 else
5863 {
5864 /*
5865 * We're loading a register from memory.
5866 */
5867 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5868 {
5869 IEM_MC_BEGIN(0, 2);
5870 IEM_MC_LOCAL(uint32_t, u32Value);
5871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5873 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5874 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5875 IEM_MC_ADVANCE_RIP();
5876 IEM_MC_END();
5877 }
5878 else
5879 {
5880 IEM_MC_BEGIN(0, 2);
5881 IEM_MC_LOCAL(uint64_t, u64Value);
5882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5884 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5885 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5886 IEM_MC_ADVANCE_RIP();
5887 IEM_MC_END();
5888 }
5889 }
5890 return VINF_SUCCESS;
5891}
5892
5893
5894/** Opcode 0x0f 0xb8. */
5895FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5896
5897
5898/** Opcode 0x0f 0xb9. */
5899FNIEMOP_DEF(iemOp_Grp10)
5900{
5901 Log(("iemOp_Grp10 -> #UD\n"));
5902 return IEMOP_RAISE_INVALID_OPCODE();
5903}
5904
5905
5906/** Opcode 0x0f 0xba. */
5907FNIEMOP_DEF(iemOp_Grp8)
5908{
5909 IEMOP_HLP_MIN_386();
5910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5911 PCIEMOPBINSIZES pImpl;
5912 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5913 {
5914 case 0: case 1: case 2: case 3:
5915 return IEMOP_RAISE_INVALID_OPCODE();
5916 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5917 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5918 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5919 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5921 }
5922 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5923
5924 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5925 {
5926 /* register destination. */
5927 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5928 IEMOP_HLP_NO_LOCK_PREFIX();
5929
5930 switch (pIemCpu->enmEffOpSize)
5931 {
5932 case IEMMODE_16BIT:
5933 IEM_MC_BEGIN(3, 0);
5934 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5935 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5936 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5937
5938 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5939 IEM_MC_REF_EFLAGS(pEFlags);
5940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5941
5942 IEM_MC_ADVANCE_RIP();
5943 IEM_MC_END();
5944 return VINF_SUCCESS;
5945
5946 case IEMMODE_32BIT:
5947 IEM_MC_BEGIN(3, 0);
5948 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5949 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5951
5952 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5953 IEM_MC_REF_EFLAGS(pEFlags);
5954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5955
5956 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 return VINF_SUCCESS;
5960
5961 case IEMMODE_64BIT:
5962 IEM_MC_BEGIN(3, 0);
5963 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5964 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5966
5967 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5968 IEM_MC_REF_EFLAGS(pEFlags);
5969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5970
5971 IEM_MC_ADVANCE_RIP();
5972 IEM_MC_END();
5973 return VINF_SUCCESS;
5974
5975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5976 }
5977 }
5978 else
5979 {
5980 /* memory destination. */
5981
5982 uint32_t fAccess;
5983 if (pImpl->pfnLockedU16)
5984 fAccess = IEM_ACCESS_DATA_RW;
5985 else /* BT */
5986 {
5987 IEMOP_HLP_NO_LOCK_PREFIX();
5988 fAccess = IEM_ACCESS_DATA_R;
5989 }
5990
5991 /** @todo test negative bit offsets! */
5992 switch (pIemCpu->enmEffOpSize)
5993 {
5994 case IEMMODE_16BIT:
5995 IEM_MC_BEGIN(3, 1);
5996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5997 IEM_MC_ARG(uint16_t, u16Src, 1);
5998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6000
6001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6002 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6003 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6004 IEM_MC_FETCH_EFLAGS(EFlags);
6005 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6006 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6008 else
6009 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6011
6012 IEM_MC_COMMIT_EFLAGS(EFlags);
6013 IEM_MC_ADVANCE_RIP();
6014 IEM_MC_END();
6015 return VINF_SUCCESS;
6016
6017 case IEMMODE_32BIT:
6018 IEM_MC_BEGIN(3, 1);
6019 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6020 IEM_MC_ARG(uint32_t, u32Src, 1);
6021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6023
6024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6025 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6026 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6027 IEM_MC_FETCH_EFLAGS(EFlags);
6028 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6029 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6031 else
6032 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6033 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6034
6035 IEM_MC_COMMIT_EFLAGS(EFlags);
6036 IEM_MC_ADVANCE_RIP();
6037 IEM_MC_END();
6038 return VINF_SUCCESS;
6039
6040 case IEMMODE_64BIT:
6041 IEM_MC_BEGIN(3, 1);
6042 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6043 IEM_MC_ARG(uint64_t, u64Src, 1);
6044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6046
6047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6048 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6049 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6050 IEM_MC_FETCH_EFLAGS(EFlags);
6051 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6052 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6054 else
6055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6056 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6057
6058 IEM_MC_COMMIT_EFLAGS(EFlags);
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 return VINF_SUCCESS;
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 }
6066
6067}
6068
6069
6070/** Opcode 0x0f 0xbb. */
6071FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6072{
6073 IEMOP_MNEMONIC("btc Ev,Gv");
6074 IEMOP_HLP_MIN_386();
6075 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6076}
6077
6078
6079/** Opcode 0x0f 0xbc. */
6080FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6081{
6082 IEMOP_MNEMONIC("bsf Gv,Ev");
6083 IEMOP_HLP_MIN_386();
6084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6085 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6086}
6087
6088
6089/** Opcode 0x0f 0xbd. */
6090FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6091{
6092 IEMOP_MNEMONIC("bsr Gv,Ev");
6093 IEMOP_HLP_MIN_386();
6094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6095 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6096}
6097
6098
6099/** Opcode 0x0f 0xbe. */
6100FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6101{
6102 IEMOP_MNEMONIC("movsx Gv,Eb");
6103 IEMOP_HLP_MIN_386();
6104
6105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6106 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6107
6108 /*
6109 * If rm is denoting a register, no more instruction bytes.
6110 */
6111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6112 {
6113 switch (pIemCpu->enmEffOpSize)
6114 {
6115 case IEMMODE_16BIT:
6116 IEM_MC_BEGIN(0, 1);
6117 IEM_MC_LOCAL(uint16_t, u16Value);
6118 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6119 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6120 IEM_MC_ADVANCE_RIP();
6121 IEM_MC_END();
6122 return VINF_SUCCESS;
6123
6124 case IEMMODE_32BIT:
6125 IEM_MC_BEGIN(0, 1);
6126 IEM_MC_LOCAL(uint32_t, u32Value);
6127 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6128 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6129 IEM_MC_ADVANCE_RIP();
6130 IEM_MC_END();
6131 return VINF_SUCCESS;
6132
6133 case IEMMODE_64BIT:
6134 IEM_MC_BEGIN(0, 1);
6135 IEM_MC_LOCAL(uint64_t, u64Value);
6136 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6137 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6138 IEM_MC_ADVANCE_RIP();
6139 IEM_MC_END();
6140 return VINF_SUCCESS;
6141
6142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6143 }
6144 }
6145 else
6146 {
6147 /*
6148 * We're loading a register from memory.
6149 */
6150 switch (pIemCpu->enmEffOpSize)
6151 {
6152 case IEMMODE_16BIT:
6153 IEM_MC_BEGIN(0, 2);
6154 IEM_MC_LOCAL(uint16_t, u16Value);
6155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6157 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6158 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162
6163 case IEMMODE_32BIT:
6164 IEM_MC_BEGIN(0, 2);
6165 IEM_MC_LOCAL(uint32_t, u32Value);
6166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6168 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6169 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(0, 2);
6176 IEM_MC_LOCAL(uint64_t, u64Value);
6177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6179 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6180 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184
6185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6186 }
6187 }
6188}
6189
6190
6191/** Opcode 0x0f 0xbf. */
6192FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6193{
6194 IEMOP_MNEMONIC("movsx Gv,Ew");
6195 IEMOP_HLP_MIN_386();
6196
6197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6198 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6199
6200 /** @todo Not entirely sure how the operand size prefix is handled here,
6201 * assuming that it will be ignored. Would be nice to have a few
6202 * test for this. */
6203 /*
6204 * If rm is denoting a register, no more instruction bytes.
6205 */
6206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6207 {
6208 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6209 {
6210 IEM_MC_BEGIN(0, 1);
6211 IEM_MC_LOCAL(uint32_t, u32Value);
6212 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6213 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 }
6217 else
6218 {
6219 IEM_MC_BEGIN(0, 1);
6220 IEM_MC_LOCAL(uint64_t, u64Value);
6221 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6222 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6223 IEM_MC_ADVANCE_RIP();
6224 IEM_MC_END();
6225 }
6226 }
6227 else
6228 {
6229 /*
6230 * We're loading a register from memory.
6231 */
6232 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6233 {
6234 IEM_MC_BEGIN(0, 2);
6235 IEM_MC_LOCAL(uint32_t, u32Value);
6236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6238 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6239 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6240 IEM_MC_ADVANCE_RIP();
6241 IEM_MC_END();
6242 }
6243 else
6244 {
6245 IEM_MC_BEGIN(0, 2);
6246 IEM_MC_LOCAL(uint64_t, u64Value);
6247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6249 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6250 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6251 IEM_MC_ADVANCE_RIP();
6252 IEM_MC_END();
6253 }
6254 }
6255 return VINF_SUCCESS;
6256}
6257
6258
6259/** Opcode 0x0f 0xc0. */
6260FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6261{
6262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6263 IEMOP_HLP_MIN_486();
6264 IEMOP_MNEMONIC("xadd Eb,Gb");
6265
6266 /*
6267 * If rm is denoting a register, no more instruction bytes.
6268 */
6269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6270 {
6271 IEMOP_HLP_NO_LOCK_PREFIX();
6272
6273 IEM_MC_BEGIN(3, 0);
6274 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6275 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6277
6278 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6279 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6280 IEM_MC_REF_EFLAGS(pEFlags);
6281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6282
6283 IEM_MC_ADVANCE_RIP();
6284 IEM_MC_END();
6285 }
6286 else
6287 {
6288 /*
6289 * We're accessing memory.
6290 */
6291 IEM_MC_BEGIN(3, 3);
6292 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6293 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6294 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6295 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6297
6298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6299 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6300 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6301 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6302 IEM_MC_FETCH_EFLAGS(EFlags);
6303 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6305 else
6306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6307
6308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6309 IEM_MC_COMMIT_EFLAGS(EFlags);
6310 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6311 IEM_MC_ADVANCE_RIP();
6312 IEM_MC_END();
6313 return VINF_SUCCESS;
6314 }
6315 return VINF_SUCCESS;
6316}
6317
6318
6319/** Opcode 0x0f 0xc1. */
6320FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6321{
6322 IEMOP_MNEMONIC("xadd Ev,Gv");
6323 IEMOP_HLP_MIN_486();
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325
6326 /*
6327 * If rm is denoting a register, no more instruction bytes.
6328 */
6329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6330 {
6331 IEMOP_HLP_NO_LOCK_PREFIX();
6332
6333 switch (pIemCpu->enmEffOpSize)
6334 {
6335 case IEMMODE_16BIT:
6336 IEM_MC_BEGIN(3, 0);
6337 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6338 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6340
6341 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6342 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6343 IEM_MC_REF_EFLAGS(pEFlags);
6344 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6345
6346 IEM_MC_ADVANCE_RIP();
6347 IEM_MC_END();
6348 return VINF_SUCCESS;
6349
6350 case IEMMODE_32BIT:
6351 IEM_MC_BEGIN(3, 0);
6352 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6353 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6354 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6355
6356 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6357 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6358 IEM_MC_REF_EFLAGS(pEFlags);
6359 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6360
6361 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6362 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6363 IEM_MC_ADVANCE_RIP();
6364 IEM_MC_END();
6365 return VINF_SUCCESS;
6366
6367 case IEMMODE_64BIT:
6368 IEM_MC_BEGIN(3, 0);
6369 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6370 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6371 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6372
6373 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6374 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6375 IEM_MC_REF_EFLAGS(pEFlags);
6376 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6377
6378 IEM_MC_ADVANCE_RIP();
6379 IEM_MC_END();
6380 return VINF_SUCCESS;
6381
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384 }
6385 else
6386 {
6387 /*
6388 * We're accessing memory.
6389 */
6390 switch (pIemCpu->enmEffOpSize)
6391 {
6392 case IEMMODE_16BIT:
6393 IEM_MC_BEGIN(3, 3);
6394 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6395 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6397 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6399
6400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6401 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6402 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6403 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6404 IEM_MC_FETCH_EFLAGS(EFlags);
6405 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6407 else
6408 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6409
6410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6411 IEM_MC_COMMIT_EFLAGS(EFlags);
6412 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6413 IEM_MC_ADVANCE_RIP();
6414 IEM_MC_END();
6415 return VINF_SUCCESS;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(3, 3);
6419 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6420 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6421 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6422 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6424
6425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6426 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6427 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6428 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6429 IEM_MC_FETCH_EFLAGS(EFlags);
6430 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6431 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6432 else
6433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6434
6435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6436 IEM_MC_COMMIT_EFLAGS(EFlags);
6437 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6438 IEM_MC_ADVANCE_RIP();
6439 IEM_MC_END();
6440 return VINF_SUCCESS;
6441
6442 case IEMMODE_64BIT:
6443 IEM_MC_BEGIN(3, 3);
6444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6445 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6446 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6447 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6449
6450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6451 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6452 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6453 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6454 IEM_MC_FETCH_EFLAGS(EFlags);
6455 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6456 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6457 else
6458 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6459
6460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6461 IEM_MC_COMMIT_EFLAGS(EFlags);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 return VINF_SUCCESS;
6466
6467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6468 }
6469 }
6470}
6471
6472/** Opcode 0x0f 0xc2. */
6473FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6474
6475
6476/** Opcode 0x0f 0xc3. */
6477#ifndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6478FNIEMOP_DEF(iemOp_movnti_My_Gy)
6479{
6480 IEMOP_MNEMONIC("movnti My,Gy");
6481
6482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6483
6484 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6485 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6486 {
6487 switch (pIemCpu->enmEffOpSize)
6488 {
6489 case IEMMODE_32BIT:
6490 IEM_MC_BEGIN(0, 2);
6491 IEM_MC_LOCAL(uint32_t, u32Value);
6492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6493
6494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6497 return IEMOP_RAISE_INVALID_OPCODE();
6498
6499 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6500 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6501 IEM_MC_ADVANCE_RIP();
6502 IEM_MC_END();
6503 break;
6504
6505 case IEMMODE_64BIT:
6506 IEM_MC_BEGIN(0, 2);
6507 IEM_MC_LOCAL(uint64_t, u64Value);
6508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6509
6510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6513 return IEMOP_RAISE_INVALID_OPCODE();
6514
6515 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6516 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6517 IEM_MC_ADVANCE_RIP();
6518 IEM_MC_END();
6519 break;
6520
6521 case IEMMODE_16BIT:
6522 /** @todo check this form. */
6523 return IEMOP_RAISE_INVALID_OPCODE();
6524 }
6525 }
6526 else
6527 return IEMOP_RAISE_INVALID_OPCODE();
6528 return VINF_SUCCESS;
6529}
6530#else
6531FNIEMOP_STUB(iemOp_movnti_My_Gy); // solaris 10 uses this in hat_pte_zero().
6532#endif
6533
6534
6535/** Opcode 0x0f 0xc4. */
6536FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6537
6538/** Opcode 0x0f 0xc5. */
6539FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6540
6541/** Opcode 0x0f 0xc6. */
6542FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6543
6544
6545/** Opcode 0x0f 0xc7 !11/1. */
6546FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6547{
6548 IEMOP_MNEMONIC("cmpxchg8b Mq");
6549
6550 IEM_MC_BEGIN(4, 3);
6551 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6552 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6553 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6554 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6555 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6556 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6558
6559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6560 IEMOP_HLP_DONE_DECODING();
6561 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6562
6563 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6564 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6565 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6566
6567 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6568 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6569 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6570
6571 IEM_MC_FETCH_EFLAGS(EFlags);
6572 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6573 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6574 else
6575 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6576
6577 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6578 IEM_MC_COMMIT_EFLAGS(EFlags);
6579 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6580 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6581 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6582 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6583 IEM_MC_ENDIF();
6584 IEM_MC_ADVANCE_RIP();
6585
6586 IEM_MC_END();
6587 return VINF_SUCCESS;
6588}
6589
6590
6591/** Opcode REX.W 0x0f 0xc7 !11/1. */
6592FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6593
6594/** Opcode 0x0f 0xc7 11/6. */
6595FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6596
6597/** Opcode 0x0f 0xc7 !11/6. */
6598FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6599
6600/** Opcode 0x66 0x0f 0xc7 !11/6. */
6601FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6602
6603/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6604FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6605
6606/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6607FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6608
6609
6610/** Opcode 0x0f 0xc7. */
6611FNIEMOP_DEF(iemOp_Grp9)
6612{
6613 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6615 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6616 {
6617 case 0: case 2: case 3: case 4: case 5:
6618 return IEMOP_RAISE_INVALID_OPCODE();
6619 case 1:
6620 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6621 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6622 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6623 return IEMOP_RAISE_INVALID_OPCODE();
6624 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6625 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6626 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6627 case 6:
6628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6629 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6630 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6631 {
6632 case 0:
6633 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6634 case IEM_OP_PRF_SIZE_OP:
6635 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6636 case IEM_OP_PRF_REPZ:
6637 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6638 default:
6639 return IEMOP_RAISE_INVALID_OPCODE();
6640 }
6641 case 7:
6642 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6643 {
6644 case 0:
6645 case IEM_OP_PRF_REPZ:
6646 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6647 default:
6648 return IEMOP_RAISE_INVALID_OPCODE();
6649 }
6650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6651 }
6652}
6653
6654
6655/**
6656 * Common 'bswap register' helper.
6657 */
6658FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6659{
6660 IEMOP_HLP_NO_LOCK_PREFIX();
6661 switch (pIemCpu->enmEffOpSize)
6662 {
6663 case IEMMODE_16BIT:
6664 IEM_MC_BEGIN(1, 0);
6665 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6666 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6667 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6668 IEM_MC_ADVANCE_RIP();
6669 IEM_MC_END();
6670 return VINF_SUCCESS;
6671
6672 case IEMMODE_32BIT:
6673 IEM_MC_BEGIN(1, 0);
6674 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6675 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6676 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6677 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6678 IEM_MC_ADVANCE_RIP();
6679 IEM_MC_END();
6680 return VINF_SUCCESS;
6681
6682 case IEMMODE_64BIT:
6683 IEM_MC_BEGIN(1, 0);
6684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6685 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6686 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6687 IEM_MC_ADVANCE_RIP();
6688 IEM_MC_END();
6689 return VINF_SUCCESS;
6690
6691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6692 }
6693}
6694
6695
6696/** Opcode 0x0f 0xc8. */
6697FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6698{
6699 IEMOP_MNEMONIC("bswap rAX/r8");
6700 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6701 prefix. REX.B is the correct prefix it appears. For a parallel
6702 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6703 IEMOP_HLP_MIN_486();
6704 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6705}
6706
6707
6708/** Opcode 0x0f 0xc9. */
6709FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6710{
6711 IEMOP_MNEMONIC("bswap rCX/r9");
6712 IEMOP_HLP_MIN_486();
6713 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6714}
6715
6716
6717/** Opcode 0x0f 0xca. */
6718FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6719{
6720 IEMOP_MNEMONIC("bswap rDX/r9");
6721 IEMOP_HLP_MIN_486();
6722 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6723}
6724
6725
6726/** Opcode 0x0f 0xcb. */
6727FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6728{
6729 IEMOP_MNEMONIC("bswap rBX/r9");
6730 IEMOP_HLP_MIN_486();
6731 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6732}
6733
6734
6735/** Opcode 0x0f 0xcc. */
6736FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6737{
6738 IEMOP_MNEMONIC("bswap rSP/r12");
6739 IEMOP_HLP_MIN_486();
6740 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6741}
6742
6743
6744/** Opcode 0x0f 0xcd. */
6745FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6746{
6747 IEMOP_MNEMONIC("bswap rBP/r13");
6748 IEMOP_HLP_MIN_486();
6749 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6750}
6751
6752
6753/** Opcode 0x0f 0xce. */
6754FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6755{
6756 IEMOP_MNEMONIC("bswap rSI/r14");
6757 IEMOP_HLP_MIN_486();
6758 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6759}
6760
6761
6762/** Opcode 0x0f 0xcf. */
6763FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6764{
6765 IEMOP_MNEMONIC("bswap rDI/r15");
6766 IEMOP_HLP_MIN_486();
6767 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6768}
6769
6770
6771
6772/** Opcode 0x0f 0xd0. */
6773FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6774/** Opcode 0x0f 0xd1. */
6775FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6776/** Opcode 0x0f 0xd2. */
6777FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6778/** Opcode 0x0f 0xd3. */
6779FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6780/** Opcode 0x0f 0xd4. */
6781FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6782/** Opcode 0x0f 0xd5. */
6783FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6784/** Opcode 0x0f 0xd6. */
6785FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6786
6787
6788/** Opcode 0x0f 0xd7. */
6789FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6790{
6791 /* Docs says register only. */
6792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6793 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6794 return IEMOP_RAISE_INVALID_OPCODE();
6795
6796 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6797 /** @todo testcase: Check that the instruction implicitly clears the high
6798 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6799 * and opcode modifications are made to work with the whole width (not
6800 * just 128). */
6801 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6802 {
6803 case IEM_OP_PRF_SIZE_OP: /* SSE */
6804 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6805 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6806 IEM_MC_BEGIN(2, 0);
6807 IEM_MC_ARG(uint64_t *, pDst, 0);
6808 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6809 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6810 IEM_MC_PREPARE_SSE_USAGE();
6811 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6812 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6813 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6814 IEM_MC_ADVANCE_RIP();
6815 IEM_MC_END();
6816 return VINF_SUCCESS;
6817
6818 case 0: /* MMX */
6819 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6820 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6821 IEM_MC_BEGIN(2, 0);
6822 IEM_MC_ARG(uint64_t *, pDst, 0);
6823 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6824 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6825 IEM_MC_PREPARE_FPU_USAGE();
6826 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6827 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6828 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6829 IEM_MC_ADVANCE_RIP();
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832
6833 default:
6834 return IEMOP_RAISE_INVALID_OPCODE();
6835 }
6836}
6837
6838
6839/** Opcode 0x0f 0xd8. */
6840FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6841/** Opcode 0x0f 0xd9. */
6842FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6843/** Opcode 0x0f 0xda. */
6844FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6845/** Opcode 0x0f 0xdb. */
6846FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6847/** Opcode 0x0f 0xdc. */
6848FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6849/** Opcode 0x0f 0xdd. */
6850FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6851/** Opcode 0x0f 0xde. */
6852FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6853/** Opcode 0x0f 0xdf. */
6854FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6855/** Opcode 0x0f 0xe0. */
6856FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6857/** Opcode 0x0f 0xe1. */
6858FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6859/** Opcode 0x0f 0xe2. */
6860FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6861/** Opcode 0x0f 0xe3. */
6862FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6863/** Opcode 0x0f 0xe4. */
6864FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6865/** Opcode 0x0f 0xe5. */
6866FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6867/** Opcode 0x0f 0xe6. */
6868FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6869
6870
6871/** Opcode 0x0f 0xe7. */
6872#if 1 //ndef VBOX_WITH_REM /** @todo figure out why some/all of these instructions is upsetting things */
6873FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
6874{
6875 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
6876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6877 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6878 {
6879 /*
6880 * Register, memory.
6881 */
6882/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
6883 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6884 {
6885
6886 case IEM_OP_PRF_SIZE_OP: /* SSE */
6887 IEM_MC_BEGIN(0, 2);
6888 IEM_MC_LOCAL(uint128_t, uSrc);
6889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6890
6891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6895
6896 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6897 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6898
6899 IEM_MC_ADVANCE_RIP();
6900 IEM_MC_END();
6901 break;
6902
6903 case 0: /* MMX */
6904 IEM_MC_BEGIN(0, 2);
6905 IEM_MC_LOCAL(uint64_t, uSrc);
6906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6907
6908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6910 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6911 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6912
6913 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6914 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
6915
6916 IEM_MC_ADVANCE_RIP();
6917 IEM_MC_END();
6918 break;
6919
6920 default:
6921 return IEMOP_RAISE_INVALID_OPCODE();
6922 }
6923 }
6924 /* The register, register encoding is invalid. */
6925 else
6926 return IEMOP_RAISE_INVALID_OPCODE();
6927 return VINF_SUCCESS;
6928}
6929#else
6930FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6931#endif
6932
6933
6934/** Opcode 0x0f 0xe8. */
6935FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6936/** Opcode 0x0f 0xe9. */
6937FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6938/** Opcode 0x0f 0xea. */
6939FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6940/** Opcode 0x0f 0xeb. */
6941FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6942/** Opcode 0x0f 0xec. */
6943FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6944/** Opcode 0x0f 0xed. */
6945FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6946/** Opcode 0x0f 0xee. */
6947FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6948
6949
6950/** Opcode 0x0f 0xef. */
6951FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6952{
6953 IEMOP_MNEMONIC("pxor");
6954 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6955}
6956
6957
6958/** Opcode 0x0f 0xf0. */
6959FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6960/** Opcode 0x0f 0xf1. */
6961FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6962/** Opcode 0x0f 0xf2. */
6963FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6964/** Opcode 0x0f 0xf3. */
6965FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6966/** Opcode 0x0f 0xf4. */
6967FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6968/** Opcode 0x0f 0xf5. */
6969FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6970/** Opcode 0x0f 0xf6. */
6971FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6972/** Opcode 0x0f 0xf7. */
6973FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6974/** Opcode 0x0f 0xf8. */
6975FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6976/** Opcode 0x0f 0xf9. */
6977FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6978/** Opcode 0x0f 0xfa. */
6979FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6980/** Opcode 0x0f 0xfb. */
6981FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6982/** Opcode 0x0f 0xfc. */
6983FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6984/** Opcode 0x0f 0xfd. */
6985FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6986/** Opcode 0x0f 0xfe. */
6987FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6988
6989
6990const PFNIEMOP g_apfnTwoByteMap[256] =
6991{
6992 /* 0x00 */ iemOp_Grp6,
6993 /* 0x01 */ iemOp_Grp7,
6994 /* 0x02 */ iemOp_lar_Gv_Ew,
6995 /* 0x03 */ iemOp_lsl_Gv_Ew,
6996 /* 0x04 */ iemOp_Invalid,
6997 /* 0x05 */ iemOp_syscall,
6998 /* 0x06 */ iemOp_clts,
6999 /* 0x07 */ iemOp_sysret,
7000 /* 0x08 */ iemOp_invd,
7001 /* 0x09 */ iemOp_wbinvd,
7002 /* 0x0a */ iemOp_Invalid,
7003 /* 0x0b */ iemOp_ud2,
7004 /* 0x0c */ iemOp_Invalid,
7005 /* 0x0d */ iemOp_nop_Ev_GrpP,
7006 /* 0x0e */ iemOp_femms,
7007 /* 0x0f */ iemOp_3Dnow,
7008 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7009 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7010 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7011 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7012 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7013 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7014 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7015 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7016 /* 0x18 */ iemOp_prefetch_Grp16,
7017 /* 0x19 */ iemOp_nop_Ev,
7018 /* 0x1a */ iemOp_nop_Ev,
7019 /* 0x1b */ iemOp_nop_Ev,
7020 /* 0x1c */ iemOp_nop_Ev,
7021 /* 0x1d */ iemOp_nop_Ev,
7022 /* 0x1e */ iemOp_nop_Ev,
7023 /* 0x1f */ iemOp_nop_Ev,
7024 /* 0x20 */ iemOp_mov_Rd_Cd,
7025 /* 0x21 */ iemOp_mov_Rd_Dd,
7026 /* 0x22 */ iemOp_mov_Cd_Rd,
7027 /* 0x23 */ iemOp_mov_Dd_Rd,
7028 /* 0x24 */ iemOp_mov_Rd_Td,
7029 /* 0x25 */ iemOp_Invalid,
7030 /* 0x26 */ iemOp_mov_Td_Rd,
7031 /* 0x27 */ iemOp_Invalid,
7032 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7033 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7034 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7035 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7036 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7037 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7038 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7039 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7040 /* 0x30 */ iemOp_wrmsr,
7041 /* 0x31 */ iemOp_rdtsc,
7042 /* 0x32 */ iemOp_rdmsr,
7043 /* 0x33 */ iemOp_rdpmc,
7044 /* 0x34 */ iemOp_sysenter,
7045 /* 0x35 */ iemOp_sysexit,
7046 /* 0x36 */ iemOp_Invalid,
7047 /* 0x37 */ iemOp_getsec,
7048 /* 0x38 */ iemOp_3byte_Esc_A4,
7049 /* 0x39 */ iemOp_Invalid,
7050 /* 0x3a */ iemOp_3byte_Esc_A5,
7051 /* 0x3b */ iemOp_Invalid,
7052 /* 0x3c */ iemOp_Invalid,
7053 /* 0x3d */ iemOp_Invalid,
7054 /* 0x3e */ iemOp_Invalid,
7055 /* 0x3f */ iemOp_Invalid,
7056 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7057 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7058 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7059 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7060 /* 0x44 */ iemOp_cmove_Gv_Ev,
7061 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7062 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7063 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7064 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7065 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7066 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7067 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7068 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7069 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7070 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7071 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7072 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7073 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7074 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7075 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7076 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7077 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7078 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7079 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7080 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7081 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7082 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7083 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7084 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7085 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7086 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7087 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7088 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7089 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7090 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7091 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7092 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7093 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7094 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7095 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7096 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7097 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7098 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7099 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7100 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7101 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7102 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7103 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7104 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7105 /* 0x71 */ iemOp_Grp12,
7106 /* 0x72 */ iemOp_Grp13,
7107 /* 0x73 */ iemOp_Grp14,
7108 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7109 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7110 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7111 /* 0x77 */ iemOp_emms,
7112 /* 0x78 */ iemOp_vmread_AmdGrp17,
7113 /* 0x79 */ iemOp_vmwrite,
7114 /* 0x7a */ iemOp_Invalid,
7115 /* 0x7b */ iemOp_Invalid,
7116 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7117 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7118 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7119 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7120 /* 0x80 */ iemOp_jo_Jv,
7121 /* 0x81 */ iemOp_jno_Jv,
7122 /* 0x82 */ iemOp_jc_Jv,
7123 /* 0x83 */ iemOp_jnc_Jv,
7124 /* 0x84 */ iemOp_je_Jv,
7125 /* 0x85 */ iemOp_jne_Jv,
7126 /* 0x86 */ iemOp_jbe_Jv,
7127 /* 0x87 */ iemOp_jnbe_Jv,
7128 /* 0x88 */ iemOp_js_Jv,
7129 /* 0x89 */ iemOp_jns_Jv,
7130 /* 0x8a */ iemOp_jp_Jv,
7131 /* 0x8b */ iemOp_jnp_Jv,
7132 /* 0x8c */ iemOp_jl_Jv,
7133 /* 0x8d */ iemOp_jnl_Jv,
7134 /* 0x8e */ iemOp_jle_Jv,
7135 /* 0x8f */ iemOp_jnle_Jv,
7136 /* 0x90 */ iemOp_seto_Eb,
7137 /* 0x91 */ iemOp_setno_Eb,
7138 /* 0x92 */ iemOp_setc_Eb,
7139 /* 0x93 */ iemOp_setnc_Eb,
7140 /* 0x94 */ iemOp_sete_Eb,
7141 /* 0x95 */ iemOp_setne_Eb,
7142 /* 0x96 */ iemOp_setbe_Eb,
7143 /* 0x97 */ iemOp_setnbe_Eb,
7144 /* 0x98 */ iemOp_sets_Eb,
7145 /* 0x99 */ iemOp_setns_Eb,
7146 /* 0x9a */ iemOp_setp_Eb,
7147 /* 0x9b */ iemOp_setnp_Eb,
7148 /* 0x9c */ iemOp_setl_Eb,
7149 /* 0x9d */ iemOp_setnl_Eb,
7150 /* 0x9e */ iemOp_setle_Eb,
7151 /* 0x9f */ iemOp_setnle_Eb,
7152 /* 0xa0 */ iemOp_push_fs,
7153 /* 0xa1 */ iemOp_pop_fs,
7154 /* 0xa2 */ iemOp_cpuid,
7155 /* 0xa3 */ iemOp_bt_Ev_Gv,
7156 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7157 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7158 /* 0xa6 */ iemOp_Invalid,
7159 /* 0xa7 */ iemOp_Invalid,
7160 /* 0xa8 */ iemOp_push_gs,
7161 /* 0xa9 */ iemOp_pop_gs,
7162 /* 0xaa */ iemOp_rsm,
7163 /* 0xab */ iemOp_bts_Ev_Gv,
7164 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7165 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7166 /* 0xae */ iemOp_Grp15,
7167 /* 0xaf */ iemOp_imul_Gv_Ev,
7168 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7169 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7170 /* 0xb2 */ iemOp_lss_Gv_Mp,
7171 /* 0xb3 */ iemOp_btr_Ev_Gv,
7172 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7173 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7174 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7175 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7176 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7177 /* 0xb9 */ iemOp_Grp10,
7178 /* 0xba */ iemOp_Grp8,
7179 /* 0xbd */ iemOp_btc_Ev_Gv,
7180 /* 0xbc */ iemOp_bsf_Gv_Ev,
7181 /* 0xbd */ iemOp_bsr_Gv_Ev,
7182 /* 0xbe */ iemOp_movsx_Gv_Eb,
7183 /* 0xbf */ iemOp_movsx_Gv_Ew,
7184 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7185 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7186 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7187 /* 0xc3 */ iemOp_movnti_My_Gy,
7188 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7189 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7190 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7191 /* 0xc7 */ iemOp_Grp9,
7192 /* 0xc8 */ iemOp_bswap_rAX_r8,
7193 /* 0xc9 */ iemOp_bswap_rCX_r9,
7194 /* 0xca */ iemOp_bswap_rDX_r10,
7195 /* 0xcb */ iemOp_bswap_rBX_r11,
7196 /* 0xcc */ iemOp_bswap_rSP_r12,
7197 /* 0xcd */ iemOp_bswap_rBP_r13,
7198 /* 0xce */ iemOp_bswap_rSI_r14,
7199 /* 0xcf */ iemOp_bswap_rDI_r15,
7200 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7201 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7202 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7203 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7204 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7205 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7206 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7207 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7208 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7209 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7210 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7211 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7212 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7213 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7214 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7215 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7216 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7217 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7218 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7219 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7220 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7221 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7222 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7223 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7224 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7225 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7226 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7227 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7228 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7229 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7230 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7231 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7232 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7233 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7234 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7235 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7236 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7237 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7238 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7239 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7240 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7241 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7242 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7243 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7244 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7245 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7246 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7247 /* 0xff */ iemOp_Invalid
7248};
7249
7250/** @} */
7251
7252
7253/** @name One byte opcodes.
7254 *
7255 * @{
7256 */
7257
7258/** Opcode 0x00. */
7259FNIEMOP_DEF(iemOp_add_Eb_Gb)
7260{
7261 IEMOP_MNEMONIC("add Eb,Gb");
7262 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7263}
7264
7265
7266/** Opcode 0x01. */
7267FNIEMOP_DEF(iemOp_add_Ev_Gv)
7268{
7269 IEMOP_MNEMONIC("add Ev,Gv");
7270 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7271}
7272
7273
7274/** Opcode 0x02. */
7275FNIEMOP_DEF(iemOp_add_Gb_Eb)
7276{
7277 IEMOP_MNEMONIC("add Gb,Eb");
7278 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7279}
7280
7281
7282/** Opcode 0x03. */
7283FNIEMOP_DEF(iemOp_add_Gv_Ev)
7284{
7285 IEMOP_MNEMONIC("add Gv,Ev");
7286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7287}
7288
7289
7290/** Opcode 0x04. */
7291FNIEMOP_DEF(iemOp_add_Al_Ib)
7292{
7293 IEMOP_MNEMONIC("add al,Ib");
7294 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7295}
7296
7297
7298/** Opcode 0x05. */
7299FNIEMOP_DEF(iemOp_add_eAX_Iz)
7300{
7301 IEMOP_MNEMONIC("add rAX,Iz");
7302 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7303}
7304
7305
7306/** Opcode 0x06. */
7307FNIEMOP_DEF(iemOp_push_ES)
7308{
7309 IEMOP_MNEMONIC("push es");
7310 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7311}
7312
7313
7314/** Opcode 0x07. */
7315FNIEMOP_DEF(iemOp_pop_ES)
7316{
7317 IEMOP_MNEMONIC("pop es");
7318 IEMOP_HLP_NO_64BIT();
7319 IEMOP_HLP_NO_LOCK_PREFIX();
7320 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7321}
7322
7323
7324/** Opcode 0x08. */
7325FNIEMOP_DEF(iemOp_or_Eb_Gb)
7326{
7327 IEMOP_MNEMONIC("or Eb,Gb");
7328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7330}
7331
7332
7333/** Opcode 0x09. */
7334FNIEMOP_DEF(iemOp_or_Ev_Gv)
7335{
7336 IEMOP_MNEMONIC("or Ev,Gv ");
7337 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7338 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7339}
7340
7341
7342/** Opcode 0x0a. */
7343FNIEMOP_DEF(iemOp_or_Gb_Eb)
7344{
7345 IEMOP_MNEMONIC("or Gb,Eb");
7346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7347 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7348}
7349
7350
7351/** Opcode 0x0b. */
7352FNIEMOP_DEF(iemOp_or_Gv_Ev)
7353{
7354 IEMOP_MNEMONIC("or Gv,Ev");
7355 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7357}
7358
7359
7360/** Opcode 0x0c. */
7361FNIEMOP_DEF(iemOp_or_Al_Ib)
7362{
7363 IEMOP_MNEMONIC("or al,Ib");
7364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7365 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7366}
7367
7368
7369/** Opcode 0x0d. */
7370FNIEMOP_DEF(iemOp_or_eAX_Iz)
7371{
7372 IEMOP_MNEMONIC("or rAX,Iz");
7373 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7374 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7375}
7376
7377
7378/** Opcode 0x0e. */
7379FNIEMOP_DEF(iemOp_push_CS)
7380{
7381 IEMOP_MNEMONIC("push cs");
7382 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7383}
7384
7385
7386/** Opcode 0x0f. */
7387FNIEMOP_DEF(iemOp_2byteEscape)
7388{
7389 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7390 /** @todo PUSH CS on 8086, undefined on 80186. */
7391 IEMOP_HLP_MIN_286();
7392 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7393}
7394
7395/** Opcode 0x10. */
7396FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7397{
7398 IEMOP_MNEMONIC("adc Eb,Gb");
7399 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7400}
7401
7402
7403/** Opcode 0x11. */
7404FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7405{
7406 IEMOP_MNEMONIC("adc Ev,Gv");
7407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7408}
7409
7410
7411/** Opcode 0x12. */
7412FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7413{
7414 IEMOP_MNEMONIC("adc Gb,Eb");
7415 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7416}
7417
7418
7419/** Opcode 0x13. */
7420FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7421{
7422 IEMOP_MNEMONIC("adc Gv,Ev");
7423 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7424}
7425
7426
7427/** Opcode 0x14. */
7428FNIEMOP_DEF(iemOp_adc_Al_Ib)
7429{
7430 IEMOP_MNEMONIC("adc al,Ib");
7431 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7432}
7433
7434
7435/** Opcode 0x15. */
7436FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7437{
7438 IEMOP_MNEMONIC("adc rAX,Iz");
7439 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7440}
7441
7442
7443/** Opcode 0x16. */
7444FNIEMOP_DEF(iemOp_push_SS)
7445{
7446 IEMOP_MNEMONIC("push ss");
7447 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7448}
7449
7450
7451/** Opcode 0x17. */
7452FNIEMOP_DEF(iemOp_pop_SS)
7453{
7454 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7455 IEMOP_HLP_NO_LOCK_PREFIX();
7456 IEMOP_HLP_NO_64BIT();
7457 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7458}
7459
7460
7461/** Opcode 0x18. */
7462FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7463{
7464 IEMOP_MNEMONIC("sbb Eb,Gb");
7465 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7466}
7467
7468
7469/** Opcode 0x19. */
7470FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7471{
7472 IEMOP_MNEMONIC("sbb Ev,Gv");
7473 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7474}
7475
7476
7477/** Opcode 0x1a. */
7478FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7479{
7480 IEMOP_MNEMONIC("sbb Gb,Eb");
7481 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7482}
7483
7484
7485/** Opcode 0x1b. */
7486FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7487{
7488 IEMOP_MNEMONIC("sbb Gv,Ev");
7489 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7490}
7491
7492
7493/** Opcode 0x1c. */
7494FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7495{
7496 IEMOP_MNEMONIC("sbb al,Ib");
7497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7498}
7499
7500
7501/** Opcode 0x1d. */
7502FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7503{
7504 IEMOP_MNEMONIC("sbb rAX,Iz");
7505 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7506}
7507
7508
7509/** Opcode 0x1e. */
7510FNIEMOP_DEF(iemOp_push_DS)
7511{
7512 IEMOP_MNEMONIC("push ds");
7513 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7514}
7515
7516
7517/** Opcode 0x1f. */
7518FNIEMOP_DEF(iemOp_pop_DS)
7519{
7520 IEMOP_MNEMONIC("pop ds");
7521 IEMOP_HLP_NO_LOCK_PREFIX();
7522 IEMOP_HLP_NO_64BIT();
7523 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7524}
7525
7526
7527/** Opcode 0x20. */
7528FNIEMOP_DEF(iemOp_and_Eb_Gb)
7529{
7530 IEMOP_MNEMONIC("and Eb,Gb");
7531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7532 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7533}
7534
7535
7536/** Opcode 0x21. */
7537FNIEMOP_DEF(iemOp_and_Ev_Gv)
7538{
7539 IEMOP_MNEMONIC("and Ev,Gv");
7540 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7541 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7542}
7543
7544
7545/** Opcode 0x22. */
7546FNIEMOP_DEF(iemOp_and_Gb_Eb)
7547{
7548 IEMOP_MNEMONIC("and Gb,Eb");
7549 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7550 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7551}
7552
7553
7554/** Opcode 0x23. */
7555FNIEMOP_DEF(iemOp_and_Gv_Ev)
7556{
7557 IEMOP_MNEMONIC("and Gv,Ev");
7558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7559 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7560}
7561
7562
7563/** Opcode 0x24. */
7564FNIEMOP_DEF(iemOp_and_Al_Ib)
7565{
7566 IEMOP_MNEMONIC("and al,Ib");
7567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7569}
7570
7571
7572/** Opcode 0x25. */
7573FNIEMOP_DEF(iemOp_and_eAX_Iz)
7574{
7575 IEMOP_MNEMONIC("and rAX,Iz");
7576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7577 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7578}
7579
7580
7581/** Opcode 0x26. */
7582FNIEMOP_DEF(iemOp_seg_ES)
7583{
7584 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7585 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7586 pIemCpu->iEffSeg = X86_SREG_ES;
7587
7588 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7589 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7590}
7591
7592
7593/** Opcode 0x27. */
7594FNIEMOP_DEF(iemOp_daa)
7595{
7596 IEMOP_MNEMONIC("daa AL");
7597 IEMOP_HLP_NO_64BIT();
7598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7600 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7601}
7602
7603
7604/** Opcode 0x28. */
7605FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7606{
7607 IEMOP_MNEMONIC("sub Eb,Gb");
7608 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7609}
7610
7611
7612/** Opcode 0x29. */
7613FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7614{
7615 IEMOP_MNEMONIC("sub Ev,Gv");
7616 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7617}
7618
7619
7620/** Opcode 0x2a. */
7621FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7622{
7623 IEMOP_MNEMONIC("sub Gb,Eb");
7624 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7625}
7626
7627
7628/** Opcode 0x2b. */
7629FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7630{
7631 IEMOP_MNEMONIC("sub Gv,Ev");
7632 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7633}
7634
7635
7636/** Opcode 0x2c. */
7637FNIEMOP_DEF(iemOp_sub_Al_Ib)
7638{
7639 IEMOP_MNEMONIC("sub al,Ib");
7640 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7641}
7642
7643
7644/** Opcode 0x2d. */
7645FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7646{
7647 IEMOP_MNEMONIC("sub rAX,Iz");
7648 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7649}
7650
7651
7652/** Opcode 0x2e. */
7653FNIEMOP_DEF(iemOp_seg_CS)
7654{
7655 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7656 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7657 pIemCpu->iEffSeg = X86_SREG_CS;
7658
7659 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7660 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7661}
7662
7663
7664/** Opcode 0x2f. */
7665FNIEMOP_DEF(iemOp_das)
7666{
7667 IEMOP_MNEMONIC("das AL");
7668 IEMOP_HLP_NO_64BIT();
7669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7671 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7672}
7673
7674
7675/** Opcode 0x30. */
7676FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7677{
7678 IEMOP_MNEMONIC("xor Eb,Gb");
7679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7680 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7681}
7682
7683
7684/** Opcode 0x31. */
7685FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7686{
7687 IEMOP_MNEMONIC("xor Ev,Gv");
7688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7690}
7691
7692
7693/** Opcode 0x32. */
7694FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7695{
7696 IEMOP_MNEMONIC("xor Gb,Eb");
7697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7699}
7700
7701
7702/** Opcode 0x33. */
7703FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7704{
7705 IEMOP_MNEMONIC("xor Gv,Ev");
7706 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7707 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7708}
7709
7710
7711/** Opcode 0x34. */
7712FNIEMOP_DEF(iemOp_xor_Al_Ib)
7713{
7714 IEMOP_MNEMONIC("xor al,Ib");
7715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7717}
7718
7719
7720/** Opcode 0x35. */
7721FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7722{
7723 IEMOP_MNEMONIC("xor rAX,Iz");
7724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7726}
7727
7728
7729/** Opcode 0x36. */
7730FNIEMOP_DEF(iemOp_seg_SS)
7731{
7732 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7733 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7734 pIemCpu->iEffSeg = X86_SREG_SS;
7735
7736 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7737 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7738}
7739
7740
7741/** Opcode 0x37. */
7742FNIEMOP_STUB(iemOp_aaa);
7743
7744
7745/** Opcode 0x38. */
7746FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7747{
7748 IEMOP_MNEMONIC("cmp Eb,Gb");
7749 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7750 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7751}
7752
7753
7754/** Opcode 0x39. */
7755FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7756{
7757 IEMOP_MNEMONIC("cmp Ev,Gv");
7758 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7760}
7761
7762
7763/** Opcode 0x3a. */
7764FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7765{
7766 IEMOP_MNEMONIC("cmp Gb,Eb");
7767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7768}
7769
7770
7771/** Opcode 0x3b. */
7772FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7773{
7774 IEMOP_MNEMONIC("cmp Gv,Ev");
7775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7776}
7777
7778
7779/** Opcode 0x3c. */
7780FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7781{
7782 IEMOP_MNEMONIC("cmp al,Ib");
7783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7784}
7785
7786
7787/** Opcode 0x3d. */
7788FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7789{
7790 IEMOP_MNEMONIC("cmp rAX,Iz");
7791 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7792}
7793
7794
7795/** Opcode 0x3e. */
7796FNIEMOP_DEF(iemOp_seg_DS)
7797{
7798 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7799 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7800 pIemCpu->iEffSeg = X86_SREG_DS;
7801
7802 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7803 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7804}
7805
7806
7807/** Opcode 0x3f. */
7808FNIEMOP_STUB(iemOp_aas);
7809
7810/**
7811 * Common 'inc/dec/not/neg register' helper.
7812 */
7813FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7814{
7815 IEMOP_HLP_NO_LOCK_PREFIX();
7816 switch (pIemCpu->enmEffOpSize)
7817 {
7818 case IEMMODE_16BIT:
7819 IEM_MC_BEGIN(2, 0);
7820 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7821 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7822 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7823 IEM_MC_REF_EFLAGS(pEFlags);
7824 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7825 IEM_MC_ADVANCE_RIP();
7826 IEM_MC_END();
7827 return VINF_SUCCESS;
7828
7829 case IEMMODE_32BIT:
7830 IEM_MC_BEGIN(2, 0);
7831 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7832 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7833 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7834 IEM_MC_REF_EFLAGS(pEFlags);
7835 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7836 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7837 IEM_MC_ADVANCE_RIP();
7838 IEM_MC_END();
7839 return VINF_SUCCESS;
7840
7841 case IEMMODE_64BIT:
7842 IEM_MC_BEGIN(2, 0);
7843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7844 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7845 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7846 IEM_MC_REF_EFLAGS(pEFlags);
7847 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7848 IEM_MC_ADVANCE_RIP();
7849 IEM_MC_END();
7850 return VINF_SUCCESS;
7851 }
7852 return VINF_SUCCESS;
7853}
7854
7855
7856/** Opcode 0x40. */
7857FNIEMOP_DEF(iemOp_inc_eAX)
7858{
7859 /*
7860 * This is a REX prefix in 64-bit mode.
7861 */
7862 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7863 {
7864 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7865 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7866
7867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7869 }
7870
7871 IEMOP_MNEMONIC("inc eAX");
7872 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7873}
7874
7875
7876/** Opcode 0x41. */
7877FNIEMOP_DEF(iemOp_inc_eCX)
7878{
7879 /*
7880 * This is a REX prefix in 64-bit mode.
7881 */
7882 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7883 {
7884 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7885 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7886 pIemCpu->uRexB = 1 << 3;
7887
7888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7890 }
7891
7892 IEMOP_MNEMONIC("inc eCX");
7893 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7894}
7895
7896
7897/** Opcode 0x42. */
7898FNIEMOP_DEF(iemOp_inc_eDX)
7899{
7900 /*
7901 * This is a REX prefix in 64-bit mode.
7902 */
7903 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7904 {
7905 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7906 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7907 pIemCpu->uRexIndex = 1 << 3;
7908
7909 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7910 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7911 }
7912
7913 IEMOP_MNEMONIC("inc eDX");
7914 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7915}
7916
7917
7918
7919/** Opcode 0x43. */
7920FNIEMOP_DEF(iemOp_inc_eBX)
7921{
7922 /*
7923 * This is a REX prefix in 64-bit mode.
7924 */
7925 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7926 {
7927 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7928 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7929 pIemCpu->uRexB = 1 << 3;
7930 pIemCpu->uRexIndex = 1 << 3;
7931
7932 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7933 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7934 }
7935
7936 IEMOP_MNEMONIC("inc eBX");
7937 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7938}
7939
7940
7941/** Opcode 0x44. */
7942FNIEMOP_DEF(iemOp_inc_eSP)
7943{
7944 /*
7945 * This is a REX prefix in 64-bit mode.
7946 */
7947 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7948 {
7949 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7950 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7951 pIemCpu->uRexReg = 1 << 3;
7952
7953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7955 }
7956
7957 IEMOP_MNEMONIC("inc eSP");
7958 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7959}
7960
7961
7962/** Opcode 0x45. */
7963FNIEMOP_DEF(iemOp_inc_eBP)
7964{
7965 /*
7966 * This is a REX prefix in 64-bit mode.
7967 */
7968 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7969 {
7970 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7971 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7972 pIemCpu->uRexReg = 1 << 3;
7973 pIemCpu->uRexB = 1 << 3;
7974
7975 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7976 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7977 }
7978
7979 IEMOP_MNEMONIC("inc eBP");
7980 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7981}
7982
7983
7984/** Opcode 0x46. */
7985FNIEMOP_DEF(iemOp_inc_eSI)
7986{
7987 /*
7988 * This is a REX prefix in 64-bit mode.
7989 */
7990 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7991 {
7992 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7993 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7994 pIemCpu->uRexReg = 1 << 3;
7995 pIemCpu->uRexIndex = 1 << 3;
7996
7997 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7998 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7999 }
8000
8001 IEMOP_MNEMONIC("inc eSI");
8002 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8003}
8004
8005
8006/** Opcode 0x47. */
8007FNIEMOP_DEF(iemOp_inc_eDI)
8008{
8009 /*
8010 * This is a REX prefix in 64-bit mode.
8011 */
8012 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8013 {
8014 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8015 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8016 pIemCpu->uRexReg = 1 << 3;
8017 pIemCpu->uRexB = 1 << 3;
8018 pIemCpu->uRexIndex = 1 << 3;
8019
8020 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8021 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8022 }
8023
8024 IEMOP_MNEMONIC("inc eDI");
8025 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8026}
8027
8028
8029/** Opcode 0x48. */
8030FNIEMOP_DEF(iemOp_dec_eAX)
8031{
8032 /*
8033 * This is a REX prefix in 64-bit mode.
8034 */
8035 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8036 {
8037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8038 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8039 iemRecalEffOpSize(pIemCpu);
8040
8041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8043 }
8044
8045 IEMOP_MNEMONIC("dec eAX");
8046 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8047}
8048
8049
8050/** Opcode 0x49. */
8051FNIEMOP_DEF(iemOp_dec_eCX)
8052{
8053 /*
8054 * This is a REX prefix in 64-bit mode.
8055 */
8056 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8057 {
8058 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8059 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8060 pIemCpu->uRexB = 1 << 3;
8061 iemRecalEffOpSize(pIemCpu);
8062
8063 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8064 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8065 }
8066
8067 IEMOP_MNEMONIC("dec eCX");
8068 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8069}
8070
8071
8072/** Opcode 0x4a. */
8073FNIEMOP_DEF(iemOp_dec_eDX)
8074{
8075 /*
8076 * This is a REX prefix in 64-bit mode.
8077 */
8078 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8079 {
8080 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8081 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8082 pIemCpu->uRexIndex = 1 << 3;
8083 iemRecalEffOpSize(pIemCpu);
8084
8085 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8086 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8087 }
8088
8089 IEMOP_MNEMONIC("dec eDX");
8090 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8091}
8092
8093
8094/** Opcode 0x4b. */
8095FNIEMOP_DEF(iemOp_dec_eBX)
8096{
8097 /*
8098 * This is a REX prefix in 64-bit mode.
8099 */
8100 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8101 {
8102 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8103 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8104 pIemCpu->uRexB = 1 << 3;
8105 pIemCpu->uRexIndex = 1 << 3;
8106 iemRecalEffOpSize(pIemCpu);
8107
8108 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8109 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8110 }
8111
8112 IEMOP_MNEMONIC("dec eBX");
8113 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8114}
8115
8116
8117/** Opcode 0x4c. */
8118FNIEMOP_DEF(iemOp_dec_eSP)
8119{
8120 /*
8121 * This is a REX prefix in 64-bit mode.
8122 */
8123 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8124 {
8125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8126 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8127 pIemCpu->uRexReg = 1 << 3;
8128 iemRecalEffOpSize(pIemCpu);
8129
8130 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8131 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8132 }
8133
8134 IEMOP_MNEMONIC("dec eSP");
8135 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8136}
8137
8138
8139/** Opcode 0x4d. */
8140FNIEMOP_DEF(iemOp_dec_eBP)
8141{
8142 /*
8143 * This is a REX prefix in 64-bit mode.
8144 */
8145 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8146 {
8147 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8148 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8149 pIemCpu->uRexReg = 1 << 3;
8150 pIemCpu->uRexB = 1 << 3;
8151 iemRecalEffOpSize(pIemCpu);
8152
8153 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8154 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8155 }
8156
8157 IEMOP_MNEMONIC("dec eBP");
8158 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8159}
8160
8161
8162/** Opcode 0x4e. */
8163FNIEMOP_DEF(iemOp_dec_eSI)
8164{
8165 /*
8166 * This is a REX prefix in 64-bit mode.
8167 */
8168 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8169 {
8170 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8171 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8172 pIemCpu->uRexReg = 1 << 3;
8173 pIemCpu->uRexIndex = 1 << 3;
8174 iemRecalEffOpSize(pIemCpu);
8175
8176 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8177 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8178 }
8179
8180 IEMOP_MNEMONIC("dec eSI");
8181 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8182}
8183
8184
8185/** Opcode 0x4f. */
8186FNIEMOP_DEF(iemOp_dec_eDI)
8187{
8188 /*
8189 * This is a REX prefix in 64-bit mode.
8190 */
8191 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8192 {
8193 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8194 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8195 pIemCpu->uRexReg = 1 << 3;
8196 pIemCpu->uRexB = 1 << 3;
8197 pIemCpu->uRexIndex = 1 << 3;
8198 iemRecalEffOpSize(pIemCpu);
8199
8200 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8201 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8202 }
8203
8204 IEMOP_MNEMONIC("dec eDI");
8205 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8206}
8207
8208
8209/**
8210 * Common 'push register' helper.
8211 */
8212FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8213{
8214 IEMOP_HLP_NO_LOCK_PREFIX();
8215 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8216 {
8217 iReg |= pIemCpu->uRexB;
8218 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8219 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8220 }
8221
8222 switch (pIemCpu->enmEffOpSize)
8223 {
8224 case IEMMODE_16BIT:
8225 IEM_MC_BEGIN(0, 1);
8226 IEM_MC_LOCAL(uint16_t, u16Value);
8227 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8228 IEM_MC_PUSH_U16(u16Value);
8229 IEM_MC_ADVANCE_RIP();
8230 IEM_MC_END();
8231 break;
8232
8233 case IEMMODE_32BIT:
8234 IEM_MC_BEGIN(0, 1);
8235 IEM_MC_LOCAL(uint32_t, u32Value);
8236 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8237 IEM_MC_PUSH_U32(u32Value);
8238 IEM_MC_ADVANCE_RIP();
8239 IEM_MC_END();
8240 break;
8241
8242 case IEMMODE_64BIT:
8243 IEM_MC_BEGIN(0, 1);
8244 IEM_MC_LOCAL(uint64_t, u64Value);
8245 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8246 IEM_MC_PUSH_U64(u64Value);
8247 IEM_MC_ADVANCE_RIP();
8248 IEM_MC_END();
8249 break;
8250 }
8251
8252 return VINF_SUCCESS;
8253}
8254
8255
8256/** Opcode 0x50. */
8257FNIEMOP_DEF(iemOp_push_eAX)
8258{
8259 IEMOP_MNEMONIC("push rAX");
8260 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8261}
8262
8263
8264/** Opcode 0x51. */
8265FNIEMOP_DEF(iemOp_push_eCX)
8266{
8267 IEMOP_MNEMONIC("push rCX");
8268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8269}
8270
8271
8272/** Opcode 0x52. */
8273FNIEMOP_DEF(iemOp_push_eDX)
8274{
8275 IEMOP_MNEMONIC("push rDX");
8276 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8277}
8278
8279
8280/** Opcode 0x53. */
8281FNIEMOP_DEF(iemOp_push_eBX)
8282{
8283 IEMOP_MNEMONIC("push rBX");
8284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8285}
8286
8287
8288/** Opcode 0x54. */
8289FNIEMOP_DEF(iemOp_push_eSP)
8290{
8291 IEMOP_MNEMONIC("push rSP");
8292 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8293 {
8294 IEM_MC_BEGIN(0, 1);
8295 IEM_MC_LOCAL(uint16_t, u16Value);
8296 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8297 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8298 IEM_MC_PUSH_U16(u16Value);
8299 IEM_MC_ADVANCE_RIP();
8300 IEM_MC_END();
8301 }
8302 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8303}
8304
8305
8306/** Opcode 0x55. */
8307FNIEMOP_DEF(iemOp_push_eBP)
8308{
8309 IEMOP_MNEMONIC("push rBP");
8310 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8311}
8312
8313
8314/** Opcode 0x56. */
8315FNIEMOP_DEF(iemOp_push_eSI)
8316{
8317 IEMOP_MNEMONIC("push rSI");
8318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8319}
8320
8321
8322/** Opcode 0x57. */
8323FNIEMOP_DEF(iemOp_push_eDI)
8324{
8325 IEMOP_MNEMONIC("push rDI");
8326 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8327}
8328
8329
8330/**
8331 * Common 'pop register' helper.
8332 */
8333FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8334{
8335 IEMOP_HLP_NO_LOCK_PREFIX();
8336 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8337 {
8338 iReg |= pIemCpu->uRexB;
8339 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8340 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8341 }
8342
8343 switch (pIemCpu->enmEffOpSize)
8344 {
8345 case IEMMODE_16BIT:
8346 IEM_MC_BEGIN(0, 1);
8347 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8348 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8349 IEM_MC_POP_U16(pu16Dst);
8350 IEM_MC_ADVANCE_RIP();
8351 IEM_MC_END();
8352 break;
8353
8354 case IEMMODE_32BIT:
8355 IEM_MC_BEGIN(0, 1);
8356 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8357 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8358 IEM_MC_POP_U32(pu32Dst);
8359 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8360 IEM_MC_ADVANCE_RIP();
8361 IEM_MC_END();
8362 break;
8363
8364 case IEMMODE_64BIT:
8365 IEM_MC_BEGIN(0, 1);
8366 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8367 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8368 IEM_MC_POP_U64(pu64Dst);
8369 IEM_MC_ADVANCE_RIP();
8370 IEM_MC_END();
8371 break;
8372 }
8373
8374 return VINF_SUCCESS;
8375}
8376
8377
8378/** Opcode 0x58. */
8379FNIEMOP_DEF(iemOp_pop_eAX)
8380{
8381 IEMOP_MNEMONIC("pop rAX");
8382 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8383}
8384
8385
8386/** Opcode 0x59. */
8387FNIEMOP_DEF(iemOp_pop_eCX)
8388{
8389 IEMOP_MNEMONIC("pop rCX");
8390 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8391}
8392
8393
8394/** Opcode 0x5a. */
8395FNIEMOP_DEF(iemOp_pop_eDX)
8396{
8397 IEMOP_MNEMONIC("pop rDX");
8398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8399}
8400
8401
8402/** Opcode 0x5b. */
8403FNIEMOP_DEF(iemOp_pop_eBX)
8404{
8405 IEMOP_MNEMONIC("pop rBX");
8406 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8407}
8408
8409
8410/** Opcode 0x5c. */
8411FNIEMOP_DEF(iemOp_pop_eSP)
8412{
8413 IEMOP_MNEMONIC("pop rSP");
8414 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8415 {
8416 if (pIemCpu->uRexB)
8417 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8418 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8419 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8420 }
8421
8422 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8423 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8424 /** @todo add testcase for this instruction. */
8425 switch (pIemCpu->enmEffOpSize)
8426 {
8427 case IEMMODE_16BIT:
8428 IEM_MC_BEGIN(0, 1);
8429 IEM_MC_LOCAL(uint16_t, u16Dst);
8430 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8431 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8432 IEM_MC_ADVANCE_RIP();
8433 IEM_MC_END();
8434 break;
8435
8436 case IEMMODE_32BIT:
8437 IEM_MC_BEGIN(0, 1);
8438 IEM_MC_LOCAL(uint32_t, u32Dst);
8439 IEM_MC_POP_U32(&u32Dst);
8440 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8441 IEM_MC_ADVANCE_RIP();
8442 IEM_MC_END();
8443 break;
8444
8445 case IEMMODE_64BIT:
8446 IEM_MC_BEGIN(0, 1);
8447 IEM_MC_LOCAL(uint64_t, u64Dst);
8448 IEM_MC_POP_U64(&u64Dst);
8449 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8450 IEM_MC_ADVANCE_RIP();
8451 IEM_MC_END();
8452 break;
8453 }
8454
8455 return VINF_SUCCESS;
8456}
8457
8458
8459/** Opcode 0x5d. */
8460FNIEMOP_DEF(iemOp_pop_eBP)
8461{
8462 IEMOP_MNEMONIC("pop rBP");
8463 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8464}
8465
8466
8467/** Opcode 0x5e. */
8468FNIEMOP_DEF(iemOp_pop_eSI)
8469{
8470 IEMOP_MNEMONIC("pop rSI");
8471 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8472}
8473
8474
8475/** Opcode 0x5f. */
8476FNIEMOP_DEF(iemOp_pop_eDI)
8477{
8478 IEMOP_MNEMONIC("pop rDI");
8479 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8480}
8481
8482
8483/** Opcode 0x60. */
8484FNIEMOP_DEF(iemOp_pusha)
8485{
8486 IEMOP_MNEMONIC("pusha");
8487 IEMOP_HLP_MIN_186();
8488 IEMOP_HLP_NO_64BIT();
8489 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8490 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8491 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8492 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8493}
8494
8495
8496/** Opcode 0x61. */
8497FNIEMOP_DEF(iemOp_popa)
8498{
8499 IEMOP_MNEMONIC("popa");
8500 IEMOP_HLP_MIN_186();
8501 IEMOP_HLP_NO_64BIT();
8502 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8504 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8505 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8506}
8507
8508
8509/** Opcode 0x62. */
8510FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8511// IEMOP_HLP_MIN_186();
8512
8513
8514/** Opcode 0x63 - non-64-bit modes. */
8515FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8516{
8517 IEMOP_MNEMONIC("arpl Ew,Gw");
8518 IEMOP_HLP_MIN_286();
8519 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8521
8522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8523 {
8524 /* Register */
8525 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8526 IEM_MC_BEGIN(3, 0);
8527 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8528 IEM_MC_ARG(uint16_t, u16Src, 1);
8529 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8530
8531 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8532 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8533 IEM_MC_REF_EFLAGS(pEFlags);
8534 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8535
8536 IEM_MC_ADVANCE_RIP();
8537 IEM_MC_END();
8538 }
8539 else
8540 {
8541 /* Memory */
8542 IEM_MC_BEGIN(3, 2);
8543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8544 IEM_MC_ARG(uint16_t, u16Src, 1);
8545 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8547
8548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8549 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8550 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8551 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8552 IEM_MC_FETCH_EFLAGS(EFlags);
8553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8554
8555 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8556 IEM_MC_COMMIT_EFLAGS(EFlags);
8557 IEM_MC_ADVANCE_RIP();
8558 IEM_MC_END();
8559 }
8560 return VINF_SUCCESS;
8561
8562}
8563
8564
8565/** Opcode 0x63.
8566 * @note This is a weird one. It works like a regular move instruction if
8567 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8568 * @todo This definitely needs a testcase to verify the odd cases. */
8569FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8570{
8571 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8572
8573 IEMOP_MNEMONIC("movsxd Gv,Ev");
8574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8575
8576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8577 {
8578 /*
8579 * Register to register.
8580 */
8581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8582 IEM_MC_BEGIN(0, 1);
8583 IEM_MC_LOCAL(uint64_t, u64Value);
8584 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8585 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8586 IEM_MC_ADVANCE_RIP();
8587 IEM_MC_END();
8588 }
8589 else
8590 {
8591 /*
8592 * We're loading a register from memory.
8593 */
8594 IEM_MC_BEGIN(0, 2);
8595 IEM_MC_LOCAL(uint64_t, u64Value);
8596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8599 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8600 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8601 IEM_MC_ADVANCE_RIP();
8602 IEM_MC_END();
8603 }
8604 return VINF_SUCCESS;
8605}
8606
8607
8608/** Opcode 0x64. */
8609FNIEMOP_DEF(iemOp_seg_FS)
8610{
8611 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8612 IEMOP_HLP_MIN_386();
8613
8614 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8615 pIemCpu->iEffSeg = X86_SREG_FS;
8616
8617 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8618 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8619}
8620
8621
8622/** Opcode 0x65. */
8623FNIEMOP_DEF(iemOp_seg_GS)
8624{
8625 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8626 IEMOP_HLP_MIN_386();
8627
8628 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8629 pIemCpu->iEffSeg = X86_SREG_GS;
8630
8631 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8632 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8633}
8634
8635
8636/** Opcode 0x66. */
8637FNIEMOP_DEF(iemOp_op_size)
8638{
8639 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8640 IEMOP_HLP_MIN_386();
8641
8642 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8643 iemRecalEffOpSize(pIemCpu);
8644
8645 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8646 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8647}
8648
8649
8650/** Opcode 0x67. */
8651FNIEMOP_DEF(iemOp_addr_size)
8652{
8653 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8654 IEMOP_HLP_MIN_386();
8655
8656 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8657 switch (pIemCpu->enmDefAddrMode)
8658 {
8659 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8660 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8661 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8662 default: AssertFailed();
8663 }
8664
8665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8667}
8668
8669
8670/** Opcode 0x68. */
8671FNIEMOP_DEF(iemOp_push_Iz)
8672{
8673 IEMOP_MNEMONIC("push Iz");
8674 IEMOP_HLP_MIN_186();
8675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8676 switch (pIemCpu->enmEffOpSize)
8677 {
8678 case IEMMODE_16BIT:
8679 {
8680 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8681 IEMOP_HLP_NO_LOCK_PREFIX();
8682 IEM_MC_BEGIN(0,0);
8683 IEM_MC_PUSH_U16(u16Imm);
8684 IEM_MC_ADVANCE_RIP();
8685 IEM_MC_END();
8686 return VINF_SUCCESS;
8687 }
8688
8689 case IEMMODE_32BIT:
8690 {
8691 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8692 IEMOP_HLP_NO_LOCK_PREFIX();
8693 IEM_MC_BEGIN(0,0);
8694 IEM_MC_PUSH_U32(u32Imm);
8695 IEM_MC_ADVANCE_RIP();
8696 IEM_MC_END();
8697 return VINF_SUCCESS;
8698 }
8699
8700 case IEMMODE_64BIT:
8701 {
8702 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8703 IEMOP_HLP_NO_LOCK_PREFIX();
8704 IEM_MC_BEGIN(0,0);
8705 IEM_MC_PUSH_U64(u64Imm);
8706 IEM_MC_ADVANCE_RIP();
8707 IEM_MC_END();
8708 return VINF_SUCCESS;
8709 }
8710
8711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8712 }
8713}
8714
8715
8716/** Opcode 0x69. */
8717FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8718{
8719 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8720 IEMOP_HLP_MIN_186();
8721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8722 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8723
8724 switch (pIemCpu->enmEffOpSize)
8725 {
8726 case IEMMODE_16BIT:
8727 {
8728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8729 {
8730 /* register operand */
8731 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8733
8734 IEM_MC_BEGIN(3, 1);
8735 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8736 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8737 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8738 IEM_MC_LOCAL(uint16_t, u16Tmp);
8739
8740 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8741 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8742 IEM_MC_REF_EFLAGS(pEFlags);
8743 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8744 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8745
8746 IEM_MC_ADVANCE_RIP();
8747 IEM_MC_END();
8748 }
8749 else
8750 {
8751 /* memory operand */
8752 IEM_MC_BEGIN(3, 2);
8753 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8754 IEM_MC_ARG(uint16_t, u16Src, 1);
8755 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8756 IEM_MC_LOCAL(uint16_t, u16Tmp);
8757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8758
8759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8760 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8761 IEM_MC_ASSIGN(u16Src, u16Imm);
8762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8763 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8764 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8765 IEM_MC_REF_EFLAGS(pEFlags);
8766 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8767 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8768
8769 IEM_MC_ADVANCE_RIP();
8770 IEM_MC_END();
8771 }
8772 return VINF_SUCCESS;
8773 }
8774
8775 case IEMMODE_32BIT:
8776 {
8777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8778 {
8779 /* register operand */
8780 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8782
8783 IEM_MC_BEGIN(3, 1);
8784 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8785 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8787 IEM_MC_LOCAL(uint32_t, u32Tmp);
8788
8789 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8790 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8791 IEM_MC_REF_EFLAGS(pEFlags);
8792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8793 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8794
8795 IEM_MC_ADVANCE_RIP();
8796 IEM_MC_END();
8797 }
8798 else
8799 {
8800 /* memory operand */
8801 IEM_MC_BEGIN(3, 2);
8802 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8803 IEM_MC_ARG(uint32_t, u32Src, 1);
8804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8805 IEM_MC_LOCAL(uint32_t, u32Tmp);
8806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8807
8808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8809 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8810 IEM_MC_ASSIGN(u32Src, u32Imm);
8811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8812 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8813 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8814 IEM_MC_REF_EFLAGS(pEFlags);
8815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8816 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8817
8818 IEM_MC_ADVANCE_RIP();
8819 IEM_MC_END();
8820 }
8821 return VINF_SUCCESS;
8822 }
8823
8824 case IEMMODE_64BIT:
8825 {
8826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8827 {
8828 /* register operand */
8829 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8831
8832 IEM_MC_BEGIN(3, 1);
8833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8834 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8835 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8836 IEM_MC_LOCAL(uint64_t, u64Tmp);
8837
8838 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8839 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8840 IEM_MC_REF_EFLAGS(pEFlags);
8841 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8842 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8843
8844 IEM_MC_ADVANCE_RIP();
8845 IEM_MC_END();
8846 }
8847 else
8848 {
8849 /* memory operand */
8850 IEM_MC_BEGIN(3, 2);
8851 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8852 IEM_MC_ARG(uint64_t, u64Src, 1);
8853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8854 IEM_MC_LOCAL(uint64_t, u64Tmp);
8855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8856
8857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8858 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8859 IEM_MC_ASSIGN(u64Src, u64Imm);
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8862 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8863 IEM_MC_REF_EFLAGS(pEFlags);
8864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8865 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8866
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 }
8870 return VINF_SUCCESS;
8871 }
8872 }
8873 AssertFailedReturn(VERR_IEM_IPE_9);
8874}
8875
8876
8877/** Opcode 0x6a. */
8878FNIEMOP_DEF(iemOp_push_Ib)
8879{
8880 IEMOP_MNEMONIC("push Ib");
8881 IEMOP_HLP_MIN_186();
8882 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8883 IEMOP_HLP_NO_LOCK_PREFIX();
8884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8885
8886 IEM_MC_BEGIN(0,0);
8887 switch (pIemCpu->enmEffOpSize)
8888 {
8889 case IEMMODE_16BIT:
8890 IEM_MC_PUSH_U16(i8Imm);
8891 break;
8892 case IEMMODE_32BIT:
8893 IEM_MC_PUSH_U32(i8Imm);
8894 break;
8895 case IEMMODE_64BIT:
8896 IEM_MC_PUSH_U64(i8Imm);
8897 break;
8898 }
8899 IEM_MC_ADVANCE_RIP();
8900 IEM_MC_END();
8901 return VINF_SUCCESS;
8902}
8903
8904
8905/** Opcode 0x6b. */
8906FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8907{
8908 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8909 IEMOP_HLP_MIN_186();
8910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8912
8913 switch (pIemCpu->enmEffOpSize)
8914 {
8915 case IEMMODE_16BIT:
8916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8917 {
8918 /* register operand */
8919 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921
8922 IEM_MC_BEGIN(3, 1);
8923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8924 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8926 IEM_MC_LOCAL(uint16_t, u16Tmp);
8927
8928 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8929 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8930 IEM_MC_REF_EFLAGS(pEFlags);
8931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8932 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8933
8934 IEM_MC_ADVANCE_RIP();
8935 IEM_MC_END();
8936 }
8937 else
8938 {
8939 /* memory operand */
8940 IEM_MC_BEGIN(3, 2);
8941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8942 IEM_MC_ARG(uint16_t, u16Src, 1);
8943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8944 IEM_MC_LOCAL(uint16_t, u16Tmp);
8945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8946
8947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8948 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8949 IEM_MC_ASSIGN(u16Src, u16Imm);
8950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8951 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8952 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8953 IEM_MC_REF_EFLAGS(pEFlags);
8954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8955 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8956
8957 IEM_MC_ADVANCE_RIP();
8958 IEM_MC_END();
8959 }
8960 return VINF_SUCCESS;
8961
8962 case IEMMODE_32BIT:
8963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8964 {
8965 /* register operand */
8966 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8968
8969 IEM_MC_BEGIN(3, 1);
8970 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8971 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8972 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8973 IEM_MC_LOCAL(uint32_t, u32Tmp);
8974
8975 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8976 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8977 IEM_MC_REF_EFLAGS(pEFlags);
8978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8979 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8980
8981 IEM_MC_ADVANCE_RIP();
8982 IEM_MC_END();
8983 }
8984 else
8985 {
8986 /* memory operand */
8987 IEM_MC_BEGIN(3, 2);
8988 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8989 IEM_MC_ARG(uint32_t, u32Src, 1);
8990 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8991 IEM_MC_LOCAL(uint32_t, u32Tmp);
8992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8993
8994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8995 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8996 IEM_MC_ASSIGN(u32Src, u32Imm);
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8999 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9000 IEM_MC_REF_EFLAGS(pEFlags);
9001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9002 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9003
9004 IEM_MC_ADVANCE_RIP();
9005 IEM_MC_END();
9006 }
9007 return VINF_SUCCESS;
9008
9009 case IEMMODE_64BIT:
9010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9011 {
9012 /* register operand */
9013 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9015
9016 IEM_MC_BEGIN(3, 1);
9017 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9018 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9020 IEM_MC_LOCAL(uint64_t, u64Tmp);
9021
9022 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9023 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9024 IEM_MC_REF_EFLAGS(pEFlags);
9025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9026 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9027
9028 IEM_MC_ADVANCE_RIP();
9029 IEM_MC_END();
9030 }
9031 else
9032 {
9033 /* memory operand */
9034 IEM_MC_BEGIN(3, 2);
9035 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9036 IEM_MC_ARG(uint64_t, u64Src, 1);
9037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9038 IEM_MC_LOCAL(uint64_t, u64Tmp);
9039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9040
9041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9042 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9043 IEM_MC_ASSIGN(u64Src, u64Imm);
9044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9045 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9046 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9047 IEM_MC_REF_EFLAGS(pEFlags);
9048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9050
9051 IEM_MC_ADVANCE_RIP();
9052 IEM_MC_END();
9053 }
9054 return VINF_SUCCESS;
9055 }
9056 AssertFailedReturn(VERR_IEM_IPE_8);
9057}
9058
9059
9060/** Opcode 0x6c. */
9061FNIEMOP_DEF(iemOp_insb_Yb_DX)
9062{
9063 IEMOP_HLP_MIN_186();
9064 IEMOP_HLP_NO_LOCK_PREFIX();
9065 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9066 {
9067 IEMOP_MNEMONIC("rep ins Yb,DX");
9068 switch (pIemCpu->enmEffAddrMode)
9069 {
9070 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9071 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9072 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9074 }
9075 }
9076 else
9077 {
9078 IEMOP_MNEMONIC("ins Yb,DX");
9079 switch (pIemCpu->enmEffAddrMode)
9080 {
9081 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9082 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9083 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9085 }
9086 }
9087}
9088
9089
9090/** Opcode 0x6d. */
9091FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9092{
9093 IEMOP_HLP_MIN_186();
9094 IEMOP_HLP_NO_LOCK_PREFIX();
9095 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9096 {
9097 IEMOP_MNEMONIC("rep ins Yv,DX");
9098 switch (pIemCpu->enmEffOpSize)
9099 {
9100 case IEMMODE_16BIT:
9101 switch (pIemCpu->enmEffAddrMode)
9102 {
9103 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9104 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9105 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9107 }
9108 break;
9109 case IEMMODE_64BIT:
9110 case IEMMODE_32BIT:
9111 switch (pIemCpu->enmEffAddrMode)
9112 {
9113 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9114 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9115 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9117 }
9118 break;
9119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9120 }
9121 }
9122 else
9123 {
9124 IEMOP_MNEMONIC("ins Yv,DX");
9125 switch (pIemCpu->enmEffOpSize)
9126 {
9127 case IEMMODE_16BIT:
9128 switch (pIemCpu->enmEffAddrMode)
9129 {
9130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9134 }
9135 break;
9136 case IEMMODE_64BIT:
9137 case IEMMODE_32BIT:
9138 switch (pIemCpu->enmEffAddrMode)
9139 {
9140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9144 }
9145 break;
9146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9147 }
9148 }
9149}
9150
9151
9152/** Opcode 0x6e. */
9153FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9154{
9155 IEMOP_HLP_MIN_186();
9156 IEMOP_HLP_NO_LOCK_PREFIX();
9157 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9158 {
9159 IEMOP_MNEMONIC("rep outs DX,Yb");
9160 switch (pIemCpu->enmEffAddrMode)
9161 {
9162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9166 }
9167 }
9168 else
9169 {
9170 IEMOP_MNEMONIC("outs DX,Yb");
9171 switch (pIemCpu->enmEffAddrMode)
9172 {
9173 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9174 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9175 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9177 }
9178 }
9179}
9180
9181
9182/** Opcode 0x6f. */
9183FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9184{
9185 IEMOP_HLP_MIN_186();
9186 IEMOP_HLP_NO_LOCK_PREFIX();
9187 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9188 {
9189 IEMOP_MNEMONIC("rep outs DX,Yv");
9190 switch (pIemCpu->enmEffOpSize)
9191 {
9192 case IEMMODE_16BIT:
9193 switch (pIemCpu->enmEffAddrMode)
9194 {
9195 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9196 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9197 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9199 }
9200 break;
9201 case IEMMODE_64BIT:
9202 case IEMMODE_32BIT:
9203 switch (pIemCpu->enmEffAddrMode)
9204 {
9205 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9206 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9207 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9209 }
9210 break;
9211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9212 }
9213 }
9214 else
9215 {
9216 IEMOP_MNEMONIC("outs DX,Yv");
9217 switch (pIemCpu->enmEffOpSize)
9218 {
9219 case IEMMODE_16BIT:
9220 switch (pIemCpu->enmEffAddrMode)
9221 {
9222 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9223 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9224 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9226 }
9227 break;
9228 case IEMMODE_64BIT:
9229 case IEMMODE_32BIT:
9230 switch (pIemCpu->enmEffAddrMode)
9231 {
9232 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9236 }
9237 break;
9238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9239 }
9240 }
9241}
9242
9243
9244/** Opcode 0x70. */
9245FNIEMOP_DEF(iemOp_jo_Jb)
9246{
9247 IEMOP_MNEMONIC("jo Jb");
9248 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9249 IEMOP_HLP_NO_LOCK_PREFIX();
9250 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9251
9252 IEM_MC_BEGIN(0, 0);
9253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9254 IEM_MC_REL_JMP_S8(i8Imm);
9255 } IEM_MC_ELSE() {
9256 IEM_MC_ADVANCE_RIP();
9257 } IEM_MC_ENDIF();
9258 IEM_MC_END();
9259 return VINF_SUCCESS;
9260}
9261
9262
9263/** Opcode 0x71. */
9264FNIEMOP_DEF(iemOp_jno_Jb)
9265{
9266 IEMOP_MNEMONIC("jno Jb");
9267 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9268 IEMOP_HLP_NO_LOCK_PREFIX();
9269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9270
9271 IEM_MC_BEGIN(0, 0);
9272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9273 IEM_MC_ADVANCE_RIP();
9274 } IEM_MC_ELSE() {
9275 IEM_MC_REL_JMP_S8(i8Imm);
9276 } IEM_MC_ENDIF();
9277 IEM_MC_END();
9278 return VINF_SUCCESS;
9279}
9280
9281/** Opcode 0x72. */
9282FNIEMOP_DEF(iemOp_jc_Jb)
9283{
9284 IEMOP_MNEMONIC("jc/jnae Jb");
9285 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9286 IEMOP_HLP_NO_LOCK_PREFIX();
9287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9288
9289 IEM_MC_BEGIN(0, 0);
9290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9291 IEM_MC_REL_JMP_S8(i8Imm);
9292 } IEM_MC_ELSE() {
9293 IEM_MC_ADVANCE_RIP();
9294 } IEM_MC_ENDIF();
9295 IEM_MC_END();
9296 return VINF_SUCCESS;
9297}
9298
9299
9300/** Opcode 0x73. */
9301FNIEMOP_DEF(iemOp_jnc_Jb)
9302{
9303 IEMOP_MNEMONIC("jnc/jnb Jb");
9304 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9305 IEMOP_HLP_NO_LOCK_PREFIX();
9306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9307
9308 IEM_MC_BEGIN(0, 0);
9309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9310 IEM_MC_ADVANCE_RIP();
9311 } IEM_MC_ELSE() {
9312 IEM_MC_REL_JMP_S8(i8Imm);
9313 } IEM_MC_ENDIF();
9314 IEM_MC_END();
9315 return VINF_SUCCESS;
9316}
9317
9318
9319/** Opcode 0x74. */
9320FNIEMOP_DEF(iemOp_je_Jb)
9321{
9322 IEMOP_MNEMONIC("je/jz Jb");
9323 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9324 IEMOP_HLP_NO_LOCK_PREFIX();
9325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9326
9327 IEM_MC_BEGIN(0, 0);
9328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9329 IEM_MC_REL_JMP_S8(i8Imm);
9330 } IEM_MC_ELSE() {
9331 IEM_MC_ADVANCE_RIP();
9332 } IEM_MC_ENDIF();
9333 IEM_MC_END();
9334 return VINF_SUCCESS;
9335}
9336
9337
9338/** Opcode 0x75. */
9339FNIEMOP_DEF(iemOp_jne_Jb)
9340{
9341 IEMOP_MNEMONIC("jne/jnz Jb");
9342 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9343 IEMOP_HLP_NO_LOCK_PREFIX();
9344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9345
9346 IEM_MC_BEGIN(0, 0);
9347 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9348 IEM_MC_ADVANCE_RIP();
9349 } IEM_MC_ELSE() {
9350 IEM_MC_REL_JMP_S8(i8Imm);
9351 } IEM_MC_ENDIF();
9352 IEM_MC_END();
9353 return VINF_SUCCESS;
9354}
9355
9356
9357/** Opcode 0x76. */
9358FNIEMOP_DEF(iemOp_jbe_Jb)
9359{
9360 IEMOP_MNEMONIC("jbe/jna Jb");
9361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9362 IEMOP_HLP_NO_LOCK_PREFIX();
9363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9364
9365 IEM_MC_BEGIN(0, 0);
9366 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9367 IEM_MC_REL_JMP_S8(i8Imm);
9368 } IEM_MC_ELSE() {
9369 IEM_MC_ADVANCE_RIP();
9370 } IEM_MC_ENDIF();
9371 IEM_MC_END();
9372 return VINF_SUCCESS;
9373}
9374
9375
9376/** Opcode 0x77. */
9377FNIEMOP_DEF(iemOp_jnbe_Jb)
9378{
9379 IEMOP_MNEMONIC("jnbe/ja Jb");
9380 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9381 IEMOP_HLP_NO_LOCK_PREFIX();
9382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9383
9384 IEM_MC_BEGIN(0, 0);
9385 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9386 IEM_MC_ADVANCE_RIP();
9387 } IEM_MC_ELSE() {
9388 IEM_MC_REL_JMP_S8(i8Imm);
9389 } IEM_MC_ENDIF();
9390 IEM_MC_END();
9391 return VINF_SUCCESS;
9392}
9393
9394
9395/** Opcode 0x78. */
9396FNIEMOP_DEF(iemOp_js_Jb)
9397{
9398 IEMOP_MNEMONIC("js Jb");
9399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9400 IEMOP_HLP_NO_LOCK_PREFIX();
9401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9402
9403 IEM_MC_BEGIN(0, 0);
9404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9405 IEM_MC_REL_JMP_S8(i8Imm);
9406 } IEM_MC_ELSE() {
9407 IEM_MC_ADVANCE_RIP();
9408 } IEM_MC_ENDIF();
9409 IEM_MC_END();
9410 return VINF_SUCCESS;
9411}
9412
9413
9414/** Opcode 0x79. */
9415FNIEMOP_DEF(iemOp_jns_Jb)
9416{
9417 IEMOP_MNEMONIC("jns Jb");
9418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9419 IEMOP_HLP_NO_LOCK_PREFIX();
9420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9421
9422 IEM_MC_BEGIN(0, 0);
9423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9424 IEM_MC_ADVANCE_RIP();
9425 } IEM_MC_ELSE() {
9426 IEM_MC_REL_JMP_S8(i8Imm);
9427 } IEM_MC_ENDIF();
9428 IEM_MC_END();
9429 return VINF_SUCCESS;
9430}
9431
9432
9433/** Opcode 0x7a. */
9434FNIEMOP_DEF(iemOp_jp_Jb)
9435{
9436 IEMOP_MNEMONIC("jp Jb");
9437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9438 IEMOP_HLP_NO_LOCK_PREFIX();
9439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9440
9441 IEM_MC_BEGIN(0, 0);
9442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9443 IEM_MC_REL_JMP_S8(i8Imm);
9444 } IEM_MC_ELSE() {
9445 IEM_MC_ADVANCE_RIP();
9446 } IEM_MC_ENDIF();
9447 IEM_MC_END();
9448 return VINF_SUCCESS;
9449}
9450
9451
9452/** Opcode 0x7b. */
9453FNIEMOP_DEF(iemOp_jnp_Jb)
9454{
9455 IEMOP_MNEMONIC("jnp Jb");
9456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9457 IEMOP_HLP_NO_LOCK_PREFIX();
9458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9459
9460 IEM_MC_BEGIN(0, 0);
9461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9462 IEM_MC_ADVANCE_RIP();
9463 } IEM_MC_ELSE() {
9464 IEM_MC_REL_JMP_S8(i8Imm);
9465 } IEM_MC_ENDIF();
9466 IEM_MC_END();
9467 return VINF_SUCCESS;
9468}
9469
9470
9471/** Opcode 0x7c. */
9472FNIEMOP_DEF(iemOp_jl_Jb)
9473{
9474 IEMOP_MNEMONIC("jl/jnge Jb");
9475 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9476 IEMOP_HLP_NO_LOCK_PREFIX();
9477 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9478
9479 IEM_MC_BEGIN(0, 0);
9480 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9481 IEM_MC_REL_JMP_S8(i8Imm);
9482 } IEM_MC_ELSE() {
9483 IEM_MC_ADVANCE_RIP();
9484 } IEM_MC_ENDIF();
9485 IEM_MC_END();
9486 return VINF_SUCCESS;
9487}
9488
9489
9490/** Opcode 0x7d. */
9491FNIEMOP_DEF(iemOp_jnl_Jb)
9492{
9493 IEMOP_MNEMONIC("jnl/jge Jb");
9494 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9495 IEMOP_HLP_NO_LOCK_PREFIX();
9496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9497
9498 IEM_MC_BEGIN(0, 0);
9499 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9500 IEM_MC_ADVANCE_RIP();
9501 } IEM_MC_ELSE() {
9502 IEM_MC_REL_JMP_S8(i8Imm);
9503 } IEM_MC_ENDIF();
9504 IEM_MC_END();
9505 return VINF_SUCCESS;
9506}
9507
9508
9509/** Opcode 0x7e. */
9510FNIEMOP_DEF(iemOp_jle_Jb)
9511{
9512 IEMOP_MNEMONIC("jle/jng Jb");
9513 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9514 IEMOP_HLP_NO_LOCK_PREFIX();
9515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9516
9517 IEM_MC_BEGIN(0, 0);
9518 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9519 IEM_MC_REL_JMP_S8(i8Imm);
9520 } IEM_MC_ELSE() {
9521 IEM_MC_ADVANCE_RIP();
9522 } IEM_MC_ENDIF();
9523 IEM_MC_END();
9524 return VINF_SUCCESS;
9525}
9526
9527
9528/** Opcode 0x7f. */
9529FNIEMOP_DEF(iemOp_jnle_Jb)
9530{
9531 IEMOP_MNEMONIC("jnle/jg Jb");
9532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9533 IEMOP_HLP_NO_LOCK_PREFIX();
9534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9535
9536 IEM_MC_BEGIN(0, 0);
9537 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9538 IEM_MC_ADVANCE_RIP();
9539 } IEM_MC_ELSE() {
9540 IEM_MC_REL_JMP_S8(i8Imm);
9541 } IEM_MC_ENDIF();
9542 IEM_MC_END();
9543 return VINF_SUCCESS;
9544}
9545
9546
9547/** Opcode 0x80. */
9548FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9549{
9550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9551 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9552 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9553
9554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9555 {
9556 /* register target */
9557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9558 IEMOP_HLP_NO_LOCK_PREFIX();
9559 IEM_MC_BEGIN(3, 0);
9560 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9561 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9562 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9563
9564 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9565 IEM_MC_REF_EFLAGS(pEFlags);
9566 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9567
9568 IEM_MC_ADVANCE_RIP();
9569 IEM_MC_END();
9570 }
9571 else
9572 {
9573 /* memory target */
9574 uint32_t fAccess;
9575 if (pImpl->pfnLockedU8)
9576 fAccess = IEM_ACCESS_DATA_RW;
9577 else
9578 { /* CMP */
9579 IEMOP_HLP_NO_LOCK_PREFIX();
9580 fAccess = IEM_ACCESS_DATA_R;
9581 }
9582 IEM_MC_BEGIN(3, 2);
9583 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9584 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9586
9587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9588 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9589 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9590
9591 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9592 IEM_MC_FETCH_EFLAGS(EFlags);
9593 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9594 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9595 else
9596 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9597
9598 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9599 IEM_MC_COMMIT_EFLAGS(EFlags);
9600 IEM_MC_ADVANCE_RIP();
9601 IEM_MC_END();
9602 }
9603 return VINF_SUCCESS;
9604}
9605
9606
9607/** Opcode 0x81. */
9608FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9609{
9610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9611 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9612 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9613
9614 switch (pIemCpu->enmEffOpSize)
9615 {
9616 case IEMMODE_16BIT:
9617 {
9618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9619 {
9620 /* register target */
9621 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9622 IEMOP_HLP_NO_LOCK_PREFIX();
9623 IEM_MC_BEGIN(3, 0);
9624 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9625 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9626 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9627
9628 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9629 IEM_MC_REF_EFLAGS(pEFlags);
9630 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9631
9632 IEM_MC_ADVANCE_RIP();
9633 IEM_MC_END();
9634 }
9635 else
9636 {
9637 /* memory target */
9638 uint32_t fAccess;
9639 if (pImpl->pfnLockedU16)
9640 fAccess = IEM_ACCESS_DATA_RW;
9641 else
9642 { /* CMP, TEST */
9643 IEMOP_HLP_NO_LOCK_PREFIX();
9644 fAccess = IEM_ACCESS_DATA_R;
9645 }
9646 IEM_MC_BEGIN(3, 2);
9647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9648 IEM_MC_ARG(uint16_t, u16Src, 1);
9649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9651
9652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9653 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9654 IEM_MC_ASSIGN(u16Src, u16Imm);
9655 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9656 IEM_MC_FETCH_EFLAGS(EFlags);
9657 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9658 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9659 else
9660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9661
9662 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9663 IEM_MC_COMMIT_EFLAGS(EFlags);
9664 IEM_MC_ADVANCE_RIP();
9665 IEM_MC_END();
9666 }
9667 break;
9668 }
9669
9670 case IEMMODE_32BIT:
9671 {
9672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9673 {
9674 /* register target */
9675 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9676 IEMOP_HLP_NO_LOCK_PREFIX();
9677 IEM_MC_BEGIN(3, 0);
9678 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9679 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9681
9682 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9683 IEM_MC_REF_EFLAGS(pEFlags);
9684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9685 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9686
9687 IEM_MC_ADVANCE_RIP();
9688 IEM_MC_END();
9689 }
9690 else
9691 {
9692 /* memory target */
9693 uint32_t fAccess;
9694 if (pImpl->pfnLockedU32)
9695 fAccess = IEM_ACCESS_DATA_RW;
9696 else
9697 { /* CMP, TEST */
9698 IEMOP_HLP_NO_LOCK_PREFIX();
9699 fAccess = IEM_ACCESS_DATA_R;
9700 }
9701 IEM_MC_BEGIN(3, 2);
9702 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9703 IEM_MC_ARG(uint32_t, u32Src, 1);
9704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9706
9707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9708 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9709 IEM_MC_ASSIGN(u32Src, u32Imm);
9710 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9711 IEM_MC_FETCH_EFLAGS(EFlags);
9712 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9713 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9714 else
9715 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9716
9717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9718 IEM_MC_COMMIT_EFLAGS(EFlags);
9719 IEM_MC_ADVANCE_RIP();
9720 IEM_MC_END();
9721 }
9722 break;
9723 }
9724
9725 case IEMMODE_64BIT:
9726 {
9727 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9728 {
9729 /* register target */
9730 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9731 IEMOP_HLP_NO_LOCK_PREFIX();
9732 IEM_MC_BEGIN(3, 0);
9733 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9734 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9735 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9736
9737 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9738 IEM_MC_REF_EFLAGS(pEFlags);
9739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9740
9741 IEM_MC_ADVANCE_RIP();
9742 IEM_MC_END();
9743 }
9744 else
9745 {
9746 /* memory target */
9747 uint32_t fAccess;
9748 if (pImpl->pfnLockedU64)
9749 fAccess = IEM_ACCESS_DATA_RW;
9750 else
9751 { /* CMP */
9752 IEMOP_HLP_NO_LOCK_PREFIX();
9753 fAccess = IEM_ACCESS_DATA_R;
9754 }
9755 IEM_MC_BEGIN(3, 2);
9756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9757 IEM_MC_ARG(uint64_t, u64Src, 1);
9758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9760
9761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9762 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9763 IEM_MC_ASSIGN(u64Src, u64Imm);
9764 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9765 IEM_MC_FETCH_EFLAGS(EFlags);
9766 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9767 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9768 else
9769 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9770
9771 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9772 IEM_MC_COMMIT_EFLAGS(EFlags);
9773 IEM_MC_ADVANCE_RIP();
9774 IEM_MC_END();
9775 }
9776 break;
9777 }
9778 }
9779 return VINF_SUCCESS;
9780}
9781
9782
9783/** Opcode 0x82. */
9784FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9785{
9786 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9787 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9788}
9789
9790
9791/** Opcode 0x83. */
9792FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9793{
9794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9795 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9796 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9797 to the 386 even if absent in the intel reference manuals and some
9798 3rd party opcode listings. */
9799 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9800
9801 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9802 {
9803 /*
9804 * Register target
9805 */
9806 IEMOP_HLP_NO_LOCK_PREFIX();
9807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9808 switch (pIemCpu->enmEffOpSize)
9809 {
9810 case IEMMODE_16BIT:
9811 {
9812 IEM_MC_BEGIN(3, 0);
9813 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9814 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9816
9817 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9818 IEM_MC_REF_EFLAGS(pEFlags);
9819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9820
9821 IEM_MC_ADVANCE_RIP();
9822 IEM_MC_END();
9823 break;
9824 }
9825
9826 case IEMMODE_32BIT:
9827 {
9828 IEM_MC_BEGIN(3, 0);
9829 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9830 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9831 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9832
9833 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9834 IEM_MC_REF_EFLAGS(pEFlags);
9835 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9836 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9837
9838 IEM_MC_ADVANCE_RIP();
9839 IEM_MC_END();
9840 break;
9841 }
9842
9843 case IEMMODE_64BIT:
9844 {
9845 IEM_MC_BEGIN(3, 0);
9846 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9847 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9849
9850 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9851 IEM_MC_REF_EFLAGS(pEFlags);
9852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9853
9854 IEM_MC_ADVANCE_RIP();
9855 IEM_MC_END();
9856 break;
9857 }
9858 }
9859 }
9860 else
9861 {
9862 /*
9863 * Memory target.
9864 */
9865 uint32_t fAccess;
9866 if (pImpl->pfnLockedU16)
9867 fAccess = IEM_ACCESS_DATA_RW;
9868 else
9869 { /* CMP */
9870 IEMOP_HLP_NO_LOCK_PREFIX();
9871 fAccess = IEM_ACCESS_DATA_R;
9872 }
9873
9874 switch (pIemCpu->enmEffOpSize)
9875 {
9876 case IEMMODE_16BIT:
9877 {
9878 IEM_MC_BEGIN(3, 2);
9879 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9880 IEM_MC_ARG(uint16_t, u16Src, 1);
9881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9883
9884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9885 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9886 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9887 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9888 IEM_MC_FETCH_EFLAGS(EFlags);
9889 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9891 else
9892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9893
9894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9895 IEM_MC_COMMIT_EFLAGS(EFlags);
9896 IEM_MC_ADVANCE_RIP();
9897 IEM_MC_END();
9898 break;
9899 }
9900
9901 case IEMMODE_32BIT:
9902 {
9903 IEM_MC_BEGIN(3, 2);
9904 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9905 IEM_MC_ARG(uint32_t, u32Src, 1);
9906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9908
9909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9911 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9912 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9913 IEM_MC_FETCH_EFLAGS(EFlags);
9914 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9915 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9916 else
9917 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9918
9919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9920 IEM_MC_COMMIT_EFLAGS(EFlags);
9921 IEM_MC_ADVANCE_RIP();
9922 IEM_MC_END();
9923 break;
9924 }
9925
9926 case IEMMODE_64BIT:
9927 {
9928 IEM_MC_BEGIN(3, 2);
9929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9930 IEM_MC_ARG(uint64_t, u64Src, 1);
9931 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9933
9934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9936 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9937 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9938 IEM_MC_FETCH_EFLAGS(EFlags);
9939 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9940 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9941 else
9942 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9943
9944 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9945 IEM_MC_COMMIT_EFLAGS(EFlags);
9946 IEM_MC_ADVANCE_RIP();
9947 IEM_MC_END();
9948 break;
9949 }
9950 }
9951 }
9952 return VINF_SUCCESS;
9953}
9954
9955
9956/** Opcode 0x84. */
9957FNIEMOP_DEF(iemOp_test_Eb_Gb)
9958{
9959 IEMOP_MNEMONIC("test Eb,Gb");
9960 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9963}
9964
9965
9966/** Opcode 0x85. */
9967FNIEMOP_DEF(iemOp_test_Ev_Gv)
9968{
9969 IEMOP_MNEMONIC("test Ev,Gv");
9970 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9972 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9973}
9974
9975
9976/** Opcode 0x86. */
9977FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9978{
9979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9980 IEMOP_MNEMONIC("xchg Eb,Gb");
9981
9982 /*
9983 * If rm is denoting a register, no more instruction bytes.
9984 */
9985 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9986 {
9987 IEMOP_HLP_NO_LOCK_PREFIX();
9988
9989 IEM_MC_BEGIN(0, 2);
9990 IEM_MC_LOCAL(uint8_t, uTmp1);
9991 IEM_MC_LOCAL(uint8_t, uTmp2);
9992
9993 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9994 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9995 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9996 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9997
9998 IEM_MC_ADVANCE_RIP();
9999 IEM_MC_END();
10000 }
10001 else
10002 {
10003 /*
10004 * We're accessing memory.
10005 */
10006/** @todo the register must be committed separately! */
10007 IEM_MC_BEGIN(2, 2);
10008 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10009 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10011
10012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10013 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10014 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10015 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10017
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 }
10021 return VINF_SUCCESS;
10022}
10023
10024
10025/** Opcode 0x87. */
10026FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10027{
10028 IEMOP_MNEMONIC("xchg Ev,Gv");
10029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10030
10031 /*
10032 * If rm is denoting a register, no more instruction bytes.
10033 */
10034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10035 {
10036 IEMOP_HLP_NO_LOCK_PREFIX();
10037
10038 switch (pIemCpu->enmEffOpSize)
10039 {
10040 case IEMMODE_16BIT:
10041 IEM_MC_BEGIN(0, 2);
10042 IEM_MC_LOCAL(uint16_t, uTmp1);
10043 IEM_MC_LOCAL(uint16_t, uTmp2);
10044
10045 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10046 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10047 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10048 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10049
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 return VINF_SUCCESS;
10053
10054 case IEMMODE_32BIT:
10055 IEM_MC_BEGIN(0, 2);
10056 IEM_MC_LOCAL(uint32_t, uTmp1);
10057 IEM_MC_LOCAL(uint32_t, uTmp2);
10058
10059 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10060 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10061 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10062 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10063
10064 IEM_MC_ADVANCE_RIP();
10065 IEM_MC_END();
10066 return VINF_SUCCESS;
10067
10068 case IEMMODE_64BIT:
10069 IEM_MC_BEGIN(0, 2);
10070 IEM_MC_LOCAL(uint64_t, uTmp1);
10071 IEM_MC_LOCAL(uint64_t, uTmp2);
10072
10073 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10074 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10075 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10076 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10077
10078 IEM_MC_ADVANCE_RIP();
10079 IEM_MC_END();
10080 return VINF_SUCCESS;
10081
10082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10083 }
10084 }
10085 else
10086 {
10087 /*
10088 * We're accessing memory.
10089 */
10090 switch (pIemCpu->enmEffOpSize)
10091 {
10092/** @todo the register must be committed separately! */
10093 case IEMMODE_16BIT:
10094 IEM_MC_BEGIN(2, 2);
10095 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10096 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10098
10099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10100 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10101 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10102 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10103 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10104
10105 IEM_MC_ADVANCE_RIP();
10106 IEM_MC_END();
10107 return VINF_SUCCESS;
10108
10109 case IEMMODE_32BIT:
10110 IEM_MC_BEGIN(2, 2);
10111 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10112 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10114
10115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10116 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10117 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10118 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10120
10121 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10122 IEM_MC_ADVANCE_RIP();
10123 IEM_MC_END();
10124 return VINF_SUCCESS;
10125
10126 case IEMMODE_64BIT:
10127 IEM_MC_BEGIN(2, 2);
10128 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10129 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10131
10132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10133 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10134 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10135 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10137
10138 IEM_MC_ADVANCE_RIP();
10139 IEM_MC_END();
10140 return VINF_SUCCESS;
10141
10142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10143 }
10144 }
10145}
10146
10147
10148/** Opcode 0x88. */
10149FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10150{
10151 IEMOP_MNEMONIC("mov Eb,Gb");
10152
10153 uint8_t bRm;
10154 IEM_OPCODE_GET_NEXT_U8(&bRm);
10155 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10156
10157 /*
10158 * If rm is denoting a register, no more instruction bytes.
10159 */
10160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10161 {
10162 IEM_MC_BEGIN(0, 1);
10163 IEM_MC_LOCAL(uint8_t, u8Value);
10164 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10165 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10166 IEM_MC_ADVANCE_RIP();
10167 IEM_MC_END();
10168 }
10169 else
10170 {
10171 /*
10172 * We're writing a register to memory.
10173 */
10174 IEM_MC_BEGIN(0, 2);
10175 IEM_MC_LOCAL(uint8_t, u8Value);
10176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10178 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10179 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10180 IEM_MC_ADVANCE_RIP();
10181 IEM_MC_END();
10182 }
10183 return VINF_SUCCESS;
10184
10185}
10186
10187
10188/** Opcode 0x89. */
10189FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10190{
10191 IEMOP_MNEMONIC("mov Ev,Gv");
10192
10193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10194 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10195
10196 /*
10197 * If rm is denoting a register, no more instruction bytes.
10198 */
10199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10200 {
10201 switch (pIemCpu->enmEffOpSize)
10202 {
10203 case IEMMODE_16BIT:
10204 IEM_MC_BEGIN(0, 1);
10205 IEM_MC_LOCAL(uint16_t, u16Value);
10206 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10207 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10208 IEM_MC_ADVANCE_RIP();
10209 IEM_MC_END();
10210 break;
10211
10212 case IEMMODE_32BIT:
10213 IEM_MC_BEGIN(0, 1);
10214 IEM_MC_LOCAL(uint32_t, u32Value);
10215 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10216 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10217 IEM_MC_ADVANCE_RIP();
10218 IEM_MC_END();
10219 break;
10220
10221 case IEMMODE_64BIT:
10222 IEM_MC_BEGIN(0, 1);
10223 IEM_MC_LOCAL(uint64_t, u64Value);
10224 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10225 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10226 IEM_MC_ADVANCE_RIP();
10227 IEM_MC_END();
10228 break;
10229 }
10230 }
10231 else
10232 {
10233 /*
10234 * We're writing a register to memory.
10235 */
10236 switch (pIemCpu->enmEffOpSize)
10237 {
10238 case IEMMODE_16BIT:
10239 IEM_MC_BEGIN(0, 2);
10240 IEM_MC_LOCAL(uint16_t, u16Value);
10241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10243 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10244 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10245 IEM_MC_ADVANCE_RIP();
10246 IEM_MC_END();
10247 break;
10248
10249 case IEMMODE_32BIT:
10250 IEM_MC_BEGIN(0, 2);
10251 IEM_MC_LOCAL(uint32_t, u32Value);
10252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10254 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10255 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10256 IEM_MC_ADVANCE_RIP();
10257 IEM_MC_END();
10258 break;
10259
10260 case IEMMODE_64BIT:
10261 IEM_MC_BEGIN(0, 2);
10262 IEM_MC_LOCAL(uint64_t, u64Value);
10263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10265 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10266 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10267 IEM_MC_ADVANCE_RIP();
10268 IEM_MC_END();
10269 break;
10270 }
10271 }
10272 return VINF_SUCCESS;
10273}
10274
10275
10276/** Opcode 0x8a. */
10277FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10278{
10279 IEMOP_MNEMONIC("mov Gb,Eb");
10280
10281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10282 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10283
10284 /*
10285 * If rm is denoting a register, no more instruction bytes.
10286 */
10287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10288 {
10289 IEM_MC_BEGIN(0, 1);
10290 IEM_MC_LOCAL(uint8_t, u8Value);
10291 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10292 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10293 IEM_MC_ADVANCE_RIP();
10294 IEM_MC_END();
10295 }
10296 else
10297 {
10298 /*
10299 * We're loading a register from memory.
10300 */
10301 IEM_MC_BEGIN(0, 2);
10302 IEM_MC_LOCAL(uint8_t, u8Value);
10303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10305 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10306 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10307 IEM_MC_ADVANCE_RIP();
10308 IEM_MC_END();
10309 }
10310 return VINF_SUCCESS;
10311}
10312
10313
10314/** Opcode 0x8b. */
10315FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10316{
10317 IEMOP_MNEMONIC("mov Gv,Ev");
10318
10319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10320 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10321
10322 /*
10323 * If rm is denoting a register, no more instruction bytes.
10324 */
10325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10326 {
10327 switch (pIemCpu->enmEffOpSize)
10328 {
10329 case IEMMODE_16BIT:
10330 IEM_MC_BEGIN(0, 1);
10331 IEM_MC_LOCAL(uint16_t, u16Value);
10332 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10333 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10334 IEM_MC_ADVANCE_RIP();
10335 IEM_MC_END();
10336 break;
10337
10338 case IEMMODE_32BIT:
10339 IEM_MC_BEGIN(0, 1);
10340 IEM_MC_LOCAL(uint32_t, u32Value);
10341 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10342 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10343 IEM_MC_ADVANCE_RIP();
10344 IEM_MC_END();
10345 break;
10346
10347 case IEMMODE_64BIT:
10348 IEM_MC_BEGIN(0, 1);
10349 IEM_MC_LOCAL(uint64_t, u64Value);
10350 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10351 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10352 IEM_MC_ADVANCE_RIP();
10353 IEM_MC_END();
10354 break;
10355 }
10356 }
10357 else
10358 {
10359 /*
10360 * We're loading a register from memory.
10361 */
10362 switch (pIemCpu->enmEffOpSize)
10363 {
10364 case IEMMODE_16BIT:
10365 IEM_MC_BEGIN(0, 2);
10366 IEM_MC_LOCAL(uint16_t, u16Value);
10367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10369 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10370 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10371 IEM_MC_ADVANCE_RIP();
10372 IEM_MC_END();
10373 break;
10374
10375 case IEMMODE_32BIT:
10376 IEM_MC_BEGIN(0, 2);
10377 IEM_MC_LOCAL(uint32_t, u32Value);
10378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10380 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10381 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10382 IEM_MC_ADVANCE_RIP();
10383 IEM_MC_END();
10384 break;
10385
10386 case IEMMODE_64BIT:
10387 IEM_MC_BEGIN(0, 2);
10388 IEM_MC_LOCAL(uint64_t, u64Value);
10389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10391 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10392 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10393 IEM_MC_ADVANCE_RIP();
10394 IEM_MC_END();
10395 break;
10396 }
10397 }
10398 return VINF_SUCCESS;
10399}
10400
10401
10402/** Opcode 0x63. */
10403FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10404{
10405 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10406 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10407 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10408 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10409 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10410}
10411
10412
10413/** Opcode 0x8c. */
10414FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10415{
10416 IEMOP_MNEMONIC("mov Ev,Sw");
10417
10418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10419 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10420
10421 /*
10422 * Check that the destination register exists. The REX.R prefix is ignored.
10423 */
10424 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10425 if ( iSegReg > X86_SREG_GS)
10426 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10427
10428 /*
10429 * If rm is denoting a register, no more instruction bytes.
10430 * In that case, the operand size is respected and the upper bits are
10431 * cleared (starting with some pentium).
10432 */
10433 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10434 {
10435 switch (pIemCpu->enmEffOpSize)
10436 {
10437 case IEMMODE_16BIT:
10438 IEM_MC_BEGIN(0, 1);
10439 IEM_MC_LOCAL(uint16_t, u16Value);
10440 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10441 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10442 IEM_MC_ADVANCE_RIP();
10443 IEM_MC_END();
10444 break;
10445
10446 case IEMMODE_32BIT:
10447 IEM_MC_BEGIN(0, 1);
10448 IEM_MC_LOCAL(uint32_t, u32Value);
10449 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10450 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 break;
10454
10455 case IEMMODE_64BIT:
10456 IEM_MC_BEGIN(0, 1);
10457 IEM_MC_LOCAL(uint64_t, u64Value);
10458 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10459 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10460 IEM_MC_ADVANCE_RIP();
10461 IEM_MC_END();
10462 break;
10463 }
10464 }
10465 else
10466 {
10467 /*
10468 * We're saving the register to memory. The access is word sized
10469 * regardless of operand size prefixes.
10470 */
10471#if 0 /* not necessary */
10472 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10473#endif
10474 IEM_MC_BEGIN(0, 2);
10475 IEM_MC_LOCAL(uint16_t, u16Value);
10476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10478 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10479 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10480 IEM_MC_ADVANCE_RIP();
10481 IEM_MC_END();
10482 }
10483 return VINF_SUCCESS;
10484}
10485
10486
10487
10488
10489/** Opcode 0x8d. */
10490FNIEMOP_DEF(iemOp_lea_Gv_M)
10491{
10492 IEMOP_MNEMONIC("lea Gv,M");
10493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10494 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10496 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10497
10498 switch (pIemCpu->enmEffOpSize)
10499 {
10500 case IEMMODE_16BIT:
10501 IEM_MC_BEGIN(0, 2);
10502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10503 IEM_MC_LOCAL(uint16_t, u16Cast);
10504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10505 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10506 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10507 IEM_MC_ADVANCE_RIP();
10508 IEM_MC_END();
10509 return VINF_SUCCESS;
10510
10511 case IEMMODE_32BIT:
10512 IEM_MC_BEGIN(0, 2);
10513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10514 IEM_MC_LOCAL(uint32_t, u32Cast);
10515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10516 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10517 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10518 IEM_MC_ADVANCE_RIP();
10519 IEM_MC_END();
10520 return VINF_SUCCESS;
10521
10522 case IEMMODE_64BIT:
10523 IEM_MC_BEGIN(0, 1);
10524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10526 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10527 IEM_MC_ADVANCE_RIP();
10528 IEM_MC_END();
10529 return VINF_SUCCESS;
10530 }
10531 AssertFailedReturn(VERR_IEM_IPE_7);
10532}
10533
10534
10535/** Opcode 0x8e. */
10536FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10537{
10538 IEMOP_MNEMONIC("mov Sw,Ev");
10539
10540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10541 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10542
10543 /*
10544 * The practical operand size is 16-bit.
10545 */
10546#if 0 /* not necessary */
10547 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10548#endif
10549
10550 /*
10551 * Check that the destination register exists and can be used with this
10552 * instruction. The REX.R prefix is ignored.
10553 */
10554 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10555 if ( iSegReg == X86_SREG_CS
10556 || iSegReg > X86_SREG_GS)
10557 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10558
10559 /*
10560 * If rm is denoting a register, no more instruction bytes.
10561 */
10562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10563 {
10564 IEM_MC_BEGIN(2, 0);
10565 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10566 IEM_MC_ARG(uint16_t, u16Value, 1);
10567 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10568 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10569 IEM_MC_END();
10570 }
10571 else
10572 {
10573 /*
10574 * We're loading the register from memory. The access is word sized
10575 * regardless of operand size prefixes.
10576 */
10577 IEM_MC_BEGIN(2, 1);
10578 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10579 IEM_MC_ARG(uint16_t, u16Value, 1);
10580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10582 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10583 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10584 IEM_MC_END();
10585 }
10586 return VINF_SUCCESS;
10587}
10588
10589
10590/** Opcode 0x8f /0. */
10591FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10592{
10593 /* This bugger is rather annoying as it requires rSP to be updated before
10594 doing the effective address calculations. Will eventually require a
10595 split between the R/M+SIB decoding and the effective address
10596 calculation - which is something that is required for any attempt at
10597 reusing this code for a recompiler. It may also be good to have if we
10598 need to delay #UD exception caused by invalid lock prefixes.
10599
10600 For now, we'll do a mostly safe interpreter-only implementation here. */
10601 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10602 * now until tests show it's checked.. */
10603 IEMOP_MNEMONIC("pop Ev");
10604 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10605
10606 /* Register access is relatively easy and can share code. */
10607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10608 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10609
10610 /*
10611 * Memory target.
10612 *
10613 * Intel says that RSP is incremented before it's used in any effective
10614 * address calcuations. This means some serious extra annoyance here since
10615 * we decode and calculate the effective address in one step and like to
10616 * delay committing registers till everything is done.
10617 *
10618 * So, we'll decode and calculate the effective address twice. This will
10619 * require some recoding if turned into a recompiler.
10620 */
10621 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10622
10623#ifndef TST_IEM_CHECK_MC
10624 /* Calc effective address with modified ESP. */
10625 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10626 RTGCPTR GCPtrEff;
10627 VBOXSTRICTRC rcStrict;
10628 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10629 if (rcStrict != VINF_SUCCESS)
10630 return rcStrict;
10631 pIemCpu->offOpcode = offOpcodeSaved;
10632
10633 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10634 uint64_t const RspSaved = pCtx->rsp;
10635 switch (pIemCpu->enmEffOpSize)
10636 {
10637 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10638 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10639 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10641 }
10642 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10643 Assert(rcStrict == VINF_SUCCESS);
10644 pCtx->rsp = RspSaved;
10645
10646 /* Perform the operation - this should be CImpl. */
10647 RTUINT64U TmpRsp;
10648 TmpRsp.u = pCtx->rsp;
10649 switch (pIemCpu->enmEffOpSize)
10650 {
10651 case IEMMODE_16BIT:
10652 {
10653 uint16_t u16Value;
10654 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10655 if (rcStrict == VINF_SUCCESS)
10656 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10657 break;
10658 }
10659
10660 case IEMMODE_32BIT:
10661 {
10662 uint32_t u32Value;
10663 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10664 if (rcStrict == VINF_SUCCESS)
10665 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10666 break;
10667 }
10668
10669 case IEMMODE_64BIT:
10670 {
10671 uint64_t u64Value;
10672 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10673 if (rcStrict == VINF_SUCCESS)
10674 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10675 break;
10676 }
10677
10678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10679 }
10680 if (rcStrict == VINF_SUCCESS)
10681 {
10682 pCtx->rsp = TmpRsp.u;
10683 iemRegUpdateRipAndClearRF(pIemCpu);
10684 }
10685 return rcStrict;
10686
10687#else
10688 return VERR_IEM_IPE_2;
10689#endif
10690}
10691
10692
10693/** Opcode 0x8f. */
10694FNIEMOP_DEF(iemOp_Grp1A)
10695{
10696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10697 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10698 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10699
10700 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10701 /** @todo XOP decoding. */
10702 IEMOP_MNEMONIC("3-byte-xop");
10703 return IEMOP_RAISE_INVALID_OPCODE();
10704}
10705
10706
10707/**
10708 * Common 'xchg reg,rAX' helper.
10709 */
10710FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10711{
10712 IEMOP_HLP_NO_LOCK_PREFIX();
10713
10714 iReg |= pIemCpu->uRexB;
10715 switch (pIemCpu->enmEffOpSize)
10716 {
10717 case IEMMODE_16BIT:
10718 IEM_MC_BEGIN(0, 2);
10719 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10720 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10721 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10722 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10723 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10724 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10725 IEM_MC_ADVANCE_RIP();
10726 IEM_MC_END();
10727 return VINF_SUCCESS;
10728
10729 case IEMMODE_32BIT:
10730 IEM_MC_BEGIN(0, 2);
10731 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10732 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10733 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10734 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10735 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10736 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10737 IEM_MC_ADVANCE_RIP();
10738 IEM_MC_END();
10739 return VINF_SUCCESS;
10740
10741 case IEMMODE_64BIT:
10742 IEM_MC_BEGIN(0, 2);
10743 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10744 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10745 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10746 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10747 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10748 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10749 IEM_MC_ADVANCE_RIP();
10750 IEM_MC_END();
10751 return VINF_SUCCESS;
10752
10753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10754 }
10755}
10756
10757
10758/** Opcode 0x90. */
10759FNIEMOP_DEF(iemOp_nop)
10760{
10761 /* R8/R8D and RAX/EAX can be exchanged. */
10762 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10763 {
10764 IEMOP_MNEMONIC("xchg r8,rAX");
10765 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10766 }
10767
10768 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10769 IEMOP_MNEMONIC("pause");
10770 else
10771 IEMOP_MNEMONIC("nop");
10772 IEM_MC_BEGIN(0, 0);
10773 IEM_MC_ADVANCE_RIP();
10774 IEM_MC_END();
10775 return VINF_SUCCESS;
10776}
10777
10778
10779/** Opcode 0x91. */
10780FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10781{
10782 IEMOP_MNEMONIC("xchg rCX,rAX");
10783 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10784}
10785
10786
10787/** Opcode 0x92. */
10788FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10789{
10790 IEMOP_MNEMONIC("xchg rDX,rAX");
10791 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10792}
10793
10794
10795/** Opcode 0x93. */
10796FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10797{
10798 IEMOP_MNEMONIC("xchg rBX,rAX");
10799 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10800}
10801
10802
10803/** Opcode 0x94. */
10804FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10805{
10806 IEMOP_MNEMONIC("xchg rSX,rAX");
10807 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10808}
10809
10810
10811/** Opcode 0x95. */
10812FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10813{
10814 IEMOP_MNEMONIC("xchg rBP,rAX");
10815 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10816}
10817
10818
10819/** Opcode 0x96. */
10820FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10821{
10822 IEMOP_MNEMONIC("xchg rSI,rAX");
10823 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10824}
10825
10826
10827/** Opcode 0x97. */
10828FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10829{
10830 IEMOP_MNEMONIC("xchg rDI,rAX");
10831 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10832}
10833
10834
10835/** Opcode 0x98. */
10836FNIEMOP_DEF(iemOp_cbw)
10837{
10838 IEMOP_HLP_NO_LOCK_PREFIX();
10839 switch (pIemCpu->enmEffOpSize)
10840 {
10841 case IEMMODE_16BIT:
10842 IEMOP_MNEMONIC("cbw");
10843 IEM_MC_BEGIN(0, 1);
10844 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10845 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10846 } IEM_MC_ELSE() {
10847 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10848 } IEM_MC_ENDIF();
10849 IEM_MC_ADVANCE_RIP();
10850 IEM_MC_END();
10851 return VINF_SUCCESS;
10852
10853 case IEMMODE_32BIT:
10854 IEMOP_MNEMONIC("cwde");
10855 IEM_MC_BEGIN(0, 1);
10856 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10857 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10858 } IEM_MC_ELSE() {
10859 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10860 } IEM_MC_ENDIF();
10861 IEM_MC_ADVANCE_RIP();
10862 IEM_MC_END();
10863 return VINF_SUCCESS;
10864
10865 case IEMMODE_64BIT:
10866 IEMOP_MNEMONIC("cdqe");
10867 IEM_MC_BEGIN(0, 1);
10868 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10869 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10870 } IEM_MC_ELSE() {
10871 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10872 } IEM_MC_ENDIF();
10873 IEM_MC_ADVANCE_RIP();
10874 IEM_MC_END();
10875 return VINF_SUCCESS;
10876
10877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10878 }
10879}
10880
10881
10882/** Opcode 0x99. */
10883FNIEMOP_DEF(iemOp_cwd)
10884{
10885 IEMOP_HLP_NO_LOCK_PREFIX();
10886 switch (pIemCpu->enmEffOpSize)
10887 {
10888 case IEMMODE_16BIT:
10889 IEMOP_MNEMONIC("cwd");
10890 IEM_MC_BEGIN(0, 1);
10891 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10892 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10893 } IEM_MC_ELSE() {
10894 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10895 } IEM_MC_ENDIF();
10896 IEM_MC_ADVANCE_RIP();
10897 IEM_MC_END();
10898 return VINF_SUCCESS;
10899
10900 case IEMMODE_32BIT:
10901 IEMOP_MNEMONIC("cdq");
10902 IEM_MC_BEGIN(0, 1);
10903 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10904 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10905 } IEM_MC_ELSE() {
10906 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10907 } IEM_MC_ENDIF();
10908 IEM_MC_ADVANCE_RIP();
10909 IEM_MC_END();
10910 return VINF_SUCCESS;
10911
10912 case IEMMODE_64BIT:
10913 IEMOP_MNEMONIC("cqo");
10914 IEM_MC_BEGIN(0, 1);
10915 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10916 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10917 } IEM_MC_ELSE() {
10918 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10919 } IEM_MC_ENDIF();
10920 IEM_MC_ADVANCE_RIP();
10921 IEM_MC_END();
10922 return VINF_SUCCESS;
10923
10924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10925 }
10926}
10927
10928
10929/** Opcode 0x9a. */
10930FNIEMOP_DEF(iemOp_call_Ap)
10931{
10932 IEMOP_MNEMONIC("call Ap");
10933 IEMOP_HLP_NO_64BIT();
10934
10935 /* Decode the far pointer address and pass it on to the far call C implementation. */
10936 uint32_t offSeg;
10937 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10938 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10939 else
10940 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10941 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10943 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10944}
10945
10946
10947/** Opcode 0x9b. (aka fwait) */
10948FNIEMOP_DEF(iemOp_wait)
10949{
10950 IEMOP_MNEMONIC("wait");
10951 IEMOP_HLP_NO_LOCK_PREFIX();
10952
10953 IEM_MC_BEGIN(0, 0);
10954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10956 IEM_MC_ADVANCE_RIP();
10957 IEM_MC_END();
10958 return VINF_SUCCESS;
10959}
10960
10961
10962/** Opcode 0x9c. */
10963FNIEMOP_DEF(iemOp_pushf_Fv)
10964{
10965 IEMOP_HLP_NO_LOCK_PREFIX();
10966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10967 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10968}
10969
10970
10971/** Opcode 0x9d. */
10972FNIEMOP_DEF(iemOp_popf_Fv)
10973{
10974 IEMOP_HLP_NO_LOCK_PREFIX();
10975 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10976 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10977}
10978
10979
10980/** Opcode 0x9e. */
10981FNIEMOP_DEF(iemOp_sahf)
10982{
10983 IEMOP_MNEMONIC("sahf");
10984 IEMOP_HLP_NO_LOCK_PREFIX();
10985 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10986 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10987 return IEMOP_RAISE_INVALID_OPCODE();
10988 IEM_MC_BEGIN(0, 2);
10989 IEM_MC_LOCAL(uint32_t, u32Flags);
10990 IEM_MC_LOCAL(uint32_t, EFlags);
10991 IEM_MC_FETCH_EFLAGS(EFlags);
10992 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10993 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10994 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10995 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10996 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10997 IEM_MC_COMMIT_EFLAGS(EFlags);
10998 IEM_MC_ADVANCE_RIP();
10999 IEM_MC_END();
11000 return VINF_SUCCESS;
11001}
11002
11003
11004/** Opcode 0x9f. */
11005FNIEMOP_DEF(iemOp_lahf)
11006{
11007 IEMOP_MNEMONIC("lahf");
11008 IEMOP_HLP_NO_LOCK_PREFIX();
11009 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11010 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11011 return IEMOP_RAISE_INVALID_OPCODE();
11012 IEM_MC_BEGIN(0, 1);
11013 IEM_MC_LOCAL(uint8_t, u8Flags);
11014 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11015 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11016 IEM_MC_ADVANCE_RIP();
11017 IEM_MC_END();
11018 return VINF_SUCCESS;
11019}
11020
11021
11022/**
11023 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11024 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11025 * prefixes. Will return on failures.
11026 * @param a_GCPtrMemOff The variable to store the offset in.
11027 */
11028#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11029 do \
11030 { \
11031 switch (pIemCpu->enmEffAddrMode) \
11032 { \
11033 case IEMMODE_16BIT: \
11034 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11035 break; \
11036 case IEMMODE_32BIT: \
11037 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11038 break; \
11039 case IEMMODE_64BIT: \
11040 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11041 break; \
11042 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11043 } \
11044 IEMOP_HLP_NO_LOCK_PREFIX(); \
11045 } while (0)
11046
11047/** Opcode 0xa0. */
11048FNIEMOP_DEF(iemOp_mov_Al_Ob)
11049{
11050 /*
11051 * Get the offset and fend of lock prefixes.
11052 */
11053 RTGCPTR GCPtrMemOff;
11054 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11055
11056 /*
11057 * Fetch AL.
11058 */
11059 IEM_MC_BEGIN(0,1);
11060 IEM_MC_LOCAL(uint8_t, u8Tmp);
11061 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11062 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11063 IEM_MC_ADVANCE_RIP();
11064 IEM_MC_END();
11065 return VINF_SUCCESS;
11066}
11067
11068
11069/** Opcode 0xa1. */
11070FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11071{
11072 /*
11073 * Get the offset and fend of lock prefixes.
11074 */
11075 IEMOP_MNEMONIC("mov rAX,Ov");
11076 RTGCPTR GCPtrMemOff;
11077 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11078
11079 /*
11080 * Fetch rAX.
11081 */
11082 switch (pIemCpu->enmEffOpSize)
11083 {
11084 case IEMMODE_16BIT:
11085 IEM_MC_BEGIN(0,1);
11086 IEM_MC_LOCAL(uint16_t, u16Tmp);
11087 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11088 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11089 IEM_MC_ADVANCE_RIP();
11090 IEM_MC_END();
11091 return VINF_SUCCESS;
11092
11093 case IEMMODE_32BIT:
11094 IEM_MC_BEGIN(0,1);
11095 IEM_MC_LOCAL(uint32_t, u32Tmp);
11096 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11097 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11098 IEM_MC_ADVANCE_RIP();
11099 IEM_MC_END();
11100 return VINF_SUCCESS;
11101
11102 case IEMMODE_64BIT:
11103 IEM_MC_BEGIN(0,1);
11104 IEM_MC_LOCAL(uint64_t, u64Tmp);
11105 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11106 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11107 IEM_MC_ADVANCE_RIP();
11108 IEM_MC_END();
11109 return VINF_SUCCESS;
11110
11111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11112 }
11113}
11114
11115
11116/** Opcode 0xa2. */
11117FNIEMOP_DEF(iemOp_mov_Ob_AL)
11118{
11119 /*
11120 * Get the offset and fend of lock prefixes.
11121 */
11122 RTGCPTR GCPtrMemOff;
11123 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11124
11125 /*
11126 * Store AL.
11127 */
11128 IEM_MC_BEGIN(0,1);
11129 IEM_MC_LOCAL(uint8_t, u8Tmp);
11130 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11131 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11132 IEM_MC_ADVANCE_RIP();
11133 IEM_MC_END();
11134 return VINF_SUCCESS;
11135}
11136
11137
11138/** Opcode 0xa3. */
11139FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11140{
11141 /*
11142 * Get the offset and fend of lock prefixes.
11143 */
11144 RTGCPTR GCPtrMemOff;
11145 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11146
11147 /*
11148 * Store rAX.
11149 */
11150 switch (pIemCpu->enmEffOpSize)
11151 {
11152 case IEMMODE_16BIT:
11153 IEM_MC_BEGIN(0,1);
11154 IEM_MC_LOCAL(uint16_t, u16Tmp);
11155 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11156 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11157 IEM_MC_ADVANCE_RIP();
11158 IEM_MC_END();
11159 return VINF_SUCCESS;
11160
11161 case IEMMODE_32BIT:
11162 IEM_MC_BEGIN(0,1);
11163 IEM_MC_LOCAL(uint32_t, u32Tmp);
11164 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11165 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11166 IEM_MC_ADVANCE_RIP();
11167 IEM_MC_END();
11168 return VINF_SUCCESS;
11169
11170 case IEMMODE_64BIT:
11171 IEM_MC_BEGIN(0,1);
11172 IEM_MC_LOCAL(uint64_t, u64Tmp);
11173 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11174 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11175 IEM_MC_ADVANCE_RIP();
11176 IEM_MC_END();
11177 return VINF_SUCCESS;
11178
11179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11180 }
11181}
11182
11183/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11184#define IEM_MOVS_CASE(ValBits, AddrBits) \
11185 IEM_MC_BEGIN(0, 2); \
11186 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11187 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11188 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11189 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11190 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11191 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11193 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11194 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11195 } IEM_MC_ELSE() { \
11196 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11197 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11198 } IEM_MC_ENDIF(); \
11199 IEM_MC_ADVANCE_RIP(); \
11200 IEM_MC_END();
11201
11202/** Opcode 0xa4. */
11203FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11204{
11205 IEMOP_HLP_NO_LOCK_PREFIX();
11206
11207 /*
11208 * Use the C implementation if a repeat prefix is encountered.
11209 */
11210 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11211 {
11212 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11213 switch (pIemCpu->enmEffAddrMode)
11214 {
11215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11219 }
11220 }
11221 IEMOP_MNEMONIC("movsb Xb,Yb");
11222
11223 /*
11224 * Sharing case implementation with movs[wdq] below.
11225 */
11226 switch (pIemCpu->enmEffAddrMode)
11227 {
11228 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11229 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11230 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11232 }
11233 return VINF_SUCCESS;
11234}
11235
11236
11237/** Opcode 0xa5. */
11238FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11239{
11240 IEMOP_HLP_NO_LOCK_PREFIX();
11241
11242 /*
11243 * Use the C implementation if a repeat prefix is encountered.
11244 */
11245 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11246 {
11247 IEMOP_MNEMONIC("rep movs Xv,Yv");
11248 switch (pIemCpu->enmEffOpSize)
11249 {
11250 case IEMMODE_16BIT:
11251 switch (pIemCpu->enmEffAddrMode)
11252 {
11253 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11254 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11255 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11257 }
11258 break;
11259 case IEMMODE_32BIT:
11260 switch (pIemCpu->enmEffAddrMode)
11261 {
11262 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11263 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11264 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11266 }
11267 case IEMMODE_64BIT:
11268 switch (pIemCpu->enmEffAddrMode)
11269 {
11270 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11271 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11272 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11274 }
11275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11276 }
11277 }
11278 IEMOP_MNEMONIC("movs Xv,Yv");
11279
11280 /*
11281 * Annoying double switch here.
11282 * Using ugly macro for implementing the cases, sharing it with movsb.
11283 */
11284 switch (pIemCpu->enmEffOpSize)
11285 {
11286 case IEMMODE_16BIT:
11287 switch (pIemCpu->enmEffAddrMode)
11288 {
11289 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11290 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11291 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294 break;
11295
11296 case IEMMODE_32BIT:
11297 switch (pIemCpu->enmEffAddrMode)
11298 {
11299 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11300 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11301 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11303 }
11304 break;
11305
11306 case IEMMODE_64BIT:
11307 switch (pIemCpu->enmEffAddrMode)
11308 {
11309 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11310 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11311 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11313 }
11314 break;
11315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11316 }
11317 return VINF_SUCCESS;
11318}
11319
11320#undef IEM_MOVS_CASE
11321
11322/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11323#define IEM_CMPS_CASE(ValBits, AddrBits) \
11324 IEM_MC_BEGIN(3, 3); \
11325 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11326 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11327 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11328 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11329 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11330 \
11331 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11332 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11333 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11334 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11335 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11336 IEM_MC_REF_EFLAGS(pEFlags); \
11337 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11338 \
11339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11340 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11341 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11342 } IEM_MC_ELSE() { \
11343 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11344 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11345 } IEM_MC_ENDIF(); \
11346 IEM_MC_ADVANCE_RIP(); \
11347 IEM_MC_END(); \
11348
11349/** Opcode 0xa6. */
11350FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11351{
11352 IEMOP_HLP_NO_LOCK_PREFIX();
11353
11354 /*
11355 * Use the C implementation if a repeat prefix is encountered.
11356 */
11357 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11358 {
11359 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11360 switch (pIemCpu->enmEffAddrMode)
11361 {
11362 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11363 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11364 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11366 }
11367 }
11368 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11369 {
11370 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11371 switch (pIemCpu->enmEffAddrMode)
11372 {
11373 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11374 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11375 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11377 }
11378 }
11379 IEMOP_MNEMONIC("cmps Xb,Yb");
11380
11381 /*
11382 * Sharing case implementation with cmps[wdq] below.
11383 */
11384 switch (pIemCpu->enmEffAddrMode)
11385 {
11386 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11387 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11388 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11390 }
11391 return VINF_SUCCESS;
11392
11393}
11394
11395
11396/** Opcode 0xa7. */
11397FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11398{
11399 IEMOP_HLP_NO_LOCK_PREFIX();
11400
11401 /*
11402 * Use the C implementation if a repeat prefix is encountered.
11403 */
11404 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11405 {
11406 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11407 switch (pIemCpu->enmEffOpSize)
11408 {
11409 case IEMMODE_16BIT:
11410 switch (pIemCpu->enmEffAddrMode)
11411 {
11412 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11413 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11414 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11416 }
11417 break;
11418 case IEMMODE_32BIT:
11419 switch (pIemCpu->enmEffAddrMode)
11420 {
11421 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11422 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11423 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11425 }
11426 case IEMMODE_64BIT:
11427 switch (pIemCpu->enmEffAddrMode)
11428 {
11429 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11435 }
11436 }
11437
11438 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11439 {
11440 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11441 switch (pIemCpu->enmEffOpSize)
11442 {
11443 case IEMMODE_16BIT:
11444 switch (pIemCpu->enmEffAddrMode)
11445 {
11446 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11447 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11448 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11450 }
11451 break;
11452 case IEMMODE_32BIT:
11453 switch (pIemCpu->enmEffAddrMode)
11454 {
11455 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11456 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11457 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 case IEMMODE_64BIT:
11461 switch (pIemCpu->enmEffAddrMode)
11462 {
11463 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11464 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11465 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11469 }
11470 }
11471
11472 IEMOP_MNEMONIC("cmps Xv,Yv");
11473
11474 /*
11475 * Annoying double switch here.
11476 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11477 */
11478 switch (pIemCpu->enmEffOpSize)
11479 {
11480 case IEMMODE_16BIT:
11481 switch (pIemCpu->enmEffAddrMode)
11482 {
11483 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11484 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11485 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11487 }
11488 break;
11489
11490 case IEMMODE_32BIT:
11491 switch (pIemCpu->enmEffAddrMode)
11492 {
11493 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11494 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11495 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11497 }
11498 break;
11499
11500 case IEMMODE_64BIT:
11501 switch (pIemCpu->enmEffAddrMode)
11502 {
11503 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11504 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11505 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11507 }
11508 break;
11509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11510 }
11511 return VINF_SUCCESS;
11512
11513}
11514
11515#undef IEM_CMPS_CASE
11516
11517/** Opcode 0xa8. */
11518FNIEMOP_DEF(iemOp_test_AL_Ib)
11519{
11520 IEMOP_MNEMONIC("test al,Ib");
11521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11522 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11523}
11524
11525
11526/** Opcode 0xa9. */
11527FNIEMOP_DEF(iemOp_test_eAX_Iz)
11528{
11529 IEMOP_MNEMONIC("test rAX,Iz");
11530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11531 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11532}
11533
11534
11535/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11536#define IEM_STOS_CASE(ValBits, AddrBits) \
11537 IEM_MC_BEGIN(0, 2); \
11538 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11539 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11540 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11541 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11542 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11544 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11545 } IEM_MC_ELSE() { \
11546 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11547 } IEM_MC_ENDIF(); \
11548 IEM_MC_ADVANCE_RIP(); \
11549 IEM_MC_END(); \
11550
11551/** Opcode 0xaa. */
11552FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11553{
11554 IEMOP_HLP_NO_LOCK_PREFIX();
11555
11556 /*
11557 * Use the C implementation if a repeat prefix is encountered.
11558 */
11559 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11560 {
11561 IEMOP_MNEMONIC("rep stos Yb,al");
11562 switch (pIemCpu->enmEffAddrMode)
11563 {
11564 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11565 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11566 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11568 }
11569 }
11570 IEMOP_MNEMONIC("stos Yb,al");
11571
11572 /*
11573 * Sharing case implementation with stos[wdq] below.
11574 */
11575 switch (pIemCpu->enmEffAddrMode)
11576 {
11577 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11578 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11579 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11581 }
11582 return VINF_SUCCESS;
11583}
11584
11585
11586/** Opcode 0xab. */
11587FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11588{
11589 IEMOP_HLP_NO_LOCK_PREFIX();
11590
11591 /*
11592 * Use the C implementation if a repeat prefix is encountered.
11593 */
11594 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11595 {
11596 IEMOP_MNEMONIC("rep stos Yv,rAX");
11597 switch (pIemCpu->enmEffOpSize)
11598 {
11599 case IEMMODE_16BIT:
11600 switch (pIemCpu->enmEffAddrMode)
11601 {
11602 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11603 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11604 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11606 }
11607 break;
11608 case IEMMODE_32BIT:
11609 switch (pIemCpu->enmEffAddrMode)
11610 {
11611 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11612 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11613 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11615 }
11616 case IEMMODE_64BIT:
11617 switch (pIemCpu->enmEffAddrMode)
11618 {
11619 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11620 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11621 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11623 }
11624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11625 }
11626 }
11627 IEMOP_MNEMONIC("stos Yv,rAX");
11628
11629 /*
11630 * Annoying double switch here.
11631 * Using ugly macro for implementing the cases, sharing it with stosb.
11632 */
11633 switch (pIemCpu->enmEffOpSize)
11634 {
11635 case IEMMODE_16BIT:
11636 switch (pIemCpu->enmEffAddrMode)
11637 {
11638 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11639 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11640 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11642 }
11643 break;
11644
11645 case IEMMODE_32BIT:
11646 switch (pIemCpu->enmEffAddrMode)
11647 {
11648 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11649 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11650 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11652 }
11653 break;
11654
11655 case IEMMODE_64BIT:
11656 switch (pIemCpu->enmEffAddrMode)
11657 {
11658 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11659 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11660 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11662 }
11663 break;
11664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11665 }
11666 return VINF_SUCCESS;
11667}
11668
11669#undef IEM_STOS_CASE
11670
11671/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11672#define IEM_LODS_CASE(ValBits, AddrBits) \
11673 IEM_MC_BEGIN(0, 2); \
11674 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11675 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11676 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11677 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11678 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11680 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11681 } IEM_MC_ELSE() { \
11682 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11683 } IEM_MC_ENDIF(); \
11684 IEM_MC_ADVANCE_RIP(); \
11685 IEM_MC_END();
11686
11687/** Opcode 0xac. */
11688FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11689{
11690 IEMOP_HLP_NO_LOCK_PREFIX();
11691
11692 /*
11693 * Use the C implementation if a repeat prefix is encountered.
11694 */
11695 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11696 {
11697 IEMOP_MNEMONIC("rep lodsb al,Xb");
11698 switch (pIemCpu->enmEffAddrMode)
11699 {
11700 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11701 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11702 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11704 }
11705 }
11706 IEMOP_MNEMONIC("lodsb al,Xb");
11707
11708 /*
11709 * Sharing case implementation with stos[wdq] below.
11710 */
11711 switch (pIemCpu->enmEffAddrMode)
11712 {
11713 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11714 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11715 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11717 }
11718 return VINF_SUCCESS;
11719}
11720
11721
11722/** Opcode 0xad. */
11723FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11724{
11725 IEMOP_HLP_NO_LOCK_PREFIX();
11726
11727 /*
11728 * Use the C implementation if a repeat prefix is encountered.
11729 */
11730 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11731 {
11732 IEMOP_MNEMONIC("rep lods rAX,Xv");
11733 switch (pIemCpu->enmEffOpSize)
11734 {
11735 case IEMMODE_16BIT:
11736 switch (pIemCpu->enmEffAddrMode)
11737 {
11738 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11739 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11740 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11742 }
11743 break;
11744 case IEMMODE_32BIT:
11745 switch (pIemCpu->enmEffAddrMode)
11746 {
11747 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11748 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11749 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11751 }
11752 case IEMMODE_64BIT:
11753 switch (pIemCpu->enmEffAddrMode)
11754 {
11755 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11759 }
11760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11761 }
11762 }
11763 IEMOP_MNEMONIC("lods rAX,Xv");
11764
11765 /*
11766 * Annoying double switch here.
11767 * Using ugly macro for implementing the cases, sharing it with lodsb.
11768 */
11769 switch (pIemCpu->enmEffOpSize)
11770 {
11771 case IEMMODE_16BIT:
11772 switch (pIemCpu->enmEffAddrMode)
11773 {
11774 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11775 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11776 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11778 }
11779 break;
11780
11781 case IEMMODE_32BIT:
11782 switch (pIemCpu->enmEffAddrMode)
11783 {
11784 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11785 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11786 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11788 }
11789 break;
11790
11791 case IEMMODE_64BIT:
11792 switch (pIemCpu->enmEffAddrMode)
11793 {
11794 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11795 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11796 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11798 }
11799 break;
11800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11801 }
11802 return VINF_SUCCESS;
11803}
11804
11805#undef IEM_LODS_CASE
11806
11807/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11808#define IEM_SCAS_CASE(ValBits, AddrBits) \
11809 IEM_MC_BEGIN(3, 2); \
11810 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11811 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11812 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11813 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11814 \
11815 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11816 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11817 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11818 IEM_MC_REF_EFLAGS(pEFlags); \
11819 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11820 \
11821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11822 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11823 } IEM_MC_ELSE() { \
11824 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11825 } IEM_MC_ENDIF(); \
11826 IEM_MC_ADVANCE_RIP(); \
11827 IEM_MC_END();
11828
11829/** Opcode 0xae. */
11830FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11831{
11832 IEMOP_HLP_NO_LOCK_PREFIX();
11833
11834 /*
11835 * Use the C implementation if a repeat prefix is encountered.
11836 */
11837 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11838 {
11839 IEMOP_MNEMONIC("repe scasb al,Xb");
11840 switch (pIemCpu->enmEffAddrMode)
11841 {
11842 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11843 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11844 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11846 }
11847 }
11848 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11849 {
11850 IEMOP_MNEMONIC("repne scasb al,Xb");
11851 switch (pIemCpu->enmEffAddrMode)
11852 {
11853 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11854 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11855 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11857 }
11858 }
11859 IEMOP_MNEMONIC("scasb al,Xb");
11860
11861 /*
11862 * Sharing case implementation with stos[wdq] below.
11863 */
11864 switch (pIemCpu->enmEffAddrMode)
11865 {
11866 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11867 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11868 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11870 }
11871 return VINF_SUCCESS;
11872}
11873
11874
11875/** Opcode 0xaf. */
11876FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11877{
11878 IEMOP_HLP_NO_LOCK_PREFIX();
11879
11880 /*
11881 * Use the C implementation if a repeat prefix is encountered.
11882 */
11883 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11884 {
11885 IEMOP_MNEMONIC("repe scas rAX,Xv");
11886 switch (pIemCpu->enmEffOpSize)
11887 {
11888 case IEMMODE_16BIT:
11889 switch (pIemCpu->enmEffAddrMode)
11890 {
11891 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11892 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11893 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11895 }
11896 break;
11897 case IEMMODE_32BIT:
11898 switch (pIemCpu->enmEffAddrMode)
11899 {
11900 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11901 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11902 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11904 }
11905 case IEMMODE_64BIT:
11906 switch (pIemCpu->enmEffAddrMode)
11907 {
11908 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11912 }
11913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11914 }
11915 }
11916 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11917 {
11918 IEMOP_MNEMONIC("repne scas rAX,Xv");
11919 switch (pIemCpu->enmEffOpSize)
11920 {
11921 case IEMMODE_16BIT:
11922 switch (pIemCpu->enmEffAddrMode)
11923 {
11924 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11925 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11926 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11928 }
11929 break;
11930 case IEMMODE_32BIT:
11931 switch (pIemCpu->enmEffAddrMode)
11932 {
11933 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11934 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11935 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11937 }
11938 case IEMMODE_64BIT:
11939 switch (pIemCpu->enmEffAddrMode)
11940 {
11941 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11942 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11943 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11945 }
11946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11947 }
11948 }
11949 IEMOP_MNEMONIC("scas rAX,Xv");
11950
11951 /*
11952 * Annoying double switch here.
11953 * Using ugly macro for implementing the cases, sharing it with scasb.
11954 */
11955 switch (pIemCpu->enmEffOpSize)
11956 {
11957 case IEMMODE_16BIT:
11958 switch (pIemCpu->enmEffAddrMode)
11959 {
11960 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11961 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11962 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11964 }
11965 break;
11966
11967 case IEMMODE_32BIT:
11968 switch (pIemCpu->enmEffAddrMode)
11969 {
11970 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11971 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11972 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11974 }
11975 break;
11976
11977 case IEMMODE_64BIT:
11978 switch (pIemCpu->enmEffAddrMode)
11979 {
11980 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11981 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11982 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 break;
11986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11987 }
11988 return VINF_SUCCESS;
11989}
11990
11991#undef IEM_SCAS_CASE
11992
11993/**
11994 * Common 'mov r8, imm8' helper.
11995 */
11996FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11997{
11998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11999 IEMOP_HLP_NO_LOCK_PREFIX();
12000
12001 IEM_MC_BEGIN(0, 1);
12002 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12003 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12004 IEM_MC_ADVANCE_RIP();
12005 IEM_MC_END();
12006
12007 return VINF_SUCCESS;
12008}
12009
12010
12011/** Opcode 0xb0. */
12012FNIEMOP_DEF(iemOp_mov_AL_Ib)
12013{
12014 IEMOP_MNEMONIC("mov AL,Ib");
12015 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
12016}
12017
12018
12019/** Opcode 0xb1. */
12020FNIEMOP_DEF(iemOp_CL_Ib)
12021{
12022 IEMOP_MNEMONIC("mov CL,Ib");
12023 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
12024}
12025
12026
12027/** Opcode 0xb2. */
12028FNIEMOP_DEF(iemOp_DL_Ib)
12029{
12030 IEMOP_MNEMONIC("mov DL,Ib");
12031 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
12032}
12033
12034
12035/** Opcode 0xb3. */
12036FNIEMOP_DEF(iemOp_BL_Ib)
12037{
12038 IEMOP_MNEMONIC("mov BL,Ib");
12039 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
12040}
12041
12042
12043/** Opcode 0xb4. */
12044FNIEMOP_DEF(iemOp_mov_AH_Ib)
12045{
12046 IEMOP_MNEMONIC("mov AH,Ib");
12047 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12048}
12049
12050
12051/** Opcode 0xb5. */
12052FNIEMOP_DEF(iemOp_CH_Ib)
12053{
12054 IEMOP_MNEMONIC("mov CH,Ib");
12055 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12056}
12057
12058
12059/** Opcode 0xb6. */
12060FNIEMOP_DEF(iemOp_DH_Ib)
12061{
12062 IEMOP_MNEMONIC("mov DH,Ib");
12063 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12064}
12065
12066
12067/** Opcode 0xb7. */
12068FNIEMOP_DEF(iemOp_BH_Ib)
12069{
12070 IEMOP_MNEMONIC("mov BH,Ib");
12071 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12072}
12073
12074
12075/**
12076 * Common 'mov regX,immX' helper.
12077 */
12078FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12079{
12080 switch (pIemCpu->enmEffOpSize)
12081 {
12082 case IEMMODE_16BIT:
12083 {
12084 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12085 IEMOP_HLP_NO_LOCK_PREFIX();
12086
12087 IEM_MC_BEGIN(0, 1);
12088 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12089 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12090 IEM_MC_ADVANCE_RIP();
12091 IEM_MC_END();
12092 break;
12093 }
12094
12095 case IEMMODE_32BIT:
12096 {
12097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12098 IEMOP_HLP_NO_LOCK_PREFIX();
12099
12100 IEM_MC_BEGIN(0, 1);
12101 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12102 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12103 IEM_MC_ADVANCE_RIP();
12104 IEM_MC_END();
12105 break;
12106 }
12107 case IEMMODE_64BIT:
12108 {
12109 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12110 IEMOP_HLP_NO_LOCK_PREFIX();
12111
12112 IEM_MC_BEGIN(0, 1);
12113 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12114 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12115 IEM_MC_ADVANCE_RIP();
12116 IEM_MC_END();
12117 break;
12118 }
12119 }
12120
12121 return VINF_SUCCESS;
12122}
12123
12124
12125/** Opcode 0xb8. */
12126FNIEMOP_DEF(iemOp_eAX_Iv)
12127{
12128 IEMOP_MNEMONIC("mov rAX,IV");
12129 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12130}
12131
12132
12133/** Opcode 0xb9. */
12134FNIEMOP_DEF(iemOp_eCX_Iv)
12135{
12136 IEMOP_MNEMONIC("mov rCX,IV");
12137 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12138}
12139
12140
12141/** Opcode 0xba. */
12142FNIEMOP_DEF(iemOp_eDX_Iv)
12143{
12144 IEMOP_MNEMONIC("mov rDX,IV");
12145 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12146}
12147
12148
12149/** Opcode 0xbb. */
12150FNIEMOP_DEF(iemOp_eBX_Iv)
12151{
12152 IEMOP_MNEMONIC("mov rBX,IV");
12153 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12154}
12155
12156
12157/** Opcode 0xbc. */
12158FNIEMOP_DEF(iemOp_eSP_Iv)
12159{
12160 IEMOP_MNEMONIC("mov rSP,IV");
12161 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12162}
12163
12164
12165/** Opcode 0xbd. */
12166FNIEMOP_DEF(iemOp_eBP_Iv)
12167{
12168 IEMOP_MNEMONIC("mov rBP,IV");
12169 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12170}
12171
12172
12173/** Opcode 0xbe. */
12174FNIEMOP_DEF(iemOp_eSI_Iv)
12175{
12176 IEMOP_MNEMONIC("mov rSI,IV");
12177 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12178}
12179
12180
12181/** Opcode 0xbf. */
12182FNIEMOP_DEF(iemOp_eDI_Iv)
12183{
12184 IEMOP_MNEMONIC("mov rDI,IV");
12185 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12186}
12187
12188
12189/** Opcode 0xc0. */
12190FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12191{
12192 IEMOP_HLP_MIN_186();
12193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12194 PCIEMOPSHIFTSIZES pImpl;
12195 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12196 {
12197 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12198 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12199 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12200 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12201 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12202 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12203 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12204 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12206 }
12207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12208
12209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12210 {
12211 /* register */
12212 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12213 IEMOP_HLP_NO_LOCK_PREFIX();
12214 IEM_MC_BEGIN(3, 0);
12215 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12216 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12218 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12219 IEM_MC_REF_EFLAGS(pEFlags);
12220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12221 IEM_MC_ADVANCE_RIP();
12222 IEM_MC_END();
12223 }
12224 else
12225 {
12226 /* memory */
12227 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12228 IEM_MC_BEGIN(3, 2);
12229 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12230 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12231 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12233
12234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12235 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12236 IEM_MC_ASSIGN(cShiftArg, cShift);
12237 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12238 IEM_MC_FETCH_EFLAGS(EFlags);
12239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12240
12241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12242 IEM_MC_COMMIT_EFLAGS(EFlags);
12243 IEM_MC_ADVANCE_RIP();
12244 IEM_MC_END();
12245 }
12246 return VINF_SUCCESS;
12247}
12248
12249
12250/** Opcode 0xc1. */
12251FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12252{
12253 IEMOP_HLP_MIN_186();
12254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12255 PCIEMOPSHIFTSIZES pImpl;
12256 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12257 {
12258 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12259 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12260 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12261 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12262 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12263 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12264 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12265 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12266 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12267 }
12268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12269
12270 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12271 {
12272 /* register */
12273 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12274 IEMOP_HLP_NO_LOCK_PREFIX();
12275 switch (pIemCpu->enmEffOpSize)
12276 {
12277 case IEMMODE_16BIT:
12278 IEM_MC_BEGIN(3, 0);
12279 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12280 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12281 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12282 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12283 IEM_MC_REF_EFLAGS(pEFlags);
12284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12285 IEM_MC_ADVANCE_RIP();
12286 IEM_MC_END();
12287 return VINF_SUCCESS;
12288
12289 case IEMMODE_32BIT:
12290 IEM_MC_BEGIN(3, 0);
12291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12292 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12294 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12295 IEM_MC_REF_EFLAGS(pEFlags);
12296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12297 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12298 IEM_MC_ADVANCE_RIP();
12299 IEM_MC_END();
12300 return VINF_SUCCESS;
12301
12302 case IEMMODE_64BIT:
12303 IEM_MC_BEGIN(3, 0);
12304 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12305 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12306 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12307 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12308 IEM_MC_REF_EFLAGS(pEFlags);
12309 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12310 IEM_MC_ADVANCE_RIP();
12311 IEM_MC_END();
12312 return VINF_SUCCESS;
12313
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 }
12317 else
12318 {
12319 /* memory */
12320 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12321 switch (pIemCpu->enmEffOpSize)
12322 {
12323 case IEMMODE_16BIT:
12324 IEM_MC_BEGIN(3, 2);
12325 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12326 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12327 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12329
12330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12331 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12332 IEM_MC_ASSIGN(cShiftArg, cShift);
12333 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12334 IEM_MC_FETCH_EFLAGS(EFlags);
12335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12336
12337 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12338 IEM_MC_COMMIT_EFLAGS(EFlags);
12339 IEM_MC_ADVANCE_RIP();
12340 IEM_MC_END();
12341 return VINF_SUCCESS;
12342
12343 case IEMMODE_32BIT:
12344 IEM_MC_BEGIN(3, 2);
12345 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12346 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12347 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12349
12350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12351 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12352 IEM_MC_ASSIGN(cShiftArg, cShift);
12353 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12354 IEM_MC_FETCH_EFLAGS(EFlags);
12355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12356
12357 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12358 IEM_MC_COMMIT_EFLAGS(EFlags);
12359 IEM_MC_ADVANCE_RIP();
12360 IEM_MC_END();
12361 return VINF_SUCCESS;
12362
12363 case IEMMODE_64BIT:
12364 IEM_MC_BEGIN(3, 2);
12365 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12366 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12367 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12369
12370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12371 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12372 IEM_MC_ASSIGN(cShiftArg, cShift);
12373 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12374 IEM_MC_FETCH_EFLAGS(EFlags);
12375 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12376
12377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12378 IEM_MC_COMMIT_EFLAGS(EFlags);
12379 IEM_MC_ADVANCE_RIP();
12380 IEM_MC_END();
12381 return VINF_SUCCESS;
12382
12383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12384 }
12385 }
12386}
12387
12388
12389/** Opcode 0xc2. */
12390FNIEMOP_DEF(iemOp_retn_Iw)
12391{
12392 IEMOP_MNEMONIC("retn Iw");
12393 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12394 IEMOP_HLP_NO_LOCK_PREFIX();
12395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12396 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12397}
12398
12399
12400/** Opcode 0xc3. */
12401FNIEMOP_DEF(iemOp_retn)
12402{
12403 IEMOP_MNEMONIC("retn");
12404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12405 IEMOP_HLP_NO_LOCK_PREFIX();
12406 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12407}
12408
12409
12410/** Opcode 0xc4. */
12411FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12412{
12413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12414 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12415 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12416 {
12417 IEMOP_MNEMONIC("2-byte-vex");
12418 /* The LES instruction is invalid 64-bit mode. In legacy and
12419 compatability mode it is invalid with MOD=3.
12420 The use as a VEX prefix is made possible by assigning the inverted
12421 REX.R to the top MOD bit, and the top bit in the inverted register
12422 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12423 to accessing registers 0..7 in this VEX form. */
12424 /** @todo VEX: Just use new tables for it. */
12425 return IEMOP_RAISE_INVALID_OPCODE();
12426 }
12427 IEMOP_MNEMONIC("les Gv,Mp");
12428 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12429}
12430
12431
12432/** Opcode 0xc5. */
12433FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12434{
12435 /* The LDS instruction is invalid 64-bit mode. In legacy and
12436 compatability mode it is invalid with MOD=3.
12437 The use as a VEX prefix is made possible by assigning the inverted
12438 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12439 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12441 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12442 {
12443 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12444 {
12445 IEMOP_MNEMONIC("lds Gv,Mp");
12446 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12447 }
12448 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12449 }
12450
12451 IEMOP_MNEMONIC("3-byte-vex");
12452 /** @todo Test when exctly the VEX conformance checks kick in during
12453 * instruction decoding and fetching (using \#PF). */
12454 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12455 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12456 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12457#if 0 /* will make sense of this next week... */
12458 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12459 &&
12460 )
12461 {
12462
12463 }
12464#endif
12465
12466 /** @todo VEX: Just use new tables for it. */
12467 return IEMOP_RAISE_INVALID_OPCODE();
12468}
12469
12470
12471/** Opcode 0xc6. */
12472FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12473{
12474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12475 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12476 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12477 return IEMOP_RAISE_INVALID_OPCODE();
12478 IEMOP_MNEMONIC("mov Eb,Ib");
12479
12480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12481 {
12482 /* register access */
12483 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12484 IEM_MC_BEGIN(0, 0);
12485 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12486 IEM_MC_ADVANCE_RIP();
12487 IEM_MC_END();
12488 }
12489 else
12490 {
12491 /* memory access. */
12492 IEM_MC_BEGIN(0, 1);
12493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12495 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12496 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12497 IEM_MC_ADVANCE_RIP();
12498 IEM_MC_END();
12499 }
12500 return VINF_SUCCESS;
12501}
12502
12503
12504/** Opcode 0xc7. */
12505FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12506{
12507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12508 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12509 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12510 return IEMOP_RAISE_INVALID_OPCODE();
12511 IEMOP_MNEMONIC("mov Ev,Iz");
12512
12513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12514 {
12515 /* register access */
12516 switch (pIemCpu->enmEffOpSize)
12517 {
12518 case IEMMODE_16BIT:
12519 IEM_MC_BEGIN(0, 0);
12520 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12521 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12522 IEM_MC_ADVANCE_RIP();
12523 IEM_MC_END();
12524 return VINF_SUCCESS;
12525
12526 case IEMMODE_32BIT:
12527 IEM_MC_BEGIN(0, 0);
12528 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12529 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12530 IEM_MC_ADVANCE_RIP();
12531 IEM_MC_END();
12532 return VINF_SUCCESS;
12533
12534 case IEMMODE_64BIT:
12535 IEM_MC_BEGIN(0, 0);
12536 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12537 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12538 IEM_MC_ADVANCE_RIP();
12539 IEM_MC_END();
12540 return VINF_SUCCESS;
12541
12542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12543 }
12544 }
12545 else
12546 {
12547 /* memory access. */
12548 switch (pIemCpu->enmEffOpSize)
12549 {
12550 case IEMMODE_16BIT:
12551 IEM_MC_BEGIN(0, 1);
12552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12554 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12555 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12556 IEM_MC_ADVANCE_RIP();
12557 IEM_MC_END();
12558 return VINF_SUCCESS;
12559
12560 case IEMMODE_32BIT:
12561 IEM_MC_BEGIN(0, 1);
12562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12564 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12565 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12566 IEM_MC_ADVANCE_RIP();
12567 IEM_MC_END();
12568 return VINF_SUCCESS;
12569
12570 case IEMMODE_64BIT:
12571 IEM_MC_BEGIN(0, 1);
12572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12574 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12575 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12576 IEM_MC_ADVANCE_RIP();
12577 IEM_MC_END();
12578 return VINF_SUCCESS;
12579
12580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12581 }
12582 }
12583}
12584
12585
12586
12587
12588/** Opcode 0xc8. */
12589FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12590{
12591 IEMOP_MNEMONIC("enter Iw,Ib");
12592 IEMOP_HLP_MIN_186();
12593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12594 IEMOP_HLP_NO_LOCK_PREFIX();
12595 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12596 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12597 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12598}
12599
12600
12601/** Opcode 0xc9. */
12602FNIEMOP_DEF(iemOp_leave)
12603{
12604 IEMOP_MNEMONIC("retn");
12605 IEMOP_HLP_MIN_186();
12606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12607 IEMOP_HLP_NO_LOCK_PREFIX();
12608 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12609}
12610
12611
12612/** Opcode 0xca. */
12613FNIEMOP_DEF(iemOp_retf_Iw)
12614{
12615 IEMOP_MNEMONIC("retf Iw");
12616 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12617 IEMOP_HLP_NO_LOCK_PREFIX();
12618 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12619 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12620}
12621
12622
12623/** Opcode 0xcb. */
12624FNIEMOP_DEF(iemOp_retf)
12625{
12626 IEMOP_MNEMONIC("retf");
12627 IEMOP_HLP_NO_LOCK_PREFIX();
12628 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12629 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12630}
12631
12632
12633/** Opcode 0xcc. */
12634FNIEMOP_DEF(iemOp_int_3)
12635{
12636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12637 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12638}
12639
12640
12641/** Opcode 0xcd. */
12642FNIEMOP_DEF(iemOp_int_Ib)
12643{
12644 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12646 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12647}
12648
12649
12650/** Opcode 0xce. */
12651FNIEMOP_DEF(iemOp_into)
12652{
12653 IEMOP_MNEMONIC("into");
12654 IEMOP_HLP_NO_64BIT();
12655
12656 IEM_MC_BEGIN(2, 0);
12657 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12658 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12659 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12660 IEM_MC_END();
12661 return VINF_SUCCESS;
12662}
12663
12664
12665/** Opcode 0xcf. */
12666FNIEMOP_DEF(iemOp_iret)
12667{
12668 IEMOP_MNEMONIC("iret");
12669 IEMOP_HLP_NO_LOCK_PREFIX();
12670 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12671}
12672
12673
12674/** Opcode 0xd0. */
12675FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12676{
12677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12678 PCIEMOPSHIFTSIZES pImpl;
12679 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12680 {
12681 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12682 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12683 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12684 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12685 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12686 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12687 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12688 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12689 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12690 }
12691 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12692
12693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12694 {
12695 /* register */
12696 IEMOP_HLP_NO_LOCK_PREFIX();
12697 IEM_MC_BEGIN(3, 0);
12698 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12699 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12701 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12702 IEM_MC_REF_EFLAGS(pEFlags);
12703 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12704 IEM_MC_ADVANCE_RIP();
12705 IEM_MC_END();
12706 }
12707 else
12708 {
12709 /* memory */
12710 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12711 IEM_MC_BEGIN(3, 2);
12712 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12713 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12714 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12716
12717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12718 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12719 IEM_MC_FETCH_EFLAGS(EFlags);
12720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12721
12722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12723 IEM_MC_COMMIT_EFLAGS(EFlags);
12724 IEM_MC_ADVANCE_RIP();
12725 IEM_MC_END();
12726 }
12727 return VINF_SUCCESS;
12728}
12729
12730
12731
12732/** Opcode 0xd1. */
12733FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12734{
12735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12736 PCIEMOPSHIFTSIZES pImpl;
12737 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12738 {
12739 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12740 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12741 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12742 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12743 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12744 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12745 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12746 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12747 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12748 }
12749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12750
12751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12752 {
12753 /* register */
12754 IEMOP_HLP_NO_LOCK_PREFIX();
12755 switch (pIemCpu->enmEffOpSize)
12756 {
12757 case IEMMODE_16BIT:
12758 IEM_MC_BEGIN(3, 0);
12759 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12760 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12761 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12762 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12763 IEM_MC_REF_EFLAGS(pEFlags);
12764 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12765 IEM_MC_ADVANCE_RIP();
12766 IEM_MC_END();
12767 return VINF_SUCCESS;
12768
12769 case IEMMODE_32BIT:
12770 IEM_MC_BEGIN(3, 0);
12771 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12772 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12773 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12774 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12775 IEM_MC_REF_EFLAGS(pEFlags);
12776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12777 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12778 IEM_MC_ADVANCE_RIP();
12779 IEM_MC_END();
12780 return VINF_SUCCESS;
12781
12782 case IEMMODE_64BIT:
12783 IEM_MC_BEGIN(3, 0);
12784 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12785 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12787 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12788 IEM_MC_REF_EFLAGS(pEFlags);
12789 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12790 IEM_MC_ADVANCE_RIP();
12791 IEM_MC_END();
12792 return VINF_SUCCESS;
12793
12794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12795 }
12796 }
12797 else
12798 {
12799 /* memory */
12800 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12801 switch (pIemCpu->enmEffOpSize)
12802 {
12803 case IEMMODE_16BIT:
12804 IEM_MC_BEGIN(3, 2);
12805 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12806 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12807 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12809
12810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12811 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12812 IEM_MC_FETCH_EFLAGS(EFlags);
12813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12814
12815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12816 IEM_MC_COMMIT_EFLAGS(EFlags);
12817 IEM_MC_ADVANCE_RIP();
12818 IEM_MC_END();
12819 return VINF_SUCCESS;
12820
12821 case IEMMODE_32BIT:
12822 IEM_MC_BEGIN(3, 2);
12823 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12824 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12825 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12827
12828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12829 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12830 IEM_MC_FETCH_EFLAGS(EFlags);
12831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12832
12833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12834 IEM_MC_COMMIT_EFLAGS(EFlags);
12835 IEM_MC_ADVANCE_RIP();
12836 IEM_MC_END();
12837 return VINF_SUCCESS;
12838
12839 case IEMMODE_64BIT:
12840 IEM_MC_BEGIN(3, 2);
12841 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12842 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12843 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12845
12846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12847 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12848 IEM_MC_FETCH_EFLAGS(EFlags);
12849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12850
12851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12852 IEM_MC_COMMIT_EFLAGS(EFlags);
12853 IEM_MC_ADVANCE_RIP();
12854 IEM_MC_END();
12855 return VINF_SUCCESS;
12856
12857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12858 }
12859 }
12860}
12861
12862
12863/** Opcode 0xd2. */
12864FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12865{
12866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12867 PCIEMOPSHIFTSIZES pImpl;
12868 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12869 {
12870 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12871 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12872 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12873 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12874 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12875 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12876 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12877 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12878 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12879 }
12880 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12881
12882 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12883 {
12884 /* register */
12885 IEMOP_HLP_NO_LOCK_PREFIX();
12886 IEM_MC_BEGIN(3, 0);
12887 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12888 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12889 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12890 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12891 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12892 IEM_MC_REF_EFLAGS(pEFlags);
12893 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12894 IEM_MC_ADVANCE_RIP();
12895 IEM_MC_END();
12896 }
12897 else
12898 {
12899 /* memory */
12900 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12901 IEM_MC_BEGIN(3, 2);
12902 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12903 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12904 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12906
12907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12908 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12909 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12910 IEM_MC_FETCH_EFLAGS(EFlags);
12911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12912
12913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12914 IEM_MC_COMMIT_EFLAGS(EFlags);
12915 IEM_MC_ADVANCE_RIP();
12916 IEM_MC_END();
12917 }
12918 return VINF_SUCCESS;
12919}
12920
12921
12922/** Opcode 0xd3. */
12923FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12924{
12925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12926 PCIEMOPSHIFTSIZES pImpl;
12927 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12928 {
12929 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12930 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12931 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12932 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12933 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12934 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12935 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12936 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12937 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12938 }
12939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12940
12941 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12942 {
12943 /* register */
12944 IEMOP_HLP_NO_LOCK_PREFIX();
12945 switch (pIemCpu->enmEffOpSize)
12946 {
12947 case IEMMODE_16BIT:
12948 IEM_MC_BEGIN(3, 0);
12949 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12950 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12951 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12952 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12953 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12954 IEM_MC_REF_EFLAGS(pEFlags);
12955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12956 IEM_MC_ADVANCE_RIP();
12957 IEM_MC_END();
12958 return VINF_SUCCESS;
12959
12960 case IEMMODE_32BIT:
12961 IEM_MC_BEGIN(3, 0);
12962 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12963 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12965 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12966 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12967 IEM_MC_REF_EFLAGS(pEFlags);
12968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12969 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12970 IEM_MC_ADVANCE_RIP();
12971 IEM_MC_END();
12972 return VINF_SUCCESS;
12973
12974 case IEMMODE_64BIT:
12975 IEM_MC_BEGIN(3, 0);
12976 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12977 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12979 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12980 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12981 IEM_MC_REF_EFLAGS(pEFlags);
12982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12983 IEM_MC_ADVANCE_RIP();
12984 IEM_MC_END();
12985 return VINF_SUCCESS;
12986
12987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12988 }
12989 }
12990 else
12991 {
12992 /* memory */
12993 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12994 switch (pIemCpu->enmEffOpSize)
12995 {
12996 case IEMMODE_16BIT:
12997 IEM_MC_BEGIN(3, 2);
12998 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12999 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13000 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13002
13003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13004 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13005 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13006 IEM_MC_FETCH_EFLAGS(EFlags);
13007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13008
13009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13010 IEM_MC_COMMIT_EFLAGS(EFlags);
13011 IEM_MC_ADVANCE_RIP();
13012 IEM_MC_END();
13013 return VINF_SUCCESS;
13014
13015 case IEMMODE_32BIT:
13016 IEM_MC_BEGIN(3, 2);
13017 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13018 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13019 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13021
13022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13024 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13025 IEM_MC_FETCH_EFLAGS(EFlags);
13026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13027
13028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13029 IEM_MC_COMMIT_EFLAGS(EFlags);
13030 IEM_MC_ADVANCE_RIP();
13031 IEM_MC_END();
13032 return VINF_SUCCESS;
13033
13034 case IEMMODE_64BIT:
13035 IEM_MC_BEGIN(3, 2);
13036 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13037 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13038 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13040
13041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13042 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13043 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13044 IEM_MC_FETCH_EFLAGS(EFlags);
13045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13046
13047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13048 IEM_MC_COMMIT_EFLAGS(EFlags);
13049 IEM_MC_ADVANCE_RIP();
13050 IEM_MC_END();
13051 return VINF_SUCCESS;
13052
13053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13054 }
13055 }
13056}
13057
13058/** Opcode 0xd4. */
13059FNIEMOP_DEF(iemOp_aam_Ib)
13060{
13061 IEMOP_MNEMONIC("aam Ib");
13062 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13063 IEMOP_HLP_NO_LOCK_PREFIX();
13064 IEMOP_HLP_NO_64BIT();
13065 if (!bImm)
13066 return IEMOP_RAISE_DIVIDE_ERROR();
13067 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13068}
13069
13070
13071/** Opcode 0xd5. */
13072FNIEMOP_DEF(iemOp_aad_Ib)
13073{
13074 IEMOP_MNEMONIC("aad Ib");
13075 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13076 IEMOP_HLP_NO_LOCK_PREFIX();
13077 IEMOP_HLP_NO_64BIT();
13078 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13079}
13080
13081
13082/** Opcode 0xd6. */
13083FNIEMOP_DEF(iemOp_salc)
13084{
13085 IEMOP_MNEMONIC("salc");
13086 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13087 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13089 IEMOP_HLP_NO_64BIT();
13090
13091 IEM_MC_BEGIN(0, 0);
13092 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13093 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13094 } IEM_MC_ELSE() {
13095 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13096 } IEM_MC_ENDIF();
13097 IEM_MC_ADVANCE_RIP();
13098 IEM_MC_END();
13099 return VINF_SUCCESS;
13100}
13101
13102
13103/** Opcode 0xd7. */
13104FNIEMOP_DEF(iemOp_xlat)
13105{
13106 IEMOP_MNEMONIC("xlat");
13107 IEMOP_HLP_NO_LOCK_PREFIX();
13108 switch (pIemCpu->enmEffAddrMode)
13109 {
13110 case IEMMODE_16BIT:
13111 IEM_MC_BEGIN(2, 0);
13112 IEM_MC_LOCAL(uint8_t, u8Tmp);
13113 IEM_MC_LOCAL(uint16_t, u16Addr);
13114 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13115 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13116 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13117 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13118 IEM_MC_ADVANCE_RIP();
13119 IEM_MC_END();
13120 return VINF_SUCCESS;
13121
13122 case IEMMODE_32BIT:
13123 IEM_MC_BEGIN(2, 0);
13124 IEM_MC_LOCAL(uint8_t, u8Tmp);
13125 IEM_MC_LOCAL(uint32_t, u32Addr);
13126 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13127 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13128 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13129 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13130 IEM_MC_ADVANCE_RIP();
13131 IEM_MC_END();
13132 return VINF_SUCCESS;
13133
13134 case IEMMODE_64BIT:
13135 IEM_MC_BEGIN(2, 0);
13136 IEM_MC_LOCAL(uint8_t, u8Tmp);
13137 IEM_MC_LOCAL(uint64_t, u64Addr);
13138 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13139 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13140 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13141 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13142 IEM_MC_ADVANCE_RIP();
13143 IEM_MC_END();
13144 return VINF_SUCCESS;
13145
13146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13147 }
13148}
13149
13150
13151/**
13152 * Common worker for FPU instructions working on ST0 and STn, and storing the
13153 * result in ST0.
13154 *
13155 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13156 */
13157FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13158{
13159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13160
13161 IEM_MC_BEGIN(3, 1);
13162 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13163 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13164 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13166
13167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13169 IEM_MC_PREPARE_FPU_USAGE();
13170 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13171 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13172 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13173 IEM_MC_ELSE()
13174 IEM_MC_FPU_STACK_UNDERFLOW(0);
13175 IEM_MC_ENDIF();
13176 IEM_MC_ADVANCE_RIP();
13177
13178 IEM_MC_END();
13179 return VINF_SUCCESS;
13180}
13181
13182
13183/**
13184 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13185 * flags.
13186 *
13187 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13188 */
13189FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13190{
13191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13192
13193 IEM_MC_BEGIN(3, 1);
13194 IEM_MC_LOCAL(uint16_t, u16Fsw);
13195 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13198
13199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13201 IEM_MC_PREPARE_FPU_USAGE();
13202 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13203 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13204 IEM_MC_UPDATE_FSW(u16Fsw);
13205 IEM_MC_ELSE()
13206 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13207 IEM_MC_ENDIF();
13208 IEM_MC_ADVANCE_RIP();
13209
13210 IEM_MC_END();
13211 return VINF_SUCCESS;
13212}
13213
13214
13215/**
13216 * Common worker for FPU instructions working on ST0 and STn, only affecting
13217 * flags, and popping when done.
13218 *
13219 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13220 */
13221FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13222{
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224
13225 IEM_MC_BEGIN(3, 1);
13226 IEM_MC_LOCAL(uint16_t, u16Fsw);
13227 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13228 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13229 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13230
13231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13232 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13233 IEM_MC_PREPARE_FPU_USAGE();
13234 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13235 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13236 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13237 IEM_MC_ELSE()
13238 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13239 IEM_MC_ENDIF();
13240 IEM_MC_ADVANCE_RIP();
13241
13242 IEM_MC_END();
13243 return VINF_SUCCESS;
13244}
13245
13246
13247/** Opcode 0xd8 11/0. */
13248FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13249{
13250 IEMOP_MNEMONIC("fadd st0,stN");
13251 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13252}
13253
13254
13255/** Opcode 0xd8 11/1. */
13256FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13257{
13258 IEMOP_MNEMONIC("fmul st0,stN");
13259 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13260}
13261
13262
13263/** Opcode 0xd8 11/2. */
13264FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13265{
13266 IEMOP_MNEMONIC("fcom st0,stN");
13267 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13268}
13269
13270
13271/** Opcode 0xd8 11/3. */
13272FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13273{
13274 IEMOP_MNEMONIC("fcomp st0,stN");
13275 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13276}
13277
13278
13279/** Opcode 0xd8 11/4. */
13280FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13281{
13282 IEMOP_MNEMONIC("fsub st0,stN");
13283 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13284}
13285
13286
13287/** Opcode 0xd8 11/5. */
13288FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13289{
13290 IEMOP_MNEMONIC("fsubr st0,stN");
13291 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13292}
13293
13294
13295/** Opcode 0xd8 11/6. */
13296FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13297{
13298 IEMOP_MNEMONIC("fdiv st0,stN");
13299 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13300}
13301
13302
13303/** Opcode 0xd8 11/7. */
13304FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13305{
13306 IEMOP_MNEMONIC("fdivr st0,stN");
13307 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13308}
13309
13310
13311/**
13312 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13313 * the result in ST0.
13314 *
13315 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13316 */
13317FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13318{
13319 IEM_MC_BEGIN(3, 3);
13320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13321 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13322 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13323 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13324 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13325 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13326
13327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13329
13330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13332 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13333
13334 IEM_MC_PREPARE_FPU_USAGE();
13335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13336 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13337 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13338 IEM_MC_ELSE()
13339 IEM_MC_FPU_STACK_UNDERFLOW(0);
13340 IEM_MC_ENDIF();
13341 IEM_MC_ADVANCE_RIP();
13342
13343 IEM_MC_END();
13344 return VINF_SUCCESS;
13345}
13346
13347
13348/** Opcode 0xd8 !11/0. */
13349FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13350{
13351 IEMOP_MNEMONIC("fadd st0,m32r");
13352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13353}
13354
13355
13356/** Opcode 0xd8 !11/1. */
13357FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13358{
13359 IEMOP_MNEMONIC("fmul st0,m32r");
13360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13361}
13362
13363
13364/** Opcode 0xd8 !11/2. */
13365FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13366{
13367 IEMOP_MNEMONIC("fcom st0,m32r");
13368
13369 IEM_MC_BEGIN(3, 3);
13370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13371 IEM_MC_LOCAL(uint16_t, u16Fsw);
13372 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13373 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13374 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13375 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13376
13377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13379
13380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13382 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13383
13384 IEM_MC_PREPARE_FPU_USAGE();
13385 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13386 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13387 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13388 IEM_MC_ELSE()
13389 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13390 IEM_MC_ENDIF();
13391 IEM_MC_ADVANCE_RIP();
13392
13393 IEM_MC_END();
13394 return VINF_SUCCESS;
13395}
13396
13397
13398/** Opcode 0xd8 !11/3. */
13399FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13400{
13401 IEMOP_MNEMONIC("fcomp st0,m32r");
13402
13403 IEM_MC_BEGIN(3, 3);
13404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13405 IEM_MC_LOCAL(uint16_t, u16Fsw);
13406 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13407 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13408 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13409 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13410
13411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413
13414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13416 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13417
13418 IEM_MC_PREPARE_FPU_USAGE();
13419 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13420 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13421 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13422 IEM_MC_ELSE()
13423 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13424 IEM_MC_ENDIF();
13425 IEM_MC_ADVANCE_RIP();
13426
13427 IEM_MC_END();
13428 return VINF_SUCCESS;
13429}
13430
13431
13432/** Opcode 0xd8 !11/4. */
13433FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13434{
13435 IEMOP_MNEMONIC("fsub st0,m32r");
13436 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13437}
13438
13439
13440/** Opcode 0xd8 !11/5. */
13441FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC("fsubr st0,m32r");
13444 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13445}
13446
13447
13448/** Opcode 0xd8 !11/6. */
13449FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13450{
13451 IEMOP_MNEMONIC("fdiv st0,m32r");
13452 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13453}
13454
13455
13456/** Opcode 0xd8 !11/7. */
13457FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13458{
13459 IEMOP_MNEMONIC("fdivr st0,m32r");
13460 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13461}
13462
13463
13464/** Opcode 0xd8. */
13465FNIEMOP_DEF(iemOp_EscF0)
13466{
13467 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13469
13470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13471 {
13472 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13473 {
13474 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13475 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13476 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13477 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13478 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13479 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13480 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13481 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13483 }
13484 }
13485 else
13486 {
13487 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13488 {
13489 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13490 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13491 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13492 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13493 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13494 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13495 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13496 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13498 }
13499 }
13500}
13501
13502
13503/** Opcode 0xd9 /0 mem32real
13504 * @sa iemOp_fld_m64r */
13505FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13506{
13507 IEMOP_MNEMONIC("fld m32r");
13508
13509 IEM_MC_BEGIN(2, 3);
13510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13511 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13512 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13513 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13514 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13515
13516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13518
13519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13521 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13522
13523 IEM_MC_PREPARE_FPU_USAGE();
13524 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13525 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13526 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13527 IEM_MC_ELSE()
13528 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13529 IEM_MC_ENDIF();
13530 IEM_MC_ADVANCE_RIP();
13531
13532 IEM_MC_END();
13533 return VINF_SUCCESS;
13534}
13535
13536
13537/** Opcode 0xd9 !11/2 mem32real */
13538FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13539{
13540 IEMOP_MNEMONIC("fst m32r");
13541 IEM_MC_BEGIN(3, 2);
13542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13543 IEM_MC_LOCAL(uint16_t, u16Fsw);
13544 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13545 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13547
13548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13551 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13552
13553 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13554 IEM_MC_PREPARE_FPU_USAGE();
13555 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13556 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13557 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13558 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13559 IEM_MC_ELSE()
13560 IEM_MC_IF_FCW_IM()
13561 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13562 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13563 IEM_MC_ENDIF();
13564 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13565 IEM_MC_ENDIF();
13566 IEM_MC_ADVANCE_RIP();
13567
13568 IEM_MC_END();
13569 return VINF_SUCCESS;
13570}
13571
13572
13573/** Opcode 0xd9 !11/3 */
13574FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13575{
13576 IEMOP_MNEMONIC("fstp m32r");
13577 IEM_MC_BEGIN(3, 2);
13578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13579 IEM_MC_LOCAL(uint16_t, u16Fsw);
13580 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13581 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13583
13584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13588
13589 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13590 IEM_MC_PREPARE_FPU_USAGE();
13591 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13592 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13593 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13594 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13595 IEM_MC_ELSE()
13596 IEM_MC_IF_FCW_IM()
13597 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13598 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13599 IEM_MC_ENDIF();
13600 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13601 IEM_MC_ENDIF();
13602 IEM_MC_ADVANCE_RIP();
13603
13604 IEM_MC_END();
13605 return VINF_SUCCESS;
13606}
13607
13608
13609/** Opcode 0xd9 !11/4 */
13610FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13611{
13612 IEMOP_MNEMONIC("fldenv m14/28byte");
13613 IEM_MC_BEGIN(3, 0);
13614 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13615 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13616 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13620 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13621 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13622 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13623 IEM_MC_END();
13624 return VINF_SUCCESS;
13625}
13626
13627
13628/** Opcode 0xd9 !11/5 */
13629FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13630{
13631 IEMOP_MNEMONIC("fldcw m2byte");
13632 IEM_MC_BEGIN(1, 1);
13633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13634 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13638 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13639 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13640 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13641 IEM_MC_END();
13642 return VINF_SUCCESS;
13643}
13644
13645
13646/** Opcode 0xd9 !11/6 */
13647FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13648{
13649 IEMOP_MNEMONIC("fstenv m14/m28byte");
13650 IEM_MC_BEGIN(3, 0);
13651 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13652 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13653 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13657 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13658 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13659 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13660 IEM_MC_END();
13661 return VINF_SUCCESS;
13662}
13663
13664
13665/** Opcode 0xd9 !11/7 */
13666FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13667{
13668 IEMOP_MNEMONIC("fnstcw m2byte");
13669 IEM_MC_BEGIN(2, 0);
13670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13671 IEM_MC_LOCAL(uint16_t, u16Fcw);
13672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13675 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13676 IEM_MC_FETCH_FCW(u16Fcw);
13677 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13678 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13679 IEM_MC_END();
13680 return VINF_SUCCESS;
13681}
13682
13683
13684/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13685FNIEMOP_DEF(iemOp_fnop)
13686{
13687 IEMOP_MNEMONIC("fnop");
13688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13689
13690 IEM_MC_BEGIN(0, 0);
13691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13693 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13694 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13695 * intel optimizations. Investigate. */
13696 IEM_MC_UPDATE_FPU_OPCODE_IP();
13697 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13698 IEM_MC_END();
13699 return VINF_SUCCESS;
13700}
13701
13702
13703/** Opcode 0xd9 11/0 stN */
13704FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13705{
13706 IEMOP_MNEMONIC("fld stN");
13707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13708
13709 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13710 * indicates that it does. */
13711 IEM_MC_BEGIN(0, 2);
13712 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13713 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13716
13717 IEM_MC_PREPARE_FPU_USAGE();
13718 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13719 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13720 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13721 IEM_MC_ELSE()
13722 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13723 IEM_MC_ENDIF();
13724
13725 IEM_MC_ADVANCE_RIP();
13726 IEM_MC_END();
13727
13728 return VINF_SUCCESS;
13729}
13730
13731
13732/** Opcode 0xd9 11/3 stN */
13733FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13734{
13735 IEMOP_MNEMONIC("fxch stN");
13736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13737
13738 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13739 * indicates that it does. */
13740 IEM_MC_BEGIN(1, 3);
13741 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13742 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13744 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13747
13748 IEM_MC_PREPARE_FPU_USAGE();
13749 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13750 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13751 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13752 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13753 IEM_MC_ELSE()
13754 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13755 IEM_MC_ENDIF();
13756
13757 IEM_MC_ADVANCE_RIP();
13758 IEM_MC_END();
13759
13760 return VINF_SUCCESS;
13761}
13762
13763
13764/** Opcode 0xd9 11/4, 0xdd 11/2. */
13765FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13766{
13767 IEMOP_MNEMONIC("fstp st0,stN");
13768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13769
13770 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13771 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13772 if (!iDstReg)
13773 {
13774 IEM_MC_BEGIN(0, 1);
13775 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13776 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13777 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13778
13779 IEM_MC_PREPARE_FPU_USAGE();
13780 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13781 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13782 IEM_MC_ELSE()
13783 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13784 IEM_MC_ENDIF();
13785
13786 IEM_MC_ADVANCE_RIP();
13787 IEM_MC_END();
13788 }
13789 else
13790 {
13791 IEM_MC_BEGIN(0, 2);
13792 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13793 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13796
13797 IEM_MC_PREPARE_FPU_USAGE();
13798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13799 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13800 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13801 IEM_MC_ELSE()
13802 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13803 IEM_MC_ENDIF();
13804
13805 IEM_MC_ADVANCE_RIP();
13806 IEM_MC_END();
13807 }
13808 return VINF_SUCCESS;
13809}
13810
13811
13812/**
13813 * Common worker for FPU instructions working on ST0 and replaces it with the
13814 * result, i.e. unary operators.
13815 *
13816 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13817 */
13818FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13819{
13820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13821
13822 IEM_MC_BEGIN(2, 1);
13823 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13826
13827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13829 IEM_MC_PREPARE_FPU_USAGE();
13830 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13831 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13832 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13833 IEM_MC_ELSE()
13834 IEM_MC_FPU_STACK_UNDERFLOW(0);
13835 IEM_MC_ENDIF();
13836 IEM_MC_ADVANCE_RIP();
13837
13838 IEM_MC_END();
13839 return VINF_SUCCESS;
13840}
13841
13842
13843/** Opcode 0xd9 0xe0. */
13844FNIEMOP_DEF(iemOp_fchs)
13845{
13846 IEMOP_MNEMONIC("fchs st0");
13847 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13848}
13849
13850
13851/** Opcode 0xd9 0xe1. */
13852FNIEMOP_DEF(iemOp_fabs)
13853{
13854 IEMOP_MNEMONIC("fabs st0");
13855 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13856}
13857
13858
13859/**
13860 * Common worker for FPU instructions working on ST0 and only returns FSW.
13861 *
13862 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13863 */
13864FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13865{
13866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13867
13868 IEM_MC_BEGIN(2, 1);
13869 IEM_MC_LOCAL(uint16_t, u16Fsw);
13870 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13871 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13872
13873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13875 IEM_MC_PREPARE_FPU_USAGE();
13876 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13877 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13878 IEM_MC_UPDATE_FSW(u16Fsw);
13879 IEM_MC_ELSE()
13880 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13881 IEM_MC_ENDIF();
13882 IEM_MC_ADVANCE_RIP();
13883
13884 IEM_MC_END();
13885 return VINF_SUCCESS;
13886}
13887
13888
13889/** Opcode 0xd9 0xe4. */
13890FNIEMOP_DEF(iemOp_ftst)
13891{
13892 IEMOP_MNEMONIC("ftst st0");
13893 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13894}
13895
13896
13897/** Opcode 0xd9 0xe5. */
13898FNIEMOP_DEF(iemOp_fxam)
13899{
13900 IEMOP_MNEMONIC("fxam st0");
13901 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13902}
13903
13904
13905/**
13906 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13907 *
13908 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13909 */
13910FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13911{
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913
13914 IEM_MC_BEGIN(1, 1);
13915 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13916 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13917
13918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13920 IEM_MC_PREPARE_FPU_USAGE();
13921 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13922 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13923 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13924 IEM_MC_ELSE()
13925 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13926 IEM_MC_ENDIF();
13927 IEM_MC_ADVANCE_RIP();
13928
13929 IEM_MC_END();
13930 return VINF_SUCCESS;
13931}
13932
13933
13934/** Opcode 0xd9 0xe8. */
13935FNIEMOP_DEF(iemOp_fld1)
13936{
13937 IEMOP_MNEMONIC("fld1");
13938 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13939}
13940
13941
13942/** Opcode 0xd9 0xe9. */
13943FNIEMOP_DEF(iemOp_fldl2t)
13944{
13945 IEMOP_MNEMONIC("fldl2t");
13946 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13947}
13948
13949
13950/** Opcode 0xd9 0xea. */
13951FNIEMOP_DEF(iemOp_fldl2e)
13952{
13953 IEMOP_MNEMONIC("fldl2e");
13954 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13955}
13956
13957/** Opcode 0xd9 0xeb. */
13958FNIEMOP_DEF(iemOp_fldpi)
13959{
13960 IEMOP_MNEMONIC("fldpi");
13961 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13962}
13963
13964
13965/** Opcode 0xd9 0xec. */
13966FNIEMOP_DEF(iemOp_fldlg2)
13967{
13968 IEMOP_MNEMONIC("fldlg2");
13969 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13970}
13971
13972/** Opcode 0xd9 0xed. */
13973FNIEMOP_DEF(iemOp_fldln2)
13974{
13975 IEMOP_MNEMONIC("fldln2");
13976 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13977}
13978
13979
13980/** Opcode 0xd9 0xee. */
13981FNIEMOP_DEF(iemOp_fldz)
13982{
13983 IEMOP_MNEMONIC("fldz");
13984 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13985}
13986
13987
13988/** Opcode 0xd9 0xf0. */
13989FNIEMOP_DEF(iemOp_f2xm1)
13990{
13991 IEMOP_MNEMONIC("f2xm1 st0");
13992 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13993}
13994
13995
13996/** Opcode 0xd9 0xf1. */
13997FNIEMOP_DEF(iemOp_fylx2)
13998{
13999 IEMOP_MNEMONIC("fylx2 st0");
14000 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14001}
14002
14003
14004/**
14005 * Common worker for FPU instructions working on ST0 and having two outputs, one
14006 * replacing ST0 and one pushed onto the stack.
14007 *
14008 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14009 */
14010FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14011{
14012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14013
14014 IEM_MC_BEGIN(2, 1);
14015 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14016 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14017 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14018
14019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14021 IEM_MC_PREPARE_FPU_USAGE();
14022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14023 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14024 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14025 IEM_MC_ELSE()
14026 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14027 IEM_MC_ENDIF();
14028 IEM_MC_ADVANCE_RIP();
14029
14030 IEM_MC_END();
14031 return VINF_SUCCESS;
14032}
14033
14034
14035/** Opcode 0xd9 0xf2. */
14036FNIEMOP_DEF(iemOp_fptan)
14037{
14038 IEMOP_MNEMONIC("fptan st0");
14039 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14040}
14041
14042
14043/**
14044 * Common worker for FPU instructions working on STn and ST0, storing the result
14045 * in STn, and popping the stack unless IE, DE or ZE was raised.
14046 *
14047 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14048 */
14049FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14050{
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052
14053 IEM_MC_BEGIN(3, 1);
14054 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14055 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14058
14059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14061
14062 IEM_MC_PREPARE_FPU_USAGE();
14063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14064 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14065 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14066 IEM_MC_ELSE()
14067 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14068 IEM_MC_ENDIF();
14069 IEM_MC_ADVANCE_RIP();
14070
14071 IEM_MC_END();
14072 return VINF_SUCCESS;
14073}
14074
14075
14076/** Opcode 0xd9 0xf3. */
14077FNIEMOP_DEF(iemOp_fpatan)
14078{
14079 IEMOP_MNEMONIC("fpatan st1,st0");
14080 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14081}
14082
14083
14084/** Opcode 0xd9 0xf4. */
14085FNIEMOP_DEF(iemOp_fxtract)
14086{
14087 IEMOP_MNEMONIC("fxtract st0");
14088 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14089}
14090
14091
14092/** Opcode 0xd9 0xf5. */
14093FNIEMOP_DEF(iemOp_fprem1)
14094{
14095 IEMOP_MNEMONIC("fprem1 st0, st1");
14096 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14097}
14098
14099
14100/** Opcode 0xd9 0xf6. */
14101FNIEMOP_DEF(iemOp_fdecstp)
14102{
14103 IEMOP_MNEMONIC("fdecstp");
14104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14105 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14106 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14107 * FINCSTP and FDECSTP. */
14108
14109 IEM_MC_BEGIN(0,0);
14110
14111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14113
14114 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14115 IEM_MC_FPU_STACK_DEC_TOP();
14116 IEM_MC_UPDATE_FSW_CONST(0);
14117
14118 IEM_MC_ADVANCE_RIP();
14119 IEM_MC_END();
14120 return VINF_SUCCESS;
14121}
14122
14123
14124/** Opcode 0xd9 0xf7. */
14125FNIEMOP_DEF(iemOp_fincstp)
14126{
14127 IEMOP_MNEMONIC("fincstp");
14128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14129 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14130 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14131 * FINCSTP and FDECSTP. */
14132
14133 IEM_MC_BEGIN(0,0);
14134
14135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14137
14138 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14139 IEM_MC_FPU_STACK_INC_TOP();
14140 IEM_MC_UPDATE_FSW_CONST(0);
14141
14142 IEM_MC_ADVANCE_RIP();
14143 IEM_MC_END();
14144 return VINF_SUCCESS;
14145}
14146
14147
14148/** Opcode 0xd9 0xf8. */
14149FNIEMOP_DEF(iemOp_fprem)
14150{
14151 IEMOP_MNEMONIC("fprem st0, st1");
14152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14153}
14154
14155
14156/** Opcode 0xd9 0xf9. */
14157FNIEMOP_DEF(iemOp_fyl2xp1)
14158{
14159 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14160 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14161}
14162
14163
14164/** Opcode 0xd9 0xfa. */
14165FNIEMOP_DEF(iemOp_fsqrt)
14166{
14167 IEMOP_MNEMONIC("fsqrt st0");
14168 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14169}
14170
14171
14172/** Opcode 0xd9 0xfb. */
14173FNIEMOP_DEF(iemOp_fsincos)
14174{
14175 IEMOP_MNEMONIC("fsincos st0");
14176 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14177}
14178
14179
14180/** Opcode 0xd9 0xfc. */
14181FNIEMOP_DEF(iemOp_frndint)
14182{
14183 IEMOP_MNEMONIC("frndint st0");
14184 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14185}
14186
14187
14188/** Opcode 0xd9 0xfd. */
14189FNIEMOP_DEF(iemOp_fscale)
14190{
14191 IEMOP_MNEMONIC("fscale st0, st1");
14192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14193}
14194
14195
14196/** Opcode 0xd9 0xfe. */
14197FNIEMOP_DEF(iemOp_fsin)
14198{
14199 IEMOP_MNEMONIC("fsin st0");
14200 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14201}
14202
14203
14204/** Opcode 0xd9 0xff. */
14205FNIEMOP_DEF(iemOp_fcos)
14206{
14207 IEMOP_MNEMONIC("fcos st0");
14208 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14209}
14210
14211
14212/** Used by iemOp_EscF1. */
14213static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14214{
14215 /* 0xe0 */ iemOp_fchs,
14216 /* 0xe1 */ iemOp_fabs,
14217 /* 0xe2 */ iemOp_Invalid,
14218 /* 0xe3 */ iemOp_Invalid,
14219 /* 0xe4 */ iemOp_ftst,
14220 /* 0xe5 */ iemOp_fxam,
14221 /* 0xe6 */ iemOp_Invalid,
14222 /* 0xe7 */ iemOp_Invalid,
14223 /* 0xe8 */ iemOp_fld1,
14224 /* 0xe9 */ iemOp_fldl2t,
14225 /* 0xea */ iemOp_fldl2e,
14226 /* 0xeb */ iemOp_fldpi,
14227 /* 0xec */ iemOp_fldlg2,
14228 /* 0xed */ iemOp_fldln2,
14229 /* 0xee */ iemOp_fldz,
14230 /* 0xef */ iemOp_Invalid,
14231 /* 0xf0 */ iemOp_f2xm1,
14232 /* 0xf1 */ iemOp_fylx2,
14233 /* 0xf2 */ iemOp_fptan,
14234 /* 0xf3 */ iemOp_fpatan,
14235 /* 0xf4 */ iemOp_fxtract,
14236 /* 0xf5 */ iemOp_fprem1,
14237 /* 0xf6 */ iemOp_fdecstp,
14238 /* 0xf7 */ iemOp_fincstp,
14239 /* 0xf8 */ iemOp_fprem,
14240 /* 0xf9 */ iemOp_fyl2xp1,
14241 /* 0xfa */ iemOp_fsqrt,
14242 /* 0xfb */ iemOp_fsincos,
14243 /* 0xfc */ iemOp_frndint,
14244 /* 0xfd */ iemOp_fscale,
14245 /* 0xfe */ iemOp_fsin,
14246 /* 0xff */ iemOp_fcos
14247};
14248
14249
14250/** Opcode 0xd9. */
14251FNIEMOP_DEF(iemOp_EscF1)
14252{
14253 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14256 {
14257 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14258 {
14259 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14260 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14261 case 2:
14262 if (bRm == 0xd0)
14263 return FNIEMOP_CALL(iemOp_fnop);
14264 return IEMOP_RAISE_INVALID_OPCODE();
14265 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14266 case 4:
14267 case 5:
14268 case 6:
14269 case 7:
14270 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14271 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14273 }
14274 }
14275 else
14276 {
14277 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14278 {
14279 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14280 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14281 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14282 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14283 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14284 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14285 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14286 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14288 }
14289 }
14290}
14291
14292
14293/** Opcode 0xda 11/0. */
14294FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14295{
14296 IEMOP_MNEMONIC("fcmovb st0,stN");
14297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14298
14299 IEM_MC_BEGIN(0, 1);
14300 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14301
14302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14304
14305 IEM_MC_PREPARE_FPU_USAGE();
14306 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14308 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14309 IEM_MC_ENDIF();
14310 IEM_MC_UPDATE_FPU_OPCODE_IP();
14311 IEM_MC_ELSE()
14312 IEM_MC_FPU_STACK_UNDERFLOW(0);
14313 IEM_MC_ENDIF();
14314 IEM_MC_ADVANCE_RIP();
14315
14316 IEM_MC_END();
14317 return VINF_SUCCESS;
14318}
14319
14320
14321/** Opcode 0xda 11/1. */
14322FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14323{
14324 IEMOP_MNEMONIC("fcmove st0,stN");
14325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14326
14327 IEM_MC_BEGIN(0, 1);
14328 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14329
14330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14332
14333 IEM_MC_PREPARE_FPU_USAGE();
14334 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14335 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14336 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14337 IEM_MC_ENDIF();
14338 IEM_MC_UPDATE_FPU_OPCODE_IP();
14339 IEM_MC_ELSE()
14340 IEM_MC_FPU_STACK_UNDERFLOW(0);
14341 IEM_MC_ENDIF();
14342 IEM_MC_ADVANCE_RIP();
14343
14344 IEM_MC_END();
14345 return VINF_SUCCESS;
14346}
14347
14348
14349/** Opcode 0xda 11/2. */
14350FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14351{
14352 IEMOP_MNEMONIC("fcmovbe st0,stN");
14353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14354
14355 IEM_MC_BEGIN(0, 1);
14356 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14357
14358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14359 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14360
14361 IEM_MC_PREPARE_FPU_USAGE();
14362 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14363 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14364 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14365 IEM_MC_ENDIF();
14366 IEM_MC_UPDATE_FPU_OPCODE_IP();
14367 IEM_MC_ELSE()
14368 IEM_MC_FPU_STACK_UNDERFLOW(0);
14369 IEM_MC_ENDIF();
14370 IEM_MC_ADVANCE_RIP();
14371
14372 IEM_MC_END();
14373 return VINF_SUCCESS;
14374}
14375
14376
14377/** Opcode 0xda 11/3. */
14378FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14379{
14380 IEMOP_MNEMONIC("fcmovu st0,stN");
14381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14382
14383 IEM_MC_BEGIN(0, 1);
14384 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14385
14386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14388
14389 IEM_MC_PREPARE_FPU_USAGE();
14390 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14392 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14393 IEM_MC_ENDIF();
14394 IEM_MC_UPDATE_FPU_OPCODE_IP();
14395 IEM_MC_ELSE()
14396 IEM_MC_FPU_STACK_UNDERFLOW(0);
14397 IEM_MC_ENDIF();
14398 IEM_MC_ADVANCE_RIP();
14399
14400 IEM_MC_END();
14401 return VINF_SUCCESS;
14402}
14403
14404
14405/**
14406 * Common worker for FPU instructions working on ST0 and STn, only affecting
14407 * flags, and popping twice when done.
14408 *
14409 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14410 */
14411FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14412{
14413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14414
14415 IEM_MC_BEGIN(3, 1);
14416 IEM_MC_LOCAL(uint16_t, u16Fsw);
14417 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14418 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14419 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14420
14421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14423
14424 IEM_MC_PREPARE_FPU_USAGE();
14425 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14426 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14427 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14428 IEM_MC_ELSE()
14429 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14430 IEM_MC_ENDIF();
14431 IEM_MC_ADVANCE_RIP();
14432
14433 IEM_MC_END();
14434 return VINF_SUCCESS;
14435}
14436
14437
14438/** Opcode 0xda 0xe9. */
14439FNIEMOP_DEF(iemOp_fucompp)
14440{
14441 IEMOP_MNEMONIC("fucompp st0,stN");
14442 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14443}
14444
14445
14446/**
14447 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14448 * the result in ST0.
14449 *
14450 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14451 */
14452FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14453{
14454 IEM_MC_BEGIN(3, 3);
14455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14456 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14457 IEM_MC_LOCAL(int32_t, i32Val2);
14458 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14460 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14461
14462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14464
14465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14467 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14468
14469 IEM_MC_PREPARE_FPU_USAGE();
14470 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14471 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14472 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14473 IEM_MC_ELSE()
14474 IEM_MC_FPU_STACK_UNDERFLOW(0);
14475 IEM_MC_ENDIF();
14476 IEM_MC_ADVANCE_RIP();
14477
14478 IEM_MC_END();
14479 return VINF_SUCCESS;
14480}
14481
14482
14483/** Opcode 0xda !11/0. */
14484FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14485{
14486 IEMOP_MNEMONIC("fiadd m32i");
14487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14488}
14489
14490
14491/** Opcode 0xda !11/1. */
14492FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14493{
14494 IEMOP_MNEMONIC("fimul m32i");
14495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14496}
14497
14498
14499/** Opcode 0xda !11/2. */
14500FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14501{
14502 IEMOP_MNEMONIC("ficom st0,m32i");
14503
14504 IEM_MC_BEGIN(3, 3);
14505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14506 IEM_MC_LOCAL(uint16_t, u16Fsw);
14507 IEM_MC_LOCAL(int32_t, i32Val2);
14508 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14510 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14511
14512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14514
14515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14517 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14518
14519 IEM_MC_PREPARE_FPU_USAGE();
14520 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14521 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14522 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14523 IEM_MC_ELSE()
14524 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14525 IEM_MC_ENDIF();
14526 IEM_MC_ADVANCE_RIP();
14527
14528 IEM_MC_END();
14529 return VINF_SUCCESS;
14530}
14531
14532
14533/** Opcode 0xda !11/3. */
14534FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14535{
14536 IEMOP_MNEMONIC("ficomp st0,m32i");
14537
14538 IEM_MC_BEGIN(3, 3);
14539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14540 IEM_MC_LOCAL(uint16_t, u16Fsw);
14541 IEM_MC_LOCAL(int32_t, i32Val2);
14542 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14543 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14544 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14545
14546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14548
14549 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14550 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14551 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14552
14553 IEM_MC_PREPARE_FPU_USAGE();
14554 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14555 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14556 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14557 IEM_MC_ELSE()
14558 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14559 IEM_MC_ENDIF();
14560 IEM_MC_ADVANCE_RIP();
14561
14562 IEM_MC_END();
14563 return VINF_SUCCESS;
14564}
14565
14566
14567/** Opcode 0xda !11/4. */
14568FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14569{
14570 IEMOP_MNEMONIC("fisub m32i");
14571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14572}
14573
14574
14575/** Opcode 0xda !11/5. */
14576FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14577{
14578 IEMOP_MNEMONIC("fisubr m32i");
14579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14580}
14581
14582
14583/** Opcode 0xda !11/6. */
14584FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14585{
14586 IEMOP_MNEMONIC("fidiv m32i");
14587 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14588}
14589
14590
14591/** Opcode 0xda !11/7. */
14592FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14593{
14594 IEMOP_MNEMONIC("fidivr m32i");
14595 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14596}
14597
14598
14599/** Opcode 0xda. */
14600FNIEMOP_DEF(iemOp_EscF2)
14601{
14602 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14605 {
14606 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14607 {
14608 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14609 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14610 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14611 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14612 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14613 case 5:
14614 if (bRm == 0xe9)
14615 return FNIEMOP_CALL(iemOp_fucompp);
14616 return IEMOP_RAISE_INVALID_OPCODE();
14617 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14618 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14620 }
14621 }
14622 else
14623 {
14624 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14625 {
14626 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14627 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14628 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14629 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14630 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14631 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14632 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14633 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14635 }
14636 }
14637}
14638
14639
14640/** Opcode 0xdb !11/0. */
14641FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14642{
14643 IEMOP_MNEMONIC("fild m32i");
14644
14645 IEM_MC_BEGIN(2, 3);
14646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14647 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14648 IEM_MC_LOCAL(int32_t, i32Val);
14649 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14650 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14651
14652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14654
14655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14657 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14658
14659 IEM_MC_PREPARE_FPU_USAGE();
14660 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14661 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14662 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14663 IEM_MC_ELSE()
14664 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14665 IEM_MC_ENDIF();
14666 IEM_MC_ADVANCE_RIP();
14667
14668 IEM_MC_END();
14669 return VINF_SUCCESS;
14670}
14671
14672
14673/** Opcode 0xdb !11/1. */
14674FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14675{
14676 IEMOP_MNEMONIC("fisttp m32i");
14677 IEM_MC_BEGIN(3, 2);
14678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14679 IEM_MC_LOCAL(uint16_t, u16Fsw);
14680 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14681 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14682 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14683
14684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14686 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14687 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14688
14689 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14690 IEM_MC_PREPARE_FPU_USAGE();
14691 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14692 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14693 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14694 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14695 IEM_MC_ELSE()
14696 IEM_MC_IF_FCW_IM()
14697 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14698 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14699 IEM_MC_ENDIF();
14700 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14701 IEM_MC_ENDIF();
14702 IEM_MC_ADVANCE_RIP();
14703
14704 IEM_MC_END();
14705 return VINF_SUCCESS;
14706}
14707
14708
14709/** Opcode 0xdb !11/2. */
14710FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14711{
14712 IEMOP_MNEMONIC("fist m32i");
14713 IEM_MC_BEGIN(3, 2);
14714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14715 IEM_MC_LOCAL(uint16_t, u16Fsw);
14716 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14717 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14718 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14719
14720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14724
14725 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14726 IEM_MC_PREPARE_FPU_USAGE();
14727 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14728 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14729 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14730 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14731 IEM_MC_ELSE()
14732 IEM_MC_IF_FCW_IM()
14733 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14734 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14735 IEM_MC_ENDIF();
14736 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14737 IEM_MC_ENDIF();
14738 IEM_MC_ADVANCE_RIP();
14739
14740 IEM_MC_END();
14741 return VINF_SUCCESS;
14742}
14743
14744
14745/** Opcode 0xdb !11/3. */
14746FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14747{
14748 IEMOP_MNEMONIC("fisttp m32i");
14749 IEM_MC_BEGIN(3, 2);
14750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14751 IEM_MC_LOCAL(uint16_t, u16Fsw);
14752 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14753 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14754 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14755
14756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14760
14761 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14762 IEM_MC_PREPARE_FPU_USAGE();
14763 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14764 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14765 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14766 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14767 IEM_MC_ELSE()
14768 IEM_MC_IF_FCW_IM()
14769 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14770 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14771 IEM_MC_ENDIF();
14772 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14773 IEM_MC_ENDIF();
14774 IEM_MC_ADVANCE_RIP();
14775
14776 IEM_MC_END();
14777 return VINF_SUCCESS;
14778}
14779
14780
14781/** Opcode 0xdb !11/5. */
14782FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14783{
14784 IEMOP_MNEMONIC("fld m80r");
14785
14786 IEM_MC_BEGIN(2, 3);
14787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14788 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14789 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14790 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14791 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14792
14793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14795
14796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14798 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14799
14800 IEM_MC_PREPARE_FPU_USAGE();
14801 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14802 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14803 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14804 IEM_MC_ELSE()
14805 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14806 IEM_MC_ENDIF();
14807 IEM_MC_ADVANCE_RIP();
14808
14809 IEM_MC_END();
14810 return VINF_SUCCESS;
14811}
14812
14813
14814/** Opcode 0xdb !11/7. */
14815FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14816{
14817 IEMOP_MNEMONIC("fstp m80r");
14818 IEM_MC_BEGIN(3, 2);
14819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14820 IEM_MC_LOCAL(uint16_t, u16Fsw);
14821 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14822 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14824
14825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14829
14830 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14831 IEM_MC_PREPARE_FPU_USAGE();
14832 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14833 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14834 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14835 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14836 IEM_MC_ELSE()
14837 IEM_MC_IF_FCW_IM()
14838 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14839 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14840 IEM_MC_ENDIF();
14841 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14842 IEM_MC_ENDIF();
14843 IEM_MC_ADVANCE_RIP();
14844
14845 IEM_MC_END();
14846 return VINF_SUCCESS;
14847}
14848
14849
14850/** Opcode 0xdb 11/0. */
14851FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14852{
14853 IEMOP_MNEMONIC("fcmovnb st0,stN");
14854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14855
14856 IEM_MC_BEGIN(0, 1);
14857 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14858
14859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14860 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14861
14862 IEM_MC_PREPARE_FPU_USAGE();
14863 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14864 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14865 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14866 IEM_MC_ENDIF();
14867 IEM_MC_UPDATE_FPU_OPCODE_IP();
14868 IEM_MC_ELSE()
14869 IEM_MC_FPU_STACK_UNDERFLOW(0);
14870 IEM_MC_ENDIF();
14871 IEM_MC_ADVANCE_RIP();
14872
14873 IEM_MC_END();
14874 return VINF_SUCCESS;
14875}
14876
14877
14878/** Opcode 0xdb 11/1. */
14879FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14880{
14881 IEMOP_MNEMONIC("fcmovne st0,stN");
14882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14883
14884 IEM_MC_BEGIN(0, 1);
14885 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14886
14887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14888 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14889
14890 IEM_MC_PREPARE_FPU_USAGE();
14891 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14892 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14893 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14894 IEM_MC_ENDIF();
14895 IEM_MC_UPDATE_FPU_OPCODE_IP();
14896 IEM_MC_ELSE()
14897 IEM_MC_FPU_STACK_UNDERFLOW(0);
14898 IEM_MC_ENDIF();
14899 IEM_MC_ADVANCE_RIP();
14900
14901 IEM_MC_END();
14902 return VINF_SUCCESS;
14903}
14904
14905
14906/** Opcode 0xdb 11/2. */
14907FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14908{
14909 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14911
14912 IEM_MC_BEGIN(0, 1);
14913 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14914
14915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14917
14918 IEM_MC_PREPARE_FPU_USAGE();
14919 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14920 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14921 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14922 IEM_MC_ENDIF();
14923 IEM_MC_UPDATE_FPU_OPCODE_IP();
14924 IEM_MC_ELSE()
14925 IEM_MC_FPU_STACK_UNDERFLOW(0);
14926 IEM_MC_ENDIF();
14927 IEM_MC_ADVANCE_RIP();
14928
14929 IEM_MC_END();
14930 return VINF_SUCCESS;
14931}
14932
14933
14934/** Opcode 0xdb 11/3. */
14935FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14936{
14937 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14939
14940 IEM_MC_BEGIN(0, 1);
14941 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14942
14943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14944 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14945
14946 IEM_MC_PREPARE_FPU_USAGE();
14947 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14948 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14949 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14950 IEM_MC_ENDIF();
14951 IEM_MC_UPDATE_FPU_OPCODE_IP();
14952 IEM_MC_ELSE()
14953 IEM_MC_FPU_STACK_UNDERFLOW(0);
14954 IEM_MC_ENDIF();
14955 IEM_MC_ADVANCE_RIP();
14956
14957 IEM_MC_END();
14958 return VINF_SUCCESS;
14959}
14960
14961
14962/** Opcode 0xdb 0xe0. */
14963FNIEMOP_DEF(iemOp_fneni)
14964{
14965 IEMOP_MNEMONIC("fneni (8087/ign)");
14966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14967 IEM_MC_BEGIN(0,0);
14968 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14969 IEM_MC_ADVANCE_RIP();
14970 IEM_MC_END();
14971 return VINF_SUCCESS;
14972}
14973
14974
14975/** Opcode 0xdb 0xe1. */
14976FNIEMOP_DEF(iemOp_fndisi)
14977{
14978 IEMOP_MNEMONIC("fndisi (8087/ign)");
14979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14980 IEM_MC_BEGIN(0,0);
14981 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14982 IEM_MC_ADVANCE_RIP();
14983 IEM_MC_END();
14984 return VINF_SUCCESS;
14985}
14986
14987
14988/** Opcode 0xdb 0xe2. */
14989FNIEMOP_DEF(iemOp_fnclex)
14990{
14991 IEMOP_MNEMONIC("fnclex");
14992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14993
14994 IEM_MC_BEGIN(0,0);
14995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14996 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14997 IEM_MC_CLEAR_FSW_EX();
14998 IEM_MC_ADVANCE_RIP();
14999 IEM_MC_END();
15000 return VINF_SUCCESS;
15001}
15002
15003
15004/** Opcode 0xdb 0xe3. */
15005FNIEMOP_DEF(iemOp_fninit)
15006{
15007 IEMOP_MNEMONIC("fninit");
15008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15009 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15010}
15011
15012
15013/** Opcode 0xdb 0xe4. */
15014FNIEMOP_DEF(iemOp_fnsetpm)
15015{
15016 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15018 IEM_MC_BEGIN(0,0);
15019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15020 IEM_MC_ADVANCE_RIP();
15021 IEM_MC_END();
15022 return VINF_SUCCESS;
15023}
15024
15025
15026/** Opcode 0xdb 0xe5. */
15027FNIEMOP_DEF(iemOp_frstpm)
15028{
15029 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15030#if 0 /* #UDs on newer CPUs */
15031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15032 IEM_MC_BEGIN(0,0);
15033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15034 IEM_MC_ADVANCE_RIP();
15035 IEM_MC_END();
15036 return VINF_SUCCESS;
15037#else
15038 return IEMOP_RAISE_INVALID_OPCODE();
15039#endif
15040}
15041
15042
15043/** Opcode 0xdb 11/5. */
15044FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15045{
15046 IEMOP_MNEMONIC("fucomi st0,stN");
15047 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15048}
15049
15050
15051/** Opcode 0xdb 11/6. */
15052FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15053{
15054 IEMOP_MNEMONIC("fcomi st0,stN");
15055 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15056}
15057
15058
15059/** Opcode 0xdb. */
15060FNIEMOP_DEF(iemOp_EscF3)
15061{
15062 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15064 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15065 {
15066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15067 {
15068 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15069 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15070 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15071 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15072 case 4:
15073 switch (bRm)
15074 {
15075 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15076 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15077 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15078 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15079 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15080 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15081 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15082 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15084 }
15085 break;
15086 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15087 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15088 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15089 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15090 }
15091 }
15092 else
15093 {
15094 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15095 {
15096 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15097 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15098 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15099 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15100 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15101 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15102 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15103 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15105 }
15106 }
15107}
15108
15109
15110/**
15111 * Common worker for FPU instructions working on STn and ST0, and storing the
15112 * result in STn unless IE, DE or ZE was raised.
15113 *
15114 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15115 */
15116FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15117{
15118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15119
15120 IEM_MC_BEGIN(3, 1);
15121 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15122 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15123 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15125
15126 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15127 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15128
15129 IEM_MC_PREPARE_FPU_USAGE();
15130 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15131 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15132 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15133 IEM_MC_ELSE()
15134 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15135 IEM_MC_ENDIF();
15136 IEM_MC_ADVANCE_RIP();
15137
15138 IEM_MC_END();
15139 return VINF_SUCCESS;
15140}
15141
15142
15143/** Opcode 0xdc 11/0. */
15144FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15145{
15146 IEMOP_MNEMONIC("fadd stN,st0");
15147 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15148}
15149
15150
15151/** Opcode 0xdc 11/1. */
15152FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15153{
15154 IEMOP_MNEMONIC("fmul stN,st0");
15155 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15156}
15157
15158
15159/** Opcode 0xdc 11/4. */
15160FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15161{
15162 IEMOP_MNEMONIC("fsubr stN,st0");
15163 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15164}
15165
15166
15167/** Opcode 0xdc 11/5. */
15168FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15169{
15170 IEMOP_MNEMONIC("fsub stN,st0");
15171 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15172}
15173
15174
15175/** Opcode 0xdc 11/6. */
15176FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15177{
15178 IEMOP_MNEMONIC("fdivr stN,st0");
15179 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15180}
15181
15182
15183/** Opcode 0xdc 11/7. */
15184FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15185{
15186 IEMOP_MNEMONIC("fdiv stN,st0");
15187 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15188}
15189
15190
15191/**
15192 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15193 * memory operand, and storing the result in ST0.
15194 *
15195 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15196 */
15197FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15198{
15199 IEM_MC_BEGIN(3, 3);
15200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15201 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15202 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15203 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15204 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15205 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15206
15207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15210 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15211
15212 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15213 IEM_MC_PREPARE_FPU_USAGE();
15214 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15215 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15216 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15217 IEM_MC_ELSE()
15218 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15219 IEM_MC_ENDIF();
15220 IEM_MC_ADVANCE_RIP();
15221
15222 IEM_MC_END();
15223 return VINF_SUCCESS;
15224}
15225
15226
15227/** Opcode 0xdc !11/0. */
15228FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15229{
15230 IEMOP_MNEMONIC("fadd m64r");
15231 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15232}
15233
15234
15235/** Opcode 0xdc !11/1. */
15236FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15237{
15238 IEMOP_MNEMONIC("fmul m64r");
15239 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15240}
15241
15242
15243/** Opcode 0xdc !11/2. */
15244FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15245{
15246 IEMOP_MNEMONIC("fcom st0,m64r");
15247
15248 IEM_MC_BEGIN(3, 3);
15249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15250 IEM_MC_LOCAL(uint16_t, u16Fsw);
15251 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15252 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15253 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15254 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15255
15256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15258
15259 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15260 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15261 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15262
15263 IEM_MC_PREPARE_FPU_USAGE();
15264 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15265 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15266 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15267 IEM_MC_ELSE()
15268 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15269 IEM_MC_ENDIF();
15270 IEM_MC_ADVANCE_RIP();
15271
15272 IEM_MC_END();
15273 return VINF_SUCCESS;
15274}
15275
15276
15277/** Opcode 0xdc !11/3. */
15278FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15279{
15280 IEMOP_MNEMONIC("fcomp st0,m64r");
15281
15282 IEM_MC_BEGIN(3, 3);
15283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15284 IEM_MC_LOCAL(uint16_t, u16Fsw);
15285 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15286 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15288 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15289
15290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15292
15293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15295 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15296
15297 IEM_MC_PREPARE_FPU_USAGE();
15298 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15299 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15300 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15301 IEM_MC_ELSE()
15302 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15303 IEM_MC_ENDIF();
15304 IEM_MC_ADVANCE_RIP();
15305
15306 IEM_MC_END();
15307 return VINF_SUCCESS;
15308}
15309
15310
15311/** Opcode 0xdc !11/4. */
15312FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15313{
15314 IEMOP_MNEMONIC("fsub m64r");
15315 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15316}
15317
15318
15319/** Opcode 0xdc !11/5. */
15320FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15321{
15322 IEMOP_MNEMONIC("fsubr m64r");
15323 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15324}
15325
15326
15327/** Opcode 0xdc !11/6. */
15328FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15329{
15330 IEMOP_MNEMONIC("fdiv m64r");
15331 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15332}
15333
15334
15335/** Opcode 0xdc !11/7. */
15336FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15337{
15338 IEMOP_MNEMONIC("fdivr m64r");
15339 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15340}
15341
15342
15343/** Opcode 0xdc. */
15344FNIEMOP_DEF(iemOp_EscF4)
15345{
15346 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15349 {
15350 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15351 {
15352 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15353 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15354 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15355 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15356 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15357 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15358 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15359 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15361 }
15362 }
15363 else
15364 {
15365 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15366 {
15367 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15368 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15369 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15370 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15371 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15372 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15373 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15374 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15376 }
15377 }
15378}
15379
15380
15381/** Opcode 0xdd !11/0.
15382 * @sa iemOp_fld_m32r */
15383FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15384{
15385 IEMOP_MNEMONIC("fld m64r");
15386
15387 IEM_MC_BEGIN(2, 3);
15388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15389 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15390 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15391 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15392 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15393
15394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15396 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15397 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15398
15399 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15400 IEM_MC_PREPARE_FPU_USAGE();
15401 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15402 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15403 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15404 IEM_MC_ELSE()
15405 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15406 IEM_MC_ENDIF();
15407 IEM_MC_ADVANCE_RIP();
15408
15409 IEM_MC_END();
15410 return VINF_SUCCESS;
15411}
15412
15413
15414/** Opcode 0xdd !11/0. */
15415FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15416{
15417 IEMOP_MNEMONIC("fisttp m64i");
15418 IEM_MC_BEGIN(3, 2);
15419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15420 IEM_MC_LOCAL(uint16_t, u16Fsw);
15421 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15422 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15424
15425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15428 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15429
15430 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15431 IEM_MC_PREPARE_FPU_USAGE();
15432 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15433 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15434 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15435 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15436 IEM_MC_ELSE()
15437 IEM_MC_IF_FCW_IM()
15438 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15439 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15440 IEM_MC_ENDIF();
15441 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15442 IEM_MC_ENDIF();
15443 IEM_MC_ADVANCE_RIP();
15444
15445 IEM_MC_END();
15446 return VINF_SUCCESS;
15447}
15448
15449
15450/** Opcode 0xdd !11/0. */
15451FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15452{
15453 IEMOP_MNEMONIC("fst m64r");
15454 IEM_MC_BEGIN(3, 2);
15455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15456 IEM_MC_LOCAL(uint16_t, u16Fsw);
15457 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15458 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15460
15461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15465
15466 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15467 IEM_MC_PREPARE_FPU_USAGE();
15468 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15469 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15470 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15471 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15472 IEM_MC_ELSE()
15473 IEM_MC_IF_FCW_IM()
15474 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15475 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15476 IEM_MC_ENDIF();
15477 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15478 IEM_MC_ENDIF();
15479 IEM_MC_ADVANCE_RIP();
15480
15481 IEM_MC_END();
15482 return VINF_SUCCESS;
15483}
15484
15485
15486
15487
15488/** Opcode 0xdd !11/0. */
15489FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15490{
15491 IEMOP_MNEMONIC("fstp m64r");
15492 IEM_MC_BEGIN(3, 2);
15493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15494 IEM_MC_LOCAL(uint16_t, u16Fsw);
15495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15496 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15497 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15498
15499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15503
15504 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15505 IEM_MC_PREPARE_FPU_USAGE();
15506 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15507 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15508 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15509 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15510 IEM_MC_ELSE()
15511 IEM_MC_IF_FCW_IM()
15512 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15513 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15514 IEM_MC_ENDIF();
15515 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15516 IEM_MC_ENDIF();
15517 IEM_MC_ADVANCE_RIP();
15518
15519 IEM_MC_END();
15520 return VINF_SUCCESS;
15521}
15522
15523
15524/** Opcode 0xdd !11/0. */
15525FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15526{
15527 IEMOP_MNEMONIC("frstor m94/108byte");
15528 IEM_MC_BEGIN(3, 0);
15529 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15530 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15531 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15536 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15537 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15538 IEM_MC_END();
15539 return VINF_SUCCESS;
15540}
15541
15542
15543/** Opcode 0xdd !11/0. */
15544FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15545{
15546 IEMOP_MNEMONIC("fnsave m94/108byte");
15547 IEM_MC_BEGIN(3, 0);
15548 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15549 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15550 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15554 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15555 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15556 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15557 IEM_MC_END();
15558 return VINF_SUCCESS;
15559
15560}
15561
15562/** Opcode 0xdd !11/0. */
15563FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15564{
15565 IEMOP_MNEMONIC("fnstsw m16");
15566
15567 IEM_MC_BEGIN(0, 2);
15568 IEM_MC_LOCAL(uint16_t, u16Tmp);
15569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15570
15571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15574
15575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15576 IEM_MC_FETCH_FSW(u16Tmp);
15577 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15578 IEM_MC_ADVANCE_RIP();
15579
15580/** @todo Debug / drop a hint to the verifier that things may differ
15581 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15582 * NT4SP1. (X86_FSW_PE) */
15583 IEM_MC_END();
15584 return VINF_SUCCESS;
15585}
15586
15587
15588/** Opcode 0xdd 11/0. */
15589FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15590{
15591 IEMOP_MNEMONIC("ffree stN");
15592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15593 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15594 unmodified. */
15595
15596 IEM_MC_BEGIN(0, 0);
15597
15598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15600
15601 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15602 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15603 IEM_MC_UPDATE_FPU_OPCODE_IP();
15604
15605 IEM_MC_ADVANCE_RIP();
15606 IEM_MC_END();
15607 return VINF_SUCCESS;
15608}
15609
15610
15611/** Opcode 0xdd 11/1. */
15612FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15613{
15614 IEMOP_MNEMONIC("fst st0,stN");
15615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15616
15617 IEM_MC_BEGIN(0, 2);
15618 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15622
15623 IEM_MC_PREPARE_FPU_USAGE();
15624 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15625 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15626 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15627 IEM_MC_ELSE()
15628 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15629 IEM_MC_ENDIF();
15630
15631 IEM_MC_ADVANCE_RIP();
15632 IEM_MC_END();
15633 return VINF_SUCCESS;
15634}
15635
15636
15637/** Opcode 0xdd 11/3. */
15638FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15639{
15640 IEMOP_MNEMONIC("fcom st0,stN");
15641 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15642}
15643
15644
15645/** Opcode 0xdd 11/4. */
15646FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15647{
15648 IEMOP_MNEMONIC("fcomp st0,stN");
15649 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15650}
15651
15652
15653/** Opcode 0xdd. */
15654FNIEMOP_DEF(iemOp_EscF5)
15655{
15656 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15658 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15659 {
15660 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15661 {
15662 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15663 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15664 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15665 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15666 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15667 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15668 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15669 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15671 }
15672 }
15673 else
15674 {
15675 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15676 {
15677 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15678 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15679 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15680 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15681 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15682 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15683 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15684 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15686 }
15687 }
15688}
15689
15690
15691/** Opcode 0xde 11/0. */
15692FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC("faddp stN,st0");
15695 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15696}
15697
15698
15699/** Opcode 0xde 11/0. */
15700FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15701{
15702 IEMOP_MNEMONIC("fmulp stN,st0");
15703 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15704}
15705
15706
15707/** Opcode 0xde 0xd9. */
15708FNIEMOP_DEF(iemOp_fcompp)
15709{
15710 IEMOP_MNEMONIC("fucompp st0,stN");
15711 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15712}
15713
15714
15715/** Opcode 0xde 11/4. */
15716FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15717{
15718 IEMOP_MNEMONIC("fsubrp stN,st0");
15719 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15720}
15721
15722
15723/** Opcode 0xde 11/5. */
15724FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15725{
15726 IEMOP_MNEMONIC("fsubp stN,st0");
15727 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15728}
15729
15730
15731/** Opcode 0xde 11/6. */
15732FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15733{
15734 IEMOP_MNEMONIC("fdivrp stN,st0");
15735 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15736}
15737
15738
15739/** Opcode 0xde 11/7. */
15740FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15741{
15742 IEMOP_MNEMONIC("fdivp stN,st0");
15743 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15744}
15745
15746
15747/**
15748 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15749 * the result in ST0.
15750 *
15751 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15752 */
15753FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15754{
15755 IEM_MC_BEGIN(3, 3);
15756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15757 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15758 IEM_MC_LOCAL(int16_t, i16Val2);
15759 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15760 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15761 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15762
15763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15765
15766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15768 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15769
15770 IEM_MC_PREPARE_FPU_USAGE();
15771 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15772 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15773 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15774 IEM_MC_ELSE()
15775 IEM_MC_FPU_STACK_UNDERFLOW(0);
15776 IEM_MC_ENDIF();
15777 IEM_MC_ADVANCE_RIP();
15778
15779 IEM_MC_END();
15780 return VINF_SUCCESS;
15781}
15782
15783
15784/** Opcode 0xde !11/0. */
15785FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15786{
15787 IEMOP_MNEMONIC("fiadd m16i");
15788 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15789}
15790
15791
15792/** Opcode 0xde !11/1. */
15793FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15794{
15795 IEMOP_MNEMONIC("fimul m16i");
15796 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15797}
15798
15799
15800/** Opcode 0xde !11/2. */
15801FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15802{
15803 IEMOP_MNEMONIC("ficom st0,m16i");
15804
15805 IEM_MC_BEGIN(3, 3);
15806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15807 IEM_MC_LOCAL(uint16_t, u16Fsw);
15808 IEM_MC_LOCAL(int16_t, i16Val2);
15809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15811 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15812
15813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15815
15816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15818 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15819
15820 IEM_MC_PREPARE_FPU_USAGE();
15821 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15822 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15823 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15824 IEM_MC_ELSE()
15825 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15826 IEM_MC_ENDIF();
15827 IEM_MC_ADVANCE_RIP();
15828
15829 IEM_MC_END();
15830 return VINF_SUCCESS;
15831}
15832
15833
15834/** Opcode 0xde !11/3. */
15835FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15836{
15837 IEMOP_MNEMONIC("ficomp st0,m16i");
15838
15839 IEM_MC_BEGIN(3, 3);
15840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15841 IEM_MC_LOCAL(uint16_t, u16Fsw);
15842 IEM_MC_LOCAL(int16_t, i16Val2);
15843 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15845 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15846
15847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15849
15850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15852 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15853
15854 IEM_MC_PREPARE_FPU_USAGE();
15855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15856 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15857 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15858 IEM_MC_ELSE()
15859 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15860 IEM_MC_ENDIF();
15861 IEM_MC_ADVANCE_RIP();
15862
15863 IEM_MC_END();
15864 return VINF_SUCCESS;
15865}
15866
15867
15868/** Opcode 0xde !11/4. */
15869FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15870{
15871 IEMOP_MNEMONIC("fisub m16i");
15872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15873}
15874
15875
15876/** Opcode 0xde !11/5. */
15877FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15878{
15879 IEMOP_MNEMONIC("fisubr m16i");
15880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15881}
15882
15883
15884/** Opcode 0xde !11/6. */
15885FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15886{
15887 IEMOP_MNEMONIC("fiadd m16i");
15888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15889}
15890
15891
15892/** Opcode 0xde !11/7. */
15893FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15894{
15895 IEMOP_MNEMONIC("fiadd m16i");
15896 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15897}
15898
15899
15900/** Opcode 0xde. */
15901FNIEMOP_DEF(iemOp_EscF6)
15902{
15903 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15906 {
15907 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15908 {
15909 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15910 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15911 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15912 case 3: if (bRm == 0xd9)
15913 return FNIEMOP_CALL(iemOp_fcompp);
15914 return IEMOP_RAISE_INVALID_OPCODE();
15915 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15916 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15917 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15918 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15920 }
15921 }
15922 else
15923 {
15924 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15925 {
15926 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15927 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15928 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15929 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15930 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15931 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15932 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15933 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15935 }
15936 }
15937}
15938
15939
15940/** Opcode 0xdf 11/0.
15941 * Undocument instruction, assumed to work like ffree + fincstp. */
15942FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15943{
15944 IEMOP_MNEMONIC("ffreep stN");
15945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15946
15947 IEM_MC_BEGIN(0, 0);
15948
15949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15951
15952 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15953 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15954 IEM_MC_FPU_STACK_INC_TOP();
15955 IEM_MC_UPDATE_FPU_OPCODE_IP();
15956
15957 IEM_MC_ADVANCE_RIP();
15958 IEM_MC_END();
15959 return VINF_SUCCESS;
15960}
15961
15962
15963/** Opcode 0xdf 0xe0. */
15964FNIEMOP_DEF(iemOp_fnstsw_ax)
15965{
15966 IEMOP_MNEMONIC("fnstsw ax");
15967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15968
15969 IEM_MC_BEGIN(0, 1);
15970 IEM_MC_LOCAL(uint16_t, u16Tmp);
15971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15972 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15973 IEM_MC_FETCH_FSW(u16Tmp);
15974 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15975 IEM_MC_ADVANCE_RIP();
15976 IEM_MC_END();
15977 return VINF_SUCCESS;
15978}
15979
15980
15981/** Opcode 0xdf 11/5. */
15982FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15983{
15984 IEMOP_MNEMONIC("fcomip st0,stN");
15985 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15986}
15987
15988
15989/** Opcode 0xdf 11/6. */
15990FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15991{
15992 IEMOP_MNEMONIC("fcomip st0,stN");
15993 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15994}
15995
15996
15997/** Opcode 0xdf !11/0. */
15998FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15999{
16000 IEMOP_MNEMONIC("fild m16i");
16001
16002 IEM_MC_BEGIN(2, 3);
16003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16004 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16005 IEM_MC_LOCAL(int16_t, i16Val);
16006 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16007 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16008
16009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16011
16012 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16013 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16014 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16015
16016 IEM_MC_PREPARE_FPU_USAGE();
16017 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16018 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16019 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16020 IEM_MC_ELSE()
16021 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16022 IEM_MC_ENDIF();
16023 IEM_MC_ADVANCE_RIP();
16024
16025 IEM_MC_END();
16026 return VINF_SUCCESS;
16027}
16028
16029
16030/** Opcode 0xdf !11/1. */
16031FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16032{
16033 IEMOP_MNEMONIC("fisttp m16i");
16034 IEM_MC_BEGIN(3, 2);
16035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16036 IEM_MC_LOCAL(uint16_t, u16Fsw);
16037 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16038 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16039 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16040
16041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16045
16046 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16047 IEM_MC_PREPARE_FPU_USAGE();
16048 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16049 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16050 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16051 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16052 IEM_MC_ELSE()
16053 IEM_MC_IF_FCW_IM()
16054 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16055 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16056 IEM_MC_ENDIF();
16057 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16058 IEM_MC_ENDIF();
16059 IEM_MC_ADVANCE_RIP();
16060
16061 IEM_MC_END();
16062 return VINF_SUCCESS;
16063}
16064
16065
16066/** Opcode 0xdf !11/2. */
16067FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16068{
16069 IEMOP_MNEMONIC("fistp m16i");
16070 IEM_MC_BEGIN(3, 2);
16071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16072 IEM_MC_LOCAL(uint16_t, u16Fsw);
16073 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16074 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16075 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16076
16077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16081
16082 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16083 IEM_MC_PREPARE_FPU_USAGE();
16084 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16085 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16086 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16087 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16088 IEM_MC_ELSE()
16089 IEM_MC_IF_FCW_IM()
16090 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16091 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16092 IEM_MC_ENDIF();
16093 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16094 IEM_MC_ENDIF();
16095 IEM_MC_ADVANCE_RIP();
16096
16097 IEM_MC_END();
16098 return VINF_SUCCESS;
16099}
16100
16101
16102/** Opcode 0xdf !11/3. */
16103FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16104{
16105 IEMOP_MNEMONIC("fistp m16i");
16106 IEM_MC_BEGIN(3, 2);
16107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16108 IEM_MC_LOCAL(uint16_t, u16Fsw);
16109 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16110 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16111 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16112
16113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16115 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16116 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16117
16118 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16119 IEM_MC_PREPARE_FPU_USAGE();
16120 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16121 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16122 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16123 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16124 IEM_MC_ELSE()
16125 IEM_MC_IF_FCW_IM()
16126 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16127 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16128 IEM_MC_ENDIF();
16129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16130 IEM_MC_ENDIF();
16131 IEM_MC_ADVANCE_RIP();
16132
16133 IEM_MC_END();
16134 return VINF_SUCCESS;
16135}
16136
16137
16138/** Opcode 0xdf !11/4. */
16139FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16140
16141
16142/** Opcode 0xdf !11/5. */
16143FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16144{
16145 IEMOP_MNEMONIC("fild m64i");
16146
16147 IEM_MC_BEGIN(2, 3);
16148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16149 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16150 IEM_MC_LOCAL(int64_t, i64Val);
16151 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16152 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16153
16154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16156
16157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16158 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16159 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16160
16161 IEM_MC_PREPARE_FPU_USAGE();
16162 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16163 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16164 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16165 IEM_MC_ELSE()
16166 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16167 IEM_MC_ENDIF();
16168 IEM_MC_ADVANCE_RIP();
16169
16170 IEM_MC_END();
16171 return VINF_SUCCESS;
16172}
16173
16174
16175/** Opcode 0xdf !11/6. */
16176FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16177
16178
16179/** Opcode 0xdf !11/7. */
16180FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16181{
16182 IEMOP_MNEMONIC("fistp m64i");
16183 IEM_MC_BEGIN(3, 2);
16184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16185 IEM_MC_LOCAL(uint16_t, u16Fsw);
16186 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16187 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16189
16190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16194
16195 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16196 IEM_MC_PREPARE_FPU_USAGE();
16197 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16198 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16199 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16200 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16201 IEM_MC_ELSE()
16202 IEM_MC_IF_FCW_IM()
16203 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16204 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16205 IEM_MC_ENDIF();
16206 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16207 IEM_MC_ENDIF();
16208 IEM_MC_ADVANCE_RIP();
16209
16210 IEM_MC_END();
16211 return VINF_SUCCESS;
16212}
16213
16214
16215/** Opcode 0xdf. */
16216FNIEMOP_DEF(iemOp_EscF7)
16217{
16218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16220 {
16221 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16222 {
16223 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16224 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16225 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16226 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16227 case 4: if (bRm == 0xe0)
16228 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16229 return IEMOP_RAISE_INVALID_OPCODE();
16230 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16231 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16232 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16234 }
16235 }
16236 else
16237 {
16238 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16239 {
16240 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16241 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16242 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16243 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16244 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16245 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16246 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16247 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16249 }
16250 }
16251}
16252
16253
16254/** Opcode 0xe0. */
16255FNIEMOP_DEF(iemOp_loopne_Jb)
16256{
16257 IEMOP_MNEMONIC("loopne Jb");
16258 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16259 IEMOP_HLP_NO_LOCK_PREFIX();
16260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16261
16262 switch (pIemCpu->enmEffAddrMode)
16263 {
16264 case IEMMODE_16BIT:
16265 IEM_MC_BEGIN(0,0);
16266 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16267 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16268 IEM_MC_REL_JMP_S8(i8Imm);
16269 } IEM_MC_ELSE() {
16270 IEM_MC_ADVANCE_RIP();
16271 } IEM_MC_ENDIF();
16272 IEM_MC_END();
16273 return VINF_SUCCESS;
16274
16275 case IEMMODE_32BIT:
16276 IEM_MC_BEGIN(0,0);
16277 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16278 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16279 IEM_MC_REL_JMP_S8(i8Imm);
16280 } IEM_MC_ELSE() {
16281 IEM_MC_ADVANCE_RIP();
16282 } IEM_MC_ENDIF();
16283 IEM_MC_END();
16284 return VINF_SUCCESS;
16285
16286 case IEMMODE_64BIT:
16287 IEM_MC_BEGIN(0,0);
16288 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16289 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16290 IEM_MC_REL_JMP_S8(i8Imm);
16291 } IEM_MC_ELSE() {
16292 IEM_MC_ADVANCE_RIP();
16293 } IEM_MC_ENDIF();
16294 IEM_MC_END();
16295 return VINF_SUCCESS;
16296
16297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16298 }
16299}
16300
16301
16302/** Opcode 0xe1. */
16303FNIEMOP_DEF(iemOp_loope_Jb)
16304{
16305 IEMOP_MNEMONIC("loope Jb");
16306 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16307 IEMOP_HLP_NO_LOCK_PREFIX();
16308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16309
16310 switch (pIemCpu->enmEffAddrMode)
16311 {
16312 case IEMMODE_16BIT:
16313 IEM_MC_BEGIN(0,0);
16314 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16315 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16316 IEM_MC_REL_JMP_S8(i8Imm);
16317 } IEM_MC_ELSE() {
16318 IEM_MC_ADVANCE_RIP();
16319 } IEM_MC_ENDIF();
16320 IEM_MC_END();
16321 return VINF_SUCCESS;
16322
16323 case IEMMODE_32BIT:
16324 IEM_MC_BEGIN(0,0);
16325 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16326 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16327 IEM_MC_REL_JMP_S8(i8Imm);
16328 } IEM_MC_ELSE() {
16329 IEM_MC_ADVANCE_RIP();
16330 } IEM_MC_ENDIF();
16331 IEM_MC_END();
16332 return VINF_SUCCESS;
16333
16334 case IEMMODE_64BIT:
16335 IEM_MC_BEGIN(0,0);
16336 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16337 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16338 IEM_MC_REL_JMP_S8(i8Imm);
16339 } IEM_MC_ELSE() {
16340 IEM_MC_ADVANCE_RIP();
16341 } IEM_MC_ENDIF();
16342 IEM_MC_END();
16343 return VINF_SUCCESS;
16344
16345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16346 }
16347}
16348
16349
16350/** Opcode 0xe2. */
16351FNIEMOP_DEF(iemOp_loop_Jb)
16352{
16353 IEMOP_MNEMONIC("loop Jb");
16354 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16355 IEMOP_HLP_NO_LOCK_PREFIX();
16356 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16357
16358 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16359 * using the 32-bit operand size override. How can that be restarted? See
16360 * weird pseudo code in intel manual. */
16361 switch (pIemCpu->enmEffAddrMode)
16362 {
16363 case IEMMODE_16BIT:
16364 IEM_MC_BEGIN(0,0);
16365 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16366 {
16367 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16368 IEM_MC_IF_CX_IS_NZ() {
16369 IEM_MC_REL_JMP_S8(i8Imm);
16370 } IEM_MC_ELSE() {
16371 IEM_MC_ADVANCE_RIP();
16372 } IEM_MC_ENDIF();
16373 }
16374 else
16375 {
16376 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16377 IEM_MC_ADVANCE_RIP();
16378 }
16379 IEM_MC_END();
16380 return VINF_SUCCESS;
16381
16382 case IEMMODE_32BIT:
16383 IEM_MC_BEGIN(0,0);
16384 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16385 {
16386 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16387 IEM_MC_IF_ECX_IS_NZ() {
16388 IEM_MC_REL_JMP_S8(i8Imm);
16389 } IEM_MC_ELSE() {
16390 IEM_MC_ADVANCE_RIP();
16391 } IEM_MC_ENDIF();
16392 }
16393 else
16394 {
16395 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16396 IEM_MC_ADVANCE_RIP();
16397 }
16398 IEM_MC_END();
16399 return VINF_SUCCESS;
16400
16401 case IEMMODE_64BIT:
16402 IEM_MC_BEGIN(0,0);
16403 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16404 {
16405 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16406 IEM_MC_IF_RCX_IS_NZ() {
16407 IEM_MC_REL_JMP_S8(i8Imm);
16408 } IEM_MC_ELSE() {
16409 IEM_MC_ADVANCE_RIP();
16410 } IEM_MC_ENDIF();
16411 }
16412 else
16413 {
16414 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16415 IEM_MC_ADVANCE_RIP();
16416 }
16417 IEM_MC_END();
16418 return VINF_SUCCESS;
16419
16420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16421 }
16422}
16423
16424
16425/** Opcode 0xe3. */
16426FNIEMOP_DEF(iemOp_jecxz_Jb)
16427{
16428 IEMOP_MNEMONIC("jecxz Jb");
16429 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16430 IEMOP_HLP_NO_LOCK_PREFIX();
16431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16432
16433 switch (pIemCpu->enmEffAddrMode)
16434 {
16435 case IEMMODE_16BIT:
16436 IEM_MC_BEGIN(0,0);
16437 IEM_MC_IF_CX_IS_NZ() {
16438 IEM_MC_ADVANCE_RIP();
16439 } IEM_MC_ELSE() {
16440 IEM_MC_REL_JMP_S8(i8Imm);
16441 } IEM_MC_ENDIF();
16442 IEM_MC_END();
16443 return VINF_SUCCESS;
16444
16445 case IEMMODE_32BIT:
16446 IEM_MC_BEGIN(0,0);
16447 IEM_MC_IF_ECX_IS_NZ() {
16448 IEM_MC_ADVANCE_RIP();
16449 } IEM_MC_ELSE() {
16450 IEM_MC_REL_JMP_S8(i8Imm);
16451 } IEM_MC_ENDIF();
16452 IEM_MC_END();
16453 return VINF_SUCCESS;
16454
16455 case IEMMODE_64BIT:
16456 IEM_MC_BEGIN(0,0);
16457 IEM_MC_IF_RCX_IS_NZ() {
16458 IEM_MC_ADVANCE_RIP();
16459 } IEM_MC_ELSE() {
16460 IEM_MC_REL_JMP_S8(i8Imm);
16461 } IEM_MC_ENDIF();
16462 IEM_MC_END();
16463 return VINF_SUCCESS;
16464
16465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16466 }
16467}
16468
16469
16470/** Opcode 0xe4 */
16471FNIEMOP_DEF(iemOp_in_AL_Ib)
16472{
16473 IEMOP_MNEMONIC("in eAX,Ib");
16474 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16475 IEMOP_HLP_NO_LOCK_PREFIX();
16476 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16477}
16478
16479
16480/** Opcode 0xe5 */
16481FNIEMOP_DEF(iemOp_in_eAX_Ib)
16482{
16483 IEMOP_MNEMONIC("in eAX,Ib");
16484 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16485 IEMOP_HLP_NO_LOCK_PREFIX();
16486 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16487}
16488
16489
16490/** Opcode 0xe6 */
16491FNIEMOP_DEF(iemOp_out_Ib_AL)
16492{
16493 IEMOP_MNEMONIC("out Ib,AL");
16494 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16495 IEMOP_HLP_NO_LOCK_PREFIX();
16496 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16497}
16498
16499
16500/** Opcode 0xe7 */
16501FNIEMOP_DEF(iemOp_out_Ib_eAX)
16502{
16503 IEMOP_MNEMONIC("out Ib,eAX");
16504 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16505 IEMOP_HLP_NO_LOCK_PREFIX();
16506 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16507}
16508
16509
16510/** Opcode 0xe8. */
16511FNIEMOP_DEF(iemOp_call_Jv)
16512{
16513 IEMOP_MNEMONIC("call Jv");
16514 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16515 switch (pIemCpu->enmEffOpSize)
16516 {
16517 case IEMMODE_16BIT:
16518 {
16519 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16520 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16521 }
16522
16523 case IEMMODE_32BIT:
16524 {
16525 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16526 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16527 }
16528
16529 case IEMMODE_64BIT:
16530 {
16531 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16532 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16533 }
16534
16535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16536 }
16537}
16538
16539
16540/** Opcode 0xe9. */
16541FNIEMOP_DEF(iemOp_jmp_Jv)
16542{
16543 IEMOP_MNEMONIC("jmp Jv");
16544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16545 switch (pIemCpu->enmEffOpSize)
16546 {
16547 case IEMMODE_16BIT:
16548 {
16549 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16550 IEM_MC_BEGIN(0, 0);
16551 IEM_MC_REL_JMP_S16(i16Imm);
16552 IEM_MC_END();
16553 return VINF_SUCCESS;
16554 }
16555
16556 case IEMMODE_64BIT:
16557 case IEMMODE_32BIT:
16558 {
16559 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16560 IEM_MC_BEGIN(0, 0);
16561 IEM_MC_REL_JMP_S32(i32Imm);
16562 IEM_MC_END();
16563 return VINF_SUCCESS;
16564 }
16565
16566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16567 }
16568}
16569
16570
16571/** Opcode 0xea. */
16572FNIEMOP_DEF(iemOp_jmp_Ap)
16573{
16574 IEMOP_MNEMONIC("jmp Ap");
16575 IEMOP_HLP_NO_64BIT();
16576
16577 /* Decode the far pointer address and pass it on to the far call C implementation. */
16578 uint32_t offSeg;
16579 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16580 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16581 else
16582 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16583 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16584 IEMOP_HLP_NO_LOCK_PREFIX();
16585 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16586}
16587
16588
16589/** Opcode 0xeb. */
16590FNIEMOP_DEF(iemOp_jmp_Jb)
16591{
16592 IEMOP_MNEMONIC("jmp Jb");
16593 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16594 IEMOP_HLP_NO_LOCK_PREFIX();
16595 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16596
16597 IEM_MC_BEGIN(0, 0);
16598 IEM_MC_REL_JMP_S8(i8Imm);
16599 IEM_MC_END();
16600 return VINF_SUCCESS;
16601}
16602
16603
16604/** Opcode 0xec */
16605FNIEMOP_DEF(iemOp_in_AL_DX)
16606{
16607 IEMOP_MNEMONIC("in AL,DX");
16608 IEMOP_HLP_NO_LOCK_PREFIX();
16609 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16610}
16611
16612
16613/** Opcode 0xed */
16614FNIEMOP_DEF(iemOp_eAX_DX)
16615{
16616 IEMOP_MNEMONIC("in eAX,DX");
16617 IEMOP_HLP_NO_LOCK_PREFIX();
16618 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16619}
16620
16621
16622/** Opcode 0xee */
16623FNIEMOP_DEF(iemOp_out_DX_AL)
16624{
16625 IEMOP_MNEMONIC("out DX,AL");
16626 IEMOP_HLP_NO_LOCK_PREFIX();
16627 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16628}
16629
16630
16631/** Opcode 0xef */
16632FNIEMOP_DEF(iemOp_out_DX_eAX)
16633{
16634 IEMOP_MNEMONIC("out DX,eAX");
16635 IEMOP_HLP_NO_LOCK_PREFIX();
16636 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16637}
16638
16639
16640/** Opcode 0xf0. */
16641FNIEMOP_DEF(iemOp_lock)
16642{
16643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16644 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16645
16646 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16647 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16648}
16649
16650
16651/** Opcode 0xf1. */
16652FNIEMOP_DEF(iemOp_int_1)
16653{
16654 IEMOP_MNEMONIC("int1"); /* icebp */
16655 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16656 /** @todo testcase! */
16657 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16658}
16659
16660
16661/** Opcode 0xf2. */
16662FNIEMOP_DEF(iemOp_repne)
16663{
16664 /* This overrides any previous REPE prefix. */
16665 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16667 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16668
16669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16671}
16672
16673
16674/** Opcode 0xf3. */
16675FNIEMOP_DEF(iemOp_repe)
16676{
16677 /* This overrides any previous REPNE prefix. */
16678 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16679 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16680 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16681
16682 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16683 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16684}
16685
16686
16687/** Opcode 0xf4. */
16688FNIEMOP_DEF(iemOp_hlt)
16689{
16690 IEMOP_HLP_NO_LOCK_PREFIX();
16691 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16692}
16693
16694
16695/** Opcode 0xf5. */
16696FNIEMOP_DEF(iemOp_cmc)
16697{
16698 IEMOP_MNEMONIC("cmc");
16699 IEMOP_HLP_NO_LOCK_PREFIX();
16700 IEM_MC_BEGIN(0, 0);
16701 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16702 IEM_MC_ADVANCE_RIP();
16703 IEM_MC_END();
16704 return VINF_SUCCESS;
16705}
16706
16707
16708/**
16709 * Common implementation of 'inc/dec/not/neg Eb'.
16710 *
16711 * @param bRm The RM byte.
16712 * @param pImpl The instruction implementation.
16713 */
16714FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16715{
16716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16717 {
16718 /* register access */
16719 IEM_MC_BEGIN(2, 0);
16720 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16721 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16722 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16723 IEM_MC_REF_EFLAGS(pEFlags);
16724 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16725 IEM_MC_ADVANCE_RIP();
16726 IEM_MC_END();
16727 }
16728 else
16729 {
16730 /* memory access. */
16731 IEM_MC_BEGIN(2, 2);
16732 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16733 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16735
16736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16737 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16738 IEM_MC_FETCH_EFLAGS(EFlags);
16739 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16740 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16741 else
16742 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16743
16744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16745 IEM_MC_COMMIT_EFLAGS(EFlags);
16746 IEM_MC_ADVANCE_RIP();
16747 IEM_MC_END();
16748 }
16749 return VINF_SUCCESS;
16750}
16751
16752
16753/**
16754 * Common implementation of 'inc/dec/not/neg Ev'.
16755 *
16756 * @param bRm The RM byte.
16757 * @param pImpl The instruction implementation.
16758 */
16759FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16760{
16761 /* Registers are handled by a common worker. */
16762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16763 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16764
16765 /* Memory we do here. */
16766 switch (pIemCpu->enmEffOpSize)
16767 {
16768 case IEMMODE_16BIT:
16769 IEM_MC_BEGIN(2, 2);
16770 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16773
16774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16775 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16776 IEM_MC_FETCH_EFLAGS(EFlags);
16777 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16778 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16779 else
16780 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16781
16782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16783 IEM_MC_COMMIT_EFLAGS(EFlags);
16784 IEM_MC_ADVANCE_RIP();
16785 IEM_MC_END();
16786 return VINF_SUCCESS;
16787
16788 case IEMMODE_32BIT:
16789 IEM_MC_BEGIN(2, 2);
16790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16791 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16793
16794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16795 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16796 IEM_MC_FETCH_EFLAGS(EFlags);
16797 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16798 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16799 else
16800 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16801
16802 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16803 IEM_MC_COMMIT_EFLAGS(EFlags);
16804 IEM_MC_ADVANCE_RIP();
16805 IEM_MC_END();
16806 return VINF_SUCCESS;
16807
16808 case IEMMODE_64BIT:
16809 IEM_MC_BEGIN(2, 2);
16810 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16813
16814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16815 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16816 IEM_MC_FETCH_EFLAGS(EFlags);
16817 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16818 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16819 else
16820 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16821
16822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16823 IEM_MC_COMMIT_EFLAGS(EFlags);
16824 IEM_MC_ADVANCE_RIP();
16825 IEM_MC_END();
16826 return VINF_SUCCESS;
16827
16828 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16829 }
16830}
16831
16832
16833/** Opcode 0xf6 /0. */
16834FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16835{
16836 IEMOP_MNEMONIC("test Eb,Ib");
16837 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16838
16839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16840 {
16841 /* register access */
16842 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16843 IEMOP_HLP_NO_LOCK_PREFIX();
16844
16845 IEM_MC_BEGIN(3, 0);
16846 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16847 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16849 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16850 IEM_MC_REF_EFLAGS(pEFlags);
16851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16852 IEM_MC_ADVANCE_RIP();
16853 IEM_MC_END();
16854 }
16855 else
16856 {
16857 /* memory access. */
16858 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16859
16860 IEM_MC_BEGIN(3, 2);
16861 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16862 IEM_MC_ARG(uint8_t, u8Src, 1);
16863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16865
16866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16867 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16868 IEM_MC_ASSIGN(u8Src, u8Imm);
16869 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16870 IEM_MC_FETCH_EFLAGS(EFlags);
16871 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16872
16873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16874 IEM_MC_COMMIT_EFLAGS(EFlags);
16875 IEM_MC_ADVANCE_RIP();
16876 IEM_MC_END();
16877 }
16878 return VINF_SUCCESS;
16879}
16880
16881
16882/** Opcode 0xf7 /0. */
16883FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16884{
16885 IEMOP_MNEMONIC("test Ev,Iv");
16886 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16888
16889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16890 {
16891 /* register access */
16892 switch (pIemCpu->enmEffOpSize)
16893 {
16894 case IEMMODE_16BIT:
16895 {
16896 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16897 IEM_MC_BEGIN(3, 0);
16898 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16899 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16901 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16902 IEM_MC_REF_EFLAGS(pEFlags);
16903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16904 IEM_MC_ADVANCE_RIP();
16905 IEM_MC_END();
16906 return VINF_SUCCESS;
16907 }
16908
16909 case IEMMODE_32BIT:
16910 {
16911 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16912 IEM_MC_BEGIN(3, 0);
16913 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16914 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16916 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16917 IEM_MC_REF_EFLAGS(pEFlags);
16918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16919 /* No clearing the high dword here - test doesn't write back the result. */
16920 IEM_MC_ADVANCE_RIP();
16921 IEM_MC_END();
16922 return VINF_SUCCESS;
16923 }
16924
16925 case IEMMODE_64BIT:
16926 {
16927 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16928 IEM_MC_BEGIN(3, 0);
16929 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16930 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16931 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16932 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16933 IEM_MC_REF_EFLAGS(pEFlags);
16934 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16935 IEM_MC_ADVANCE_RIP();
16936 IEM_MC_END();
16937 return VINF_SUCCESS;
16938 }
16939
16940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16941 }
16942 }
16943 else
16944 {
16945 /* memory access. */
16946 switch (pIemCpu->enmEffOpSize)
16947 {
16948 case IEMMODE_16BIT:
16949 {
16950 IEM_MC_BEGIN(3, 2);
16951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16952 IEM_MC_ARG(uint16_t, u16Src, 1);
16953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16955
16956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16957 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16958 IEM_MC_ASSIGN(u16Src, u16Imm);
16959 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16960 IEM_MC_FETCH_EFLAGS(EFlags);
16961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16962
16963 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16964 IEM_MC_COMMIT_EFLAGS(EFlags);
16965 IEM_MC_ADVANCE_RIP();
16966 IEM_MC_END();
16967 return VINF_SUCCESS;
16968 }
16969
16970 case IEMMODE_32BIT:
16971 {
16972 IEM_MC_BEGIN(3, 2);
16973 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16974 IEM_MC_ARG(uint32_t, u32Src, 1);
16975 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16977
16978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16980 IEM_MC_ASSIGN(u32Src, u32Imm);
16981 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16982 IEM_MC_FETCH_EFLAGS(EFlags);
16983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16984
16985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16986 IEM_MC_COMMIT_EFLAGS(EFlags);
16987 IEM_MC_ADVANCE_RIP();
16988 IEM_MC_END();
16989 return VINF_SUCCESS;
16990 }
16991
16992 case IEMMODE_64BIT:
16993 {
16994 IEM_MC_BEGIN(3, 2);
16995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16996 IEM_MC_ARG(uint64_t, u64Src, 1);
16997 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16999
17000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17001 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17002 IEM_MC_ASSIGN(u64Src, u64Imm);
17003 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17004 IEM_MC_FETCH_EFLAGS(EFlags);
17005 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17006
17007 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17008 IEM_MC_COMMIT_EFLAGS(EFlags);
17009 IEM_MC_ADVANCE_RIP();
17010 IEM_MC_END();
17011 return VINF_SUCCESS;
17012 }
17013
17014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17015 }
17016 }
17017}
17018
17019
17020/** Opcode 0xf6 /4, /5, /6 and /7. */
17021FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17022{
17023 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17024
17025 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17026 {
17027 /* register access */
17028 IEMOP_HLP_NO_LOCK_PREFIX();
17029 IEM_MC_BEGIN(3, 1);
17030 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17031 IEM_MC_ARG(uint8_t, u8Value, 1);
17032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17033 IEM_MC_LOCAL(int32_t, rc);
17034
17035 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17036 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17037 IEM_MC_REF_EFLAGS(pEFlags);
17038 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17039 IEM_MC_IF_LOCAL_IS_Z(rc) {
17040 IEM_MC_ADVANCE_RIP();
17041 } IEM_MC_ELSE() {
17042 IEM_MC_RAISE_DIVIDE_ERROR();
17043 } IEM_MC_ENDIF();
17044
17045 IEM_MC_END();
17046 }
17047 else
17048 {
17049 /* memory access. */
17050 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17051
17052 IEM_MC_BEGIN(3, 2);
17053 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17054 IEM_MC_ARG(uint8_t, u8Value, 1);
17055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17057 IEM_MC_LOCAL(int32_t, rc);
17058
17059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17060 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17061 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17062 IEM_MC_REF_EFLAGS(pEFlags);
17063 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17064 IEM_MC_IF_LOCAL_IS_Z(rc) {
17065 IEM_MC_ADVANCE_RIP();
17066 } IEM_MC_ELSE() {
17067 IEM_MC_RAISE_DIVIDE_ERROR();
17068 } IEM_MC_ENDIF();
17069
17070 IEM_MC_END();
17071 }
17072 return VINF_SUCCESS;
17073}
17074
17075
17076/** Opcode 0xf7 /4, /5, /6 and /7. */
17077FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17078{
17079 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17080 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17081
17082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17083 {
17084 /* register access */
17085 switch (pIemCpu->enmEffOpSize)
17086 {
17087 case IEMMODE_16BIT:
17088 {
17089 IEMOP_HLP_NO_LOCK_PREFIX();
17090 IEM_MC_BEGIN(4, 1);
17091 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17092 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17093 IEM_MC_ARG(uint16_t, u16Value, 2);
17094 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17095 IEM_MC_LOCAL(int32_t, rc);
17096
17097 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17098 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17099 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17100 IEM_MC_REF_EFLAGS(pEFlags);
17101 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17102 IEM_MC_IF_LOCAL_IS_Z(rc) {
17103 IEM_MC_ADVANCE_RIP();
17104 } IEM_MC_ELSE() {
17105 IEM_MC_RAISE_DIVIDE_ERROR();
17106 } IEM_MC_ENDIF();
17107
17108 IEM_MC_END();
17109 return VINF_SUCCESS;
17110 }
17111
17112 case IEMMODE_32BIT:
17113 {
17114 IEMOP_HLP_NO_LOCK_PREFIX();
17115 IEM_MC_BEGIN(4, 1);
17116 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17117 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17118 IEM_MC_ARG(uint32_t, u32Value, 2);
17119 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17120 IEM_MC_LOCAL(int32_t, rc);
17121
17122 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17123 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17124 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17125 IEM_MC_REF_EFLAGS(pEFlags);
17126 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17127 IEM_MC_IF_LOCAL_IS_Z(rc) {
17128 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17129 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17130 IEM_MC_ADVANCE_RIP();
17131 } IEM_MC_ELSE() {
17132 IEM_MC_RAISE_DIVIDE_ERROR();
17133 } IEM_MC_ENDIF();
17134
17135 IEM_MC_END();
17136 return VINF_SUCCESS;
17137 }
17138
17139 case IEMMODE_64BIT:
17140 {
17141 IEMOP_HLP_NO_LOCK_PREFIX();
17142 IEM_MC_BEGIN(4, 1);
17143 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17144 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17145 IEM_MC_ARG(uint64_t, u64Value, 2);
17146 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17147 IEM_MC_LOCAL(int32_t, rc);
17148
17149 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17150 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17151 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17152 IEM_MC_REF_EFLAGS(pEFlags);
17153 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17154 IEM_MC_IF_LOCAL_IS_Z(rc) {
17155 IEM_MC_ADVANCE_RIP();
17156 } IEM_MC_ELSE() {
17157 IEM_MC_RAISE_DIVIDE_ERROR();
17158 } IEM_MC_ENDIF();
17159
17160 IEM_MC_END();
17161 return VINF_SUCCESS;
17162 }
17163
17164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17165 }
17166 }
17167 else
17168 {
17169 /* memory access. */
17170 switch (pIemCpu->enmEffOpSize)
17171 {
17172 case IEMMODE_16BIT:
17173 {
17174 IEMOP_HLP_NO_LOCK_PREFIX();
17175 IEM_MC_BEGIN(4, 2);
17176 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17177 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17178 IEM_MC_ARG(uint16_t, u16Value, 2);
17179 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17181 IEM_MC_LOCAL(int32_t, rc);
17182
17183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17184 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17185 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17186 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17187 IEM_MC_REF_EFLAGS(pEFlags);
17188 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17189 IEM_MC_IF_LOCAL_IS_Z(rc) {
17190 IEM_MC_ADVANCE_RIP();
17191 } IEM_MC_ELSE() {
17192 IEM_MC_RAISE_DIVIDE_ERROR();
17193 } IEM_MC_ENDIF();
17194
17195 IEM_MC_END();
17196 return VINF_SUCCESS;
17197 }
17198
17199 case IEMMODE_32BIT:
17200 {
17201 IEMOP_HLP_NO_LOCK_PREFIX();
17202 IEM_MC_BEGIN(4, 2);
17203 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17204 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17205 IEM_MC_ARG(uint32_t, u32Value, 2);
17206 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17208 IEM_MC_LOCAL(int32_t, rc);
17209
17210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17211 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17212 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17213 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17214 IEM_MC_REF_EFLAGS(pEFlags);
17215 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17216 IEM_MC_IF_LOCAL_IS_Z(rc) {
17217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17218 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17219 IEM_MC_ADVANCE_RIP();
17220 } IEM_MC_ELSE() {
17221 IEM_MC_RAISE_DIVIDE_ERROR();
17222 } IEM_MC_ENDIF();
17223
17224 IEM_MC_END();
17225 return VINF_SUCCESS;
17226 }
17227
17228 case IEMMODE_64BIT:
17229 {
17230 IEMOP_HLP_NO_LOCK_PREFIX();
17231 IEM_MC_BEGIN(4, 2);
17232 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17233 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17234 IEM_MC_ARG(uint64_t, u64Value, 2);
17235 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17237 IEM_MC_LOCAL(int32_t, rc);
17238
17239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17240 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17241 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17242 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17243 IEM_MC_REF_EFLAGS(pEFlags);
17244 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17245 IEM_MC_IF_LOCAL_IS_Z(rc) {
17246 IEM_MC_ADVANCE_RIP();
17247 } IEM_MC_ELSE() {
17248 IEM_MC_RAISE_DIVIDE_ERROR();
17249 } IEM_MC_ENDIF();
17250
17251 IEM_MC_END();
17252 return VINF_SUCCESS;
17253 }
17254
17255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17256 }
17257 }
17258}
17259
17260/** Opcode 0xf6. */
17261FNIEMOP_DEF(iemOp_Grp3_Eb)
17262{
17263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17264 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17265 {
17266 case 0:
17267 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17268 case 1:
17269/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17270 return IEMOP_RAISE_INVALID_OPCODE();
17271 case 2:
17272 IEMOP_MNEMONIC("not Eb");
17273 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17274 case 3:
17275 IEMOP_MNEMONIC("neg Eb");
17276 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17277 case 4:
17278 IEMOP_MNEMONIC("mul Eb");
17279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17280 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17281 case 5:
17282 IEMOP_MNEMONIC("imul Eb");
17283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17284 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17285 case 6:
17286 IEMOP_MNEMONIC("div Eb");
17287 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17288 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17289 case 7:
17290 IEMOP_MNEMONIC("idiv Eb");
17291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17292 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17294 }
17295}
17296
17297
17298/** Opcode 0xf7. */
17299FNIEMOP_DEF(iemOp_Grp3_Ev)
17300{
17301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17302 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17303 {
17304 case 0:
17305 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17306 case 1:
17307/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17308 return IEMOP_RAISE_INVALID_OPCODE();
17309 case 2:
17310 IEMOP_MNEMONIC("not Ev");
17311 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17312 case 3:
17313 IEMOP_MNEMONIC("neg Ev");
17314 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17315 case 4:
17316 IEMOP_MNEMONIC("mul Ev");
17317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17318 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17319 case 5:
17320 IEMOP_MNEMONIC("imul Ev");
17321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17322 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17323 case 6:
17324 IEMOP_MNEMONIC("div Ev");
17325 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17326 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17327 case 7:
17328 IEMOP_MNEMONIC("idiv Ev");
17329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17330 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17332 }
17333}
17334
17335
17336/** Opcode 0xf8. */
17337FNIEMOP_DEF(iemOp_clc)
17338{
17339 IEMOP_MNEMONIC("clc");
17340 IEMOP_HLP_NO_LOCK_PREFIX();
17341 IEM_MC_BEGIN(0, 0);
17342 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17343 IEM_MC_ADVANCE_RIP();
17344 IEM_MC_END();
17345 return VINF_SUCCESS;
17346}
17347
17348
17349/** Opcode 0xf9. */
17350FNIEMOP_DEF(iemOp_stc)
17351{
17352 IEMOP_MNEMONIC("stc");
17353 IEMOP_HLP_NO_LOCK_PREFIX();
17354 IEM_MC_BEGIN(0, 0);
17355 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17356 IEM_MC_ADVANCE_RIP();
17357 IEM_MC_END();
17358 return VINF_SUCCESS;
17359}
17360
17361
17362/** Opcode 0xfa. */
17363FNIEMOP_DEF(iemOp_cli)
17364{
17365 IEMOP_MNEMONIC("cli");
17366 IEMOP_HLP_NO_LOCK_PREFIX();
17367 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17368}
17369
17370
17371FNIEMOP_DEF(iemOp_sti)
17372{
17373 IEMOP_MNEMONIC("sti");
17374 IEMOP_HLP_NO_LOCK_PREFIX();
17375 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17376}
17377
17378
17379/** Opcode 0xfc. */
17380FNIEMOP_DEF(iemOp_cld)
17381{
17382 IEMOP_MNEMONIC("cld");
17383 IEMOP_HLP_NO_LOCK_PREFIX();
17384 IEM_MC_BEGIN(0, 0);
17385 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17386 IEM_MC_ADVANCE_RIP();
17387 IEM_MC_END();
17388 return VINF_SUCCESS;
17389}
17390
17391
17392/** Opcode 0xfd. */
17393FNIEMOP_DEF(iemOp_std)
17394{
17395 IEMOP_MNEMONIC("std");
17396 IEMOP_HLP_NO_LOCK_PREFIX();
17397 IEM_MC_BEGIN(0, 0);
17398 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17399 IEM_MC_ADVANCE_RIP();
17400 IEM_MC_END();
17401 return VINF_SUCCESS;
17402}
17403
17404
17405/** Opcode 0xfe. */
17406FNIEMOP_DEF(iemOp_Grp4)
17407{
17408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17409 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17410 {
17411 case 0:
17412 IEMOP_MNEMONIC("inc Ev");
17413 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17414 case 1:
17415 IEMOP_MNEMONIC("dec Ev");
17416 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17417 default:
17418 IEMOP_MNEMONIC("grp4-ud");
17419 return IEMOP_RAISE_INVALID_OPCODE();
17420 }
17421}
17422
17423
17424/**
17425 * Opcode 0xff /2.
17426 * @param bRm The RM byte.
17427 */
17428FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17429{
17430 IEMOP_MNEMONIC("calln Ev");
17431 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17432 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17433
17434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17435 {
17436 /* The new RIP is taken from a register. */
17437 switch (pIemCpu->enmEffOpSize)
17438 {
17439 case IEMMODE_16BIT:
17440 IEM_MC_BEGIN(1, 0);
17441 IEM_MC_ARG(uint16_t, u16Target, 0);
17442 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17443 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17444 IEM_MC_END()
17445 return VINF_SUCCESS;
17446
17447 case IEMMODE_32BIT:
17448 IEM_MC_BEGIN(1, 0);
17449 IEM_MC_ARG(uint32_t, u32Target, 0);
17450 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17451 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17452 IEM_MC_END()
17453 return VINF_SUCCESS;
17454
17455 case IEMMODE_64BIT:
17456 IEM_MC_BEGIN(1, 0);
17457 IEM_MC_ARG(uint64_t, u64Target, 0);
17458 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17459 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17460 IEM_MC_END()
17461 return VINF_SUCCESS;
17462
17463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17464 }
17465 }
17466 else
17467 {
17468 /* The new RIP is taken from a register. */
17469 switch (pIemCpu->enmEffOpSize)
17470 {
17471 case IEMMODE_16BIT:
17472 IEM_MC_BEGIN(1, 1);
17473 IEM_MC_ARG(uint16_t, u16Target, 0);
17474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17476 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17477 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17478 IEM_MC_END()
17479 return VINF_SUCCESS;
17480
17481 case IEMMODE_32BIT:
17482 IEM_MC_BEGIN(1, 1);
17483 IEM_MC_ARG(uint32_t, u32Target, 0);
17484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17486 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17487 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17488 IEM_MC_END()
17489 return VINF_SUCCESS;
17490
17491 case IEMMODE_64BIT:
17492 IEM_MC_BEGIN(1, 1);
17493 IEM_MC_ARG(uint64_t, u64Target, 0);
17494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17496 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17497 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17498 IEM_MC_END()
17499 return VINF_SUCCESS;
17500
17501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17502 }
17503 }
17504}
17505
17506typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17507
17508FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17509{
17510 /* Registers? How?? */
17511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17512 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17513
17514 /* Far pointer loaded from memory. */
17515 switch (pIemCpu->enmEffOpSize)
17516 {
17517 case IEMMODE_16BIT:
17518 IEM_MC_BEGIN(3, 1);
17519 IEM_MC_ARG(uint16_t, u16Sel, 0);
17520 IEM_MC_ARG(uint16_t, offSeg, 1);
17521 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17525 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17526 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17527 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17528 IEM_MC_END();
17529 return VINF_SUCCESS;
17530
17531 case IEMMODE_64BIT:
17532 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17533 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17534 * and call far qword [rsp] encodings. */
17535 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17536 {
17537 IEM_MC_BEGIN(3, 1);
17538 IEM_MC_ARG(uint16_t, u16Sel, 0);
17539 IEM_MC_ARG(uint64_t, offSeg, 1);
17540 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17544 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17545 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17546 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17547 IEM_MC_END();
17548 return VINF_SUCCESS;
17549 }
17550 /* AMD falls thru. */
17551
17552 case IEMMODE_32BIT:
17553 IEM_MC_BEGIN(3, 1);
17554 IEM_MC_ARG(uint16_t, u16Sel, 0);
17555 IEM_MC_ARG(uint32_t, offSeg, 1);
17556 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17560 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17561 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17562 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17563 IEM_MC_END();
17564 return VINF_SUCCESS;
17565
17566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17567 }
17568}
17569
17570
17571/**
17572 * Opcode 0xff /3.
17573 * @param bRm The RM byte.
17574 */
17575FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17576{
17577 IEMOP_MNEMONIC("callf Ep");
17578 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17579}
17580
17581
17582/**
17583 * Opcode 0xff /4.
17584 * @param bRm The RM byte.
17585 */
17586FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17587{
17588 IEMOP_MNEMONIC("jmpn Ev");
17589 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17591
17592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17593 {
17594 /* The new RIP is taken from a register. */
17595 switch (pIemCpu->enmEffOpSize)
17596 {
17597 case IEMMODE_16BIT:
17598 IEM_MC_BEGIN(0, 1);
17599 IEM_MC_LOCAL(uint16_t, u16Target);
17600 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17601 IEM_MC_SET_RIP_U16(u16Target);
17602 IEM_MC_END()
17603 return VINF_SUCCESS;
17604
17605 case IEMMODE_32BIT:
17606 IEM_MC_BEGIN(0, 1);
17607 IEM_MC_LOCAL(uint32_t, u32Target);
17608 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17609 IEM_MC_SET_RIP_U32(u32Target);
17610 IEM_MC_END()
17611 return VINF_SUCCESS;
17612
17613 case IEMMODE_64BIT:
17614 IEM_MC_BEGIN(0, 1);
17615 IEM_MC_LOCAL(uint64_t, u64Target);
17616 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17617 IEM_MC_SET_RIP_U64(u64Target);
17618 IEM_MC_END()
17619 return VINF_SUCCESS;
17620
17621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17622 }
17623 }
17624 else
17625 {
17626 /* The new RIP is taken from a memory location. */
17627 switch (pIemCpu->enmEffOpSize)
17628 {
17629 case IEMMODE_16BIT:
17630 IEM_MC_BEGIN(0, 2);
17631 IEM_MC_LOCAL(uint16_t, u16Target);
17632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17634 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17635 IEM_MC_SET_RIP_U16(u16Target);
17636 IEM_MC_END()
17637 return VINF_SUCCESS;
17638
17639 case IEMMODE_32BIT:
17640 IEM_MC_BEGIN(0, 2);
17641 IEM_MC_LOCAL(uint32_t, u32Target);
17642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17644 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17645 IEM_MC_SET_RIP_U32(u32Target);
17646 IEM_MC_END()
17647 return VINF_SUCCESS;
17648
17649 case IEMMODE_64BIT:
17650 IEM_MC_BEGIN(0, 2);
17651 IEM_MC_LOCAL(uint64_t, u64Target);
17652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17654 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17655 IEM_MC_SET_RIP_U64(u64Target);
17656 IEM_MC_END()
17657 return VINF_SUCCESS;
17658
17659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17660 }
17661 }
17662}
17663
17664
17665/**
17666 * Opcode 0xff /5.
17667 * @param bRm The RM byte.
17668 */
17669FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17670{
17671 IEMOP_MNEMONIC("jmpf Ep");
17672 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17673}
17674
17675
17676/**
17677 * Opcode 0xff /6.
17678 * @param bRm The RM byte.
17679 */
17680FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17681{
17682 IEMOP_MNEMONIC("push Ev");
17683 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17684
17685 /* Registers are handled by a common worker. */
17686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17687 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17688
17689 /* Memory we do here. */
17690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17691 switch (pIemCpu->enmEffOpSize)
17692 {
17693 case IEMMODE_16BIT:
17694 IEM_MC_BEGIN(0, 2);
17695 IEM_MC_LOCAL(uint16_t, u16Src);
17696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17698 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17699 IEM_MC_PUSH_U16(u16Src);
17700 IEM_MC_ADVANCE_RIP();
17701 IEM_MC_END();
17702 return VINF_SUCCESS;
17703
17704 case IEMMODE_32BIT:
17705 IEM_MC_BEGIN(0, 2);
17706 IEM_MC_LOCAL(uint32_t, u32Src);
17707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17709 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17710 IEM_MC_PUSH_U32(u32Src);
17711 IEM_MC_ADVANCE_RIP();
17712 IEM_MC_END();
17713 return VINF_SUCCESS;
17714
17715 case IEMMODE_64BIT:
17716 IEM_MC_BEGIN(0, 2);
17717 IEM_MC_LOCAL(uint64_t, u64Src);
17718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17720 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17721 IEM_MC_PUSH_U64(u64Src);
17722 IEM_MC_ADVANCE_RIP();
17723 IEM_MC_END();
17724 return VINF_SUCCESS;
17725
17726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17727 }
17728}
17729
17730
17731/** Opcode 0xff. */
17732FNIEMOP_DEF(iemOp_Grp5)
17733{
17734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17735 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17736 {
17737 case 0:
17738 IEMOP_MNEMONIC("inc Ev");
17739 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17740 case 1:
17741 IEMOP_MNEMONIC("dec Ev");
17742 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17743 case 2:
17744 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17745 case 3:
17746 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17747 case 4:
17748 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17749 case 5:
17750 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17751 case 6:
17752 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17753 case 7:
17754 IEMOP_MNEMONIC("grp5-ud");
17755 return IEMOP_RAISE_INVALID_OPCODE();
17756 }
17757 AssertFailedReturn(VERR_IEM_IPE_3);
17758}
17759
17760
17761
17762const PFNIEMOP g_apfnOneByteMap[256] =
17763{
17764 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17765 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17766 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17767 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17768 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17769 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17770 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17771 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17772 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17773 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17774 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17775 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17776 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17777 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17778 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17779 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17780 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17781 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17782 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17783 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17784 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17785 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17786 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17787 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17788 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17789 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17790 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17791 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17792 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17793 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17794 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17795 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17796 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17797 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17798 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17799 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17800 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17801 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17802 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17803 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17804 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17805 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17806 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17807 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17808 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17809 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17810 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17811 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17812 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17813 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17814 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17815 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17816 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17817 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17818 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17819 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17820 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17821 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17822 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17823 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17824 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17825 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17826 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17827 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17828};
17829
17830
17831/** @} */
17832
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette