VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 61683

Last change on this file since 61683 was 61665, checked in by vboxsync, 9 years ago

Type typo.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 610.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 61665 2016-06-10 16:25:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_MIN_286();
544 IEMOP_HLP_NO_REAL_OR_V86_MODE();
545
546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
547 {
548 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
549 switch (pIemCpu->enmEffOpSize)
550 {
551 case IEMMODE_16BIT:
552 IEM_MC_BEGIN(0, 1);
553 IEM_MC_LOCAL(uint16_t, u16Ldtr);
554 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
555 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
556 IEM_MC_ADVANCE_RIP();
557 IEM_MC_END();
558 break;
559
560 case IEMMODE_32BIT:
561 IEM_MC_BEGIN(0, 1);
562 IEM_MC_LOCAL(uint32_t, u32Ldtr);
563 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
564 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
565 IEM_MC_ADVANCE_RIP();
566 IEM_MC_END();
567 break;
568
569 case IEMMODE_64BIT:
570 IEM_MC_BEGIN(0, 1);
571 IEM_MC_LOCAL(uint64_t, u64Ldtr);
572 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
573 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 break;
577
578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
579 }
580 }
581 else
582 {
583 IEM_MC_BEGIN(0, 2);
584 IEM_MC_LOCAL(uint16_t, u16Ldtr);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
587 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
588 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
589 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
590 IEM_MC_ADVANCE_RIP();
591 IEM_MC_END();
592 }
593 return VINF_SUCCESS;
594}
595
596
597/** Opcode 0x0f 0x00 /1. */
598FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
599{
600 IEMOP_MNEMONIC("str Rv/Mw");
601 IEMOP_HLP_MIN_286();
602 IEMOP_HLP_NO_REAL_OR_V86_MODE();
603
604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
605 {
606 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
607 switch (pIemCpu->enmEffOpSize)
608 {
609 case IEMMODE_16BIT:
610 IEM_MC_BEGIN(0, 1);
611 IEM_MC_LOCAL(uint16_t, u16Tr);
612 IEM_MC_FETCH_TR_U16(u16Tr);
613 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
614 IEM_MC_ADVANCE_RIP();
615 IEM_MC_END();
616 break;
617
618 case IEMMODE_32BIT:
619 IEM_MC_BEGIN(0, 1);
620 IEM_MC_LOCAL(uint32_t, u32Tr);
621 IEM_MC_FETCH_TR_U32(u32Tr);
622 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
623 IEM_MC_ADVANCE_RIP();
624 IEM_MC_END();
625 break;
626
627 case IEMMODE_64BIT:
628 IEM_MC_BEGIN(0, 1);
629 IEM_MC_LOCAL(uint64_t, u64Tr);
630 IEM_MC_FETCH_TR_U64(u64Tr);
631 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
632 IEM_MC_ADVANCE_RIP();
633 IEM_MC_END();
634 break;
635
636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
637 }
638 }
639 else
640 {
641 IEM_MC_BEGIN(0, 2);
642 IEM_MC_LOCAL(uint16_t, u16Tr);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
645 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
646 IEM_MC_FETCH_TR_U16(u16Tr);
647 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 return VINF_SUCCESS;
652}
653
654
655/** Opcode 0x0f 0x00 /2. */
656FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
657{
658 IEMOP_MNEMONIC("lldt Ew");
659 IEMOP_HLP_MIN_286();
660 IEMOP_HLP_NO_REAL_OR_V86_MODE();
661
662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
663 {
664 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
665 IEM_MC_BEGIN(1, 0);
666 IEM_MC_ARG(uint16_t, u16Sel, 0);
667 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
668 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
669 IEM_MC_END();
670 }
671 else
672 {
673 IEM_MC_BEGIN(1, 1);
674 IEM_MC_ARG(uint16_t, u16Sel, 0);
675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
677 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
678 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
679 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
680 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/** Opcode 0x0f 0x00 /3. */
688FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
689{
690 IEMOP_MNEMONIC("ltr Ew");
691 IEMOP_HLP_MIN_286();
692 IEMOP_HLP_NO_REAL_OR_V86_MODE();
693
694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
695 {
696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
697 IEM_MC_BEGIN(1, 0);
698 IEM_MC_ARG(uint16_t, u16Sel, 0);
699 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
700 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(1, 1);
706 IEM_MC_ARG(uint16_t, u16Sel, 0);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
710 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
711 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
712 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
713 IEM_MC_END();
714 }
715 return VINF_SUCCESS;
716}
717
718
719/** Opcode 0x0f 0x00 /3. */
720FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
721{
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 IEM_MC_BEGIN(2, 0);
729 IEM_MC_ARG(uint16_t, u16Sel, 0);
730 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
731 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
732 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
733 IEM_MC_END();
734 }
735 else
736 {
737 IEM_MC_BEGIN(2, 1);
738 IEM_MC_ARG(uint16_t, u16Sel, 0);
739 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
742 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
743 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
744 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
745 IEM_MC_END();
746 }
747 return VINF_SUCCESS;
748}
749
750
751/** Opcode 0x0f 0x00 /4. */
752FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
753{
754 IEMOP_MNEMONIC("verr Ew");
755 IEMOP_HLP_MIN_286();
756 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
757}
758
759
760/** Opcode 0x0f 0x00 /5. */
761FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
762{
763 IEMOP_MNEMONIC("verr Ew");
764 IEMOP_HLP_MIN_286();
765 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
766}
767
768
769/** Opcode 0x0f 0x00. */
770FNIEMOP_DEF(iemOp_Grp6)
771{
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
774 {
775 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
776 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
777 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
778 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
779 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
780 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
781 case 6: return IEMOP_RAISE_INVALID_OPCODE();
782 case 7: return IEMOP_RAISE_INVALID_OPCODE();
783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
784 }
785
786}
787
788
789/** Opcode 0x0f 0x01 /0. */
790FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
791{
792 IEMOP_MNEMONIC("sgdt Ms");
793 IEMOP_HLP_MIN_286();
794 IEMOP_HLP_64BIT_OP_SIZE();
795 IEM_MC_BEGIN(2, 1);
796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
797 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
800 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
801 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
802 IEM_MC_END();
803 return VINF_SUCCESS;
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmcall)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmresume)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /0. */
832FNIEMOP_DEF(iemOp_Grp7_vmxoff)
833{
834 IEMOP_BITCH_ABOUT_STUB();
835 return IEMOP_RAISE_INVALID_OPCODE();
836}
837
838
839/** Opcode 0x0f 0x01 /1. */
840FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
841{
842 IEMOP_MNEMONIC("sidt Ms");
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_64BIT_OP_SIZE();
845 IEM_MC_BEGIN(2, 1);
846 IEM_MC_ARG(uint8_t, iEffSeg, 0);
847 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
850 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
851 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
852 IEM_MC_END();
853 return VINF_SUCCESS;
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_monitor)
859{
860 IEMOP_MNEMONIC("monitor");
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
862 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
863}
864
865
866/** Opcode 0x0f 0x01 /1. */
867FNIEMOP_DEF(iemOp_Grp7_mwait)
868{
869 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
871 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
872}
873
874
875/** Opcode 0x0f 0x01 /2. */
876FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
877{
878 IEMOP_MNEMONIC("lgdt");
879 IEMOP_HLP_64BIT_OP_SIZE();
880 IEM_MC_BEGIN(3, 1);
881 IEM_MC_ARG(uint8_t, iEffSeg, 0);
882 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
883 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
887 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
888 IEM_MC_END();
889 return VINF_SUCCESS;
890}
891
892
893/** Opcode 0x0f 0x01 0xd0. */
894FNIEMOP_DEF(iemOp_Grp7_xgetbv)
895{
896 IEMOP_MNEMONIC("xgetbv");
897 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
898 {
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
901 }
902 return IEMOP_RAISE_INVALID_OPCODE();
903}
904
905
906/** Opcode 0x0f 0x01 0xd1. */
907FNIEMOP_DEF(iemOp_Grp7_xsetbv)
908{
909 IEMOP_MNEMONIC("xsetbv");
910 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
911 {
912 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
913 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
914 }
915 return IEMOP_RAISE_INVALID_OPCODE();
916}
917
918
919/** Opcode 0x0f 0x01 /3. */
920FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
921{
922 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
923 ? IEMMODE_64BIT
924 : pIemCpu->enmEffOpSize;
925 IEM_MC_BEGIN(3, 1);
926 IEM_MC_ARG(uint8_t, iEffSeg, 0);
927 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
928 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
931 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
932 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
933 IEM_MC_END();
934 return VINF_SUCCESS;
935}
936
937
938/** Opcode 0x0f 0x01 0xd8. */
939FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
940
941/** Opcode 0x0f 0x01 0xd9. */
942FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
943
944/** Opcode 0x0f 0x01 0xda. */
945FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
946
947/** Opcode 0x0f 0x01 0xdb. */
948FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
949
950/** Opcode 0x0f 0x01 0xdc. */
951FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
952
953/** Opcode 0x0f 0x01 0xdd. */
954FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
955
956/** Opcode 0x0f 0x01 0xde. */
957FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
958
959/** Opcode 0x0f 0x01 0xdf. */
960FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
961
962/** Opcode 0x0f 0x01 /4. */
963FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
964{
965 IEMOP_MNEMONIC("smsw");
966 IEMOP_HLP_MIN_286();
967 IEMOP_HLP_NO_LOCK_PREFIX();
968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
969 {
970 switch (pIemCpu->enmEffOpSize)
971 {
972 case IEMMODE_16BIT:
973 IEM_MC_BEGIN(0, 1);
974 IEM_MC_LOCAL(uint16_t, u16Tmp);
975 IEM_MC_FETCH_CR0_U16(u16Tmp);
976 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
977 { /* likely */ }
978 else if (IEM_GET_TARGET_CPU(pIemCpu) >= IEMTARGETCPU_386)
979 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
980 else
981 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
982 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
983 IEM_MC_ADVANCE_RIP();
984 IEM_MC_END();
985 return VINF_SUCCESS;
986
987 case IEMMODE_32BIT:
988 IEM_MC_BEGIN(0, 1);
989 IEM_MC_LOCAL(uint32_t, u32Tmp);
990 IEM_MC_FETCH_CR0_U32(u32Tmp);
991 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
992 IEM_MC_ADVANCE_RIP();
993 IEM_MC_END();
994 return VINF_SUCCESS;
995
996 case IEMMODE_64BIT:
997 IEM_MC_BEGIN(0, 1);
998 IEM_MC_LOCAL(uint64_t, u64Tmp);
999 IEM_MC_FETCH_CR0_U64(u64Tmp);
1000 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 return VINF_SUCCESS;
1004
1005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1006 }
1007 }
1008 else
1009 {
1010 /* Ignore operand size here, memory refs are always 16-bit. */
1011 IEM_MC_BEGIN(0, 2);
1012 IEM_MC_LOCAL(uint16_t, u16Tmp);
1013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1015 IEM_MC_FETCH_CR0_U16(u16Tmp);
1016 if (IEM_GET_TARGET_CPU(pIemCpu) > IEMTARGETCPU_386)
1017 { /* likely */ }
1018 else if (pIemCpu->uTargetCpu >= IEMTARGETCPU_386)
1019 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1020 else
1021 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1022 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 return VINF_SUCCESS;
1026 }
1027}
1028
1029
1030/** Opcode 0x0f 0x01 /6. */
1031FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1032{
1033 /* The operand size is effectively ignored, all is 16-bit and only the
1034 lower 3-bits are used. */
1035 IEMOP_MNEMONIC("lmsw");
1036 IEMOP_HLP_MIN_286();
1037 IEMOP_HLP_NO_LOCK_PREFIX();
1038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1039 {
1040 IEM_MC_BEGIN(1, 0);
1041 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1042 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1043 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1044 IEM_MC_END();
1045 }
1046 else
1047 {
1048 IEM_MC_BEGIN(1, 1);
1049 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1052 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1053 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1054 IEM_MC_END();
1055 }
1056 return VINF_SUCCESS;
1057}
1058
1059
1060/** Opcode 0x0f 0x01 /7. */
1061FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1062{
1063 IEMOP_MNEMONIC("invlpg");
1064 IEMOP_HLP_MIN_486();
1065 IEMOP_HLP_NO_LOCK_PREFIX();
1066 IEM_MC_BEGIN(1, 1);
1067 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1069 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1070 IEM_MC_END();
1071 return VINF_SUCCESS;
1072}
1073
1074
1075/** Opcode 0x0f 0x01 /7. */
1076FNIEMOP_DEF(iemOp_Grp7_swapgs)
1077{
1078 IEMOP_MNEMONIC("swapgs");
1079 IEMOP_HLP_ONLY_64BIT();
1080 IEMOP_HLP_NO_LOCK_PREFIX();
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1082}
1083
1084
1085/** Opcode 0x0f 0x01 /7. */
1086FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1087{
1088 NOREF(pIemCpu);
1089 IEMOP_BITCH_ABOUT_STUB();
1090 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1091}
1092
1093
1094/** Opcode 0x0f 0x01. */
1095FNIEMOP_DEF(iemOp_Grp7)
1096{
1097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1098 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1099 {
1100 case 0:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1106 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1107 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1108 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1109 }
1110 return IEMOP_RAISE_INVALID_OPCODE();
1111
1112 case 1:
1113 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1114 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1115 switch (bRm & X86_MODRM_RM_MASK)
1116 {
1117 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1118 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1119 }
1120 return IEMOP_RAISE_INVALID_OPCODE();
1121
1122 case 2:
1123 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1124 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1125 switch (bRm & X86_MODRM_RM_MASK)
1126 {
1127 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1128 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1129 }
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 3:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1138 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1139 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1140 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1141 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1142 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1143 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1144 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147
1148 case 4:
1149 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1150
1151 case 5:
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 6:
1155 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1156
1157 case 7:
1158 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1159 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1160 switch (bRm & X86_MODRM_RM_MASK)
1161 {
1162 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1163 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1164 }
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166
1167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1168 }
1169}
1170
1171/** Opcode 0x0f 0x00 /3. */
1172FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1173{
1174 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1176
1177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178 {
1179 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 switch (pIemCpu->enmEffOpSize)
1181 {
1182 case IEMMODE_16BIT:
1183 {
1184 IEM_MC_BEGIN(4, 0);
1185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 1);
1187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1188 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1189
1190 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1191 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1192 IEM_MC_REF_EFLAGS(pEFlags);
1193 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1194
1195 IEM_MC_END();
1196 return VINF_SUCCESS;
1197 }
1198
1199 case IEMMODE_32BIT:
1200 case IEMMODE_64BIT:
1201 {
1202 IEM_MC_BEGIN(4, 0);
1203 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1204 IEM_MC_ARG(uint16_t, u16Sel, 1);
1205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1206 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1207
1208 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1209 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1210 IEM_MC_REF_EFLAGS(pEFlags);
1211 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1212
1213 IEM_MC_END();
1214 return VINF_SUCCESS;
1215 }
1216
1217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1218 }
1219 }
1220 else
1221 {
1222 switch (pIemCpu->enmEffOpSize)
1223 {
1224 case IEMMODE_16BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235
1236 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1237 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1238 IEM_MC_REF_EFLAGS(pEFlags);
1239 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 case IEMMODE_32BIT:
1246 case IEMMODE_64BIT:
1247 {
1248 IEM_MC_BEGIN(4, 1);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint16_t, u16Sel, 1);
1251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1252 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1254
1255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1256 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1257/** @todo testcase: make sure it's a 16-bit read. */
1258
1259 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1260 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1261 IEM_MC_REF_EFLAGS(pEFlags);
1262 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1263
1264 IEM_MC_END();
1265 return VINF_SUCCESS;
1266 }
1267
1268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1269 }
1270 }
1271}
1272
1273
1274
1275/** Opcode 0x0f 0x02. */
1276FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1277{
1278 IEMOP_MNEMONIC("lar Gv,Ew");
1279 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1280}
1281
1282
1283/** Opcode 0x0f 0x03. */
1284FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1285{
1286 IEMOP_MNEMONIC("lsl Gv,Ew");
1287 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1288}
1289
1290
1291/** Opcode 0x0f 0x05. */
1292FNIEMOP_DEF(iemOp_syscall)
1293{
1294 IEMOP_MNEMONIC("syscall"); /** @todo 286 LOADALL */
1295 IEMOP_HLP_NO_LOCK_PREFIX();
1296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1297}
1298
1299
1300/** Opcode 0x0f 0x06. */
1301FNIEMOP_DEF(iemOp_clts)
1302{
1303 IEMOP_MNEMONIC("clts");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1306}
1307
1308
1309/** Opcode 0x0f 0x07. */
1310FNIEMOP_DEF(iemOp_sysret)
1311{
1312 IEMOP_MNEMONIC("sysret"); /** @todo 386 LOADALL */
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1315}
1316
1317
1318/** Opcode 0x0f 0x08. */
1319FNIEMOP_STUB(iemOp_invd);
1320// IEMOP_HLP_MIN_486();
1321
1322
1323/** Opcode 0x0f 0x09. */
1324FNIEMOP_DEF(iemOp_wbinvd)
1325{
1326 IEMOP_MNEMONIC("wbinvd");
1327 IEMOP_HLP_MIN_486();
1328 IEMOP_HLP_NO_LOCK_PREFIX();
1329 IEM_MC_BEGIN(0, 0);
1330 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS; /* ignore for now */
1334}
1335
1336
1337/** Opcode 0x0f 0x0b. */
1338FNIEMOP_DEF(iemOp_ud2)
1339{
1340 IEMOP_MNEMONIC("ud2");
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344/** Opcode 0x0f 0x0d. */
1345FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1346{
1347 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1348 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1349 {
1350 IEMOP_MNEMONIC("GrpP");
1351 return IEMOP_RAISE_INVALID_OPCODE();
1352 }
1353
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1356 {
1357 IEMOP_MNEMONIC("GrpP");
1358 return IEMOP_RAISE_INVALID_OPCODE();
1359 }
1360
1361 IEMOP_HLP_NO_LOCK_PREFIX();
1362 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1363 {
1364 case 2: /* Aliased to /0 for the time being. */
1365 case 4: /* Aliased to /0 for the time being. */
1366 case 5: /* Aliased to /0 for the time being. */
1367 case 6: /* Aliased to /0 for the time being. */
1368 case 7: /* Aliased to /0 for the time being. */
1369 case 0: IEMOP_MNEMONIC("prefetch"); break;
1370 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1371 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1373 }
1374
1375 IEM_MC_BEGIN(0, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1378 /* Currently a NOP. */
1379 IEM_MC_ADVANCE_RIP();
1380 IEM_MC_END();
1381 return VINF_SUCCESS;
1382}
1383
1384
1385/** Opcode 0x0f 0x0e. */
1386FNIEMOP_STUB(iemOp_femms);
1387
1388
1389/** Opcode 0x0f 0x0f 0x0c. */
1390FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0x0d. */
1393FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0x1c. */
1396FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0x1d. */
1399FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0x8a. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1403
1404/** Opcode 0x0f 0x0f 0x8e. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0x90. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0x94. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0x96. */
1414FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0x97. */
1417FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0x9a. */
1420FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1421
1422/** Opcode 0x0f 0x0f 0x9e. */
1423FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1424
1425/** Opcode 0x0f 0x0f 0xa0. */
1426FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1427
1428/** Opcode 0x0f 0x0f 0xa4. */
1429FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1430
1431/** Opcode 0x0f 0x0f 0xa6. */
1432FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1433
1434/** Opcode 0x0f 0x0f 0xa7. */
1435FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1436
1437/** Opcode 0x0f 0x0f 0xaa. */
1438FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1439
1440/** Opcode 0x0f 0x0f 0xae. */
1441FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1442
1443/** Opcode 0x0f 0x0f 0xb0. */
1444FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1445
1446/** Opcode 0x0f 0x0f 0xb4. */
1447FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1448
1449/** Opcode 0x0f 0x0f 0xb6. */
1450FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1451
1452/** Opcode 0x0f 0x0f 0xb7. */
1453FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1454
1455/** Opcode 0x0f 0x0f 0xbb. */
1456FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1457
1458/** Opcode 0x0f 0x0f 0xbf. */
1459FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1460
1461
1462/** Opcode 0x0f 0x0f. */
1463FNIEMOP_DEF(iemOp_3Dnow)
1464{
1465 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1466 {
1467 IEMOP_MNEMONIC("3Dnow");
1468 return IEMOP_RAISE_INVALID_OPCODE();
1469 }
1470
1471 /* This is pretty sparse, use switch instead of table. */
1472 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1473 switch (b)
1474 {
1475 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1476 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1477 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1478 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1479 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1480 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1481 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1482 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1483 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1484 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1485 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1486 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1487 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1488 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1489 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1490 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1491 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1492 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1493 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1494 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1495 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1496 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1497 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1498 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1499 default:
1500 return IEMOP_RAISE_INVALID_OPCODE();
1501 }
1502}
1503
1504
1505/** Opcode 0x0f 0x10. */
1506FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1507
1508
1509/** Opcode 0x0f 0x11. */
1510FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1511{
1512 /* Quick hack. Need to restructure all of this later some time. */
1513 uint32_t const fRelevantPrefix = pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1514 if (fRelevantPrefix == 0)
1515 {
1516 IEMOP_MNEMONIC("movups Wps,Vps");
1517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1518 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1519 {
1520 /*
1521 * Register, register.
1522 */
1523 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1524 IEM_MC_BEGIN(0, 0);
1525 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1527 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1528 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1529 IEM_MC_ADVANCE_RIP();
1530 IEM_MC_END();
1531 }
1532 else
1533 {
1534 /*
1535 * Memory, register.
1536 */
1537 IEM_MC_BEGIN(0, 2);
1538 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1540
1541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1545
1546 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1547 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1548
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 }
1552 }
1553 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1554 {
1555 IEMOP_MNEMONIC("movsd Wsd,Vsd");
1556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1558 {
1559 /*
1560 * Register, register.
1561 */
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1563 IEM_MC_BEGIN(0, 1);
1564 IEM_MC_LOCAL(uint64_t, uSrc);
1565
1566 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1567 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1568 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1569 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uSrc);
1570
1571 IEM_MC_ADVANCE_RIP();
1572 IEM_MC_END();
1573 }
1574 else
1575 {
1576 /*
1577 * Memory, register.
1578 */
1579 IEM_MC_BEGIN(0, 2);
1580 IEM_MC_LOCAL(uint64_t, uSrc);
1581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1582
1583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1585 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1587
1588 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1589 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1590
1591 IEM_MC_ADVANCE_RIP();
1592 IEM_MC_END();
1593 }
1594 }
1595 else
1596 {
1597 IEMOP_BITCH_ABOUT_STUB();
1598 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1599 }
1600 return VINF_SUCCESS;
1601}
1602
1603
1604/** Opcode 0x0f 0x12. */
1605FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1606
1607
1608/** Opcode 0x0f 0x13. */
1609FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1610{
1611 /* Quick hack. Need to restructure all of this later some time. */
1612 if (pIemCpu->fPrefixes == IEM_OP_PRF_SIZE_OP)
1613 {
1614 IEMOP_MNEMONIC("movlpd Mq,Vq");
1615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1617 {
1618#if 0
1619 /*
1620 * Register, register.
1621 */
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1623 IEM_MC_BEGIN(0, 1);
1624 IEM_MC_LOCAL(uint64_t, uSrc);
1625 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1627 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1628 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uSrc);
1629 IEM_MC_ADVANCE_RIP();
1630 IEM_MC_END();
1631#else
1632 return IEMOP_RAISE_INVALID_OPCODE();
1633#endif
1634 }
1635 else
1636 {
1637 /*
1638 * Memory, register.
1639 */
1640 IEM_MC_BEGIN(0, 2);
1641 IEM_MC_LOCAL(uint64_t, uSrc);
1642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1643
1644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1648
1649 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1650 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1651
1652 IEM_MC_ADVANCE_RIP();
1653 IEM_MC_END();
1654 }
1655 return VINF_SUCCESS;
1656 }
1657
1658 IEMOP_BITCH_ABOUT_STUB();
1659 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1660}
1661
1662
1663/** Opcode 0x0f 0x14. */
1664FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1665/** Opcode 0x0f 0x15. */
1666FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1667/** Opcode 0x0f 0x16. */
1668FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1669/** Opcode 0x0f 0x17. */
1670FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1671
1672
1673/** Opcode 0x0f 0x18. */
1674FNIEMOP_DEF(iemOp_prefetch_Grp16)
1675{
1676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1677 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1678 {
1679 IEMOP_HLP_NO_LOCK_PREFIX();
1680 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1681 {
1682 case 4: /* Aliased to /0 for the time being according to AMD. */
1683 case 5: /* Aliased to /0 for the time being according to AMD. */
1684 case 6: /* Aliased to /0 for the time being according to AMD. */
1685 case 7: /* Aliased to /0 for the time being according to AMD. */
1686 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1687 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1688 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1689 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1691 }
1692
1693 IEM_MC_BEGIN(0, 1);
1694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1696 /* Currently a NOP. */
1697 IEM_MC_ADVANCE_RIP();
1698 IEM_MC_END();
1699 return VINF_SUCCESS;
1700 }
1701
1702 return IEMOP_RAISE_INVALID_OPCODE();
1703}
1704
1705
1706/** Opcode 0x0f 0x19..0x1f. */
1707FNIEMOP_DEF(iemOp_nop_Ev)
1708{
1709 IEMOP_HLP_NO_LOCK_PREFIX();
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1712 {
1713 IEM_MC_BEGIN(0, 0);
1714 IEM_MC_ADVANCE_RIP();
1715 IEM_MC_END();
1716 }
1717 else
1718 {
1719 IEM_MC_BEGIN(0, 1);
1720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1722 /* Currently a NOP. */
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x20. */
1731FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1732{
1733 /* mod is ignored, as is operand size overrides. */
1734 IEMOP_MNEMONIC("mov Rd,Cd");
1735 IEMOP_HLP_MIN_386();
1736 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1737 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1738 else
1739 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1740
1741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1742 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1743 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1744 {
1745 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1746 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1747 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1748 iCrReg |= 8;
1749 }
1750 switch (iCrReg)
1751 {
1752 case 0: case 2: case 3: case 4: case 8:
1753 break;
1754 default:
1755 return IEMOP_RAISE_INVALID_OPCODE();
1756 }
1757 IEMOP_HLP_DONE_DECODING();
1758
1759 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1760}
1761
1762
1763/** Opcode 0x0f 0x21. */
1764FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1765{
1766 IEMOP_MNEMONIC("mov Rd,Dd");
1767 IEMOP_HLP_MIN_386();
1768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1769 IEMOP_HLP_NO_LOCK_PREFIX();
1770 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1771 return IEMOP_RAISE_INVALID_OPCODE();
1772 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1773 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1774 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1775}
1776
1777
1778/** Opcode 0x0f 0x22. */
1779FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1780{
1781 /* mod is ignored, as is operand size overrides. */
1782 IEMOP_MNEMONIC("mov Cd,Rd");
1783 IEMOP_HLP_MIN_386();
1784 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1785 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1786 else
1787 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1788
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1791 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1792 {
1793 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1794 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1795 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1796 iCrReg |= 8;
1797 }
1798 switch (iCrReg)
1799 {
1800 case 0: case 2: case 3: case 4: case 8:
1801 break;
1802 default:
1803 return IEMOP_RAISE_INVALID_OPCODE();
1804 }
1805 IEMOP_HLP_DONE_DECODING();
1806
1807 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1808}
1809
1810
1811/** Opcode 0x0f 0x23. */
1812FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1813{
1814 IEMOP_MNEMONIC("mov Dd,Rd");
1815 IEMOP_HLP_MIN_386();
1816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1819 return IEMOP_RAISE_INVALID_OPCODE();
1820 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1821 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1822 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1823}
1824
1825
1826/** Opcode 0x0f 0x24. */
1827FNIEMOP_DEF(iemOp_mov_Rd_Td)
1828{
1829 IEMOP_MNEMONIC("mov Rd,Td");
1830 /** @todo works on 386 and 486. */
1831 /* The RM byte is not considered, see testcase. */
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833}
1834
1835
1836/** Opcode 0x0f 0x26. */
1837FNIEMOP_DEF(iemOp_mov_Td_Rd)
1838{
1839 IEMOP_MNEMONIC("mov Td,Rd");
1840 /** @todo works on 386 and 486. */
1841 /* The RM byte is not considered, see testcase. */
1842 return IEMOP_RAISE_INVALID_OPCODE();
1843}
1844
1845
1846/** Opcode 0x0f 0x28. */
1847FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1848{
1849 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps r,mr" : "movapd r,mr");
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1852 {
1853 /*
1854 * Register, register.
1855 */
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1857 IEM_MC_BEGIN(0, 0);
1858 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1859 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1860 else
1861 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1862 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1863 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
1864 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 /*
1871 * Register, memory.
1872 */
1873 IEM_MC_BEGIN(0, 2);
1874 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1876
1877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1878 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1879 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1880 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1881 else
1882 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1883 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1884
1885 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
1886 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uSrc);
1887
1888 IEM_MC_ADVANCE_RIP();
1889 IEM_MC_END();
1890 }
1891 return VINF_SUCCESS;
1892}
1893
1894
1895/** Opcode 0x0f 0x29. */
1896FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1897{
1898 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movaps mr,r" : "movapd mr,r");
1899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1901 {
1902 /*
1903 * Register, register.
1904 */
1905 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1906 IEM_MC_BEGIN(0, 0);
1907 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1909 else
1910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1911 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1912 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
1913 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1914 IEM_MC_ADVANCE_RIP();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Memory, register.
1921 */
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1928 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1929 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1930 else
1931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1933
1934 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1935 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1936
1937 IEM_MC_ADVANCE_RIP();
1938 IEM_MC_END();
1939 }
1940 return VINF_SUCCESS;
1941}
1942
1943
1944/** Opcode 0x0f 0x2a. */
1945FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1946
1947
1948/** Opcode 0x0f 0x2b. */
1949FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1950{
1951 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntps mr,r" : "movntpd mr,r");
1952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1953 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1954 {
1955 /*
1956 * memory, register.
1957 */
1958 IEM_MC_BEGIN(0, 2);
1959 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1961
1962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1963 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1964 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP))
1965 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1966 else
1967 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1968 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1969
1970 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1971 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
1972
1973 IEM_MC_ADVANCE_RIP();
1974 IEM_MC_END();
1975 }
1976 /* The register, register encoding is invalid. */
1977 else
1978 return IEMOP_RAISE_INVALID_OPCODE();
1979 return VINF_SUCCESS;
1980}
1981
1982
1983/** Opcode 0x0f 0x2c. */
1984FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1985/** Opcode 0x0f 0x2d. */
1986FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1987/** Opcode 0x0f 0x2e. */
1988FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1989/** Opcode 0x0f 0x2f. */
1990FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1991
1992
1993/** Opcode 0x0f 0x30. */
1994FNIEMOP_DEF(iemOp_wrmsr)
1995{
1996 IEMOP_MNEMONIC("wrmsr");
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1999}
2000
2001
2002/** Opcode 0x0f 0x31. */
2003FNIEMOP_DEF(iemOp_rdtsc)
2004{
2005 IEMOP_MNEMONIC("rdtsc");
2006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2008}
2009
2010
2011/** Opcode 0x0f 0x33. */
2012FNIEMOP_DEF(iemOp_rdmsr)
2013{
2014 IEMOP_MNEMONIC("rdmsr");
2015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2016 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2017}
2018
2019
2020/** Opcode 0x0f 0x34. */
2021FNIEMOP_STUB(iemOp_rdpmc);
2022/** Opcode 0x0f 0x34. */
2023FNIEMOP_STUB(iemOp_sysenter);
2024/** Opcode 0x0f 0x35. */
2025FNIEMOP_STUB(iemOp_sysexit);
2026/** Opcode 0x0f 0x37. */
2027FNIEMOP_STUB(iemOp_getsec);
2028/** Opcode 0x0f 0x38. */
2029FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2030/** Opcode 0x0f 0x3a. */
2031FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2032
2033
2034/**
2035 * Implements a conditional move.
2036 *
2037 * Wish there was an obvious way to do this where we could share and reduce
2038 * code bloat.
2039 *
2040 * @param a_Cnd The conditional "microcode" operation.
2041 */
2042#define CMOV_X(a_Cnd) \
2043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2044 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2045 { \
2046 switch (pIemCpu->enmEffOpSize) \
2047 { \
2048 case IEMMODE_16BIT: \
2049 IEM_MC_BEGIN(0, 1); \
2050 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2051 a_Cnd { \
2052 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2053 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
2054 } IEM_MC_ENDIF(); \
2055 IEM_MC_ADVANCE_RIP(); \
2056 IEM_MC_END(); \
2057 return VINF_SUCCESS; \
2058 \
2059 case IEMMODE_32BIT: \
2060 IEM_MC_BEGIN(0, 1); \
2061 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2062 a_Cnd { \
2063 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2064 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
2065 } IEM_MC_ELSE() { \
2066 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
2067 } IEM_MC_ENDIF(); \
2068 IEM_MC_ADVANCE_RIP(); \
2069 IEM_MC_END(); \
2070 return VINF_SUCCESS; \
2071 \
2072 case IEMMODE_64BIT: \
2073 IEM_MC_BEGIN(0, 1); \
2074 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2075 a_Cnd { \
2076 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
2078 } IEM_MC_ENDIF(); \
2079 IEM_MC_ADVANCE_RIP(); \
2080 IEM_MC_END(); \
2081 return VINF_SUCCESS; \
2082 \
2083 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2084 } \
2085 } \
2086 else \
2087 { \
2088 switch (pIemCpu->enmEffOpSize) \
2089 { \
2090 case IEMMODE_16BIT: \
2091 IEM_MC_BEGIN(0, 2); \
2092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2093 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2095 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2096 a_Cnd { \
2097 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
2098 } IEM_MC_ENDIF(); \
2099 IEM_MC_ADVANCE_RIP(); \
2100 IEM_MC_END(); \
2101 return VINF_SUCCESS; \
2102 \
2103 case IEMMODE_32BIT: \
2104 IEM_MC_BEGIN(0, 2); \
2105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2106 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2108 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2109 a_Cnd { \
2110 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
2111 } IEM_MC_ELSE() { \
2112 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
2113 } IEM_MC_ENDIF(); \
2114 IEM_MC_ADVANCE_RIP(); \
2115 IEM_MC_END(); \
2116 return VINF_SUCCESS; \
2117 \
2118 case IEMMODE_64BIT: \
2119 IEM_MC_BEGIN(0, 2); \
2120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2121 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2123 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
2124 a_Cnd { \
2125 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
2126 } IEM_MC_ENDIF(); \
2127 IEM_MC_ADVANCE_RIP(); \
2128 IEM_MC_END(); \
2129 return VINF_SUCCESS; \
2130 \
2131 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2132 } \
2133 } do {} while (0)
2134
2135
2136
2137/** Opcode 0x0f 0x40. */
2138FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2139{
2140 IEMOP_MNEMONIC("cmovo Gv,Ev");
2141 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2142}
2143
2144
2145/** Opcode 0x0f 0x41. */
2146FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2147{
2148 IEMOP_MNEMONIC("cmovno Gv,Ev");
2149 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2150}
2151
2152
2153/** Opcode 0x0f 0x42. */
2154FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2155{
2156 IEMOP_MNEMONIC("cmovc Gv,Ev");
2157 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2158}
2159
2160
2161/** Opcode 0x0f 0x43. */
2162FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2163{
2164 IEMOP_MNEMONIC("cmovnc Gv,Ev");
2165 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2166}
2167
2168
2169/** Opcode 0x0f 0x44. */
2170FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2171{
2172 IEMOP_MNEMONIC("cmove Gv,Ev");
2173 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2174}
2175
2176
2177/** Opcode 0x0f 0x45. */
2178FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2179{
2180 IEMOP_MNEMONIC("cmovne Gv,Ev");
2181 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2182}
2183
2184
2185/** Opcode 0x0f 0x46. */
2186FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2187{
2188 IEMOP_MNEMONIC("cmovbe Gv,Ev");
2189 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2190}
2191
2192
2193/** Opcode 0x0f 0x47. */
2194FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2195{
2196 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
2197 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2198}
2199
2200
2201/** Opcode 0x0f 0x48. */
2202FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2203{
2204 IEMOP_MNEMONIC("cmovs Gv,Ev");
2205 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2206}
2207
2208
2209/** Opcode 0x0f 0x49. */
2210FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2211{
2212 IEMOP_MNEMONIC("cmovns Gv,Ev");
2213 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2214}
2215
2216
2217/** Opcode 0x0f 0x4a. */
2218FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2219{
2220 IEMOP_MNEMONIC("cmovp Gv,Ev");
2221 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2222}
2223
2224
2225/** Opcode 0x0f 0x4b. */
2226FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2227{
2228 IEMOP_MNEMONIC("cmovnp Gv,Ev");
2229 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2230}
2231
2232
2233/** Opcode 0x0f 0x4c. */
2234FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2235{
2236 IEMOP_MNEMONIC("cmovl Gv,Ev");
2237 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2238}
2239
2240
2241/** Opcode 0x0f 0x4d. */
2242FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2243{
2244 IEMOP_MNEMONIC("cmovnl Gv,Ev");
2245 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2246}
2247
2248
2249/** Opcode 0x0f 0x4e. */
2250FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2251{
2252 IEMOP_MNEMONIC("cmovle Gv,Ev");
2253 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2254}
2255
2256
2257/** Opcode 0x0f 0x4f. */
2258FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2259{
2260 IEMOP_MNEMONIC("cmovnle Gv,Ev");
2261 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2262}
2263
2264#undef CMOV_X
2265
2266/** Opcode 0x0f 0x50. */
2267FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2268/** Opcode 0x0f 0x51. */
2269FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2270/** Opcode 0x0f 0x52. */
2271FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2272/** Opcode 0x0f 0x53. */
2273FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2274/** Opcode 0x0f 0x54. */
2275FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2276/** Opcode 0x0f 0x55. */
2277FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2278/** Opcode 0x0f 0x56. */
2279FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2280/** Opcode 0x0f 0x57. */
2281FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2282/** Opcode 0x0f 0x58. */
2283FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2284/** Opcode 0x0f 0x59. */
2285FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2286/** Opcode 0x0f 0x5a. */
2287FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2288/** Opcode 0x0f 0x5b. */
2289FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2290/** Opcode 0x0f 0x5c. */
2291FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2292/** Opcode 0x0f 0x5d. */
2293FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2294/** Opcode 0x0f 0x5e. */
2295FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2296/** Opcode 0x0f 0x5f. */
2297FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2298
2299
2300/**
2301 * Common worker for SSE2 and MMX instructions on the forms:
2302 * pxxxx xmm1, xmm2/mem128
2303 * pxxxx mm1, mm2/mem32
2304 *
2305 * The 2nd operand is the first half of a register, which in the memory case
2306 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2307 * memory accessed for MMX.
2308 *
2309 * Exceptions type 4.
2310 */
2311FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2312{
2313 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2314 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2315 {
2316 case IEM_OP_PRF_SIZE_OP: /* SSE */
2317 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2318 {
2319 /*
2320 * Register, register.
2321 */
2322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2323 IEM_MC_BEGIN(2, 0);
2324 IEM_MC_ARG(uint128_t *, pDst, 0);
2325 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2326 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2327 IEM_MC_PREPARE_SSE_USAGE();
2328 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2329 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2330 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2331 IEM_MC_ADVANCE_RIP();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 /*
2337 * Register, memory.
2338 */
2339 IEM_MC_BEGIN(2, 2);
2340 IEM_MC_ARG(uint128_t *, pDst, 0);
2341 IEM_MC_LOCAL(uint64_t, uSrc);
2342 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2347 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2348 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2349
2350 IEM_MC_PREPARE_SSE_USAGE();
2351 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2352 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 return VINF_SUCCESS;
2358
2359 case 0: /* MMX */
2360 if (!pImpl->pfnU64)
2361 return IEMOP_RAISE_INVALID_OPCODE();
2362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2363 {
2364 /*
2365 * Register, register.
2366 */
2367 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2368 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2370 IEM_MC_BEGIN(2, 0);
2371 IEM_MC_ARG(uint64_t *, pDst, 0);
2372 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2373 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2374 IEM_MC_PREPARE_FPU_USAGE();
2375 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2376 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2377 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2378 IEM_MC_ADVANCE_RIP();
2379 IEM_MC_END();
2380 }
2381 else
2382 {
2383 /*
2384 * Register, memory.
2385 */
2386 IEM_MC_BEGIN(2, 2);
2387 IEM_MC_ARG(uint64_t *, pDst, 0);
2388 IEM_MC_LOCAL(uint32_t, uSrc);
2389 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2395 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2396
2397 IEM_MC_PREPARE_FPU_USAGE();
2398 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2399 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2400
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 return VINF_SUCCESS;
2405
2406 default:
2407 return IEMOP_RAISE_INVALID_OPCODE();
2408 }
2409}
2410
2411
2412/** Opcode 0x0f 0x60. */
2413FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2414{
2415 IEMOP_MNEMONIC("punpcklbw");
2416 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2417}
2418
2419
2420/** Opcode 0x0f 0x61. */
2421FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2422{
2423 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2424 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2425}
2426
2427
2428/** Opcode 0x0f 0x62. */
2429FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2430{
2431 IEMOP_MNEMONIC("punpckldq");
2432 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2433}
2434
2435
2436/** Opcode 0x0f 0x63. */
2437FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2438/** Opcode 0x0f 0x64. */
2439FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2440/** Opcode 0x0f 0x65. */
2441FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2442/** Opcode 0x0f 0x66. */
2443FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2444/** Opcode 0x0f 0x67. */
2445FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2446
2447
2448/**
2449 * Common worker for SSE2 and MMX instructions on the forms:
2450 * pxxxx xmm1, xmm2/mem128
2451 * pxxxx mm1, mm2/mem64
2452 *
2453 * The 2nd operand is the second half of a register, which in the memory case
2454 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2455 * where it may read the full 128 bits or only the upper 64 bits.
2456 *
2457 * Exceptions type 4.
2458 */
2459FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2460{
2461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2462 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2463 {
2464 case IEM_OP_PRF_SIZE_OP: /* SSE */
2465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /*
2468 * Register, register.
2469 */
2470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2471 IEM_MC_BEGIN(2, 0);
2472 IEM_MC_ARG(uint128_t *, pDst, 0);
2473 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2474 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2475 IEM_MC_PREPARE_SSE_USAGE();
2476 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2477 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2478 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2479 IEM_MC_ADVANCE_RIP();
2480 IEM_MC_END();
2481 }
2482 else
2483 {
2484 /*
2485 * Register, memory.
2486 */
2487 IEM_MC_BEGIN(2, 2);
2488 IEM_MC_ARG(uint128_t *, pDst, 0);
2489 IEM_MC_LOCAL(uint128_t, uSrc);
2490 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2492
2493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2495 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2496 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2497
2498 IEM_MC_PREPARE_SSE_USAGE();
2499 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2500 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2501
2502 IEM_MC_ADVANCE_RIP();
2503 IEM_MC_END();
2504 }
2505 return VINF_SUCCESS;
2506
2507 case 0: /* MMX */
2508 if (!pImpl->pfnU64)
2509 return IEMOP_RAISE_INVALID_OPCODE();
2510 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2511 {
2512 /*
2513 * Register, register.
2514 */
2515 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2516 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2518 IEM_MC_BEGIN(2, 0);
2519 IEM_MC_ARG(uint64_t *, pDst, 0);
2520 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2521 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2522 IEM_MC_PREPARE_FPU_USAGE();
2523 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2524 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2525 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2526 IEM_MC_ADVANCE_RIP();
2527 IEM_MC_END();
2528 }
2529 else
2530 {
2531 /*
2532 * Register, memory.
2533 */
2534 IEM_MC_BEGIN(2, 2);
2535 IEM_MC_ARG(uint64_t *, pDst, 0);
2536 IEM_MC_LOCAL(uint64_t, uSrc);
2537 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2539
2540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2542 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2543 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2544
2545 IEM_MC_PREPARE_FPU_USAGE();
2546 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2547 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2548
2549 IEM_MC_ADVANCE_RIP();
2550 IEM_MC_END();
2551 }
2552 return VINF_SUCCESS;
2553
2554 default:
2555 return IEMOP_RAISE_INVALID_OPCODE();
2556 }
2557}
2558
2559
2560/** Opcode 0x0f 0x68. */
2561FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2562{
2563 IEMOP_MNEMONIC("punpckhbw");
2564 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2565}
2566
2567
2568/** Opcode 0x0f 0x69. */
2569FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2570{
2571 IEMOP_MNEMONIC("punpckhwd");
2572 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2573}
2574
2575
2576/** Opcode 0x0f 0x6a. */
2577FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2578{
2579 IEMOP_MNEMONIC("punpckhdq");
2580 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2581}
2582
2583/** Opcode 0x0f 0x6b. */
2584FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2585
2586
2587/** Opcode 0x0f 0x6c. */
2588FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2589{
2590 IEMOP_MNEMONIC("punpcklqdq");
2591 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2592}
2593
2594
2595/** Opcode 0x0f 0x6d. */
2596FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2597{
2598 IEMOP_MNEMONIC("punpckhqdq");
2599 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2600}
2601
2602
2603/** Opcode 0x0f 0x6e. */
2604FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2605{
2606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2607 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2608 {
2609 case IEM_OP_PRF_SIZE_OP: /* SSE */
2610 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2612 {
2613 /* XMM, greg*/
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615 IEM_MC_BEGIN(0, 1);
2616 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2618 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2619 {
2620 IEM_MC_LOCAL(uint64_t, u64Tmp);
2621 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2622 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2623 }
2624 else
2625 {
2626 IEM_MC_LOCAL(uint32_t, u32Tmp);
2627 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2628 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2629 }
2630 IEM_MC_ADVANCE_RIP();
2631 IEM_MC_END();
2632 }
2633 else
2634 {
2635 /* XMM, [mem] */
2636 IEM_MC_BEGIN(0, 2);
2637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2638 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2642 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2643 {
2644 IEM_MC_LOCAL(uint64_t, u64Tmp);
2645 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2646 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2647 }
2648 else
2649 {
2650 IEM_MC_LOCAL(uint32_t, u32Tmp);
2651 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2652 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2653 }
2654 IEM_MC_ADVANCE_RIP();
2655 IEM_MC_END();
2656 }
2657 return VINF_SUCCESS;
2658
2659 case 0: /* MMX */
2660 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2662 {
2663 /* MMX, greg */
2664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2665 IEM_MC_BEGIN(0, 1);
2666 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2667 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2668 IEM_MC_LOCAL(uint64_t, u64Tmp);
2669 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2670 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2671 else
2672 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2673 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2674 IEM_MC_ADVANCE_RIP();
2675 IEM_MC_END();
2676 }
2677 else
2678 {
2679 /* MMX, [mem] */
2680 IEM_MC_BEGIN(0, 2);
2681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2682 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2686 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2687 {
2688 IEM_MC_LOCAL(uint64_t, u64Tmp);
2689 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2690 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2691 }
2692 else
2693 {
2694 IEM_MC_LOCAL(uint32_t, u32Tmp);
2695 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2696 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2697 }
2698 IEM_MC_ADVANCE_RIP();
2699 IEM_MC_END();
2700 }
2701 return VINF_SUCCESS;
2702
2703 default:
2704 return IEMOP_RAISE_INVALID_OPCODE();
2705 }
2706}
2707
2708
2709/** Opcode 0x0f 0x6f. */
2710FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2711{
2712 bool fAligned = false;
2713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2714 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2715 {
2716 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2717 fAligned = true;
2718 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2719 if (fAligned)
2720 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2721 else
2722 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2724 {
2725 /*
2726 * Register, register.
2727 */
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_BEGIN(0, 0);
2730 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2731 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2732 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg,
2733 (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2734 IEM_MC_ADVANCE_RIP();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /*
2740 * Register, memory.
2741 */
2742 IEM_MC_BEGIN(0, 2);
2743 IEM_MC_LOCAL(uint128_t, u128Tmp);
2744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2745
2746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2748 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2749 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2750 if (fAligned)
2751 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2752 else
2753 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2754 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2755
2756 IEM_MC_ADVANCE_RIP();
2757 IEM_MC_END();
2758 }
2759 return VINF_SUCCESS;
2760
2761 case 0: /* MMX */
2762 IEMOP_MNEMONIC("movq Pq,Qq");
2763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2764 {
2765 /*
2766 * Register, register.
2767 */
2768 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2769 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2771 IEM_MC_BEGIN(0, 1);
2772 IEM_MC_LOCAL(uint64_t, u64Tmp);
2773 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2774 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2775 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2776 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2777 IEM_MC_ADVANCE_RIP();
2778 IEM_MC_END();
2779 }
2780 else
2781 {
2782 /*
2783 * Register, memory.
2784 */
2785 IEM_MC_BEGIN(0, 2);
2786 IEM_MC_LOCAL(uint64_t, u64Tmp);
2787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2788
2789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2791 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2792 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2793 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2794 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2795
2796 IEM_MC_ADVANCE_RIP();
2797 IEM_MC_END();
2798 }
2799 return VINF_SUCCESS;
2800
2801 default:
2802 return IEMOP_RAISE_INVALID_OPCODE();
2803 }
2804}
2805
2806
2807/** Opcode 0x0f 0x70. The immediate here is evil! */
2808FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2809{
2810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2811 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2812 {
2813 case IEM_OP_PRF_SIZE_OP: /* SSE */
2814 case IEM_OP_PRF_REPNZ: /* SSE */
2815 case IEM_OP_PRF_REPZ: /* SSE */
2816 {
2817 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2818 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2819 {
2820 case IEM_OP_PRF_SIZE_OP:
2821 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2822 pfnAImpl = iemAImpl_pshufd;
2823 break;
2824 case IEM_OP_PRF_REPNZ:
2825 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2826 pfnAImpl = iemAImpl_pshuflw;
2827 break;
2828 case IEM_OP_PRF_REPZ:
2829 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2830 pfnAImpl = iemAImpl_pshufhw;
2831 break;
2832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2833 }
2834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2835 {
2836 /*
2837 * Register, register.
2838 */
2839 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2841
2842 IEM_MC_BEGIN(3, 0);
2843 IEM_MC_ARG(uint128_t *, pDst, 0);
2844 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2845 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2846 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2847 IEM_MC_PREPARE_SSE_USAGE();
2848 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2849 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2850 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2851 IEM_MC_ADVANCE_RIP();
2852 IEM_MC_END();
2853 }
2854 else
2855 {
2856 /*
2857 * Register, memory.
2858 */
2859 IEM_MC_BEGIN(3, 2);
2860 IEM_MC_ARG(uint128_t *, pDst, 0);
2861 IEM_MC_LOCAL(uint128_t, uSrc);
2862 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2864
2865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2866 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2867 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2869 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2870
2871 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2872 IEM_MC_PREPARE_SSE_USAGE();
2873 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2874 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2875
2876 IEM_MC_ADVANCE_RIP();
2877 IEM_MC_END();
2878 }
2879 return VINF_SUCCESS;
2880 }
2881
2882 case 0: /* MMX Extension */
2883 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2885 {
2886 /*
2887 * Register, register.
2888 */
2889 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891
2892 IEM_MC_BEGIN(3, 0);
2893 IEM_MC_ARG(uint64_t *, pDst, 0);
2894 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2895 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2896 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2897 IEM_MC_PREPARE_FPU_USAGE();
2898 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2899 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2900 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2901 IEM_MC_ADVANCE_RIP();
2902 IEM_MC_END();
2903 }
2904 else
2905 {
2906 /*
2907 * Register, memory.
2908 */
2909 IEM_MC_BEGIN(3, 2);
2910 IEM_MC_ARG(uint64_t *, pDst, 0);
2911 IEM_MC_LOCAL(uint64_t, uSrc);
2912 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2914
2915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2916 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2917 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2920
2921 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2922 IEM_MC_PREPARE_FPU_USAGE();
2923 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2924 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2925
2926 IEM_MC_ADVANCE_RIP();
2927 IEM_MC_END();
2928 }
2929 return VINF_SUCCESS;
2930
2931 default:
2932 return IEMOP_RAISE_INVALID_OPCODE();
2933 }
2934}
2935
2936
2937/** Opcode 0x0f 0x71 11/2. */
2938FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2939
2940/** Opcode 0x66 0x0f 0x71 11/2. */
2941FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2942
2943/** Opcode 0x0f 0x71 11/4. */
2944FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2945
2946/** Opcode 0x66 0x0f 0x71 11/4. */
2947FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2948
2949/** Opcode 0x0f 0x71 11/6. */
2950FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2951
2952/** Opcode 0x66 0x0f 0x71 11/6. */
2953FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2954
2955
2956/** Opcode 0x0f 0x71. */
2957FNIEMOP_DEF(iemOp_Grp12)
2958{
2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2960 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2961 return IEMOP_RAISE_INVALID_OPCODE();
2962 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2963 {
2964 case 0: case 1: case 3: case 5: case 7:
2965 return IEMOP_RAISE_INVALID_OPCODE();
2966 case 2:
2967 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2968 {
2969 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2970 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2971 default: return IEMOP_RAISE_INVALID_OPCODE();
2972 }
2973 case 4:
2974 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2975 {
2976 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2977 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2978 default: return IEMOP_RAISE_INVALID_OPCODE();
2979 }
2980 case 6:
2981 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2982 {
2983 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2984 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2985 default: return IEMOP_RAISE_INVALID_OPCODE();
2986 }
2987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2988 }
2989}
2990
2991
2992/** Opcode 0x0f 0x72 11/2. */
2993FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2994
2995/** Opcode 0x66 0x0f 0x72 11/2. */
2996FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2997
2998/** Opcode 0x0f 0x72 11/4. */
2999FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3000
3001/** Opcode 0x66 0x0f 0x72 11/4. */
3002FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3003
3004/** Opcode 0x0f 0x72 11/6. */
3005FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3006
3007/** Opcode 0x66 0x0f 0x72 11/6. */
3008FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3009
3010
3011/** Opcode 0x0f 0x72. */
3012FNIEMOP_DEF(iemOp_Grp13)
3013{
3014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3015 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3016 return IEMOP_RAISE_INVALID_OPCODE();
3017 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3018 {
3019 case 0: case 1: case 3: case 5: case 7:
3020 return IEMOP_RAISE_INVALID_OPCODE();
3021 case 2:
3022 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3023 {
3024 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3025 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3026 default: return IEMOP_RAISE_INVALID_OPCODE();
3027 }
3028 case 4:
3029 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3030 {
3031 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3032 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3033 default: return IEMOP_RAISE_INVALID_OPCODE();
3034 }
3035 case 6:
3036 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3037 {
3038 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3039 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3040 default: return IEMOP_RAISE_INVALID_OPCODE();
3041 }
3042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3043 }
3044}
3045
3046
3047/** Opcode 0x0f 0x73 11/2. */
3048FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3049
3050/** Opcode 0x66 0x0f 0x73 11/2. */
3051FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3052
3053/** Opcode 0x66 0x0f 0x73 11/3. */
3054FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3055
3056/** Opcode 0x0f 0x73 11/6. */
3057FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3058
3059/** Opcode 0x66 0x0f 0x73 11/6. */
3060FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3061
3062/** Opcode 0x66 0x0f 0x73 11/7. */
3063FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3064
3065
3066/** Opcode 0x0f 0x73. */
3067FNIEMOP_DEF(iemOp_Grp14)
3068{
3069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3070 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3071 return IEMOP_RAISE_INVALID_OPCODE();
3072 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3073 {
3074 case 0: case 1: case 4: case 5:
3075 return IEMOP_RAISE_INVALID_OPCODE();
3076 case 2:
3077 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3078 {
3079 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3080 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3081 default: return IEMOP_RAISE_INVALID_OPCODE();
3082 }
3083 case 3:
3084 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3085 {
3086 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3087 default: return IEMOP_RAISE_INVALID_OPCODE();
3088 }
3089 case 6:
3090 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3091 {
3092 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3093 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3094 default: return IEMOP_RAISE_INVALID_OPCODE();
3095 }
3096 case 7:
3097 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3098 {
3099 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3100 default: return IEMOP_RAISE_INVALID_OPCODE();
3101 }
3102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3103 }
3104}
3105
3106
3107/**
3108 * Common worker for SSE2 and MMX instructions on the forms:
3109 * pxxx mm1, mm2/mem64
3110 * pxxx xmm1, xmm2/mem128
3111 *
3112 * Proper alignment of the 128-bit operand is enforced.
3113 * Exceptions type 4. SSE2 and MMX cpuid checks.
3114 */
3115FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3116{
3117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3118 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3119 {
3120 case IEM_OP_PRF_SIZE_OP: /* SSE */
3121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3122 {
3123 /*
3124 * Register, register.
3125 */
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_BEGIN(2, 0);
3128 IEM_MC_ARG(uint128_t *, pDst, 0);
3129 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3131 IEM_MC_PREPARE_SSE_USAGE();
3132 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3133 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3134 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3135 IEM_MC_ADVANCE_RIP();
3136 IEM_MC_END();
3137 }
3138 else
3139 {
3140 /*
3141 * Register, memory.
3142 */
3143 IEM_MC_BEGIN(2, 2);
3144 IEM_MC_ARG(uint128_t *, pDst, 0);
3145 IEM_MC_LOCAL(uint128_t, uSrc);
3146 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3148
3149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3151 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3152 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3153
3154 IEM_MC_PREPARE_SSE_USAGE();
3155 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3156 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3157
3158 IEM_MC_ADVANCE_RIP();
3159 IEM_MC_END();
3160 }
3161 return VINF_SUCCESS;
3162
3163 case 0: /* MMX */
3164 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3165 {
3166 /*
3167 * Register, register.
3168 */
3169 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3170 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3172 IEM_MC_BEGIN(2, 0);
3173 IEM_MC_ARG(uint64_t *, pDst, 0);
3174 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3175 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3176 IEM_MC_PREPARE_FPU_USAGE();
3177 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3178 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3179 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3180 IEM_MC_ADVANCE_RIP();
3181 IEM_MC_END();
3182 }
3183 else
3184 {
3185 /*
3186 * Register, memory.
3187 */
3188 IEM_MC_BEGIN(2, 2);
3189 IEM_MC_ARG(uint64_t *, pDst, 0);
3190 IEM_MC_LOCAL(uint64_t, uSrc);
3191 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3193
3194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3196 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3197 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
3198
3199 IEM_MC_PREPARE_FPU_USAGE();
3200 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3201 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3202
3203 IEM_MC_ADVANCE_RIP();
3204 IEM_MC_END();
3205 }
3206 return VINF_SUCCESS;
3207
3208 default:
3209 return IEMOP_RAISE_INVALID_OPCODE();
3210 }
3211}
3212
3213
3214/** Opcode 0x0f 0x74. */
3215FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3216{
3217 IEMOP_MNEMONIC("pcmpeqb");
3218 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3219}
3220
3221
3222/** Opcode 0x0f 0x75. */
3223FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3224{
3225 IEMOP_MNEMONIC("pcmpeqw");
3226 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3227}
3228
3229
3230/** Opcode 0x0f 0x76. */
3231FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3232{
3233 IEMOP_MNEMONIC("pcmpeqd");
3234 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3235}
3236
3237
3238/** Opcode 0x0f 0x77. */
3239FNIEMOP_STUB(iemOp_emms);
3240/** Opcode 0x0f 0x78. */
3241FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3242/** Opcode 0x0f 0x79. */
3243FNIEMOP_UD_STUB(iemOp_vmwrite);
3244/** Opcode 0x0f 0x7c. */
3245FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3246/** Opcode 0x0f 0x7d. */
3247FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3248
3249
3250/** Opcode 0x0f 0x7e. */
3251FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3252{
3253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3254 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3255 {
3256 case IEM_OP_PRF_SIZE_OP: /* SSE */
3257 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
3258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3259 {
3260 /* greg, XMM */
3261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3262 IEM_MC_BEGIN(0, 1);
3263 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3264 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3265 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3266 {
3267 IEM_MC_LOCAL(uint64_t, u64Tmp);
3268 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3269 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3270 }
3271 else
3272 {
3273 IEM_MC_LOCAL(uint32_t, u32Tmp);
3274 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3275 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3276 }
3277 IEM_MC_ADVANCE_RIP();
3278 IEM_MC_END();
3279 }
3280 else
3281 {
3282 /* [mem], XMM */
3283 IEM_MC_BEGIN(0, 2);
3284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3289 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3290 {
3291 IEM_MC_LOCAL(uint64_t, u64Tmp);
3292 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3293 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3294 }
3295 else
3296 {
3297 IEM_MC_LOCAL(uint32_t, u32Tmp);
3298 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3299 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3300 }
3301 IEM_MC_ADVANCE_RIP();
3302 IEM_MC_END();
3303 }
3304 return VINF_SUCCESS;
3305
3306 case 0: /* MMX */
3307 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* greg, MMX */
3311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3312 IEM_MC_BEGIN(0, 1);
3313 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3314 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3315 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3316 {
3317 IEM_MC_LOCAL(uint64_t, u64Tmp);
3318 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3319 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
3320 }
3321 else
3322 {
3323 IEM_MC_LOCAL(uint32_t, u32Tmp);
3324 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3325 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
3326 }
3327 IEM_MC_ADVANCE_RIP();
3328 IEM_MC_END();
3329 }
3330 else
3331 {
3332 /* [mem], MMX */
3333 IEM_MC_BEGIN(0, 2);
3334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3335 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3338 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3339 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3340 {
3341 IEM_MC_LOCAL(uint64_t, u64Tmp);
3342 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3343 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3344 }
3345 else
3346 {
3347 IEM_MC_LOCAL(uint32_t, u32Tmp);
3348 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3349 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3350 }
3351 IEM_MC_ADVANCE_RIP();
3352 IEM_MC_END();
3353 }
3354 return VINF_SUCCESS;
3355
3356 default:
3357 return IEMOP_RAISE_INVALID_OPCODE();
3358 }
3359}
3360
3361
3362/** Opcode 0x0f 0x7f. */
3363FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3364{
3365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3366 bool fAligned = false;
3367 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3368 {
3369 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3370 fAligned = true;
3371 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3372 if (fAligned)
3373 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3374 else
3375 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3376 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3382 IEM_MC_BEGIN(0, 0);
3383 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3385 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB,
3386 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3387 IEM_MC_ADVANCE_RIP();
3388 IEM_MC_END();
3389 }
3390 else
3391 {
3392 /*
3393 * Register, memory.
3394 */
3395 IEM_MC_BEGIN(0, 2);
3396 IEM_MC_LOCAL(uint128_t, u128Tmp);
3397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3401 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3403
3404 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3405 if (fAligned)
3406 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3407 else
3408 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3409
3410 IEM_MC_ADVANCE_RIP();
3411 IEM_MC_END();
3412 }
3413 return VINF_SUCCESS;
3414
3415 case 0: /* MMX */
3416 IEMOP_MNEMONIC("movq Qq,Pq");
3417
3418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3419 {
3420 /*
3421 * Register, register.
3422 */
3423 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3424 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3426 IEM_MC_BEGIN(0, 1);
3427 IEM_MC_LOCAL(uint64_t, u64Tmp);
3428 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3429 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3430 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3431 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3432 IEM_MC_ADVANCE_RIP();
3433 IEM_MC_END();
3434 }
3435 else
3436 {
3437 /*
3438 * Register, memory.
3439 */
3440 IEM_MC_BEGIN(0, 2);
3441 IEM_MC_LOCAL(uint64_t, u64Tmp);
3442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3443
3444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3446 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3448
3449 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3450 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 }
3455 return VINF_SUCCESS;
3456
3457 default:
3458 return IEMOP_RAISE_INVALID_OPCODE();
3459 }
3460}
3461
3462
3463
3464/** Opcode 0x0f 0x80. */
3465FNIEMOP_DEF(iemOp_jo_Jv)
3466{
3467 IEMOP_MNEMONIC("jo Jv");
3468 IEMOP_HLP_MIN_386();
3469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3470 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3471 {
3472 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3473 IEMOP_HLP_NO_LOCK_PREFIX();
3474
3475 IEM_MC_BEGIN(0, 0);
3476 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3477 IEM_MC_REL_JMP_S16(i16Imm);
3478 } IEM_MC_ELSE() {
3479 IEM_MC_ADVANCE_RIP();
3480 } IEM_MC_ENDIF();
3481 IEM_MC_END();
3482 }
3483 else
3484 {
3485 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3486 IEMOP_HLP_NO_LOCK_PREFIX();
3487
3488 IEM_MC_BEGIN(0, 0);
3489 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3490 IEM_MC_REL_JMP_S32(i32Imm);
3491 } IEM_MC_ELSE() {
3492 IEM_MC_ADVANCE_RIP();
3493 } IEM_MC_ENDIF();
3494 IEM_MC_END();
3495 }
3496 return VINF_SUCCESS;
3497}
3498
3499
3500/** Opcode 0x0f 0x81. */
3501FNIEMOP_DEF(iemOp_jno_Jv)
3502{
3503 IEMOP_MNEMONIC("jno Jv");
3504 IEMOP_HLP_MIN_386();
3505 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3506 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3507 {
3508 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3509 IEMOP_HLP_NO_LOCK_PREFIX();
3510
3511 IEM_MC_BEGIN(0, 0);
3512 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3513 IEM_MC_ADVANCE_RIP();
3514 } IEM_MC_ELSE() {
3515 IEM_MC_REL_JMP_S16(i16Imm);
3516 } IEM_MC_ENDIF();
3517 IEM_MC_END();
3518 }
3519 else
3520 {
3521 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3522 IEMOP_HLP_NO_LOCK_PREFIX();
3523
3524 IEM_MC_BEGIN(0, 0);
3525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3526 IEM_MC_ADVANCE_RIP();
3527 } IEM_MC_ELSE() {
3528 IEM_MC_REL_JMP_S32(i32Imm);
3529 } IEM_MC_ENDIF();
3530 IEM_MC_END();
3531 }
3532 return VINF_SUCCESS;
3533}
3534
3535
3536/** Opcode 0x0f 0x82. */
3537FNIEMOP_DEF(iemOp_jc_Jv)
3538{
3539 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3540 IEMOP_HLP_MIN_386();
3541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3542 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3543 {
3544 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3545 IEMOP_HLP_NO_LOCK_PREFIX();
3546
3547 IEM_MC_BEGIN(0, 0);
3548 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3549 IEM_MC_REL_JMP_S16(i16Imm);
3550 } IEM_MC_ELSE() {
3551 IEM_MC_ADVANCE_RIP();
3552 } IEM_MC_ENDIF();
3553 IEM_MC_END();
3554 }
3555 else
3556 {
3557 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3558 IEMOP_HLP_NO_LOCK_PREFIX();
3559
3560 IEM_MC_BEGIN(0, 0);
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3562 IEM_MC_REL_JMP_S32(i32Imm);
3563 } IEM_MC_ELSE() {
3564 IEM_MC_ADVANCE_RIP();
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567 }
3568 return VINF_SUCCESS;
3569}
3570
3571
3572/** Opcode 0x0f 0x83. */
3573FNIEMOP_DEF(iemOp_jnc_Jv)
3574{
3575 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3576 IEMOP_HLP_MIN_386();
3577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3578 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3579 {
3580 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3581 IEMOP_HLP_NO_LOCK_PREFIX();
3582
3583 IEM_MC_BEGIN(0, 0);
3584 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3585 IEM_MC_ADVANCE_RIP();
3586 } IEM_MC_ELSE() {
3587 IEM_MC_REL_JMP_S16(i16Imm);
3588 } IEM_MC_ENDIF();
3589 IEM_MC_END();
3590 }
3591 else
3592 {
3593 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3594 IEMOP_HLP_NO_LOCK_PREFIX();
3595
3596 IEM_MC_BEGIN(0, 0);
3597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3598 IEM_MC_ADVANCE_RIP();
3599 } IEM_MC_ELSE() {
3600 IEM_MC_REL_JMP_S32(i32Imm);
3601 } IEM_MC_ENDIF();
3602 IEM_MC_END();
3603 }
3604 return VINF_SUCCESS;
3605}
3606
3607
3608/** Opcode 0x0f 0x84. */
3609FNIEMOP_DEF(iemOp_je_Jv)
3610{
3611 IEMOP_MNEMONIC("je/jz Jv");
3612 IEMOP_HLP_MIN_386();
3613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3614 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3615 {
3616 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3617 IEMOP_HLP_NO_LOCK_PREFIX();
3618
3619 IEM_MC_BEGIN(0, 0);
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S16(i16Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626 }
3627 else
3628 {
3629 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3630 IEMOP_HLP_NO_LOCK_PREFIX();
3631
3632 IEM_MC_BEGIN(0, 0);
3633 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3634 IEM_MC_REL_JMP_S32(i32Imm);
3635 } IEM_MC_ELSE() {
3636 IEM_MC_ADVANCE_RIP();
3637 } IEM_MC_ENDIF();
3638 IEM_MC_END();
3639 }
3640 return VINF_SUCCESS;
3641}
3642
3643
3644/** Opcode 0x0f 0x85. */
3645FNIEMOP_DEF(iemOp_jne_Jv)
3646{
3647 IEMOP_MNEMONIC("jne/jnz Jv");
3648 IEMOP_HLP_MIN_386();
3649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3650 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3651 {
3652 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3653 IEMOP_HLP_NO_LOCK_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0);
3656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3657 IEM_MC_ADVANCE_RIP();
3658 } IEM_MC_ELSE() {
3659 IEM_MC_REL_JMP_S16(i16Imm);
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662 }
3663 else
3664 {
3665 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3666 IEMOP_HLP_NO_LOCK_PREFIX();
3667
3668 IEM_MC_BEGIN(0, 0);
3669 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_REL_JMP_S32(i32Imm);
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679
3680/** Opcode 0x0f 0x86. */
3681FNIEMOP_DEF(iemOp_jbe_Jv)
3682{
3683 IEMOP_MNEMONIC("jbe/jna Jv");
3684 IEMOP_HLP_MIN_386();
3685 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3686 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3687 {
3688 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3689 IEMOP_HLP_NO_LOCK_PREFIX();
3690
3691 IEM_MC_BEGIN(0, 0);
3692 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3693 IEM_MC_REL_JMP_S16(i16Imm);
3694 } IEM_MC_ELSE() {
3695 IEM_MC_ADVANCE_RIP();
3696 } IEM_MC_ENDIF();
3697 IEM_MC_END();
3698 }
3699 else
3700 {
3701 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3702 IEMOP_HLP_NO_LOCK_PREFIX();
3703
3704 IEM_MC_BEGIN(0, 0);
3705 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3706 IEM_MC_REL_JMP_S32(i32Imm);
3707 } IEM_MC_ELSE() {
3708 IEM_MC_ADVANCE_RIP();
3709 } IEM_MC_ENDIF();
3710 IEM_MC_END();
3711 }
3712 return VINF_SUCCESS;
3713}
3714
3715
3716/** Opcode 0x0f 0x87. */
3717FNIEMOP_DEF(iemOp_jnbe_Jv)
3718{
3719 IEMOP_MNEMONIC("jnbe/ja Jv");
3720 IEMOP_HLP_MIN_386();
3721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3722 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3723 {
3724 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3725 IEMOP_HLP_NO_LOCK_PREFIX();
3726
3727 IEM_MC_BEGIN(0, 0);
3728 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3729 IEM_MC_ADVANCE_RIP();
3730 } IEM_MC_ELSE() {
3731 IEM_MC_REL_JMP_S16(i16Imm);
3732 } IEM_MC_ENDIF();
3733 IEM_MC_END();
3734 }
3735 else
3736 {
3737 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3738 IEMOP_HLP_NO_LOCK_PREFIX();
3739
3740 IEM_MC_BEGIN(0, 0);
3741 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3742 IEM_MC_ADVANCE_RIP();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S32(i32Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747 }
3748 return VINF_SUCCESS;
3749}
3750
3751
3752/** Opcode 0x0f 0x88. */
3753FNIEMOP_DEF(iemOp_js_Jv)
3754{
3755 IEMOP_MNEMONIC("js Jv");
3756 IEMOP_HLP_MIN_386();
3757 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3758 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3759 {
3760 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3761 IEMOP_HLP_NO_LOCK_PREFIX();
3762
3763 IEM_MC_BEGIN(0, 0);
3764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3765 IEM_MC_REL_JMP_S16(i16Imm);
3766 } IEM_MC_ELSE() {
3767 IEM_MC_ADVANCE_RIP();
3768 } IEM_MC_ENDIF();
3769 IEM_MC_END();
3770 }
3771 else
3772 {
3773 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3774 IEMOP_HLP_NO_LOCK_PREFIX();
3775
3776 IEM_MC_BEGIN(0, 0);
3777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3778 IEM_MC_REL_JMP_S32(i32Imm);
3779 } IEM_MC_ELSE() {
3780 IEM_MC_ADVANCE_RIP();
3781 } IEM_MC_ENDIF();
3782 IEM_MC_END();
3783 }
3784 return VINF_SUCCESS;
3785}
3786
3787
3788/** Opcode 0x0f 0x89. */
3789FNIEMOP_DEF(iemOp_jns_Jv)
3790{
3791 IEMOP_MNEMONIC("jns Jv");
3792 IEMOP_HLP_MIN_386();
3793 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3794 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3795 {
3796 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3797 IEMOP_HLP_NO_LOCK_PREFIX();
3798
3799 IEM_MC_BEGIN(0, 0);
3800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3801 IEM_MC_ADVANCE_RIP();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S16(i16Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806 }
3807 else
3808 {
3809 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3810 IEMOP_HLP_NO_LOCK_PREFIX();
3811
3812 IEM_MC_BEGIN(0, 0);
3813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3814 IEM_MC_ADVANCE_RIP();
3815 } IEM_MC_ELSE() {
3816 IEM_MC_REL_JMP_S32(i32Imm);
3817 } IEM_MC_ENDIF();
3818 IEM_MC_END();
3819 }
3820 return VINF_SUCCESS;
3821}
3822
3823
3824/** Opcode 0x0f 0x8a. */
3825FNIEMOP_DEF(iemOp_jp_Jv)
3826{
3827 IEMOP_MNEMONIC("jp Jv");
3828 IEMOP_HLP_MIN_386();
3829 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3830 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3831 {
3832 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3833 IEMOP_HLP_NO_LOCK_PREFIX();
3834
3835 IEM_MC_BEGIN(0, 0);
3836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3837 IEM_MC_REL_JMP_S16(i16Imm);
3838 } IEM_MC_ELSE() {
3839 IEM_MC_ADVANCE_RIP();
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842 }
3843 else
3844 {
3845 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3846 IEMOP_HLP_NO_LOCK_PREFIX();
3847
3848 IEM_MC_BEGIN(0, 0);
3849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3850 IEM_MC_REL_JMP_S32(i32Imm);
3851 } IEM_MC_ELSE() {
3852 IEM_MC_ADVANCE_RIP();
3853 } IEM_MC_ENDIF();
3854 IEM_MC_END();
3855 }
3856 return VINF_SUCCESS;
3857}
3858
3859
3860/** Opcode 0x0f 0x8b. */
3861FNIEMOP_DEF(iemOp_jnp_Jv)
3862{
3863 IEMOP_MNEMONIC("jo Jv");
3864 IEMOP_HLP_MIN_386();
3865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3866 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3867 {
3868 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3869 IEMOP_HLP_NO_LOCK_PREFIX();
3870
3871 IEM_MC_BEGIN(0, 0);
3872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3873 IEM_MC_ADVANCE_RIP();
3874 } IEM_MC_ELSE() {
3875 IEM_MC_REL_JMP_S16(i16Imm);
3876 } IEM_MC_ENDIF();
3877 IEM_MC_END();
3878 }
3879 else
3880 {
3881 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3882 IEMOP_HLP_NO_LOCK_PREFIX();
3883
3884 IEM_MC_BEGIN(0, 0);
3885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3886 IEM_MC_ADVANCE_RIP();
3887 } IEM_MC_ELSE() {
3888 IEM_MC_REL_JMP_S32(i32Imm);
3889 } IEM_MC_ENDIF();
3890 IEM_MC_END();
3891 }
3892 return VINF_SUCCESS;
3893}
3894
3895
3896/** Opcode 0x0f 0x8c. */
3897FNIEMOP_DEF(iemOp_jl_Jv)
3898{
3899 IEMOP_MNEMONIC("jl/jnge Jv");
3900 IEMOP_HLP_MIN_386();
3901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3902 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3903 {
3904 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3905 IEMOP_HLP_NO_LOCK_PREFIX();
3906
3907 IEM_MC_BEGIN(0, 0);
3908 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3909 IEM_MC_REL_JMP_S16(i16Imm);
3910 } IEM_MC_ELSE() {
3911 IEM_MC_ADVANCE_RIP();
3912 } IEM_MC_ENDIF();
3913 IEM_MC_END();
3914 }
3915 else
3916 {
3917 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3918 IEMOP_HLP_NO_LOCK_PREFIX();
3919
3920 IEM_MC_BEGIN(0, 0);
3921 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3922 IEM_MC_REL_JMP_S32(i32Imm);
3923 } IEM_MC_ELSE() {
3924 IEM_MC_ADVANCE_RIP();
3925 } IEM_MC_ENDIF();
3926 IEM_MC_END();
3927 }
3928 return VINF_SUCCESS;
3929}
3930
3931
3932/** Opcode 0x0f 0x8d. */
3933FNIEMOP_DEF(iemOp_jnl_Jv)
3934{
3935 IEMOP_MNEMONIC("jnl/jge Jv");
3936 IEMOP_HLP_MIN_386();
3937 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3938 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3939 {
3940 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3941 IEMOP_HLP_NO_LOCK_PREFIX();
3942
3943 IEM_MC_BEGIN(0, 0);
3944 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3945 IEM_MC_ADVANCE_RIP();
3946 } IEM_MC_ELSE() {
3947 IEM_MC_REL_JMP_S16(i16Imm);
3948 } IEM_MC_ENDIF();
3949 IEM_MC_END();
3950 }
3951 else
3952 {
3953 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3954 IEMOP_HLP_NO_LOCK_PREFIX();
3955
3956 IEM_MC_BEGIN(0, 0);
3957 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3958 IEM_MC_ADVANCE_RIP();
3959 } IEM_MC_ELSE() {
3960 IEM_MC_REL_JMP_S32(i32Imm);
3961 } IEM_MC_ENDIF();
3962 IEM_MC_END();
3963 }
3964 return VINF_SUCCESS;
3965}
3966
3967
3968/** Opcode 0x0f 0x8e. */
3969FNIEMOP_DEF(iemOp_jle_Jv)
3970{
3971 IEMOP_MNEMONIC("jle/jng Jv");
3972 IEMOP_HLP_MIN_386();
3973 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3974 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3975 {
3976 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3977 IEMOP_HLP_NO_LOCK_PREFIX();
3978
3979 IEM_MC_BEGIN(0, 0);
3980 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3981 IEM_MC_REL_JMP_S16(i16Imm);
3982 } IEM_MC_ELSE() {
3983 IEM_MC_ADVANCE_RIP();
3984 } IEM_MC_ENDIF();
3985 IEM_MC_END();
3986 }
3987 else
3988 {
3989 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3990 IEMOP_HLP_NO_LOCK_PREFIX();
3991
3992 IEM_MC_BEGIN(0, 0);
3993 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3994 IEM_MC_REL_JMP_S32(i32Imm);
3995 } IEM_MC_ELSE() {
3996 IEM_MC_ADVANCE_RIP();
3997 } IEM_MC_ENDIF();
3998 IEM_MC_END();
3999 }
4000 return VINF_SUCCESS;
4001}
4002
4003
4004/** Opcode 0x0f 0x8f. */
4005FNIEMOP_DEF(iemOp_jnle_Jv)
4006{
4007 IEMOP_MNEMONIC("jnle/jg Jv");
4008 IEMOP_HLP_MIN_386();
4009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4010 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
4011 {
4012 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4013 IEMOP_HLP_NO_LOCK_PREFIX();
4014
4015 IEM_MC_BEGIN(0, 0);
4016 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4017 IEM_MC_ADVANCE_RIP();
4018 } IEM_MC_ELSE() {
4019 IEM_MC_REL_JMP_S16(i16Imm);
4020 } IEM_MC_ENDIF();
4021 IEM_MC_END();
4022 }
4023 else
4024 {
4025 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4026 IEMOP_HLP_NO_LOCK_PREFIX();
4027
4028 IEM_MC_BEGIN(0, 0);
4029 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4030 IEM_MC_ADVANCE_RIP();
4031 } IEM_MC_ELSE() {
4032 IEM_MC_REL_JMP_S32(i32Imm);
4033 } IEM_MC_ENDIF();
4034 IEM_MC_END();
4035 }
4036 return VINF_SUCCESS;
4037}
4038
4039
4040/** Opcode 0x0f 0x90. */
4041FNIEMOP_DEF(iemOp_seto_Eb)
4042{
4043 IEMOP_MNEMONIC("seto Eb");
4044 IEMOP_HLP_MIN_386();
4045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4046 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4047
4048 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4049 * any way. AMD says it's "unused", whatever that means. We're
4050 * ignoring for now. */
4051 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4052 {
4053 /* register target */
4054 IEM_MC_BEGIN(0, 0);
4055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4056 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4057 } IEM_MC_ELSE() {
4058 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4059 } IEM_MC_ENDIF();
4060 IEM_MC_ADVANCE_RIP();
4061 IEM_MC_END();
4062 }
4063 else
4064 {
4065 /* memory target */
4066 IEM_MC_BEGIN(0, 1);
4067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4069 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4070 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4071 } IEM_MC_ELSE() {
4072 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4073 } IEM_MC_ENDIF();
4074 IEM_MC_ADVANCE_RIP();
4075 IEM_MC_END();
4076 }
4077 return VINF_SUCCESS;
4078}
4079
4080
4081/** Opcode 0x0f 0x91. */
4082FNIEMOP_DEF(iemOp_setno_Eb)
4083{
4084 IEMOP_MNEMONIC("setno Eb");
4085 IEMOP_HLP_MIN_386();
4086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4087 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4088
4089 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4090 * any way. AMD says it's "unused", whatever that means. We're
4091 * ignoring for now. */
4092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4093 {
4094 /* register target */
4095 IEM_MC_BEGIN(0, 0);
4096 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4097 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4098 } IEM_MC_ELSE() {
4099 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4100 } IEM_MC_ENDIF();
4101 IEM_MC_ADVANCE_RIP();
4102 IEM_MC_END();
4103 }
4104 else
4105 {
4106 /* memory target */
4107 IEM_MC_BEGIN(0, 1);
4108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4111 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4112 } IEM_MC_ELSE() {
4113 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4114 } IEM_MC_ENDIF();
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122/** Opcode 0x0f 0x92. */
4123FNIEMOP_DEF(iemOp_setc_Eb)
4124{
4125 IEMOP_MNEMONIC("setc Eb");
4126 IEMOP_HLP_MIN_386();
4127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4128 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4129
4130 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4131 * any way. AMD says it's "unused", whatever that means. We're
4132 * ignoring for now. */
4133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4134 {
4135 /* register target */
4136 IEM_MC_BEGIN(0, 0);
4137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4138 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4139 } IEM_MC_ELSE() {
4140 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4141 } IEM_MC_ENDIF();
4142 IEM_MC_ADVANCE_RIP();
4143 IEM_MC_END();
4144 }
4145 else
4146 {
4147 /* memory target */
4148 IEM_MC_BEGIN(0, 1);
4149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4152 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4153 } IEM_MC_ELSE() {
4154 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4155 } IEM_MC_ENDIF();
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 }
4159 return VINF_SUCCESS;
4160}
4161
4162
4163/** Opcode 0x0f 0x93. */
4164FNIEMOP_DEF(iemOp_setnc_Eb)
4165{
4166 IEMOP_MNEMONIC("setnc Eb");
4167 IEMOP_HLP_MIN_386();
4168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4169 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4170
4171 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4172 * any way. AMD says it's "unused", whatever that means. We're
4173 * ignoring for now. */
4174 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4175 {
4176 /* register target */
4177 IEM_MC_BEGIN(0, 0);
4178 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4179 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4180 } IEM_MC_ELSE() {
4181 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4182 } IEM_MC_ENDIF();
4183 IEM_MC_ADVANCE_RIP();
4184 IEM_MC_END();
4185 }
4186 else
4187 {
4188 /* memory target */
4189 IEM_MC_BEGIN(0, 1);
4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4193 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4194 } IEM_MC_ELSE() {
4195 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4196 } IEM_MC_ENDIF();
4197 IEM_MC_ADVANCE_RIP();
4198 IEM_MC_END();
4199 }
4200 return VINF_SUCCESS;
4201}
4202
4203
4204/** Opcode 0x0f 0x94. */
4205FNIEMOP_DEF(iemOp_sete_Eb)
4206{
4207 IEMOP_MNEMONIC("sete Eb");
4208 IEMOP_HLP_MIN_386();
4209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4210 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4211
4212 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4213 * any way. AMD says it's "unused", whatever that means. We're
4214 * ignoring for now. */
4215 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4216 {
4217 /* register target */
4218 IEM_MC_BEGIN(0, 0);
4219 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4220 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4221 } IEM_MC_ELSE() {
4222 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4223 } IEM_MC_ENDIF();
4224 IEM_MC_ADVANCE_RIP();
4225 IEM_MC_END();
4226 }
4227 else
4228 {
4229 /* memory target */
4230 IEM_MC_BEGIN(0, 1);
4231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4234 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4235 } IEM_MC_ELSE() {
4236 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4237 } IEM_MC_ENDIF();
4238 IEM_MC_ADVANCE_RIP();
4239 IEM_MC_END();
4240 }
4241 return VINF_SUCCESS;
4242}
4243
4244
4245/** Opcode 0x0f 0x95. */
4246FNIEMOP_DEF(iemOp_setne_Eb)
4247{
4248 IEMOP_MNEMONIC("setne Eb");
4249 IEMOP_HLP_MIN_386();
4250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4251 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4252
4253 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4254 * any way. AMD says it's "unused", whatever that means. We're
4255 * ignoring for now. */
4256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4257 {
4258 /* register target */
4259 IEM_MC_BEGIN(0, 0);
4260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4261 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4262 } IEM_MC_ELSE() {
4263 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4264 } IEM_MC_ENDIF();
4265 IEM_MC_ADVANCE_RIP();
4266 IEM_MC_END();
4267 }
4268 else
4269 {
4270 /* memory target */
4271 IEM_MC_BEGIN(0, 1);
4272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4275 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4276 } IEM_MC_ELSE() {
4277 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4278 } IEM_MC_ENDIF();
4279 IEM_MC_ADVANCE_RIP();
4280 IEM_MC_END();
4281 }
4282 return VINF_SUCCESS;
4283}
4284
4285
4286/** Opcode 0x0f 0x96. */
4287FNIEMOP_DEF(iemOp_setbe_Eb)
4288{
4289 IEMOP_MNEMONIC("setbe Eb");
4290 IEMOP_HLP_MIN_386();
4291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4292 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4293
4294 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4295 * any way. AMD says it's "unused", whatever that means. We're
4296 * ignoring for now. */
4297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4298 {
4299 /* register target */
4300 IEM_MC_BEGIN(0, 0);
4301 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4302 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4303 } IEM_MC_ELSE() {
4304 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4305 } IEM_MC_ENDIF();
4306 IEM_MC_ADVANCE_RIP();
4307 IEM_MC_END();
4308 }
4309 else
4310 {
4311 /* memory target */
4312 IEM_MC_BEGIN(0, 1);
4313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4315 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4316 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 return VINF_SUCCESS;
4324}
4325
4326
4327/** Opcode 0x0f 0x97. */
4328FNIEMOP_DEF(iemOp_setnbe_Eb)
4329{
4330 IEMOP_MNEMONIC("setnbe Eb");
4331 IEMOP_HLP_MIN_386();
4332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4333 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4334
4335 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4336 * any way. AMD says it's "unused", whatever that means. We're
4337 * ignoring for now. */
4338 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4339 {
4340 /* register target */
4341 IEM_MC_BEGIN(0, 0);
4342 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4343 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4344 } IEM_MC_ELSE() {
4345 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4346 } IEM_MC_ENDIF();
4347 IEM_MC_ADVANCE_RIP();
4348 IEM_MC_END();
4349 }
4350 else
4351 {
4352 /* memory target */
4353 IEM_MC_BEGIN(0, 1);
4354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4356 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4357 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4358 } IEM_MC_ELSE() {
4359 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4360 } IEM_MC_ENDIF();
4361 IEM_MC_ADVANCE_RIP();
4362 IEM_MC_END();
4363 }
4364 return VINF_SUCCESS;
4365}
4366
4367
4368/** Opcode 0x0f 0x98. */
4369FNIEMOP_DEF(iemOp_sets_Eb)
4370{
4371 IEMOP_MNEMONIC("sets Eb");
4372 IEMOP_HLP_MIN_386();
4373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4374 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4375
4376 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4377 * any way. AMD says it's "unused", whatever that means. We're
4378 * ignoring for now. */
4379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4380 {
4381 /* register target */
4382 IEM_MC_BEGIN(0, 0);
4383 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4384 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4385 } IEM_MC_ELSE() {
4386 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4387 } IEM_MC_ENDIF();
4388 IEM_MC_ADVANCE_RIP();
4389 IEM_MC_END();
4390 }
4391 else
4392 {
4393 /* memory target */
4394 IEM_MC_BEGIN(0, 1);
4395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4398 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4399 } IEM_MC_ELSE() {
4400 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4401 } IEM_MC_ENDIF();
4402 IEM_MC_ADVANCE_RIP();
4403 IEM_MC_END();
4404 }
4405 return VINF_SUCCESS;
4406}
4407
4408
4409/** Opcode 0x0f 0x99. */
4410FNIEMOP_DEF(iemOp_setns_Eb)
4411{
4412 IEMOP_MNEMONIC("setns Eb");
4413 IEMOP_HLP_MIN_386();
4414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4415 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4416
4417 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4418 * any way. AMD says it's "unused", whatever that means. We're
4419 * ignoring for now. */
4420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4421 {
4422 /* register target */
4423 IEM_MC_BEGIN(0, 0);
4424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4425 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4426 } IEM_MC_ELSE() {
4427 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4428 } IEM_MC_ENDIF();
4429 IEM_MC_ADVANCE_RIP();
4430 IEM_MC_END();
4431 }
4432 else
4433 {
4434 /* memory target */
4435 IEM_MC_BEGIN(0, 1);
4436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4439 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4440 } IEM_MC_ELSE() {
4441 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4442 } IEM_MC_ENDIF();
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 }
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** Opcode 0x0f 0x9a. */
4451FNIEMOP_DEF(iemOp_setp_Eb)
4452{
4453 IEMOP_MNEMONIC("setnp Eb");
4454 IEMOP_HLP_MIN_386();
4455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4456 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4457
4458 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4459 * any way. AMD says it's "unused", whatever that means. We're
4460 * ignoring for now. */
4461 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4462 {
4463 /* register target */
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4466 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4467 } IEM_MC_ELSE() {
4468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4469 } IEM_MC_ENDIF();
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /* memory target */
4476 IEM_MC_BEGIN(0, 1);
4477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4480 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4481 } IEM_MC_ELSE() {
4482 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_ADVANCE_RIP();
4485 IEM_MC_END();
4486 }
4487 return VINF_SUCCESS;
4488}
4489
4490
4491/** Opcode 0x0f 0x9b. */
4492FNIEMOP_DEF(iemOp_setnp_Eb)
4493{
4494 IEMOP_MNEMONIC("setnp Eb");
4495 IEMOP_HLP_MIN_386();
4496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4497 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4498
4499 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4500 * any way. AMD says it's "unused", whatever that means. We're
4501 * ignoring for now. */
4502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4503 {
4504 /* register target */
4505 IEM_MC_BEGIN(0, 0);
4506 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4507 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4508 } IEM_MC_ELSE() {
4509 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4510 } IEM_MC_ENDIF();
4511 IEM_MC_ADVANCE_RIP();
4512 IEM_MC_END();
4513 }
4514 else
4515 {
4516 /* memory target */
4517 IEM_MC_BEGIN(0, 1);
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4520 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4521 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4522 } IEM_MC_ELSE() {
4523 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4524 } IEM_MC_ENDIF();
4525 IEM_MC_ADVANCE_RIP();
4526 IEM_MC_END();
4527 }
4528 return VINF_SUCCESS;
4529}
4530
4531
4532/** Opcode 0x0f 0x9c. */
4533FNIEMOP_DEF(iemOp_setl_Eb)
4534{
4535 IEMOP_MNEMONIC("setl Eb");
4536 IEMOP_HLP_MIN_386();
4537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4538 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4539
4540 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4541 * any way. AMD says it's "unused", whatever that means. We're
4542 * ignoring for now. */
4543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4544 {
4545 /* register target */
4546 IEM_MC_BEGIN(0, 0);
4547 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4548 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4549 } IEM_MC_ELSE() {
4550 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4551 } IEM_MC_ENDIF();
4552 IEM_MC_ADVANCE_RIP();
4553 IEM_MC_END();
4554 }
4555 else
4556 {
4557 /* memory target */
4558 IEM_MC_BEGIN(0, 1);
4559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4561 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4562 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4563 } IEM_MC_ELSE() {
4564 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4565 } IEM_MC_ENDIF();
4566 IEM_MC_ADVANCE_RIP();
4567 IEM_MC_END();
4568 }
4569 return VINF_SUCCESS;
4570}
4571
4572
4573/** Opcode 0x0f 0x9d. */
4574FNIEMOP_DEF(iemOp_setnl_Eb)
4575{
4576 IEMOP_MNEMONIC("setnl Eb");
4577 IEMOP_HLP_MIN_386();
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4580
4581 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4582 * any way. AMD says it's "unused", whatever that means. We're
4583 * ignoring for now. */
4584 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4585 {
4586 /* register target */
4587 IEM_MC_BEGIN(0, 0);
4588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4589 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4590 } IEM_MC_ELSE() {
4591 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 /* memory target */
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4602 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4603 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4604 } IEM_MC_ELSE() {
4605 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4606 } IEM_MC_ENDIF();
4607 IEM_MC_ADVANCE_RIP();
4608 IEM_MC_END();
4609 }
4610 return VINF_SUCCESS;
4611}
4612
4613
4614/** Opcode 0x0f 0x9e. */
4615FNIEMOP_DEF(iemOp_setle_Eb)
4616{
4617 IEMOP_MNEMONIC("setle Eb");
4618 IEMOP_HLP_MIN_386();
4619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4620 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4621
4622 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4623 * any way. AMD says it's "unused", whatever that means. We're
4624 * ignoring for now. */
4625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4626 {
4627 /* register target */
4628 IEM_MC_BEGIN(0, 0);
4629 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4630 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4631 } IEM_MC_ELSE() {
4632 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 }
4637 else
4638 {
4639 /* memory target */
4640 IEM_MC_BEGIN(0, 1);
4641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4643 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4644 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4645 } IEM_MC_ELSE() {
4646 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4647 } IEM_MC_ENDIF();
4648 IEM_MC_ADVANCE_RIP();
4649 IEM_MC_END();
4650 }
4651 return VINF_SUCCESS;
4652}
4653
4654
4655/** Opcode 0x0f 0x9f. */
4656FNIEMOP_DEF(iemOp_setnle_Eb)
4657{
4658 IEMOP_MNEMONIC("setnle Eb");
4659 IEMOP_HLP_MIN_386();
4660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4661 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4662
4663 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4664 * any way. AMD says it's "unused", whatever that means. We're
4665 * ignoring for now. */
4666 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4667 {
4668 /* register target */
4669 IEM_MC_BEGIN(0, 0);
4670 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4671 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4672 } IEM_MC_ELSE() {
4673 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4674 } IEM_MC_ENDIF();
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 }
4678 else
4679 {
4680 /* memory target */
4681 IEM_MC_BEGIN(0, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4684 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4685 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4686 } IEM_MC_ELSE() {
4687 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4688 } IEM_MC_ENDIF();
4689 IEM_MC_ADVANCE_RIP();
4690 IEM_MC_END();
4691 }
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/**
4697 * Common 'push segment-register' helper.
4698 */
4699FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4700{
4701 IEMOP_HLP_NO_LOCK_PREFIX();
4702 if (iReg < X86_SREG_FS)
4703 IEMOP_HLP_NO_64BIT();
4704 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4705
4706 switch (pIemCpu->enmEffOpSize)
4707 {
4708 case IEMMODE_16BIT:
4709 IEM_MC_BEGIN(0, 1);
4710 IEM_MC_LOCAL(uint16_t, u16Value);
4711 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4712 IEM_MC_PUSH_U16(u16Value);
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 break;
4716
4717 case IEMMODE_32BIT:
4718 IEM_MC_BEGIN(0, 1);
4719 IEM_MC_LOCAL(uint32_t, u32Value);
4720 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4721 IEM_MC_PUSH_U32_SREG(u32Value);
4722 IEM_MC_ADVANCE_RIP();
4723 IEM_MC_END();
4724 break;
4725
4726 case IEMMODE_64BIT:
4727 IEM_MC_BEGIN(0, 1);
4728 IEM_MC_LOCAL(uint64_t, u64Value);
4729 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4730 IEM_MC_PUSH_U64(u64Value);
4731 IEM_MC_ADVANCE_RIP();
4732 IEM_MC_END();
4733 break;
4734 }
4735
4736 return VINF_SUCCESS;
4737}
4738
4739
4740/** Opcode 0x0f 0xa0. */
4741FNIEMOP_DEF(iemOp_push_fs)
4742{
4743 IEMOP_MNEMONIC("push fs");
4744 IEMOP_HLP_MIN_386();
4745 IEMOP_HLP_NO_LOCK_PREFIX();
4746 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4747}
4748
4749
4750/** Opcode 0x0f 0xa1. */
4751FNIEMOP_DEF(iemOp_pop_fs)
4752{
4753 IEMOP_MNEMONIC("pop fs");
4754 IEMOP_HLP_MIN_386();
4755 IEMOP_HLP_NO_LOCK_PREFIX();
4756 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4757}
4758
4759
4760/** Opcode 0x0f 0xa2. */
4761FNIEMOP_DEF(iemOp_cpuid)
4762{
4763 IEMOP_MNEMONIC("cpuid");
4764 IEMOP_HLP_MIN_486(); /* not all 486es. */
4765 IEMOP_HLP_NO_LOCK_PREFIX();
4766 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4767}
4768
4769
4770/**
4771 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4772 * iemOp_bts_Ev_Gv.
4773 */
4774FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4775{
4776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4777 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4778
4779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4780 {
4781 /* register destination. */
4782 IEMOP_HLP_NO_LOCK_PREFIX();
4783 switch (pIemCpu->enmEffOpSize)
4784 {
4785 case IEMMODE_16BIT:
4786 IEM_MC_BEGIN(3, 0);
4787 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4788 IEM_MC_ARG(uint16_t, u16Src, 1);
4789 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4790
4791 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4792 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4793 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4794 IEM_MC_REF_EFLAGS(pEFlags);
4795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4796
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 return VINF_SUCCESS;
4800
4801 case IEMMODE_32BIT:
4802 IEM_MC_BEGIN(3, 0);
4803 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4804 IEM_MC_ARG(uint32_t, u32Src, 1);
4805 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4806
4807 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4808 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4809 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4810 IEM_MC_REF_EFLAGS(pEFlags);
4811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4812
4813 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4814 IEM_MC_ADVANCE_RIP();
4815 IEM_MC_END();
4816 return VINF_SUCCESS;
4817
4818 case IEMMODE_64BIT:
4819 IEM_MC_BEGIN(3, 0);
4820 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4821 IEM_MC_ARG(uint64_t, u64Src, 1);
4822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4823
4824 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4825 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4826 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4827 IEM_MC_REF_EFLAGS(pEFlags);
4828 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4829
4830 IEM_MC_ADVANCE_RIP();
4831 IEM_MC_END();
4832 return VINF_SUCCESS;
4833
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 }
4837 else
4838 {
4839 /* memory destination. */
4840
4841 uint32_t fAccess;
4842 if (pImpl->pfnLockedU16)
4843 fAccess = IEM_ACCESS_DATA_RW;
4844 else /* BT */
4845 {
4846 IEMOP_HLP_NO_LOCK_PREFIX();
4847 fAccess = IEM_ACCESS_DATA_R;
4848 }
4849
4850 NOREF(fAccess);
4851
4852 /** @todo test negative bit offsets! */
4853 switch (pIemCpu->enmEffOpSize)
4854 {
4855 case IEMMODE_16BIT:
4856 IEM_MC_BEGIN(3, 2);
4857 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4858 IEM_MC_ARG(uint16_t, u16Src, 1);
4859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4861 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4862
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4864 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4865 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4866 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4867 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4868 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4869 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4870 IEM_MC_FETCH_EFLAGS(EFlags);
4871
4872 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4873 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4875 else
4876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4877 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4878
4879 IEM_MC_COMMIT_EFLAGS(EFlags);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_32BIT:
4885 IEM_MC_BEGIN(3, 2);
4886 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4887 IEM_MC_ARG(uint32_t, u32Src, 1);
4888 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4890 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4891
4892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4893 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4894 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4895 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4896 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4897 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4898 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4899 IEM_MC_FETCH_EFLAGS(EFlags);
4900
4901 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4902 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4904 else
4905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4907
4908 IEM_MC_COMMIT_EFLAGS(EFlags);
4909 IEM_MC_ADVANCE_RIP();
4910 IEM_MC_END();
4911 return VINF_SUCCESS;
4912
4913 case IEMMODE_64BIT:
4914 IEM_MC_BEGIN(3, 2);
4915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4916 IEM_MC_ARG(uint64_t, u64Src, 1);
4917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4919 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4920
4921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4922 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4923 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4924 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4925 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4926 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4927 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4928 IEM_MC_FETCH_EFLAGS(EFlags);
4929
4930 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4931 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4933 else
4934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4935 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4936
4937 IEM_MC_COMMIT_EFLAGS(EFlags);
4938 IEM_MC_ADVANCE_RIP();
4939 IEM_MC_END();
4940 return VINF_SUCCESS;
4941
4942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4943 }
4944 }
4945}
4946
4947
4948/** Opcode 0x0f 0xa3. */
4949FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4950{
4951 IEMOP_MNEMONIC("bt Gv,Gv");
4952 IEMOP_HLP_MIN_386();
4953 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4954}
4955
4956
4957/**
4958 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4959 */
4960FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4961{
4962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4963 IEMOP_HLP_NO_LOCK_PREFIX();
4964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4965
4966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4967 {
4968 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4969 IEMOP_HLP_NO_LOCK_PREFIX();
4970
4971 switch (pIemCpu->enmEffOpSize)
4972 {
4973 case IEMMODE_16BIT:
4974 IEM_MC_BEGIN(4, 0);
4975 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4976 IEM_MC_ARG(uint16_t, u16Src, 1);
4977 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4978 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4979
4980 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4981 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4982 IEM_MC_REF_EFLAGS(pEFlags);
4983 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4984
4985 IEM_MC_ADVANCE_RIP();
4986 IEM_MC_END();
4987 return VINF_SUCCESS;
4988
4989 case IEMMODE_32BIT:
4990 IEM_MC_BEGIN(4, 0);
4991 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4992 IEM_MC_ARG(uint32_t, u32Src, 1);
4993 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4994 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4995
4996 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4997 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4998 IEM_MC_REF_EFLAGS(pEFlags);
4999 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5000
5001 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5002 IEM_MC_ADVANCE_RIP();
5003 IEM_MC_END();
5004 return VINF_SUCCESS;
5005
5006 case IEMMODE_64BIT:
5007 IEM_MC_BEGIN(4, 0);
5008 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5009 IEM_MC_ARG(uint64_t, u64Src, 1);
5010 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5011 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5012
5013 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5014 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5015 IEM_MC_REF_EFLAGS(pEFlags);
5016 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5017
5018 IEM_MC_ADVANCE_RIP();
5019 IEM_MC_END();
5020 return VINF_SUCCESS;
5021
5022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5023 }
5024 }
5025 else
5026 {
5027 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5028
5029 switch (pIemCpu->enmEffOpSize)
5030 {
5031 case IEMMODE_16BIT:
5032 IEM_MC_BEGIN(4, 2);
5033 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5034 IEM_MC_ARG(uint16_t, u16Src, 1);
5035 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5038
5039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEM_MC_ASSIGN(cShiftArg, cShift);
5042 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5043 IEM_MC_FETCH_EFLAGS(EFlags);
5044 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5045 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5046
5047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5048 IEM_MC_COMMIT_EFLAGS(EFlags);
5049 IEM_MC_ADVANCE_RIP();
5050 IEM_MC_END();
5051 return VINF_SUCCESS;
5052
5053 case IEMMODE_32BIT:
5054 IEM_MC_BEGIN(4, 2);
5055 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5056 IEM_MC_ARG(uint32_t, u32Src, 1);
5057 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5058 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5060
5061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5062 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5063 IEM_MC_ASSIGN(cShiftArg, cShift);
5064 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5065 IEM_MC_FETCH_EFLAGS(EFlags);
5066 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5067 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5068
5069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5070 IEM_MC_COMMIT_EFLAGS(EFlags);
5071 IEM_MC_ADVANCE_RIP();
5072 IEM_MC_END();
5073 return VINF_SUCCESS;
5074
5075 case IEMMODE_64BIT:
5076 IEM_MC_BEGIN(4, 2);
5077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5078 IEM_MC_ARG(uint64_t, u64Src, 1);
5079 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5080 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5082
5083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5084 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5085 IEM_MC_ASSIGN(cShiftArg, cShift);
5086 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5087 IEM_MC_FETCH_EFLAGS(EFlags);
5088 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5089 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5090
5091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5092 IEM_MC_COMMIT_EFLAGS(EFlags);
5093 IEM_MC_ADVANCE_RIP();
5094 IEM_MC_END();
5095 return VINF_SUCCESS;
5096
5097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5098 }
5099 }
5100}
5101
5102
5103/**
5104 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5105 */
5106FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5107{
5108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5109 IEMOP_HLP_NO_LOCK_PREFIX();
5110 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5111
5112 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5113 {
5114 IEMOP_HLP_NO_LOCK_PREFIX();
5115
5116 switch (pIemCpu->enmEffOpSize)
5117 {
5118 case IEMMODE_16BIT:
5119 IEM_MC_BEGIN(4, 0);
5120 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5121 IEM_MC_ARG(uint16_t, u16Src, 1);
5122 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5123 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5124
5125 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5126 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5127 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5128 IEM_MC_REF_EFLAGS(pEFlags);
5129 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5130
5131 IEM_MC_ADVANCE_RIP();
5132 IEM_MC_END();
5133 return VINF_SUCCESS;
5134
5135 case IEMMODE_32BIT:
5136 IEM_MC_BEGIN(4, 0);
5137 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5138 IEM_MC_ARG(uint32_t, u32Src, 1);
5139 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5140 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5141
5142 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5143 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5144 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5145 IEM_MC_REF_EFLAGS(pEFlags);
5146 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5147
5148 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5149 IEM_MC_ADVANCE_RIP();
5150 IEM_MC_END();
5151 return VINF_SUCCESS;
5152
5153 case IEMMODE_64BIT:
5154 IEM_MC_BEGIN(4, 0);
5155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5156 IEM_MC_ARG(uint64_t, u64Src, 1);
5157 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5158 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5159
5160 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5161 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5162 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5163 IEM_MC_REF_EFLAGS(pEFlags);
5164 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5165
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173 else
5174 {
5175 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
5176
5177 switch (pIemCpu->enmEffOpSize)
5178 {
5179 case IEMMODE_16BIT:
5180 IEM_MC_BEGIN(4, 2);
5181 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5182 IEM_MC_ARG(uint16_t, u16Src, 1);
5183 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5186
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5189 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5190 IEM_MC_FETCH_EFLAGS(EFlags);
5191 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5192 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5193
5194 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5195 IEM_MC_COMMIT_EFLAGS(EFlags);
5196 IEM_MC_ADVANCE_RIP();
5197 IEM_MC_END();
5198 return VINF_SUCCESS;
5199
5200 case IEMMODE_32BIT:
5201 IEM_MC_BEGIN(4, 2);
5202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5203 IEM_MC_ARG(uint32_t, u32Src, 1);
5204 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5205 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5207
5208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5209 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5210 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5211 IEM_MC_FETCH_EFLAGS(EFlags);
5212 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5213 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5214
5215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5216 IEM_MC_COMMIT_EFLAGS(EFlags);
5217 IEM_MC_ADVANCE_RIP();
5218 IEM_MC_END();
5219 return VINF_SUCCESS;
5220
5221 case IEMMODE_64BIT:
5222 IEM_MC_BEGIN(4, 2);
5223 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5224 IEM_MC_ARG(uint64_t, u64Src, 1);
5225 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5226 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5228
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5231 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5232 IEM_MC_FETCH_EFLAGS(EFlags);
5233 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5234 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5235
5236 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5237 IEM_MC_COMMIT_EFLAGS(EFlags);
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245}
5246
5247
5248
5249/** Opcode 0x0f 0xa4. */
5250FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5251{
5252 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
5253 IEMOP_HLP_MIN_386();
5254 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5255}
5256
5257
5258/** Opcode 0x0f 0xa5. */
5259FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5260{
5261 IEMOP_MNEMONIC("shld Ev,Gv,CL");
5262 IEMOP_HLP_MIN_386();
5263 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5264}
5265
5266
5267/** Opcode 0x0f 0xa8. */
5268FNIEMOP_DEF(iemOp_push_gs)
5269{
5270 IEMOP_MNEMONIC("push gs");
5271 IEMOP_HLP_MIN_386();
5272 IEMOP_HLP_NO_LOCK_PREFIX();
5273 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5274}
5275
5276
5277/** Opcode 0x0f 0xa9. */
5278FNIEMOP_DEF(iemOp_pop_gs)
5279{
5280 IEMOP_MNEMONIC("pop gs");
5281 IEMOP_HLP_MIN_386();
5282 IEMOP_HLP_NO_LOCK_PREFIX();
5283 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
5284}
5285
5286
5287/** Opcode 0x0f 0xaa. */
5288FNIEMOP_STUB(iemOp_rsm);
5289//IEMOP_HLP_MIN_386();
5290
5291
5292/** Opcode 0x0f 0xab. */
5293FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5294{
5295 IEMOP_MNEMONIC("bts Ev,Gv");
5296 IEMOP_HLP_MIN_386();
5297 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5298}
5299
5300
5301/** Opcode 0x0f 0xac. */
5302FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5303{
5304 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
5305 IEMOP_HLP_MIN_386();
5306 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5307}
5308
5309
5310/** Opcode 0x0f 0xad. */
5311FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5312{
5313 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
5314 IEMOP_HLP_MIN_386();
5315 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5316}
5317
5318
5319/** Opcode 0x0f 0xae mem/0. */
5320FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5321{
5322 IEMOP_MNEMONIC("fxsave m512");
5323 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5324 return IEMOP_RAISE_INVALID_OPCODE();
5325
5326 IEM_MC_BEGIN(3, 1);
5327 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5328 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5329 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5332 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5333 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5334 IEM_MC_END();
5335 return VINF_SUCCESS;
5336}
5337
5338
5339/** Opcode 0x0f 0xae mem/1. */
5340FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5341{
5342 IEMOP_MNEMONIC("fxrstor m512");
5343 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
5344 return IEMOP_RAISE_INVALID_OPCODE();
5345
5346 IEM_MC_BEGIN(3, 1);
5347 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5348 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5349 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
5350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5352 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
5353 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5354 IEM_MC_END();
5355 return VINF_SUCCESS;
5356}
5357
5358
5359/** Opcode 0x0f 0xae mem/2. */
5360FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5361
5362/** Opcode 0x0f 0xae mem/3. */
5363FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5364
5365/** Opcode 0x0f 0xae mem/4. */
5366FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5367
5368/** Opcode 0x0f 0xae mem/5. */
5369FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5370
5371/** Opcode 0x0f 0xae mem/6. */
5372FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5373
5374/** Opcode 0x0f 0xae mem/7. */
5375FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5376
5377
5378/** Opcode 0x0f 0xae 11b/5. */
5379FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5380{
5381 IEMOP_MNEMONIC("lfence");
5382 IEMOP_HLP_NO_LOCK_PREFIX();
5383 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5384 return IEMOP_RAISE_INVALID_OPCODE();
5385
5386 IEM_MC_BEGIN(0, 0);
5387 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5388 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5389 else
5390 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5391 IEM_MC_ADVANCE_RIP();
5392 IEM_MC_END();
5393 return VINF_SUCCESS;
5394}
5395
5396
5397/** Opcode 0x0f 0xae 11b/6. */
5398FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5399{
5400 IEMOP_MNEMONIC("mfence");
5401 IEMOP_HLP_NO_LOCK_PREFIX();
5402 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5403 return IEMOP_RAISE_INVALID_OPCODE();
5404
5405 IEM_MC_BEGIN(0, 0);
5406 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5407 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5408 else
5409 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5410 IEM_MC_ADVANCE_RIP();
5411 IEM_MC_END();
5412 return VINF_SUCCESS;
5413}
5414
5415
5416/** Opcode 0x0f 0xae 11b/7. */
5417FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5418{
5419 IEMOP_MNEMONIC("sfence");
5420 IEMOP_HLP_NO_LOCK_PREFIX();
5421 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5422 return IEMOP_RAISE_INVALID_OPCODE();
5423
5424 IEM_MC_BEGIN(0, 0);
5425 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5426 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5427 else
5428 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5429 IEM_MC_ADVANCE_RIP();
5430 IEM_MC_END();
5431 return VINF_SUCCESS;
5432}
5433
5434
5435/** Opcode 0xf3 0x0f 0xae 11b/0. */
5436FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5437
5438/** Opcode 0xf3 0x0f 0xae 11b/1. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5440
5441/** Opcode 0xf3 0x0f 0xae 11b/2. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5443
5444/** Opcode 0xf3 0x0f 0xae 11b/3. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5446
5447
5448/** Opcode 0x0f 0xae. */
5449FNIEMOP_DEF(iemOp_Grp15)
5450{
5451 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5453 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5454 {
5455 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5456 {
5457 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5458 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5459 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5460 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5461 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5462 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5463 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5464 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5466 }
5467 }
5468 else
5469 {
5470 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5471 {
5472 case 0:
5473 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5474 {
5475 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5476 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5477 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5478 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5479 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5480 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5481 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5482 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 break;
5486
5487 case IEM_OP_PRF_REPZ:
5488 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5489 {
5490 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5491 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5492 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5493 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5494 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5495 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5496 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5497 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5499 }
5500 break;
5501
5502 default:
5503 return IEMOP_RAISE_INVALID_OPCODE();
5504 }
5505 }
5506}
5507
5508
5509/** Opcode 0x0f 0xaf. */
5510FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5511{
5512 IEMOP_MNEMONIC("imul Gv,Ev");
5513 IEMOP_HLP_MIN_386();
5514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5515 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5516}
5517
5518
5519/** Opcode 0x0f 0xb0. */
5520FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5521{
5522 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5523 IEMOP_HLP_MIN_486();
5524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5525
5526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5527 {
5528 IEMOP_HLP_DONE_DECODING();
5529 IEM_MC_BEGIN(4, 0);
5530 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5531 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5532 IEM_MC_ARG(uint8_t, u8Src, 2);
5533 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5534
5535 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5536 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5537 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5538 IEM_MC_REF_EFLAGS(pEFlags);
5539 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5540 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5541 else
5542 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5543
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 }
5547 else
5548 {
5549 IEM_MC_BEGIN(4, 3);
5550 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5551 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5552 IEM_MC_ARG(uint8_t, u8Src, 2);
5553 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5555 IEM_MC_LOCAL(uint8_t, u8Al);
5556
5557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5558 IEMOP_HLP_DONE_DECODING();
5559 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5560 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5561 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5562 IEM_MC_FETCH_EFLAGS(EFlags);
5563 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5564 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5566 else
5567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5568
5569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5570 IEM_MC_COMMIT_EFLAGS(EFlags);
5571 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5572 IEM_MC_ADVANCE_RIP();
5573 IEM_MC_END();
5574 }
5575 return VINF_SUCCESS;
5576}
5577
5578/** Opcode 0x0f 0xb1. */
5579FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5580{
5581 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5582 IEMOP_HLP_MIN_486();
5583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5584
5585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5586 {
5587 IEMOP_HLP_DONE_DECODING();
5588 switch (pIemCpu->enmEffOpSize)
5589 {
5590 case IEMMODE_16BIT:
5591 IEM_MC_BEGIN(4, 0);
5592 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5593 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5594 IEM_MC_ARG(uint16_t, u16Src, 2);
5595 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5596
5597 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5598 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5599 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5600 IEM_MC_REF_EFLAGS(pEFlags);
5601 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5602 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5603 else
5604 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5605
5606 IEM_MC_ADVANCE_RIP();
5607 IEM_MC_END();
5608 return VINF_SUCCESS;
5609
5610 case IEMMODE_32BIT:
5611 IEM_MC_BEGIN(4, 0);
5612 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5613 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5614 IEM_MC_ARG(uint32_t, u32Src, 2);
5615 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5616
5617 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5618 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5619 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5620 IEM_MC_REF_EFLAGS(pEFlags);
5621 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5623 else
5624 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5625
5626 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5627 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 return VINF_SUCCESS;
5631
5632 case IEMMODE_64BIT:
5633 IEM_MC_BEGIN(4, 0);
5634 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5635 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5636#ifdef RT_ARCH_X86
5637 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5638#else
5639 IEM_MC_ARG(uint64_t, u64Src, 2);
5640#endif
5641 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5642
5643 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5644 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5645 IEM_MC_REF_EFLAGS(pEFlags);
5646#ifdef RT_ARCH_X86
5647 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5648 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5649 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5650 else
5651 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5652#else
5653 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5654 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5655 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5656 else
5657 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5658#endif
5659
5660 IEM_MC_ADVANCE_RIP();
5661 IEM_MC_END();
5662 return VINF_SUCCESS;
5663
5664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5665 }
5666 }
5667 else
5668 {
5669 switch (pIemCpu->enmEffOpSize)
5670 {
5671 case IEMMODE_16BIT:
5672 IEM_MC_BEGIN(4, 3);
5673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5674 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5675 IEM_MC_ARG(uint16_t, u16Src, 2);
5676 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5678 IEM_MC_LOCAL(uint16_t, u16Ax);
5679
5680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5681 IEMOP_HLP_DONE_DECODING();
5682 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5683 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5684 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5685 IEM_MC_FETCH_EFLAGS(EFlags);
5686 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5687 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5688 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5689 else
5690 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5691
5692 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5693 IEM_MC_COMMIT_EFLAGS(EFlags);
5694 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5695 IEM_MC_ADVANCE_RIP();
5696 IEM_MC_END();
5697 return VINF_SUCCESS;
5698
5699 case IEMMODE_32BIT:
5700 IEM_MC_BEGIN(4, 3);
5701 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5702 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5703 IEM_MC_ARG(uint32_t, u32Src, 2);
5704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5706 IEM_MC_LOCAL(uint32_t, u32Eax);
5707
5708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5709 IEMOP_HLP_DONE_DECODING();
5710 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5711 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5712 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5713 IEM_MC_FETCH_EFLAGS(EFlags);
5714 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5715 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5716 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5717 else
5718 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5719
5720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5721 IEM_MC_COMMIT_EFLAGS(EFlags);
5722 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5723 IEM_MC_ADVANCE_RIP();
5724 IEM_MC_END();
5725 return VINF_SUCCESS;
5726
5727 case IEMMODE_64BIT:
5728 IEM_MC_BEGIN(4, 3);
5729 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5730 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5731#ifdef RT_ARCH_X86
5732 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5733#else
5734 IEM_MC_ARG(uint64_t, u64Src, 2);
5735#endif
5736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5738 IEM_MC_LOCAL(uint64_t, u64Rax);
5739
5740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5741 IEMOP_HLP_DONE_DECODING();
5742 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5743 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5744 IEM_MC_FETCH_EFLAGS(EFlags);
5745 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5746#ifdef RT_ARCH_X86
5747 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5748 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5749 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5750 else
5751 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5752#else
5753 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5754 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5755 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5756 else
5757 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5758#endif
5759
5760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5761 IEM_MC_COMMIT_EFLAGS(EFlags);
5762 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5763 IEM_MC_ADVANCE_RIP();
5764 IEM_MC_END();
5765 return VINF_SUCCESS;
5766
5767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5768 }
5769 }
5770}
5771
5772
5773FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5774{
5775 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5776 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5777
5778 switch (pIemCpu->enmEffOpSize)
5779 {
5780 case IEMMODE_16BIT:
5781 IEM_MC_BEGIN(5, 1);
5782 IEM_MC_ARG(uint16_t, uSel, 0);
5783 IEM_MC_ARG(uint16_t, offSeg, 1);
5784 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5785 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5786 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5787 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5790 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5791 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5792 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5793 IEM_MC_END();
5794 return VINF_SUCCESS;
5795
5796 case IEMMODE_32BIT:
5797 IEM_MC_BEGIN(5, 1);
5798 IEM_MC_ARG(uint16_t, uSel, 0);
5799 IEM_MC_ARG(uint32_t, offSeg, 1);
5800 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5801 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5802 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5803 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5806 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5807 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5808 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5809 IEM_MC_END();
5810 return VINF_SUCCESS;
5811
5812 case IEMMODE_64BIT:
5813 IEM_MC_BEGIN(5, 1);
5814 IEM_MC_ARG(uint16_t, uSel, 0);
5815 IEM_MC_ARG(uint64_t, offSeg, 1);
5816 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5817 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5818 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5819 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5823 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5824 else
5825 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5826 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5827 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5828 IEM_MC_END();
5829 return VINF_SUCCESS;
5830
5831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5832 }
5833}
5834
5835
5836/** Opcode 0x0f 0xb2. */
5837FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5838{
5839 IEMOP_MNEMONIC("lss Gv,Mp");
5840 IEMOP_HLP_MIN_386();
5841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5843 return IEMOP_RAISE_INVALID_OPCODE();
5844 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5845}
5846
5847
5848/** Opcode 0x0f 0xb3. */
5849FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5850{
5851 IEMOP_MNEMONIC("btr Ev,Gv");
5852 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5853}
5854
5855
5856/** Opcode 0x0f 0xb4. */
5857FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5858{
5859 IEMOP_MNEMONIC("lfs Gv,Mp");
5860 IEMOP_HLP_MIN_386();
5861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5863 return IEMOP_RAISE_INVALID_OPCODE();
5864 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5865}
5866
5867
5868/** Opcode 0x0f 0xb5. */
5869FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5870{
5871 IEMOP_MNEMONIC("lgs Gv,Mp");
5872 IEMOP_HLP_MIN_386();
5873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5874 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5875 return IEMOP_RAISE_INVALID_OPCODE();
5876 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5877}
5878
5879
5880/** Opcode 0x0f 0xb6. */
5881FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5882{
5883 IEMOP_MNEMONIC("movzx Gv,Eb");
5884 IEMOP_HLP_MIN_386();
5885
5886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5887 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5888
5889 /*
5890 * If rm is denoting a register, no more instruction bytes.
5891 */
5892 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5893 {
5894 switch (pIemCpu->enmEffOpSize)
5895 {
5896 case IEMMODE_16BIT:
5897 IEM_MC_BEGIN(0, 1);
5898 IEM_MC_LOCAL(uint16_t, u16Value);
5899 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5900 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5901 IEM_MC_ADVANCE_RIP();
5902 IEM_MC_END();
5903 return VINF_SUCCESS;
5904
5905 case IEMMODE_32BIT:
5906 IEM_MC_BEGIN(0, 1);
5907 IEM_MC_LOCAL(uint32_t, u32Value);
5908 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5909 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5910 IEM_MC_ADVANCE_RIP();
5911 IEM_MC_END();
5912 return VINF_SUCCESS;
5913
5914 case IEMMODE_64BIT:
5915 IEM_MC_BEGIN(0, 1);
5916 IEM_MC_LOCAL(uint64_t, u64Value);
5917 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5918 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5919 IEM_MC_ADVANCE_RIP();
5920 IEM_MC_END();
5921 return VINF_SUCCESS;
5922
5923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5924 }
5925 }
5926 else
5927 {
5928 /*
5929 * We're loading a register from memory.
5930 */
5931 switch (pIemCpu->enmEffOpSize)
5932 {
5933 case IEMMODE_16BIT:
5934 IEM_MC_BEGIN(0, 2);
5935 IEM_MC_LOCAL(uint16_t, u16Value);
5936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5938 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5939 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5940 IEM_MC_ADVANCE_RIP();
5941 IEM_MC_END();
5942 return VINF_SUCCESS;
5943
5944 case IEMMODE_32BIT:
5945 IEM_MC_BEGIN(0, 2);
5946 IEM_MC_LOCAL(uint32_t, u32Value);
5947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5949 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5950 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 return VINF_SUCCESS;
5954
5955 case IEMMODE_64BIT:
5956 IEM_MC_BEGIN(0, 2);
5957 IEM_MC_LOCAL(uint64_t, u64Value);
5958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5960 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5961 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5962 IEM_MC_ADVANCE_RIP();
5963 IEM_MC_END();
5964 return VINF_SUCCESS;
5965
5966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5967 }
5968 }
5969}
5970
5971
5972/** Opcode 0x0f 0xb7. */
5973FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5974{
5975 IEMOP_MNEMONIC("movzx Gv,Ew");
5976 IEMOP_HLP_MIN_386();
5977
5978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5979 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5980
5981 /** @todo Not entirely sure how the operand size prefix is handled here,
5982 * assuming that it will be ignored. Would be nice to have a few
5983 * test for this. */
5984 /*
5985 * If rm is denoting a register, no more instruction bytes.
5986 */
5987 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5988 {
5989 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5990 {
5991 IEM_MC_BEGIN(0, 1);
5992 IEM_MC_LOCAL(uint32_t, u32Value);
5993 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5994 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5995 IEM_MC_ADVANCE_RIP();
5996 IEM_MC_END();
5997 }
5998 else
5999 {
6000 IEM_MC_BEGIN(0, 1);
6001 IEM_MC_LOCAL(uint64_t, u64Value);
6002 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6003 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6004 IEM_MC_ADVANCE_RIP();
6005 IEM_MC_END();
6006 }
6007 }
6008 else
6009 {
6010 /*
6011 * We're loading a register from memory.
6012 */
6013 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6014 {
6015 IEM_MC_BEGIN(0, 2);
6016 IEM_MC_LOCAL(uint32_t, u32Value);
6017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6019 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6020 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6021 IEM_MC_ADVANCE_RIP();
6022 IEM_MC_END();
6023 }
6024 else
6025 {
6026 IEM_MC_BEGIN(0, 2);
6027 IEM_MC_LOCAL(uint64_t, u64Value);
6028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6030 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6031 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 }
6035 }
6036 return VINF_SUCCESS;
6037}
6038
6039
6040/** Opcode 0x0f 0xb8. */
6041FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6042
6043
6044/** Opcode 0x0f 0xb9. */
6045FNIEMOP_DEF(iemOp_Grp10)
6046{
6047 Log(("iemOp_Grp10 -> #UD\n"));
6048 return IEMOP_RAISE_INVALID_OPCODE();
6049}
6050
6051
6052/** Opcode 0x0f 0xba. */
6053FNIEMOP_DEF(iemOp_Grp8)
6054{
6055 IEMOP_HLP_MIN_386();
6056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6057 PCIEMOPBINSIZES pImpl;
6058 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6059 {
6060 case 0: case 1: case 2: case 3:
6061 return IEMOP_RAISE_INVALID_OPCODE();
6062 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
6063 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
6064 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
6065 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
6066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6067 }
6068 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6069
6070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6071 {
6072 /* register destination. */
6073 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6074 IEMOP_HLP_NO_LOCK_PREFIX();
6075
6076 switch (pIemCpu->enmEffOpSize)
6077 {
6078 case IEMMODE_16BIT:
6079 IEM_MC_BEGIN(3, 0);
6080 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6081 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6083
6084 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6085 IEM_MC_REF_EFLAGS(pEFlags);
6086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6087
6088 IEM_MC_ADVANCE_RIP();
6089 IEM_MC_END();
6090 return VINF_SUCCESS;
6091
6092 case IEMMODE_32BIT:
6093 IEM_MC_BEGIN(3, 0);
6094 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6095 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6096 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6097
6098 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6099 IEM_MC_REF_EFLAGS(pEFlags);
6100 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6101
6102 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6103 IEM_MC_ADVANCE_RIP();
6104 IEM_MC_END();
6105 return VINF_SUCCESS;
6106
6107 case IEMMODE_64BIT:
6108 IEM_MC_BEGIN(3, 0);
6109 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6110 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6111 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6112
6113 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6114 IEM_MC_REF_EFLAGS(pEFlags);
6115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6116
6117 IEM_MC_ADVANCE_RIP();
6118 IEM_MC_END();
6119 return VINF_SUCCESS;
6120
6121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6122 }
6123 }
6124 else
6125 {
6126 /* memory destination. */
6127
6128 uint32_t fAccess;
6129 if (pImpl->pfnLockedU16)
6130 fAccess = IEM_ACCESS_DATA_RW;
6131 else /* BT */
6132 {
6133 IEMOP_HLP_NO_LOCK_PREFIX();
6134 fAccess = IEM_ACCESS_DATA_R;
6135 }
6136
6137 /** @todo test negative bit offsets! */
6138 switch (pIemCpu->enmEffOpSize)
6139 {
6140 case IEMMODE_16BIT:
6141 IEM_MC_BEGIN(3, 1);
6142 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6143 IEM_MC_ARG(uint16_t, u16Src, 1);
6144 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6146
6147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6148 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6149 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6150 IEM_MC_FETCH_EFLAGS(EFlags);
6151 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6152 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6154 else
6155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6157
6158 IEM_MC_COMMIT_EFLAGS(EFlags);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162
6163 case IEMMODE_32BIT:
6164 IEM_MC_BEGIN(3, 1);
6165 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6166 IEM_MC_ARG(uint32_t, u32Src, 1);
6167 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6169
6170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6171 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6172 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6173 IEM_MC_FETCH_EFLAGS(EFlags);
6174 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6177 else
6178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6180
6181 IEM_MC_COMMIT_EFLAGS(EFlags);
6182 IEM_MC_ADVANCE_RIP();
6183 IEM_MC_END();
6184 return VINF_SUCCESS;
6185
6186 case IEMMODE_64BIT:
6187 IEM_MC_BEGIN(3, 1);
6188 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6189 IEM_MC_ARG(uint64_t, u64Src, 1);
6190 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6192
6193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6194 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6195 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6196 IEM_MC_FETCH_EFLAGS(EFlags);
6197 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
6198 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6203
6204 IEM_MC_COMMIT_EFLAGS(EFlags);
6205 IEM_MC_ADVANCE_RIP();
6206 IEM_MC_END();
6207 return VINF_SUCCESS;
6208
6209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6210 }
6211 }
6212
6213}
6214
6215
6216/** Opcode 0x0f 0xbb. */
6217FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6218{
6219 IEMOP_MNEMONIC("btc Ev,Gv");
6220 IEMOP_HLP_MIN_386();
6221 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6222}
6223
6224
6225/** Opcode 0x0f 0xbc. */
6226FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6227{
6228 IEMOP_MNEMONIC("bsf Gv,Ev");
6229 IEMOP_HLP_MIN_386();
6230 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6231 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6232}
6233
6234
6235/** Opcode 0x0f 0xbd. */
6236FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6237{
6238 IEMOP_MNEMONIC("bsr Gv,Ev");
6239 IEMOP_HLP_MIN_386();
6240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6242}
6243
6244
6245/** Opcode 0x0f 0xbe. */
6246FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6247{
6248 IEMOP_MNEMONIC("movsx Gv,Eb");
6249 IEMOP_HLP_MIN_386();
6250
6251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6252 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6253
6254 /*
6255 * If rm is denoting a register, no more instruction bytes.
6256 */
6257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6258 {
6259 switch (pIemCpu->enmEffOpSize)
6260 {
6261 case IEMMODE_16BIT:
6262 IEM_MC_BEGIN(0, 1);
6263 IEM_MC_LOCAL(uint16_t, u16Value);
6264 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6265 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6266 IEM_MC_ADVANCE_RIP();
6267 IEM_MC_END();
6268 return VINF_SUCCESS;
6269
6270 case IEMMODE_32BIT:
6271 IEM_MC_BEGIN(0, 1);
6272 IEM_MC_LOCAL(uint32_t, u32Value);
6273 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6274 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6275 IEM_MC_ADVANCE_RIP();
6276 IEM_MC_END();
6277 return VINF_SUCCESS;
6278
6279 case IEMMODE_64BIT:
6280 IEM_MC_BEGIN(0, 1);
6281 IEM_MC_LOCAL(uint64_t, u64Value);
6282 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6283 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6284 IEM_MC_ADVANCE_RIP();
6285 IEM_MC_END();
6286 return VINF_SUCCESS;
6287
6288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6289 }
6290 }
6291 else
6292 {
6293 /*
6294 * We're loading a register from memory.
6295 */
6296 switch (pIemCpu->enmEffOpSize)
6297 {
6298 case IEMMODE_16BIT:
6299 IEM_MC_BEGIN(0, 2);
6300 IEM_MC_LOCAL(uint16_t, u16Value);
6301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6303 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
6304 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
6305 IEM_MC_ADVANCE_RIP();
6306 IEM_MC_END();
6307 return VINF_SUCCESS;
6308
6309 case IEMMODE_32BIT:
6310 IEM_MC_BEGIN(0, 2);
6311 IEM_MC_LOCAL(uint32_t, u32Value);
6312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6314 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6315 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 return VINF_SUCCESS;
6319
6320 case IEMMODE_64BIT:
6321 IEM_MC_BEGIN(0, 2);
6322 IEM_MC_LOCAL(uint64_t, u64Value);
6323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6325 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6326 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6327 IEM_MC_ADVANCE_RIP();
6328 IEM_MC_END();
6329 return VINF_SUCCESS;
6330
6331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6332 }
6333 }
6334}
6335
6336
6337/** Opcode 0x0f 0xbf. */
6338FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6339{
6340 IEMOP_MNEMONIC("movsx Gv,Ew");
6341 IEMOP_HLP_MIN_386();
6342
6343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6344 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6345
6346 /** @todo Not entirely sure how the operand size prefix is handled here,
6347 * assuming that it will be ignored. Would be nice to have a few
6348 * test for this. */
6349 /*
6350 * If rm is denoting a register, no more instruction bytes.
6351 */
6352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6353 {
6354 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6355 {
6356 IEM_MC_BEGIN(0, 1);
6357 IEM_MC_LOCAL(uint32_t, u32Value);
6358 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6359 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6360 IEM_MC_ADVANCE_RIP();
6361 IEM_MC_END();
6362 }
6363 else
6364 {
6365 IEM_MC_BEGIN(0, 1);
6366 IEM_MC_LOCAL(uint64_t, u64Value);
6367 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6368 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6369 IEM_MC_ADVANCE_RIP();
6370 IEM_MC_END();
6371 }
6372 }
6373 else
6374 {
6375 /*
6376 * We're loading a register from memory.
6377 */
6378 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
6379 {
6380 IEM_MC_BEGIN(0, 2);
6381 IEM_MC_LOCAL(uint32_t, u32Value);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6384 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
6385 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
6386 IEM_MC_ADVANCE_RIP();
6387 IEM_MC_END();
6388 }
6389 else
6390 {
6391 IEM_MC_BEGIN(0, 2);
6392 IEM_MC_LOCAL(uint64_t, u64Value);
6393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6395 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 }
6400 }
6401 return VINF_SUCCESS;
6402}
6403
6404
6405/** Opcode 0x0f 0xc0. */
6406FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6407{
6408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6409 IEMOP_HLP_MIN_486();
6410 IEMOP_MNEMONIC("xadd Eb,Gb");
6411
6412 /*
6413 * If rm is denoting a register, no more instruction bytes.
6414 */
6415 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6416 {
6417 IEMOP_HLP_NO_LOCK_PREFIX();
6418
6419 IEM_MC_BEGIN(3, 0);
6420 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6421 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6422 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6423
6424 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6425 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6426 IEM_MC_REF_EFLAGS(pEFlags);
6427 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6428
6429 IEM_MC_ADVANCE_RIP();
6430 IEM_MC_END();
6431 }
6432 else
6433 {
6434 /*
6435 * We're accessing memory.
6436 */
6437 IEM_MC_BEGIN(3, 3);
6438 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6439 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6440 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6441 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6443
6444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6445 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6446 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6447 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6448 IEM_MC_FETCH_EFLAGS(EFlags);
6449 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6450 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6451 else
6452 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6453
6454 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6455 IEM_MC_COMMIT_EFLAGS(EFlags);
6456 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460 }
6461 return VINF_SUCCESS;
6462}
6463
6464
6465/** Opcode 0x0f 0xc1. */
6466FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6467{
6468 IEMOP_MNEMONIC("xadd Ev,Gv");
6469 IEMOP_HLP_MIN_486();
6470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6471
6472 /*
6473 * If rm is denoting a register, no more instruction bytes.
6474 */
6475 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6476 {
6477 IEMOP_HLP_NO_LOCK_PREFIX();
6478
6479 switch (pIemCpu->enmEffOpSize)
6480 {
6481 case IEMMODE_16BIT:
6482 IEM_MC_BEGIN(3, 0);
6483 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6484 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6485 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6486
6487 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6488 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6489 IEM_MC_REF_EFLAGS(pEFlags);
6490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6491
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 return VINF_SUCCESS;
6495
6496 case IEMMODE_32BIT:
6497 IEM_MC_BEGIN(3, 0);
6498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6499 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6500 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6501
6502 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6503 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6504 IEM_MC_REF_EFLAGS(pEFlags);
6505 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6506
6507 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6508 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6509 IEM_MC_ADVANCE_RIP();
6510 IEM_MC_END();
6511 return VINF_SUCCESS;
6512
6513 case IEMMODE_64BIT:
6514 IEM_MC_BEGIN(3, 0);
6515 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6516 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6517 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6518
6519 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6520 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6521 IEM_MC_REF_EFLAGS(pEFlags);
6522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6523
6524 IEM_MC_ADVANCE_RIP();
6525 IEM_MC_END();
6526 return VINF_SUCCESS;
6527
6528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6529 }
6530 }
6531 else
6532 {
6533 /*
6534 * We're accessing memory.
6535 */
6536 switch (pIemCpu->enmEffOpSize)
6537 {
6538 case IEMMODE_16BIT:
6539 IEM_MC_BEGIN(3, 3);
6540 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6541 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6542 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6543 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6545
6546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6547 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6548 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6549 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6550 IEM_MC_FETCH_EFLAGS(EFlags);
6551 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6552 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6553 else
6554 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6555
6556 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6557 IEM_MC_COMMIT_EFLAGS(EFlags);
6558 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6559 IEM_MC_ADVANCE_RIP();
6560 IEM_MC_END();
6561 return VINF_SUCCESS;
6562
6563 case IEMMODE_32BIT:
6564 IEM_MC_BEGIN(3, 3);
6565 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6566 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6567 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6568 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6570
6571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6572 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6573 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6574 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6575 IEM_MC_FETCH_EFLAGS(EFlags);
6576 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6578 else
6579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6580
6581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6582 IEM_MC_COMMIT_EFLAGS(EFlags);
6583 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6584 IEM_MC_ADVANCE_RIP();
6585 IEM_MC_END();
6586 return VINF_SUCCESS;
6587
6588 case IEMMODE_64BIT:
6589 IEM_MC_BEGIN(3, 3);
6590 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6591 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6592 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6593 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6595
6596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6597 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6598 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6599 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6600 IEM_MC_FETCH_EFLAGS(EFlags);
6601 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6603 else
6604 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6605
6606 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6607 IEM_MC_COMMIT_EFLAGS(EFlags);
6608 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6609 IEM_MC_ADVANCE_RIP();
6610 IEM_MC_END();
6611 return VINF_SUCCESS;
6612
6613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6614 }
6615 }
6616}
6617
6618/** Opcode 0x0f 0xc2. */
6619FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6620
6621
6622/** Opcode 0x0f 0xc3. */
6623FNIEMOP_DEF(iemOp_movnti_My_Gy)
6624{
6625 IEMOP_MNEMONIC("movnti My,Gy");
6626
6627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6628
6629 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6630 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6631 {
6632 switch (pIemCpu->enmEffOpSize)
6633 {
6634 case IEMMODE_32BIT:
6635 IEM_MC_BEGIN(0, 2);
6636 IEM_MC_LOCAL(uint32_t, u32Value);
6637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6638
6639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6642 return IEMOP_RAISE_INVALID_OPCODE();
6643
6644 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6645 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
6646 IEM_MC_ADVANCE_RIP();
6647 IEM_MC_END();
6648 break;
6649
6650 case IEMMODE_64BIT:
6651 IEM_MC_BEGIN(0, 2);
6652 IEM_MC_LOCAL(uint64_t, u64Value);
6653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6654
6655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6657 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
6658 return IEMOP_RAISE_INVALID_OPCODE();
6659
6660 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6661 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
6662 IEM_MC_ADVANCE_RIP();
6663 IEM_MC_END();
6664 break;
6665
6666 case IEMMODE_16BIT:
6667 /** @todo check this form. */
6668 return IEMOP_RAISE_INVALID_OPCODE();
6669 }
6670 }
6671 else
6672 return IEMOP_RAISE_INVALID_OPCODE();
6673 return VINF_SUCCESS;
6674}
6675
6676
6677/** Opcode 0x0f 0xc4. */
6678FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6679
6680/** Opcode 0x0f 0xc5. */
6681FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6682
6683/** Opcode 0x0f 0xc6. */
6684FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6685
6686
6687/** Opcode 0x0f 0xc7 !11/1. */
6688FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6689{
6690 IEMOP_MNEMONIC("cmpxchg8b Mq");
6691
6692 IEM_MC_BEGIN(4, 3);
6693 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6694 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6695 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6696 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6697 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6698 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6700
6701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6702 IEMOP_HLP_DONE_DECODING();
6703 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6704
6705 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6706 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6707 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6708
6709 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6710 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6711 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6712
6713 IEM_MC_FETCH_EFLAGS(EFlags);
6714 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6715 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6716 else
6717 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6718
6719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6720 IEM_MC_COMMIT_EFLAGS(EFlags);
6721 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6722 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6723 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6724 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6725 IEM_MC_ENDIF();
6726 IEM_MC_ADVANCE_RIP();
6727
6728 IEM_MC_END();
6729 return VINF_SUCCESS;
6730}
6731
6732
6733/** Opcode REX.W 0x0f 0xc7 !11/1. */
6734FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6735
6736/** Opcode 0x0f 0xc7 11/6. */
6737FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6738
6739/** Opcode 0x0f 0xc7 !11/6. */
6740FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6741
6742/** Opcode 0x66 0x0f 0xc7 !11/6. */
6743FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6744
6745/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6746FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6747
6748/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6749FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6750
6751
6752/** Opcode 0x0f 0xc7. */
6753FNIEMOP_DEF(iemOp_Grp9)
6754{
6755 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6757 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6758 {
6759 case 0: case 2: case 3: case 4: case 5:
6760 return IEMOP_RAISE_INVALID_OPCODE();
6761 case 1:
6762 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6763 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6764 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6765 return IEMOP_RAISE_INVALID_OPCODE();
6766 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6767 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6768 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6769 case 6:
6770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6771 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6772 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6773 {
6774 case 0:
6775 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6776 case IEM_OP_PRF_SIZE_OP:
6777 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6778 case IEM_OP_PRF_REPZ:
6779 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6780 default:
6781 return IEMOP_RAISE_INVALID_OPCODE();
6782 }
6783 case 7:
6784 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6785 {
6786 case 0:
6787 case IEM_OP_PRF_REPZ:
6788 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6789 default:
6790 return IEMOP_RAISE_INVALID_OPCODE();
6791 }
6792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6793 }
6794}
6795
6796
6797/**
6798 * Common 'bswap register' helper.
6799 */
6800FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6801{
6802 IEMOP_HLP_NO_LOCK_PREFIX();
6803 switch (pIemCpu->enmEffOpSize)
6804 {
6805 case IEMMODE_16BIT:
6806 IEM_MC_BEGIN(1, 0);
6807 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6808 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6809 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6810 IEM_MC_ADVANCE_RIP();
6811 IEM_MC_END();
6812 return VINF_SUCCESS;
6813
6814 case IEMMODE_32BIT:
6815 IEM_MC_BEGIN(1, 0);
6816 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6817 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6818 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6819 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823
6824 case IEMMODE_64BIT:
6825 IEM_MC_BEGIN(1, 0);
6826 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6827 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6828 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6829 IEM_MC_ADVANCE_RIP();
6830 IEM_MC_END();
6831 return VINF_SUCCESS;
6832
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6834 }
6835}
6836
6837
6838/** Opcode 0x0f 0xc8. */
6839FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6840{
6841 IEMOP_MNEMONIC("bswap rAX/r8");
6842 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6843 prefix. REX.B is the correct prefix it appears. For a parallel
6844 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6845 IEMOP_HLP_MIN_486();
6846 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6847}
6848
6849
6850/** Opcode 0x0f 0xc9. */
6851FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6852{
6853 IEMOP_MNEMONIC("bswap rCX/r9");
6854 IEMOP_HLP_MIN_486();
6855 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6856}
6857
6858
6859/** Opcode 0x0f 0xca. */
6860FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6861{
6862 IEMOP_MNEMONIC("bswap rDX/r9");
6863 IEMOP_HLP_MIN_486();
6864 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6865}
6866
6867
6868/** Opcode 0x0f 0xcb. */
6869FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6870{
6871 IEMOP_MNEMONIC("bswap rBX/r9");
6872 IEMOP_HLP_MIN_486();
6873 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6874}
6875
6876
6877/** Opcode 0x0f 0xcc. */
6878FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6879{
6880 IEMOP_MNEMONIC("bswap rSP/r12");
6881 IEMOP_HLP_MIN_486();
6882 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6883}
6884
6885
6886/** Opcode 0x0f 0xcd. */
6887FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6888{
6889 IEMOP_MNEMONIC("bswap rBP/r13");
6890 IEMOP_HLP_MIN_486();
6891 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6892}
6893
6894
6895/** Opcode 0x0f 0xce. */
6896FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6897{
6898 IEMOP_MNEMONIC("bswap rSI/r14");
6899 IEMOP_HLP_MIN_486();
6900 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6901}
6902
6903
6904/** Opcode 0x0f 0xcf. */
6905FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6906{
6907 IEMOP_MNEMONIC("bswap rDI/r15");
6908 IEMOP_HLP_MIN_486();
6909 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6910}
6911
6912
6913
6914/** Opcode 0x0f 0xd0. */
6915FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6916/** Opcode 0x0f 0xd1. */
6917FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6918/** Opcode 0x0f 0xd2. */
6919FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6920/** Opcode 0x0f 0xd3. */
6921FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6922/** Opcode 0x0f 0xd4. */
6923FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6924/** Opcode 0x0f 0xd5. */
6925FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6926/** Opcode 0x0f 0xd6. */
6927FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6928
6929
6930/** Opcode 0x0f 0xd7. */
6931FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6932{
6933 /* Docs says register only. */
6934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6935 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6936 return IEMOP_RAISE_INVALID_OPCODE();
6937
6938 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6939 /** @todo testcase: Check that the instruction implicitly clears the high
6940 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6941 * and opcode modifications are made to work with the whole width (not
6942 * just 128). */
6943 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6944 {
6945 case IEM_OP_PRF_SIZE_OP: /* SSE */
6946 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6947 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6948 IEM_MC_BEGIN(2, 0);
6949 IEM_MC_ARG(uint64_t *, pDst, 0);
6950 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6951 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6952 IEM_MC_PREPARE_SSE_USAGE();
6953 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6954 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6955 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6956 IEM_MC_ADVANCE_RIP();
6957 IEM_MC_END();
6958 return VINF_SUCCESS;
6959
6960 case 0: /* MMX */
6961 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6962 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6963 IEM_MC_BEGIN(2, 0);
6964 IEM_MC_ARG(uint64_t *, pDst, 0);
6965 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6966 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6967 IEM_MC_PREPARE_FPU_USAGE();
6968 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6969 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6970 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6971 IEM_MC_ADVANCE_RIP();
6972 IEM_MC_END();
6973 return VINF_SUCCESS;
6974
6975 default:
6976 return IEMOP_RAISE_INVALID_OPCODE();
6977 }
6978}
6979
6980
6981/** Opcode 0x0f 0xd8. */
6982FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6983/** Opcode 0x0f 0xd9. */
6984FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6985/** Opcode 0x0f 0xda. */
6986FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6987/** Opcode 0x0f 0xdb. */
6988FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6989/** Opcode 0x0f 0xdc. */
6990FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6991/** Opcode 0x0f 0xdd. */
6992FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6993/** Opcode 0x0f 0xde. */
6994FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6995/** Opcode 0x0f 0xdf. */
6996FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6997/** Opcode 0x0f 0xe0. */
6998FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6999/** Opcode 0x0f 0xe1. */
7000FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7001/** Opcode 0x0f 0xe2. */
7002FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7003/** Opcode 0x0f 0xe3. */
7004FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7005/** Opcode 0x0f 0xe4. */
7006FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7007/** Opcode 0x0f 0xe5. */
7008FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7009/** Opcode 0x0f 0xe6. */
7010FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7011
7012
7013/** Opcode 0x0f 0xe7. */
7014FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7015{
7016 IEMOP_MNEMONIC(!(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? "movntq mr,r" : "movntdq mr,r");
7017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7018 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7019 {
7020 /*
7021 * Register, memory.
7022 */
7023/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7024 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7025 {
7026
7027 case IEM_OP_PRF_SIZE_OP: /* SSE */
7028 IEM_MC_BEGIN(0, 2);
7029 IEM_MC_LOCAL(uint128_t, uSrc);
7030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7031
7032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7034 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7035 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7036
7037 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
7038 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
7039
7040 IEM_MC_ADVANCE_RIP();
7041 IEM_MC_END();
7042 break;
7043
7044 case 0: /* MMX */
7045 IEM_MC_BEGIN(0, 2);
7046 IEM_MC_LOCAL(uint64_t, uSrc);
7047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7048
7049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7051 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7052 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7053
7054 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7055 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, uSrc);
7056
7057 IEM_MC_ADVANCE_RIP();
7058 IEM_MC_END();
7059 break;
7060
7061 default:
7062 return IEMOP_RAISE_INVALID_OPCODE();
7063 }
7064 }
7065 /* The register, register encoding is invalid. */
7066 else
7067 return IEMOP_RAISE_INVALID_OPCODE();
7068 return VINF_SUCCESS;
7069}
7070
7071
7072/** Opcode 0x0f 0xe8. */
7073FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7074/** Opcode 0x0f 0xe9. */
7075FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7076/** Opcode 0x0f 0xea. */
7077FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7078/** Opcode 0x0f 0xeb. */
7079FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7080/** Opcode 0x0f 0xec. */
7081FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7082/** Opcode 0x0f 0xed. */
7083FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7084/** Opcode 0x0f 0xee. */
7085FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7086
7087
7088/** Opcode 0x0f 0xef. */
7089FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7090{
7091 IEMOP_MNEMONIC("pxor");
7092 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7093}
7094
7095
7096/** Opcode 0x0f 0xf0. */
7097FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7098/** Opcode 0x0f 0xf1. */
7099FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7100/** Opcode 0x0f 0xf2. */
7101FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7102/** Opcode 0x0f 0xf3. */
7103FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7104/** Opcode 0x0f 0xf4. */
7105FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7106/** Opcode 0x0f 0xf5. */
7107FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7108/** Opcode 0x0f 0xf6. */
7109FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7110/** Opcode 0x0f 0xf7. */
7111FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7112/** Opcode 0x0f 0xf8. */
7113FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7114/** Opcode 0x0f 0xf9. */
7115FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7116/** Opcode 0x0f 0xfa. */
7117FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7118/** Opcode 0x0f 0xfb. */
7119FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7120/** Opcode 0x0f 0xfc. */
7121FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7122/** Opcode 0x0f 0xfd. */
7123FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7124/** Opcode 0x0f 0xfe. */
7125FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7126
7127
7128const PFNIEMOP g_apfnTwoByteMap[256] =
7129{
7130 /* 0x00 */ iemOp_Grp6,
7131 /* 0x01 */ iemOp_Grp7,
7132 /* 0x02 */ iemOp_lar_Gv_Ew,
7133 /* 0x03 */ iemOp_lsl_Gv_Ew,
7134 /* 0x04 */ iemOp_Invalid,
7135 /* 0x05 */ iemOp_syscall,
7136 /* 0x06 */ iemOp_clts,
7137 /* 0x07 */ iemOp_sysret,
7138 /* 0x08 */ iemOp_invd,
7139 /* 0x09 */ iemOp_wbinvd,
7140 /* 0x0a */ iemOp_Invalid,
7141 /* 0x0b */ iemOp_ud2,
7142 /* 0x0c */ iemOp_Invalid,
7143 /* 0x0d */ iemOp_nop_Ev_GrpP,
7144 /* 0x0e */ iemOp_femms,
7145 /* 0x0f */ iemOp_3Dnow,
7146 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7147 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7148 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7149 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7150 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7151 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7152 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7153 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7154 /* 0x18 */ iemOp_prefetch_Grp16,
7155 /* 0x19 */ iemOp_nop_Ev,
7156 /* 0x1a */ iemOp_nop_Ev,
7157 /* 0x1b */ iemOp_nop_Ev,
7158 /* 0x1c */ iemOp_nop_Ev,
7159 /* 0x1d */ iemOp_nop_Ev,
7160 /* 0x1e */ iemOp_nop_Ev,
7161 /* 0x1f */ iemOp_nop_Ev,
7162 /* 0x20 */ iemOp_mov_Rd_Cd,
7163 /* 0x21 */ iemOp_mov_Rd_Dd,
7164 /* 0x22 */ iemOp_mov_Cd_Rd,
7165 /* 0x23 */ iemOp_mov_Dd_Rd,
7166 /* 0x24 */ iemOp_mov_Rd_Td,
7167 /* 0x25 */ iemOp_Invalid,
7168 /* 0x26 */ iemOp_mov_Td_Rd,
7169 /* 0x27 */ iemOp_Invalid,
7170 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7171 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7172 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7173 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7174 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7175 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7176 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7177 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7178 /* 0x30 */ iemOp_wrmsr,
7179 /* 0x31 */ iemOp_rdtsc,
7180 /* 0x32 */ iemOp_rdmsr,
7181 /* 0x33 */ iemOp_rdpmc,
7182 /* 0x34 */ iemOp_sysenter,
7183 /* 0x35 */ iemOp_sysexit,
7184 /* 0x36 */ iemOp_Invalid,
7185 /* 0x37 */ iemOp_getsec,
7186 /* 0x38 */ iemOp_3byte_Esc_A4,
7187 /* 0x39 */ iemOp_Invalid,
7188 /* 0x3a */ iemOp_3byte_Esc_A5,
7189 /* 0x3b */ iemOp_Invalid,
7190 /* 0x3c */ iemOp_Invalid,
7191 /* 0x3d */ iemOp_Invalid,
7192 /* 0x3e */ iemOp_Invalid,
7193 /* 0x3f */ iemOp_Invalid,
7194 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7195 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7196 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7197 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7198 /* 0x44 */ iemOp_cmove_Gv_Ev,
7199 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7200 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7201 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7202 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7203 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7204 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7205 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7206 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7207 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7208 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7209 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7210 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7211 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7212 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7213 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7214 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7215 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7216 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7217 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7218 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7219 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7220 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7221 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7222 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7223 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7224 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7225 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7226 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7227 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7228 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7229 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7230 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7231 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7232 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7233 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7234 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7235 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7236 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7237 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7238 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7239 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7240 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7241 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7242 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7243 /* 0x71 */ iemOp_Grp12,
7244 /* 0x72 */ iemOp_Grp13,
7245 /* 0x73 */ iemOp_Grp14,
7246 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7247 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7248 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7249 /* 0x77 */ iemOp_emms,
7250 /* 0x78 */ iemOp_vmread_AmdGrp17,
7251 /* 0x79 */ iemOp_vmwrite,
7252 /* 0x7a */ iemOp_Invalid,
7253 /* 0x7b */ iemOp_Invalid,
7254 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7255 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7256 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7257 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7258 /* 0x80 */ iemOp_jo_Jv,
7259 /* 0x81 */ iemOp_jno_Jv,
7260 /* 0x82 */ iemOp_jc_Jv,
7261 /* 0x83 */ iemOp_jnc_Jv,
7262 /* 0x84 */ iemOp_je_Jv,
7263 /* 0x85 */ iemOp_jne_Jv,
7264 /* 0x86 */ iemOp_jbe_Jv,
7265 /* 0x87 */ iemOp_jnbe_Jv,
7266 /* 0x88 */ iemOp_js_Jv,
7267 /* 0x89 */ iemOp_jns_Jv,
7268 /* 0x8a */ iemOp_jp_Jv,
7269 /* 0x8b */ iemOp_jnp_Jv,
7270 /* 0x8c */ iemOp_jl_Jv,
7271 /* 0x8d */ iemOp_jnl_Jv,
7272 /* 0x8e */ iemOp_jle_Jv,
7273 /* 0x8f */ iemOp_jnle_Jv,
7274 /* 0x90 */ iemOp_seto_Eb,
7275 /* 0x91 */ iemOp_setno_Eb,
7276 /* 0x92 */ iemOp_setc_Eb,
7277 /* 0x93 */ iemOp_setnc_Eb,
7278 /* 0x94 */ iemOp_sete_Eb,
7279 /* 0x95 */ iemOp_setne_Eb,
7280 /* 0x96 */ iemOp_setbe_Eb,
7281 /* 0x97 */ iemOp_setnbe_Eb,
7282 /* 0x98 */ iemOp_sets_Eb,
7283 /* 0x99 */ iemOp_setns_Eb,
7284 /* 0x9a */ iemOp_setp_Eb,
7285 /* 0x9b */ iemOp_setnp_Eb,
7286 /* 0x9c */ iemOp_setl_Eb,
7287 /* 0x9d */ iemOp_setnl_Eb,
7288 /* 0x9e */ iemOp_setle_Eb,
7289 /* 0x9f */ iemOp_setnle_Eb,
7290 /* 0xa0 */ iemOp_push_fs,
7291 /* 0xa1 */ iemOp_pop_fs,
7292 /* 0xa2 */ iemOp_cpuid,
7293 /* 0xa3 */ iemOp_bt_Ev_Gv,
7294 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7295 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7296 /* 0xa6 */ iemOp_Invalid,
7297 /* 0xa7 */ iemOp_Invalid,
7298 /* 0xa8 */ iemOp_push_gs,
7299 /* 0xa9 */ iemOp_pop_gs,
7300 /* 0xaa */ iemOp_rsm,
7301 /* 0xab */ iemOp_bts_Ev_Gv,
7302 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7303 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7304 /* 0xae */ iemOp_Grp15,
7305 /* 0xaf */ iemOp_imul_Gv_Ev,
7306 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7307 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7308 /* 0xb2 */ iemOp_lss_Gv_Mp,
7309 /* 0xb3 */ iemOp_btr_Ev_Gv,
7310 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7311 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7312 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7313 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7314 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7315 /* 0xb9 */ iemOp_Grp10,
7316 /* 0xba */ iemOp_Grp8,
7317 /* 0xbd */ iemOp_btc_Ev_Gv,
7318 /* 0xbc */ iemOp_bsf_Gv_Ev,
7319 /* 0xbd */ iemOp_bsr_Gv_Ev,
7320 /* 0xbe */ iemOp_movsx_Gv_Eb,
7321 /* 0xbf */ iemOp_movsx_Gv_Ew,
7322 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7323 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7324 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7325 /* 0xc3 */ iemOp_movnti_My_Gy,
7326 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7327 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7328 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7329 /* 0xc7 */ iemOp_Grp9,
7330 /* 0xc8 */ iemOp_bswap_rAX_r8,
7331 /* 0xc9 */ iemOp_bswap_rCX_r9,
7332 /* 0xca */ iemOp_bswap_rDX_r10,
7333 /* 0xcb */ iemOp_bswap_rBX_r11,
7334 /* 0xcc */ iemOp_bswap_rSP_r12,
7335 /* 0xcd */ iemOp_bswap_rBP_r13,
7336 /* 0xce */ iemOp_bswap_rSI_r14,
7337 /* 0xcf */ iemOp_bswap_rDI_r15,
7338 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7339 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7340 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7341 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7342 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7343 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7344 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7345 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7346 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7347 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7348 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7349 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7350 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7351 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7352 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7353 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7354 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7355 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7356 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7357 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7358 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7359 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7360 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7361 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7362 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7363 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7364 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7365 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7366 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7367 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7368 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7369 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7370 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7371 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7372 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7373 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7374 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7375 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7376 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7377 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7378 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7379 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7380 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7381 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7382 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7383 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7384 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7385 /* 0xff */ iemOp_Invalid
7386};
7387
7388/** @} */
7389
7390
7391/** @name One byte opcodes.
7392 *
7393 * @{
7394 */
7395
7396/** Opcode 0x00. */
7397FNIEMOP_DEF(iemOp_add_Eb_Gb)
7398{
7399 IEMOP_MNEMONIC("add Eb,Gb");
7400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7401}
7402
7403
7404/** Opcode 0x01. */
7405FNIEMOP_DEF(iemOp_add_Ev_Gv)
7406{
7407 IEMOP_MNEMONIC("add Ev,Gv");
7408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7409}
7410
7411
7412/** Opcode 0x02. */
7413FNIEMOP_DEF(iemOp_add_Gb_Eb)
7414{
7415 IEMOP_MNEMONIC("add Gb,Eb");
7416 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7417}
7418
7419
7420/** Opcode 0x03. */
7421FNIEMOP_DEF(iemOp_add_Gv_Ev)
7422{
7423 IEMOP_MNEMONIC("add Gv,Ev");
7424 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7425}
7426
7427
7428/** Opcode 0x04. */
7429FNIEMOP_DEF(iemOp_add_Al_Ib)
7430{
7431 IEMOP_MNEMONIC("add al,Ib");
7432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7433}
7434
7435
7436/** Opcode 0x05. */
7437FNIEMOP_DEF(iemOp_add_eAX_Iz)
7438{
7439 IEMOP_MNEMONIC("add rAX,Iz");
7440 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7441}
7442
7443
7444/** Opcode 0x06. */
7445FNIEMOP_DEF(iemOp_push_ES)
7446{
7447 IEMOP_MNEMONIC("push es");
7448 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7449}
7450
7451
7452/** Opcode 0x07. */
7453FNIEMOP_DEF(iemOp_pop_ES)
7454{
7455 IEMOP_MNEMONIC("pop es");
7456 IEMOP_HLP_NO_64BIT();
7457 IEMOP_HLP_NO_LOCK_PREFIX();
7458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
7459}
7460
7461
7462/** Opcode 0x08. */
7463FNIEMOP_DEF(iemOp_or_Eb_Gb)
7464{
7465 IEMOP_MNEMONIC("or Eb,Gb");
7466 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7468}
7469
7470
7471/** Opcode 0x09. */
7472FNIEMOP_DEF(iemOp_or_Ev_Gv)
7473{
7474 IEMOP_MNEMONIC("or Ev,Gv ");
7475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7477}
7478
7479
7480/** Opcode 0x0a. */
7481FNIEMOP_DEF(iemOp_or_Gb_Eb)
7482{
7483 IEMOP_MNEMONIC("or Gb,Eb");
7484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7486}
7487
7488
7489/** Opcode 0x0b. */
7490FNIEMOP_DEF(iemOp_or_Gv_Ev)
7491{
7492 IEMOP_MNEMONIC("or Gv,Ev");
7493 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7494 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7495}
7496
7497
7498/** Opcode 0x0c. */
7499FNIEMOP_DEF(iemOp_or_Al_Ib)
7500{
7501 IEMOP_MNEMONIC("or al,Ib");
7502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7503 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7504}
7505
7506
7507/** Opcode 0x0d. */
7508FNIEMOP_DEF(iemOp_or_eAX_Iz)
7509{
7510 IEMOP_MNEMONIC("or rAX,Iz");
7511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7513}
7514
7515
7516/** Opcode 0x0e. */
7517FNIEMOP_DEF(iemOp_push_CS)
7518{
7519 IEMOP_MNEMONIC("push cs");
7520 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7521}
7522
7523
7524/** Opcode 0x0f. */
7525FNIEMOP_DEF(iemOp_2byteEscape)
7526{
7527 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7528 /** @todo PUSH CS on 8086, undefined on 80186. */
7529 IEMOP_HLP_MIN_286();
7530 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7531}
7532
7533/** Opcode 0x10. */
7534FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7535{
7536 IEMOP_MNEMONIC("adc Eb,Gb");
7537 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7538}
7539
7540
7541/** Opcode 0x11. */
7542FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7543{
7544 IEMOP_MNEMONIC("adc Ev,Gv");
7545 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7546}
7547
7548
7549/** Opcode 0x12. */
7550FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7551{
7552 IEMOP_MNEMONIC("adc Gb,Eb");
7553 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7554}
7555
7556
7557/** Opcode 0x13. */
7558FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7559{
7560 IEMOP_MNEMONIC("adc Gv,Ev");
7561 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7562}
7563
7564
7565/** Opcode 0x14. */
7566FNIEMOP_DEF(iemOp_adc_Al_Ib)
7567{
7568 IEMOP_MNEMONIC("adc al,Ib");
7569 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7570}
7571
7572
7573/** Opcode 0x15. */
7574FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7575{
7576 IEMOP_MNEMONIC("adc rAX,Iz");
7577 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7578}
7579
7580
7581/** Opcode 0x16. */
7582FNIEMOP_DEF(iemOp_push_SS)
7583{
7584 IEMOP_MNEMONIC("push ss");
7585 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7586}
7587
7588
7589/** Opcode 0x17. */
7590FNIEMOP_DEF(iemOp_pop_SS)
7591{
7592 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7593 IEMOP_HLP_NO_LOCK_PREFIX();
7594 IEMOP_HLP_NO_64BIT();
7595 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7596}
7597
7598
7599/** Opcode 0x18. */
7600FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7601{
7602 IEMOP_MNEMONIC("sbb Eb,Gb");
7603 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7604}
7605
7606
7607/** Opcode 0x19. */
7608FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7609{
7610 IEMOP_MNEMONIC("sbb Ev,Gv");
7611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7612}
7613
7614
7615/** Opcode 0x1a. */
7616FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7617{
7618 IEMOP_MNEMONIC("sbb Gb,Eb");
7619 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7620}
7621
7622
7623/** Opcode 0x1b. */
7624FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7625{
7626 IEMOP_MNEMONIC("sbb Gv,Ev");
7627 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7628}
7629
7630
7631/** Opcode 0x1c. */
7632FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7633{
7634 IEMOP_MNEMONIC("sbb al,Ib");
7635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7636}
7637
7638
7639/** Opcode 0x1d. */
7640FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7641{
7642 IEMOP_MNEMONIC("sbb rAX,Iz");
7643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7644}
7645
7646
7647/** Opcode 0x1e. */
7648FNIEMOP_DEF(iemOp_push_DS)
7649{
7650 IEMOP_MNEMONIC("push ds");
7651 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7652}
7653
7654
7655/** Opcode 0x1f. */
7656FNIEMOP_DEF(iemOp_pop_DS)
7657{
7658 IEMOP_MNEMONIC("pop ds");
7659 IEMOP_HLP_NO_LOCK_PREFIX();
7660 IEMOP_HLP_NO_64BIT();
7661 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7662}
7663
7664
7665/** Opcode 0x20. */
7666FNIEMOP_DEF(iemOp_and_Eb_Gb)
7667{
7668 IEMOP_MNEMONIC("and Eb,Gb");
7669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7670 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7671}
7672
7673
7674/** Opcode 0x21. */
7675FNIEMOP_DEF(iemOp_and_Ev_Gv)
7676{
7677 IEMOP_MNEMONIC("and Ev,Gv");
7678 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7679 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7680}
7681
7682
7683/** Opcode 0x22. */
7684FNIEMOP_DEF(iemOp_and_Gb_Eb)
7685{
7686 IEMOP_MNEMONIC("and Gb,Eb");
7687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7689}
7690
7691
7692/** Opcode 0x23. */
7693FNIEMOP_DEF(iemOp_and_Gv_Ev)
7694{
7695 IEMOP_MNEMONIC("and Gv,Ev");
7696 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7697 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7698}
7699
7700
7701/** Opcode 0x24. */
7702FNIEMOP_DEF(iemOp_and_Al_Ib)
7703{
7704 IEMOP_MNEMONIC("and al,Ib");
7705 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7706 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7707}
7708
7709
7710/** Opcode 0x25. */
7711FNIEMOP_DEF(iemOp_and_eAX_Iz)
7712{
7713 IEMOP_MNEMONIC("and rAX,Iz");
7714 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7715 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7716}
7717
7718
7719/** Opcode 0x26. */
7720FNIEMOP_DEF(iemOp_seg_ES)
7721{
7722 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7723 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7724 pIemCpu->iEffSeg = X86_SREG_ES;
7725
7726 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7727 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7728}
7729
7730
7731/** Opcode 0x27. */
7732FNIEMOP_DEF(iemOp_daa)
7733{
7734 IEMOP_MNEMONIC("daa AL");
7735 IEMOP_HLP_NO_64BIT();
7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7739}
7740
7741
7742/** Opcode 0x28. */
7743FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7744{
7745 IEMOP_MNEMONIC("sub Eb,Gb");
7746 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7747}
7748
7749
7750/** Opcode 0x29. */
7751FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7752{
7753 IEMOP_MNEMONIC("sub Ev,Gv");
7754 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7755}
7756
7757
7758/** Opcode 0x2a. */
7759FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7760{
7761 IEMOP_MNEMONIC("sub Gb,Eb");
7762 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7763}
7764
7765
7766/** Opcode 0x2b. */
7767FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7768{
7769 IEMOP_MNEMONIC("sub Gv,Ev");
7770 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7771}
7772
7773
7774/** Opcode 0x2c. */
7775FNIEMOP_DEF(iemOp_sub_Al_Ib)
7776{
7777 IEMOP_MNEMONIC("sub al,Ib");
7778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7779}
7780
7781
7782/** Opcode 0x2d. */
7783FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7784{
7785 IEMOP_MNEMONIC("sub rAX,Iz");
7786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7787}
7788
7789
7790/** Opcode 0x2e. */
7791FNIEMOP_DEF(iemOp_seg_CS)
7792{
7793 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7794 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7795 pIemCpu->iEffSeg = X86_SREG_CS;
7796
7797 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7798 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7799}
7800
7801
7802/** Opcode 0x2f. */
7803FNIEMOP_DEF(iemOp_das)
7804{
7805 IEMOP_MNEMONIC("das AL");
7806 IEMOP_HLP_NO_64BIT();
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7809 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7810}
7811
7812
7813/** Opcode 0x30. */
7814FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7815{
7816 IEMOP_MNEMONIC("xor Eb,Gb");
7817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7818 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7819}
7820
7821
7822/** Opcode 0x31. */
7823FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7824{
7825 IEMOP_MNEMONIC("xor Ev,Gv");
7826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7827 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7828}
7829
7830
7831/** Opcode 0x32. */
7832FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7833{
7834 IEMOP_MNEMONIC("xor Gb,Eb");
7835 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7836 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7837}
7838
7839
7840/** Opcode 0x33. */
7841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7842{
7843 IEMOP_MNEMONIC("xor Gv,Ev");
7844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7846}
7847
7848
7849/** Opcode 0x34. */
7850FNIEMOP_DEF(iemOp_xor_Al_Ib)
7851{
7852 IEMOP_MNEMONIC("xor al,Ib");
7853 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7854 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7855}
7856
7857
7858/** Opcode 0x35. */
7859FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7860{
7861 IEMOP_MNEMONIC("xor rAX,Iz");
7862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7863 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7864}
7865
7866
7867/** Opcode 0x36. */
7868FNIEMOP_DEF(iemOp_seg_SS)
7869{
7870 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7871 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7872 pIemCpu->iEffSeg = X86_SREG_SS;
7873
7874 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7875 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7876}
7877
7878
7879/** Opcode 0x37. */
7880FNIEMOP_STUB(iemOp_aaa);
7881
7882
7883/** Opcode 0x38. */
7884FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7885{
7886 IEMOP_MNEMONIC("cmp Eb,Gb");
7887 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7888 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7889}
7890
7891
7892/** Opcode 0x39. */
7893FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7894{
7895 IEMOP_MNEMONIC("cmp Ev,Gv");
7896 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7897 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7898}
7899
7900
7901/** Opcode 0x3a. */
7902FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7903{
7904 IEMOP_MNEMONIC("cmp Gb,Eb");
7905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7906}
7907
7908
7909/** Opcode 0x3b. */
7910FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7911{
7912 IEMOP_MNEMONIC("cmp Gv,Ev");
7913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7914}
7915
7916
7917/** Opcode 0x3c. */
7918FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7919{
7920 IEMOP_MNEMONIC("cmp al,Ib");
7921 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7922}
7923
7924
7925/** Opcode 0x3d. */
7926FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7927{
7928 IEMOP_MNEMONIC("cmp rAX,Iz");
7929 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7930}
7931
7932
7933/** Opcode 0x3e. */
7934FNIEMOP_DEF(iemOp_seg_DS)
7935{
7936 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7937 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7938 pIemCpu->iEffSeg = X86_SREG_DS;
7939
7940 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7941 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7942}
7943
7944
7945/** Opcode 0x3f. */
7946FNIEMOP_STUB(iemOp_aas);
7947
7948/**
7949 * Common 'inc/dec/not/neg register' helper.
7950 */
7951FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7952{
7953 IEMOP_HLP_NO_LOCK_PREFIX();
7954 switch (pIemCpu->enmEffOpSize)
7955 {
7956 case IEMMODE_16BIT:
7957 IEM_MC_BEGIN(2, 0);
7958 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7959 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7960 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7961 IEM_MC_REF_EFLAGS(pEFlags);
7962 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7963 IEM_MC_ADVANCE_RIP();
7964 IEM_MC_END();
7965 return VINF_SUCCESS;
7966
7967 case IEMMODE_32BIT:
7968 IEM_MC_BEGIN(2, 0);
7969 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7970 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7971 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7972 IEM_MC_REF_EFLAGS(pEFlags);
7973 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7974 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7975 IEM_MC_ADVANCE_RIP();
7976 IEM_MC_END();
7977 return VINF_SUCCESS;
7978
7979 case IEMMODE_64BIT:
7980 IEM_MC_BEGIN(2, 0);
7981 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7982 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7983 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7984 IEM_MC_REF_EFLAGS(pEFlags);
7985 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7986 IEM_MC_ADVANCE_RIP();
7987 IEM_MC_END();
7988 return VINF_SUCCESS;
7989 }
7990 return VINF_SUCCESS;
7991}
7992
7993
7994/** Opcode 0x40. */
7995FNIEMOP_DEF(iemOp_inc_eAX)
7996{
7997 /*
7998 * This is a REX prefix in 64-bit mode.
7999 */
8000 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8001 {
8002 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8003 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
8004
8005 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8006 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8007 }
8008
8009 IEMOP_MNEMONIC("inc eAX");
8010 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8011}
8012
8013
8014/** Opcode 0x41. */
8015FNIEMOP_DEF(iemOp_inc_eCX)
8016{
8017 /*
8018 * This is a REX prefix in 64-bit mode.
8019 */
8020 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8021 {
8022 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8023 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8024 pIemCpu->uRexB = 1 << 3;
8025
8026 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8027 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8028 }
8029
8030 IEMOP_MNEMONIC("inc eCX");
8031 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8032}
8033
8034
8035/** Opcode 0x42. */
8036FNIEMOP_DEF(iemOp_inc_eDX)
8037{
8038 /*
8039 * This is a REX prefix in 64-bit mode.
8040 */
8041 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8042 {
8043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8044 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8045 pIemCpu->uRexIndex = 1 << 3;
8046
8047 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8048 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8049 }
8050
8051 IEMOP_MNEMONIC("inc eDX");
8052 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8053}
8054
8055
8056
8057/** Opcode 0x43. */
8058FNIEMOP_DEF(iemOp_inc_eBX)
8059{
8060 /*
8061 * This is a REX prefix in 64-bit mode.
8062 */
8063 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8064 {
8065 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8066 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8067 pIemCpu->uRexB = 1 << 3;
8068 pIemCpu->uRexIndex = 1 << 3;
8069
8070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8072 }
8073
8074 IEMOP_MNEMONIC("inc eBX");
8075 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8076}
8077
8078
8079/** Opcode 0x44. */
8080FNIEMOP_DEF(iemOp_inc_eSP)
8081{
8082 /*
8083 * This is a REX prefix in 64-bit mode.
8084 */
8085 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8086 {
8087 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8088 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8089 pIemCpu->uRexReg = 1 << 3;
8090
8091 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8092 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8093 }
8094
8095 IEMOP_MNEMONIC("inc eSP");
8096 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8097}
8098
8099
8100/** Opcode 0x45. */
8101FNIEMOP_DEF(iemOp_inc_eBP)
8102{
8103 /*
8104 * This is a REX prefix in 64-bit mode.
8105 */
8106 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8107 {
8108 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8109 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8110 pIemCpu->uRexReg = 1 << 3;
8111 pIemCpu->uRexB = 1 << 3;
8112
8113 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8114 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8115 }
8116
8117 IEMOP_MNEMONIC("inc eBP");
8118 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8119}
8120
8121
8122/** Opcode 0x46. */
8123FNIEMOP_DEF(iemOp_inc_eSI)
8124{
8125 /*
8126 * This is a REX prefix in 64-bit mode.
8127 */
8128 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8129 {
8130 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8131 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8132 pIemCpu->uRexReg = 1 << 3;
8133 pIemCpu->uRexIndex = 1 << 3;
8134
8135 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8136 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8137 }
8138
8139 IEMOP_MNEMONIC("inc eSI");
8140 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8141}
8142
8143
8144/** Opcode 0x47. */
8145FNIEMOP_DEF(iemOp_inc_eDI)
8146{
8147 /*
8148 * This is a REX prefix in 64-bit mode.
8149 */
8150 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8151 {
8152 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8153 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8154 pIemCpu->uRexReg = 1 << 3;
8155 pIemCpu->uRexB = 1 << 3;
8156 pIemCpu->uRexIndex = 1 << 3;
8157
8158 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8159 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8160 }
8161
8162 IEMOP_MNEMONIC("inc eDI");
8163 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8164}
8165
8166
8167/** Opcode 0x48. */
8168FNIEMOP_DEF(iemOp_dec_eAX)
8169{
8170 /*
8171 * This is a REX prefix in 64-bit mode.
8172 */
8173 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8174 {
8175 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8176 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8177 iemRecalEffOpSize(pIemCpu);
8178
8179 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8180 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8181 }
8182
8183 IEMOP_MNEMONIC("dec eAX");
8184 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8185}
8186
8187
8188/** Opcode 0x49. */
8189FNIEMOP_DEF(iemOp_dec_eCX)
8190{
8191 /*
8192 * This is a REX prefix in 64-bit mode.
8193 */
8194 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8195 {
8196 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8197 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8198 pIemCpu->uRexB = 1 << 3;
8199 iemRecalEffOpSize(pIemCpu);
8200
8201 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8202 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8203 }
8204
8205 IEMOP_MNEMONIC("dec eCX");
8206 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8207}
8208
8209
8210/** Opcode 0x4a. */
8211FNIEMOP_DEF(iemOp_dec_eDX)
8212{
8213 /*
8214 * This is a REX prefix in 64-bit mode.
8215 */
8216 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8217 {
8218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8219 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8220 pIemCpu->uRexIndex = 1 << 3;
8221 iemRecalEffOpSize(pIemCpu);
8222
8223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8225 }
8226
8227 IEMOP_MNEMONIC("dec eDX");
8228 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8229}
8230
8231
8232/** Opcode 0x4b. */
8233FNIEMOP_DEF(iemOp_dec_eBX)
8234{
8235 /*
8236 * This is a REX prefix in 64-bit mode.
8237 */
8238 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8239 {
8240 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8241 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8242 pIemCpu->uRexB = 1 << 3;
8243 pIemCpu->uRexIndex = 1 << 3;
8244 iemRecalEffOpSize(pIemCpu);
8245
8246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8248 }
8249
8250 IEMOP_MNEMONIC("dec eBX");
8251 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8252}
8253
8254
8255/** Opcode 0x4c. */
8256FNIEMOP_DEF(iemOp_dec_eSP)
8257{
8258 /*
8259 * This is a REX prefix in 64-bit mode.
8260 */
8261 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8262 {
8263 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8264 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8265 pIemCpu->uRexReg = 1 << 3;
8266 iemRecalEffOpSize(pIemCpu);
8267
8268 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8269 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8270 }
8271
8272 IEMOP_MNEMONIC("dec eSP");
8273 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8274}
8275
8276
8277/** Opcode 0x4d. */
8278FNIEMOP_DEF(iemOp_dec_eBP)
8279{
8280 /*
8281 * This is a REX prefix in 64-bit mode.
8282 */
8283 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8284 {
8285 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8286 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8287 pIemCpu->uRexReg = 1 << 3;
8288 pIemCpu->uRexB = 1 << 3;
8289 iemRecalEffOpSize(pIemCpu);
8290
8291 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8292 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8293 }
8294
8295 IEMOP_MNEMONIC("dec eBP");
8296 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8297}
8298
8299
8300/** Opcode 0x4e. */
8301FNIEMOP_DEF(iemOp_dec_eSI)
8302{
8303 /*
8304 * This is a REX prefix in 64-bit mode.
8305 */
8306 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8307 {
8308 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8309 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8310 pIemCpu->uRexReg = 1 << 3;
8311 pIemCpu->uRexIndex = 1 << 3;
8312 iemRecalEffOpSize(pIemCpu);
8313
8314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8316 }
8317
8318 IEMOP_MNEMONIC("dec eSI");
8319 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8320}
8321
8322
8323/** Opcode 0x4f. */
8324FNIEMOP_DEF(iemOp_dec_eDI)
8325{
8326 /*
8327 * This is a REX prefix in 64-bit mode.
8328 */
8329 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8330 {
8331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8332 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8333 pIemCpu->uRexReg = 1 << 3;
8334 pIemCpu->uRexB = 1 << 3;
8335 pIemCpu->uRexIndex = 1 << 3;
8336 iemRecalEffOpSize(pIemCpu);
8337
8338 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8339 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8340 }
8341
8342 IEMOP_MNEMONIC("dec eDI");
8343 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8344}
8345
8346
8347/**
8348 * Common 'push register' helper.
8349 */
8350FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8351{
8352 IEMOP_HLP_NO_LOCK_PREFIX();
8353 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8354 {
8355 iReg |= pIemCpu->uRexB;
8356 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8357 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8358 }
8359
8360 switch (pIemCpu->enmEffOpSize)
8361 {
8362 case IEMMODE_16BIT:
8363 IEM_MC_BEGIN(0, 1);
8364 IEM_MC_LOCAL(uint16_t, u16Value);
8365 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8366 IEM_MC_PUSH_U16(u16Value);
8367 IEM_MC_ADVANCE_RIP();
8368 IEM_MC_END();
8369 break;
8370
8371 case IEMMODE_32BIT:
8372 IEM_MC_BEGIN(0, 1);
8373 IEM_MC_LOCAL(uint32_t, u32Value);
8374 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8375 IEM_MC_PUSH_U32(u32Value);
8376 IEM_MC_ADVANCE_RIP();
8377 IEM_MC_END();
8378 break;
8379
8380 case IEMMODE_64BIT:
8381 IEM_MC_BEGIN(0, 1);
8382 IEM_MC_LOCAL(uint64_t, u64Value);
8383 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8384 IEM_MC_PUSH_U64(u64Value);
8385 IEM_MC_ADVANCE_RIP();
8386 IEM_MC_END();
8387 break;
8388 }
8389
8390 return VINF_SUCCESS;
8391}
8392
8393
8394/** Opcode 0x50. */
8395FNIEMOP_DEF(iemOp_push_eAX)
8396{
8397 IEMOP_MNEMONIC("push rAX");
8398 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8399}
8400
8401
8402/** Opcode 0x51. */
8403FNIEMOP_DEF(iemOp_push_eCX)
8404{
8405 IEMOP_MNEMONIC("push rCX");
8406 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8407}
8408
8409
8410/** Opcode 0x52. */
8411FNIEMOP_DEF(iemOp_push_eDX)
8412{
8413 IEMOP_MNEMONIC("push rDX");
8414 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8415}
8416
8417
8418/** Opcode 0x53. */
8419FNIEMOP_DEF(iemOp_push_eBX)
8420{
8421 IEMOP_MNEMONIC("push rBX");
8422 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8423}
8424
8425
8426/** Opcode 0x54. */
8427FNIEMOP_DEF(iemOp_push_eSP)
8428{
8429 IEMOP_MNEMONIC("push rSP");
8430 if (IEM_GET_TARGET_CPU(pIemCpu) == IEMTARGETCPU_8086)
8431 {
8432 IEM_MC_BEGIN(0, 1);
8433 IEM_MC_LOCAL(uint16_t, u16Value);
8434 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8435 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8436 IEM_MC_PUSH_U16(u16Value);
8437 IEM_MC_ADVANCE_RIP();
8438 IEM_MC_END();
8439 }
8440 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8441}
8442
8443
8444/** Opcode 0x55. */
8445FNIEMOP_DEF(iemOp_push_eBP)
8446{
8447 IEMOP_MNEMONIC("push rBP");
8448 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8449}
8450
8451
8452/** Opcode 0x56. */
8453FNIEMOP_DEF(iemOp_push_eSI)
8454{
8455 IEMOP_MNEMONIC("push rSI");
8456 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8457}
8458
8459
8460/** Opcode 0x57. */
8461FNIEMOP_DEF(iemOp_push_eDI)
8462{
8463 IEMOP_MNEMONIC("push rDI");
8464 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8465}
8466
8467
8468/**
8469 * Common 'pop register' helper.
8470 */
8471FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8472{
8473 IEMOP_HLP_NO_LOCK_PREFIX();
8474 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8475 {
8476 iReg |= pIemCpu->uRexB;
8477 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8478 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8479 }
8480
8481 switch (pIemCpu->enmEffOpSize)
8482 {
8483 case IEMMODE_16BIT:
8484 IEM_MC_BEGIN(0, 1);
8485 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8486 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8487 IEM_MC_POP_U16(pu16Dst);
8488 IEM_MC_ADVANCE_RIP();
8489 IEM_MC_END();
8490 break;
8491
8492 case IEMMODE_32BIT:
8493 IEM_MC_BEGIN(0, 1);
8494 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8495 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8496 IEM_MC_POP_U32(pu32Dst);
8497 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8498 IEM_MC_ADVANCE_RIP();
8499 IEM_MC_END();
8500 break;
8501
8502 case IEMMODE_64BIT:
8503 IEM_MC_BEGIN(0, 1);
8504 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8505 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8506 IEM_MC_POP_U64(pu64Dst);
8507 IEM_MC_ADVANCE_RIP();
8508 IEM_MC_END();
8509 break;
8510 }
8511
8512 return VINF_SUCCESS;
8513}
8514
8515
8516/** Opcode 0x58. */
8517FNIEMOP_DEF(iemOp_pop_eAX)
8518{
8519 IEMOP_MNEMONIC("pop rAX");
8520 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8521}
8522
8523
8524/** Opcode 0x59. */
8525FNIEMOP_DEF(iemOp_pop_eCX)
8526{
8527 IEMOP_MNEMONIC("pop rCX");
8528 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8529}
8530
8531
8532/** Opcode 0x5a. */
8533FNIEMOP_DEF(iemOp_pop_eDX)
8534{
8535 IEMOP_MNEMONIC("pop rDX");
8536 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8537}
8538
8539
8540/** Opcode 0x5b. */
8541FNIEMOP_DEF(iemOp_pop_eBX)
8542{
8543 IEMOP_MNEMONIC("pop rBX");
8544 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8545}
8546
8547
8548/** Opcode 0x5c. */
8549FNIEMOP_DEF(iemOp_pop_eSP)
8550{
8551 IEMOP_MNEMONIC("pop rSP");
8552 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8553 {
8554 if (pIemCpu->uRexB)
8555 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8556 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8557 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8558 }
8559
8560 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8561 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8562 /** @todo add testcase for this instruction. */
8563 switch (pIemCpu->enmEffOpSize)
8564 {
8565 case IEMMODE_16BIT:
8566 IEM_MC_BEGIN(0, 1);
8567 IEM_MC_LOCAL(uint16_t, u16Dst);
8568 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8569 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8570 IEM_MC_ADVANCE_RIP();
8571 IEM_MC_END();
8572 break;
8573
8574 case IEMMODE_32BIT:
8575 IEM_MC_BEGIN(0, 1);
8576 IEM_MC_LOCAL(uint32_t, u32Dst);
8577 IEM_MC_POP_U32(&u32Dst);
8578 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8579 IEM_MC_ADVANCE_RIP();
8580 IEM_MC_END();
8581 break;
8582
8583 case IEMMODE_64BIT:
8584 IEM_MC_BEGIN(0, 1);
8585 IEM_MC_LOCAL(uint64_t, u64Dst);
8586 IEM_MC_POP_U64(&u64Dst);
8587 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8588 IEM_MC_ADVANCE_RIP();
8589 IEM_MC_END();
8590 break;
8591 }
8592
8593 return VINF_SUCCESS;
8594}
8595
8596
8597/** Opcode 0x5d. */
8598FNIEMOP_DEF(iemOp_pop_eBP)
8599{
8600 IEMOP_MNEMONIC("pop rBP");
8601 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8602}
8603
8604
8605/** Opcode 0x5e. */
8606FNIEMOP_DEF(iemOp_pop_eSI)
8607{
8608 IEMOP_MNEMONIC("pop rSI");
8609 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8610}
8611
8612
8613/** Opcode 0x5f. */
8614FNIEMOP_DEF(iemOp_pop_eDI)
8615{
8616 IEMOP_MNEMONIC("pop rDI");
8617 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8618}
8619
8620
8621/** Opcode 0x60. */
8622FNIEMOP_DEF(iemOp_pusha)
8623{
8624 IEMOP_MNEMONIC("pusha");
8625 IEMOP_HLP_MIN_186();
8626 IEMOP_HLP_NO_64BIT();
8627 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8628 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8629 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8630 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8631}
8632
8633
8634/** Opcode 0x61. */
8635FNIEMOP_DEF(iemOp_popa)
8636{
8637 IEMOP_MNEMONIC("popa");
8638 IEMOP_HLP_MIN_186();
8639 IEMOP_HLP_NO_64BIT();
8640 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8641 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8642 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8643 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8644}
8645
8646
8647/** Opcode 0x62. */
8648FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8649// IEMOP_HLP_MIN_186();
8650
8651
8652/** Opcode 0x63 - non-64-bit modes. */
8653FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8654{
8655 IEMOP_MNEMONIC("arpl Ew,Gw");
8656 IEMOP_HLP_MIN_286();
8657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8659
8660 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8661 {
8662 /* Register */
8663 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8664 IEM_MC_BEGIN(3, 0);
8665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8666 IEM_MC_ARG(uint16_t, u16Src, 1);
8667 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8668
8669 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8670 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8671 IEM_MC_REF_EFLAGS(pEFlags);
8672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8673
8674 IEM_MC_ADVANCE_RIP();
8675 IEM_MC_END();
8676 }
8677 else
8678 {
8679 /* Memory */
8680 IEM_MC_BEGIN(3, 2);
8681 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8682 IEM_MC_ARG(uint16_t, u16Src, 1);
8683 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8685
8686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8688 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8689 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8690 IEM_MC_FETCH_EFLAGS(EFlags);
8691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8692
8693 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8694 IEM_MC_COMMIT_EFLAGS(EFlags);
8695 IEM_MC_ADVANCE_RIP();
8696 IEM_MC_END();
8697 }
8698 return VINF_SUCCESS;
8699
8700}
8701
8702
8703/** Opcode 0x63.
8704 * @note This is a weird one. It works like a regular move instruction if
8705 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8706 * @todo This definitely needs a testcase to verify the odd cases. */
8707FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8708{
8709 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8710
8711 IEMOP_MNEMONIC("movsxd Gv,Ev");
8712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8713
8714 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8715 {
8716 /*
8717 * Register to register.
8718 */
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_BEGIN(0, 1);
8721 IEM_MC_LOCAL(uint64_t, u64Value);
8722 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8723 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8724 IEM_MC_ADVANCE_RIP();
8725 IEM_MC_END();
8726 }
8727 else
8728 {
8729 /*
8730 * We're loading a register from memory.
8731 */
8732 IEM_MC_BEGIN(0, 2);
8733 IEM_MC_LOCAL(uint64_t, u64Value);
8734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8737 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8738 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8739 IEM_MC_ADVANCE_RIP();
8740 IEM_MC_END();
8741 }
8742 return VINF_SUCCESS;
8743}
8744
8745
8746/** Opcode 0x64. */
8747FNIEMOP_DEF(iemOp_seg_FS)
8748{
8749 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8750 IEMOP_HLP_MIN_386();
8751
8752 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8753 pIemCpu->iEffSeg = X86_SREG_FS;
8754
8755 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8756 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8757}
8758
8759
8760/** Opcode 0x65. */
8761FNIEMOP_DEF(iemOp_seg_GS)
8762{
8763 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8764 IEMOP_HLP_MIN_386();
8765
8766 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8767 pIemCpu->iEffSeg = X86_SREG_GS;
8768
8769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8771}
8772
8773
8774/** Opcode 0x66. */
8775FNIEMOP_DEF(iemOp_op_size)
8776{
8777 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8778 IEMOP_HLP_MIN_386();
8779
8780 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8781 iemRecalEffOpSize(pIemCpu);
8782
8783 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8784 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8785}
8786
8787
8788/** Opcode 0x67. */
8789FNIEMOP_DEF(iemOp_addr_size)
8790{
8791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8792 IEMOP_HLP_MIN_386();
8793
8794 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8795 switch (pIemCpu->enmDefAddrMode)
8796 {
8797 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8798 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8799 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8800 default: AssertFailed();
8801 }
8802
8803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8805}
8806
8807
8808/** Opcode 0x68. */
8809FNIEMOP_DEF(iemOp_push_Iz)
8810{
8811 IEMOP_MNEMONIC("push Iz");
8812 IEMOP_HLP_MIN_186();
8813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8814 switch (pIemCpu->enmEffOpSize)
8815 {
8816 case IEMMODE_16BIT:
8817 {
8818 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8819 IEMOP_HLP_NO_LOCK_PREFIX();
8820 IEM_MC_BEGIN(0,0);
8821 IEM_MC_PUSH_U16(u16Imm);
8822 IEM_MC_ADVANCE_RIP();
8823 IEM_MC_END();
8824 return VINF_SUCCESS;
8825 }
8826
8827 case IEMMODE_32BIT:
8828 {
8829 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8830 IEMOP_HLP_NO_LOCK_PREFIX();
8831 IEM_MC_BEGIN(0,0);
8832 IEM_MC_PUSH_U32(u32Imm);
8833 IEM_MC_ADVANCE_RIP();
8834 IEM_MC_END();
8835 return VINF_SUCCESS;
8836 }
8837
8838 case IEMMODE_64BIT:
8839 {
8840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8841 IEMOP_HLP_NO_LOCK_PREFIX();
8842 IEM_MC_BEGIN(0,0);
8843 IEM_MC_PUSH_U64(u64Imm);
8844 IEM_MC_ADVANCE_RIP();
8845 IEM_MC_END();
8846 return VINF_SUCCESS;
8847 }
8848
8849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8850 }
8851}
8852
8853
8854/** Opcode 0x69. */
8855FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8856{
8857 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8858 IEMOP_HLP_MIN_186();
8859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8861
8862 switch (pIemCpu->enmEffOpSize)
8863 {
8864 case IEMMODE_16BIT:
8865 {
8866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8867 {
8868 /* register operand */
8869 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8871
8872 IEM_MC_BEGIN(3, 1);
8873 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8874 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8876 IEM_MC_LOCAL(uint16_t, u16Tmp);
8877
8878 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8879 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8880 IEM_MC_REF_EFLAGS(pEFlags);
8881 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8882 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8883
8884 IEM_MC_ADVANCE_RIP();
8885 IEM_MC_END();
8886 }
8887 else
8888 {
8889 /* memory operand */
8890 IEM_MC_BEGIN(3, 2);
8891 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8892 IEM_MC_ARG(uint16_t, u16Src, 1);
8893 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8894 IEM_MC_LOCAL(uint16_t, u16Tmp);
8895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8896
8897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8898 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8899 IEM_MC_ASSIGN(u16Src, u16Imm);
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8902 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8903 IEM_MC_REF_EFLAGS(pEFlags);
8904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8905 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8906
8907 IEM_MC_ADVANCE_RIP();
8908 IEM_MC_END();
8909 }
8910 return VINF_SUCCESS;
8911 }
8912
8913 case IEMMODE_32BIT:
8914 {
8915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8916 {
8917 /* register operand */
8918 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8920
8921 IEM_MC_BEGIN(3, 1);
8922 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8923 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8924 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8925 IEM_MC_LOCAL(uint32_t, u32Tmp);
8926
8927 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8928 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8929 IEM_MC_REF_EFLAGS(pEFlags);
8930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8931 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8932
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 }
8936 else
8937 {
8938 /* memory operand */
8939 IEM_MC_BEGIN(3, 2);
8940 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8941 IEM_MC_ARG(uint32_t, u32Src, 1);
8942 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8943 IEM_MC_LOCAL(uint32_t, u32Tmp);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945
8946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8947 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8948 IEM_MC_ASSIGN(u32Src, u32Imm);
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8951 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8952 IEM_MC_REF_EFLAGS(pEFlags);
8953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8954 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8955
8956 IEM_MC_ADVANCE_RIP();
8957 IEM_MC_END();
8958 }
8959 return VINF_SUCCESS;
8960 }
8961
8962 case IEMMODE_64BIT:
8963 {
8964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8965 {
8966 /* register operand */
8967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8969
8970 IEM_MC_BEGIN(3, 1);
8971 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8972 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8974 IEM_MC_LOCAL(uint64_t, u64Tmp);
8975
8976 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8977 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8978 IEM_MC_REF_EFLAGS(pEFlags);
8979 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8980 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8981
8982 IEM_MC_ADVANCE_RIP();
8983 IEM_MC_END();
8984 }
8985 else
8986 {
8987 /* memory operand */
8988 IEM_MC_BEGIN(3, 2);
8989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8990 IEM_MC_ARG(uint64_t, u64Src, 1);
8991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8992 IEM_MC_LOCAL(uint64_t, u64Tmp);
8993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8994
8995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8996 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8997 IEM_MC_ASSIGN(u64Src, u64Imm);
8998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8999 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9000 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9001 IEM_MC_REF_EFLAGS(pEFlags);
9002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9003 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9004
9005 IEM_MC_ADVANCE_RIP();
9006 IEM_MC_END();
9007 }
9008 return VINF_SUCCESS;
9009 }
9010 }
9011 AssertFailedReturn(VERR_IEM_IPE_9);
9012}
9013
9014
9015/** Opcode 0x6a. */
9016FNIEMOP_DEF(iemOp_push_Ib)
9017{
9018 IEMOP_MNEMONIC("push Ib");
9019 IEMOP_HLP_MIN_186();
9020 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9021 IEMOP_HLP_NO_LOCK_PREFIX();
9022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9023
9024 IEM_MC_BEGIN(0,0);
9025 switch (pIemCpu->enmEffOpSize)
9026 {
9027 case IEMMODE_16BIT:
9028 IEM_MC_PUSH_U16(i8Imm);
9029 break;
9030 case IEMMODE_32BIT:
9031 IEM_MC_PUSH_U32(i8Imm);
9032 break;
9033 case IEMMODE_64BIT:
9034 IEM_MC_PUSH_U64(i8Imm);
9035 break;
9036 }
9037 IEM_MC_ADVANCE_RIP();
9038 IEM_MC_END();
9039 return VINF_SUCCESS;
9040}
9041
9042
9043/** Opcode 0x6b. */
9044FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9045{
9046 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9047 IEMOP_HLP_MIN_186();
9048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9049 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9050
9051 switch (pIemCpu->enmEffOpSize)
9052 {
9053 case IEMMODE_16BIT:
9054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9055 {
9056 /* register operand */
9057 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9059
9060 IEM_MC_BEGIN(3, 1);
9061 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9062 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9064 IEM_MC_LOCAL(uint16_t, u16Tmp);
9065
9066 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9067 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9068 IEM_MC_REF_EFLAGS(pEFlags);
9069 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9070 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
9071
9072 IEM_MC_ADVANCE_RIP();
9073 IEM_MC_END();
9074 }
9075 else
9076 {
9077 /* memory operand */
9078 IEM_MC_BEGIN(3, 2);
9079 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9080 IEM_MC_ARG(uint16_t, u16Src, 1);
9081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9082 IEM_MC_LOCAL(uint16_t, u16Tmp);
9083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9084
9085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9086 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9087 IEM_MC_ASSIGN(u16Src, u16Imm);
9088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9089 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9090 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9091 IEM_MC_REF_EFLAGS(pEFlags);
9092 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9093 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
9094
9095 IEM_MC_ADVANCE_RIP();
9096 IEM_MC_END();
9097 }
9098 return VINF_SUCCESS;
9099
9100 case IEMMODE_32BIT:
9101 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9102 {
9103 /* register operand */
9104 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9106
9107 IEM_MC_BEGIN(3, 1);
9108 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9109 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9111 IEM_MC_LOCAL(uint32_t, u32Tmp);
9112
9113 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9114 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9115 IEM_MC_REF_EFLAGS(pEFlags);
9116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9117 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9118
9119 IEM_MC_ADVANCE_RIP();
9120 IEM_MC_END();
9121 }
9122 else
9123 {
9124 /* memory operand */
9125 IEM_MC_BEGIN(3, 2);
9126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9127 IEM_MC_ARG(uint32_t, u32Src, 1);
9128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9129 IEM_MC_LOCAL(uint32_t, u32Tmp);
9130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9131
9132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9133 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9134 IEM_MC_ASSIGN(u32Src, u32Imm);
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9137 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9138 IEM_MC_REF_EFLAGS(pEFlags);
9139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9140 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
9141
9142 IEM_MC_ADVANCE_RIP();
9143 IEM_MC_END();
9144 }
9145 return VINF_SUCCESS;
9146
9147 case IEMMODE_64BIT:
9148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9149 {
9150 /* register operand */
9151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9153
9154 IEM_MC_BEGIN(3, 1);
9155 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9156 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9157 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9158 IEM_MC_LOCAL(uint64_t, u64Tmp);
9159
9160 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9161 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9162 IEM_MC_REF_EFLAGS(pEFlags);
9163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9164 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9165
9166 IEM_MC_ADVANCE_RIP();
9167 IEM_MC_END();
9168 }
9169 else
9170 {
9171 /* memory operand */
9172 IEM_MC_BEGIN(3, 2);
9173 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9174 IEM_MC_ARG(uint64_t, u64Src, 1);
9175 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9176 IEM_MC_LOCAL(uint64_t, u64Tmp);
9177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9178
9179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9181 IEM_MC_ASSIGN(u64Src, u64Imm);
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9183 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
9184 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9185 IEM_MC_REF_EFLAGS(pEFlags);
9186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9187 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
9188
9189 IEM_MC_ADVANCE_RIP();
9190 IEM_MC_END();
9191 }
9192 return VINF_SUCCESS;
9193 }
9194 AssertFailedReturn(VERR_IEM_IPE_8);
9195}
9196
9197
9198/** Opcode 0x6c. */
9199FNIEMOP_DEF(iemOp_insb_Yb_DX)
9200{
9201 IEMOP_HLP_MIN_186();
9202 IEMOP_HLP_NO_LOCK_PREFIX();
9203 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9204 {
9205 IEMOP_MNEMONIC("rep ins Yb,DX");
9206 switch (pIemCpu->enmEffAddrMode)
9207 {
9208 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9209 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9210 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9212 }
9213 }
9214 else
9215 {
9216 IEMOP_MNEMONIC("ins Yb,DX");
9217 switch (pIemCpu->enmEffAddrMode)
9218 {
9219 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9220 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9221 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9223 }
9224 }
9225}
9226
9227
9228/** Opcode 0x6d. */
9229FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9230{
9231 IEMOP_HLP_MIN_186();
9232 IEMOP_HLP_NO_LOCK_PREFIX();
9233 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9234 {
9235 IEMOP_MNEMONIC("rep ins Yv,DX");
9236 switch (pIemCpu->enmEffOpSize)
9237 {
9238 case IEMMODE_16BIT:
9239 switch (pIemCpu->enmEffAddrMode)
9240 {
9241 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9242 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9243 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9245 }
9246 break;
9247 case IEMMODE_64BIT:
9248 case IEMMODE_32BIT:
9249 switch (pIemCpu->enmEffAddrMode)
9250 {
9251 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9252 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9253 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9255 }
9256 break;
9257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9258 }
9259 }
9260 else
9261 {
9262 IEMOP_MNEMONIC("ins Yv,DX");
9263 switch (pIemCpu->enmEffOpSize)
9264 {
9265 case IEMMODE_16BIT:
9266 switch (pIemCpu->enmEffAddrMode)
9267 {
9268 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9269 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9270 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9272 }
9273 break;
9274 case IEMMODE_64BIT:
9275 case IEMMODE_32BIT:
9276 switch (pIemCpu->enmEffAddrMode)
9277 {
9278 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9279 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9280 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9282 }
9283 break;
9284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9285 }
9286 }
9287}
9288
9289
9290/** Opcode 0x6e. */
9291FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9292{
9293 IEMOP_HLP_MIN_186();
9294 IEMOP_HLP_NO_LOCK_PREFIX();
9295 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9296 {
9297 IEMOP_MNEMONIC("rep outs DX,Yb");
9298 switch (pIemCpu->enmEffAddrMode)
9299 {
9300 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
9301 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
9302 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
9303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9304 }
9305 }
9306 else
9307 {
9308 IEMOP_MNEMONIC("outs DX,Yb");
9309 switch (pIemCpu->enmEffAddrMode)
9310 {
9311 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
9312 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
9313 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
9314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9315 }
9316 }
9317}
9318
9319
9320/** Opcode 0x6f. */
9321FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9322{
9323 IEMOP_HLP_MIN_186();
9324 IEMOP_HLP_NO_LOCK_PREFIX();
9325 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9326 {
9327 IEMOP_MNEMONIC("rep outs DX,Yv");
9328 switch (pIemCpu->enmEffOpSize)
9329 {
9330 case IEMMODE_16BIT:
9331 switch (pIemCpu->enmEffAddrMode)
9332 {
9333 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
9334 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
9335 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
9336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9337 }
9338 break;
9339 case IEMMODE_64BIT:
9340 case IEMMODE_32BIT:
9341 switch (pIemCpu->enmEffAddrMode)
9342 {
9343 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
9344 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
9345 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
9346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9347 }
9348 break;
9349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9350 }
9351 }
9352 else
9353 {
9354 IEMOP_MNEMONIC("outs DX,Yv");
9355 switch (pIemCpu->enmEffOpSize)
9356 {
9357 case IEMMODE_16BIT:
9358 switch (pIemCpu->enmEffAddrMode)
9359 {
9360 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
9361 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
9362 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
9363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9364 }
9365 break;
9366 case IEMMODE_64BIT:
9367 case IEMMODE_32BIT:
9368 switch (pIemCpu->enmEffAddrMode)
9369 {
9370 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
9371 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
9372 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
9373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9374 }
9375 break;
9376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9377 }
9378 }
9379}
9380
9381
9382/** Opcode 0x70. */
9383FNIEMOP_DEF(iemOp_jo_Jb)
9384{
9385 IEMOP_MNEMONIC("jo Jb");
9386 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9387 IEMOP_HLP_NO_LOCK_PREFIX();
9388 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9389
9390 IEM_MC_BEGIN(0, 0);
9391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9392 IEM_MC_REL_JMP_S8(i8Imm);
9393 } IEM_MC_ELSE() {
9394 IEM_MC_ADVANCE_RIP();
9395 } IEM_MC_ENDIF();
9396 IEM_MC_END();
9397 return VINF_SUCCESS;
9398}
9399
9400
9401/** Opcode 0x71. */
9402FNIEMOP_DEF(iemOp_jno_Jb)
9403{
9404 IEMOP_MNEMONIC("jno Jb");
9405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9406 IEMOP_HLP_NO_LOCK_PREFIX();
9407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9408
9409 IEM_MC_BEGIN(0, 0);
9410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9411 IEM_MC_ADVANCE_RIP();
9412 } IEM_MC_ELSE() {
9413 IEM_MC_REL_JMP_S8(i8Imm);
9414 } IEM_MC_ENDIF();
9415 IEM_MC_END();
9416 return VINF_SUCCESS;
9417}
9418
9419/** Opcode 0x72. */
9420FNIEMOP_DEF(iemOp_jc_Jb)
9421{
9422 IEMOP_MNEMONIC("jc/jnae Jb");
9423 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9424 IEMOP_HLP_NO_LOCK_PREFIX();
9425 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9426
9427 IEM_MC_BEGIN(0, 0);
9428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9429 IEM_MC_REL_JMP_S8(i8Imm);
9430 } IEM_MC_ELSE() {
9431 IEM_MC_ADVANCE_RIP();
9432 } IEM_MC_ENDIF();
9433 IEM_MC_END();
9434 return VINF_SUCCESS;
9435}
9436
9437
9438/** Opcode 0x73. */
9439FNIEMOP_DEF(iemOp_jnc_Jb)
9440{
9441 IEMOP_MNEMONIC("jnc/jnb Jb");
9442 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9443 IEMOP_HLP_NO_LOCK_PREFIX();
9444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9445
9446 IEM_MC_BEGIN(0, 0);
9447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9448 IEM_MC_ADVANCE_RIP();
9449 } IEM_MC_ELSE() {
9450 IEM_MC_REL_JMP_S8(i8Imm);
9451 } IEM_MC_ENDIF();
9452 IEM_MC_END();
9453 return VINF_SUCCESS;
9454}
9455
9456
9457/** Opcode 0x74. */
9458FNIEMOP_DEF(iemOp_je_Jb)
9459{
9460 IEMOP_MNEMONIC("je/jz Jb");
9461 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9462 IEMOP_HLP_NO_LOCK_PREFIX();
9463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9464
9465 IEM_MC_BEGIN(0, 0);
9466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9467 IEM_MC_REL_JMP_S8(i8Imm);
9468 } IEM_MC_ELSE() {
9469 IEM_MC_ADVANCE_RIP();
9470 } IEM_MC_ENDIF();
9471 IEM_MC_END();
9472 return VINF_SUCCESS;
9473}
9474
9475
9476/** Opcode 0x75. */
9477FNIEMOP_DEF(iemOp_jne_Jb)
9478{
9479 IEMOP_MNEMONIC("jne/jnz Jb");
9480 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9481 IEMOP_HLP_NO_LOCK_PREFIX();
9482 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9483
9484 IEM_MC_BEGIN(0, 0);
9485 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9486 IEM_MC_ADVANCE_RIP();
9487 } IEM_MC_ELSE() {
9488 IEM_MC_REL_JMP_S8(i8Imm);
9489 } IEM_MC_ENDIF();
9490 IEM_MC_END();
9491 return VINF_SUCCESS;
9492}
9493
9494
9495/** Opcode 0x76. */
9496FNIEMOP_DEF(iemOp_jbe_Jb)
9497{
9498 IEMOP_MNEMONIC("jbe/jna Jb");
9499 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9500 IEMOP_HLP_NO_LOCK_PREFIX();
9501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9502
9503 IEM_MC_BEGIN(0, 0);
9504 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9505 IEM_MC_REL_JMP_S8(i8Imm);
9506 } IEM_MC_ELSE() {
9507 IEM_MC_ADVANCE_RIP();
9508 } IEM_MC_ENDIF();
9509 IEM_MC_END();
9510 return VINF_SUCCESS;
9511}
9512
9513
9514/** Opcode 0x77. */
9515FNIEMOP_DEF(iemOp_jnbe_Jb)
9516{
9517 IEMOP_MNEMONIC("jnbe/ja Jb");
9518 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9519 IEMOP_HLP_NO_LOCK_PREFIX();
9520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9521
9522 IEM_MC_BEGIN(0, 0);
9523 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9524 IEM_MC_ADVANCE_RIP();
9525 } IEM_MC_ELSE() {
9526 IEM_MC_REL_JMP_S8(i8Imm);
9527 } IEM_MC_ENDIF();
9528 IEM_MC_END();
9529 return VINF_SUCCESS;
9530}
9531
9532
9533/** Opcode 0x78. */
9534FNIEMOP_DEF(iemOp_js_Jb)
9535{
9536 IEMOP_MNEMONIC("js Jb");
9537 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9538 IEMOP_HLP_NO_LOCK_PREFIX();
9539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9540
9541 IEM_MC_BEGIN(0, 0);
9542 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9543 IEM_MC_REL_JMP_S8(i8Imm);
9544 } IEM_MC_ELSE() {
9545 IEM_MC_ADVANCE_RIP();
9546 } IEM_MC_ENDIF();
9547 IEM_MC_END();
9548 return VINF_SUCCESS;
9549}
9550
9551
9552/** Opcode 0x79. */
9553FNIEMOP_DEF(iemOp_jns_Jb)
9554{
9555 IEMOP_MNEMONIC("jns Jb");
9556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9557 IEMOP_HLP_NO_LOCK_PREFIX();
9558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9559
9560 IEM_MC_BEGIN(0, 0);
9561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9562 IEM_MC_ADVANCE_RIP();
9563 } IEM_MC_ELSE() {
9564 IEM_MC_REL_JMP_S8(i8Imm);
9565 } IEM_MC_ENDIF();
9566 IEM_MC_END();
9567 return VINF_SUCCESS;
9568}
9569
9570
9571/** Opcode 0x7a. */
9572FNIEMOP_DEF(iemOp_jp_Jb)
9573{
9574 IEMOP_MNEMONIC("jp Jb");
9575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9576 IEMOP_HLP_NO_LOCK_PREFIX();
9577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9578
9579 IEM_MC_BEGIN(0, 0);
9580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9581 IEM_MC_REL_JMP_S8(i8Imm);
9582 } IEM_MC_ELSE() {
9583 IEM_MC_ADVANCE_RIP();
9584 } IEM_MC_ENDIF();
9585 IEM_MC_END();
9586 return VINF_SUCCESS;
9587}
9588
9589
9590/** Opcode 0x7b. */
9591FNIEMOP_DEF(iemOp_jnp_Jb)
9592{
9593 IEMOP_MNEMONIC("jnp Jb");
9594 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9595 IEMOP_HLP_NO_LOCK_PREFIX();
9596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9597
9598 IEM_MC_BEGIN(0, 0);
9599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9600 IEM_MC_ADVANCE_RIP();
9601 } IEM_MC_ELSE() {
9602 IEM_MC_REL_JMP_S8(i8Imm);
9603 } IEM_MC_ENDIF();
9604 IEM_MC_END();
9605 return VINF_SUCCESS;
9606}
9607
9608
9609/** Opcode 0x7c. */
9610FNIEMOP_DEF(iemOp_jl_Jb)
9611{
9612 IEMOP_MNEMONIC("jl/jnge Jb");
9613 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9614 IEMOP_HLP_NO_LOCK_PREFIX();
9615 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9616
9617 IEM_MC_BEGIN(0, 0);
9618 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9619 IEM_MC_REL_JMP_S8(i8Imm);
9620 } IEM_MC_ELSE() {
9621 IEM_MC_ADVANCE_RIP();
9622 } IEM_MC_ENDIF();
9623 IEM_MC_END();
9624 return VINF_SUCCESS;
9625}
9626
9627
9628/** Opcode 0x7d. */
9629FNIEMOP_DEF(iemOp_jnl_Jb)
9630{
9631 IEMOP_MNEMONIC("jnl/jge Jb");
9632 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9633 IEMOP_HLP_NO_LOCK_PREFIX();
9634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9635
9636 IEM_MC_BEGIN(0, 0);
9637 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9638 IEM_MC_ADVANCE_RIP();
9639 } IEM_MC_ELSE() {
9640 IEM_MC_REL_JMP_S8(i8Imm);
9641 } IEM_MC_ENDIF();
9642 IEM_MC_END();
9643 return VINF_SUCCESS;
9644}
9645
9646
9647/** Opcode 0x7e. */
9648FNIEMOP_DEF(iemOp_jle_Jb)
9649{
9650 IEMOP_MNEMONIC("jle/jng Jb");
9651 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9652 IEMOP_HLP_NO_LOCK_PREFIX();
9653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9654
9655 IEM_MC_BEGIN(0, 0);
9656 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9657 IEM_MC_REL_JMP_S8(i8Imm);
9658 } IEM_MC_ELSE() {
9659 IEM_MC_ADVANCE_RIP();
9660 } IEM_MC_ENDIF();
9661 IEM_MC_END();
9662 return VINF_SUCCESS;
9663}
9664
9665
9666/** Opcode 0x7f. */
9667FNIEMOP_DEF(iemOp_jnle_Jb)
9668{
9669 IEMOP_MNEMONIC("jnle/jg Jb");
9670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9671 IEMOP_HLP_NO_LOCK_PREFIX();
9672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9673
9674 IEM_MC_BEGIN(0, 0);
9675 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9676 IEM_MC_ADVANCE_RIP();
9677 } IEM_MC_ELSE() {
9678 IEM_MC_REL_JMP_S8(i8Imm);
9679 } IEM_MC_ENDIF();
9680 IEM_MC_END();
9681 return VINF_SUCCESS;
9682}
9683
9684
9685/** Opcode 0x80. */
9686FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9687{
9688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9689 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9690 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9691
9692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9693 {
9694 /* register target */
9695 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9696 IEMOP_HLP_NO_LOCK_PREFIX();
9697 IEM_MC_BEGIN(3, 0);
9698 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9699 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9701
9702 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9703 IEM_MC_REF_EFLAGS(pEFlags);
9704 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9705
9706 IEM_MC_ADVANCE_RIP();
9707 IEM_MC_END();
9708 }
9709 else
9710 {
9711 /* memory target */
9712 uint32_t fAccess;
9713 if (pImpl->pfnLockedU8)
9714 fAccess = IEM_ACCESS_DATA_RW;
9715 else
9716 { /* CMP */
9717 IEMOP_HLP_NO_LOCK_PREFIX();
9718 fAccess = IEM_ACCESS_DATA_R;
9719 }
9720 IEM_MC_BEGIN(3, 2);
9721 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9722 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9724
9725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9726 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9727 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9728
9729 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9730 IEM_MC_FETCH_EFLAGS(EFlags);
9731 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9732 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9733 else
9734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9735
9736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9737 IEM_MC_COMMIT_EFLAGS(EFlags);
9738 IEM_MC_ADVANCE_RIP();
9739 IEM_MC_END();
9740 }
9741 return VINF_SUCCESS;
9742}
9743
9744
9745/** Opcode 0x81. */
9746FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9747{
9748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9749 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9750 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9751
9752 switch (pIemCpu->enmEffOpSize)
9753 {
9754 case IEMMODE_16BIT:
9755 {
9756 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9757 {
9758 /* register target */
9759 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9760 IEMOP_HLP_NO_LOCK_PREFIX();
9761 IEM_MC_BEGIN(3, 0);
9762 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9763 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9764 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9765
9766 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9767 IEM_MC_REF_EFLAGS(pEFlags);
9768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9769
9770 IEM_MC_ADVANCE_RIP();
9771 IEM_MC_END();
9772 }
9773 else
9774 {
9775 /* memory target */
9776 uint32_t fAccess;
9777 if (pImpl->pfnLockedU16)
9778 fAccess = IEM_ACCESS_DATA_RW;
9779 else
9780 { /* CMP, TEST */
9781 IEMOP_HLP_NO_LOCK_PREFIX();
9782 fAccess = IEM_ACCESS_DATA_R;
9783 }
9784 IEM_MC_BEGIN(3, 2);
9785 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9786 IEM_MC_ARG(uint16_t, u16Src, 1);
9787 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9789
9790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9791 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9792 IEM_MC_ASSIGN(u16Src, u16Imm);
9793 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9794 IEM_MC_FETCH_EFLAGS(EFlags);
9795 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9797 else
9798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9799
9800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9801 IEM_MC_COMMIT_EFLAGS(EFlags);
9802 IEM_MC_ADVANCE_RIP();
9803 IEM_MC_END();
9804 }
9805 break;
9806 }
9807
9808 case IEMMODE_32BIT:
9809 {
9810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9811 {
9812 /* register target */
9813 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9814 IEMOP_HLP_NO_LOCK_PREFIX();
9815 IEM_MC_BEGIN(3, 0);
9816 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9817 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9818 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9819
9820 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9821 IEM_MC_REF_EFLAGS(pEFlags);
9822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9823 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9824
9825 IEM_MC_ADVANCE_RIP();
9826 IEM_MC_END();
9827 }
9828 else
9829 {
9830 /* memory target */
9831 uint32_t fAccess;
9832 if (pImpl->pfnLockedU32)
9833 fAccess = IEM_ACCESS_DATA_RW;
9834 else
9835 { /* CMP, TEST */
9836 IEMOP_HLP_NO_LOCK_PREFIX();
9837 fAccess = IEM_ACCESS_DATA_R;
9838 }
9839 IEM_MC_BEGIN(3, 2);
9840 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9841 IEM_MC_ARG(uint32_t, u32Src, 1);
9842 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9844
9845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9846 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9847 IEM_MC_ASSIGN(u32Src, u32Imm);
9848 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9849 IEM_MC_FETCH_EFLAGS(EFlags);
9850 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9852 else
9853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9854
9855 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9856 IEM_MC_COMMIT_EFLAGS(EFlags);
9857 IEM_MC_ADVANCE_RIP();
9858 IEM_MC_END();
9859 }
9860 break;
9861 }
9862
9863 case IEMMODE_64BIT:
9864 {
9865 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9866 {
9867 /* register target */
9868 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9869 IEMOP_HLP_NO_LOCK_PREFIX();
9870 IEM_MC_BEGIN(3, 0);
9871 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9872 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9874
9875 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9876 IEM_MC_REF_EFLAGS(pEFlags);
9877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9878
9879 IEM_MC_ADVANCE_RIP();
9880 IEM_MC_END();
9881 }
9882 else
9883 {
9884 /* memory target */
9885 uint32_t fAccess;
9886 if (pImpl->pfnLockedU64)
9887 fAccess = IEM_ACCESS_DATA_RW;
9888 else
9889 { /* CMP */
9890 IEMOP_HLP_NO_LOCK_PREFIX();
9891 fAccess = IEM_ACCESS_DATA_R;
9892 }
9893 IEM_MC_BEGIN(3, 2);
9894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9895 IEM_MC_ARG(uint64_t, u64Src, 1);
9896 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9898
9899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9900 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9901 IEM_MC_ASSIGN(u64Src, u64Imm);
9902 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9903 IEM_MC_FETCH_EFLAGS(EFlags);
9904 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9906 else
9907 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9908
9909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9910 IEM_MC_COMMIT_EFLAGS(EFlags);
9911 IEM_MC_ADVANCE_RIP();
9912 IEM_MC_END();
9913 }
9914 break;
9915 }
9916 }
9917 return VINF_SUCCESS;
9918}
9919
9920
9921/** Opcode 0x82. */
9922FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9923{
9924 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9925 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9926}
9927
9928
9929/** Opcode 0x83. */
9930FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9931{
9932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9933 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9934 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
9935 to the 386 even if absent in the intel reference manuals and some
9936 3rd party opcode listings. */
9937 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9938
9939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9940 {
9941 /*
9942 * Register target
9943 */
9944 IEMOP_HLP_NO_LOCK_PREFIX();
9945 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9946 switch (pIemCpu->enmEffOpSize)
9947 {
9948 case IEMMODE_16BIT:
9949 {
9950 IEM_MC_BEGIN(3, 0);
9951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9952 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9954
9955 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9956 IEM_MC_REF_EFLAGS(pEFlags);
9957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9958
9959 IEM_MC_ADVANCE_RIP();
9960 IEM_MC_END();
9961 break;
9962 }
9963
9964 case IEMMODE_32BIT:
9965 {
9966 IEM_MC_BEGIN(3, 0);
9967 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9968 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9970
9971 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9972 IEM_MC_REF_EFLAGS(pEFlags);
9973 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9974 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9975
9976 IEM_MC_ADVANCE_RIP();
9977 IEM_MC_END();
9978 break;
9979 }
9980
9981 case IEMMODE_64BIT:
9982 {
9983 IEM_MC_BEGIN(3, 0);
9984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9985 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9987
9988 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9989 IEM_MC_REF_EFLAGS(pEFlags);
9990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9991
9992 IEM_MC_ADVANCE_RIP();
9993 IEM_MC_END();
9994 break;
9995 }
9996 }
9997 }
9998 else
9999 {
10000 /*
10001 * Memory target.
10002 */
10003 uint32_t fAccess;
10004 if (pImpl->pfnLockedU16)
10005 fAccess = IEM_ACCESS_DATA_RW;
10006 else
10007 { /* CMP */
10008 IEMOP_HLP_NO_LOCK_PREFIX();
10009 fAccess = IEM_ACCESS_DATA_R;
10010 }
10011
10012 switch (pIemCpu->enmEffOpSize)
10013 {
10014 case IEMMODE_16BIT:
10015 {
10016 IEM_MC_BEGIN(3, 2);
10017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10018 IEM_MC_ARG(uint16_t, u16Src, 1);
10019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10021
10022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10024 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10025 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10026 IEM_MC_FETCH_EFLAGS(EFlags);
10027 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10029 else
10030 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10031
10032 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10033 IEM_MC_COMMIT_EFLAGS(EFlags);
10034 IEM_MC_ADVANCE_RIP();
10035 IEM_MC_END();
10036 break;
10037 }
10038
10039 case IEMMODE_32BIT:
10040 {
10041 IEM_MC_BEGIN(3, 2);
10042 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10043 IEM_MC_ARG(uint32_t, u32Src, 1);
10044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10046
10047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10049 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10050 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10051 IEM_MC_FETCH_EFLAGS(EFlags);
10052 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10053 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10054 else
10055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10056
10057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10058 IEM_MC_COMMIT_EFLAGS(EFlags);
10059 IEM_MC_ADVANCE_RIP();
10060 IEM_MC_END();
10061 break;
10062 }
10063
10064 case IEMMODE_64BIT:
10065 {
10066 IEM_MC_BEGIN(3, 2);
10067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10068 IEM_MC_ARG(uint64_t, u64Src, 1);
10069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10071
10072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10073 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10074 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10075 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10076 IEM_MC_FETCH_EFLAGS(EFlags);
10077 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
10078 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10079 else
10080 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10081
10082 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10083 IEM_MC_COMMIT_EFLAGS(EFlags);
10084 IEM_MC_ADVANCE_RIP();
10085 IEM_MC_END();
10086 break;
10087 }
10088 }
10089 }
10090 return VINF_SUCCESS;
10091}
10092
10093
10094/** Opcode 0x84. */
10095FNIEMOP_DEF(iemOp_test_Eb_Gb)
10096{
10097 IEMOP_MNEMONIC("test Eb,Gb");
10098 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10099 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10100 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10101}
10102
10103
10104/** Opcode 0x85. */
10105FNIEMOP_DEF(iemOp_test_Ev_Gv)
10106{
10107 IEMOP_MNEMONIC("test Ev,Gv");
10108 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
10109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10110 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10111}
10112
10113
10114/** Opcode 0x86. */
10115FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10116{
10117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10118 IEMOP_MNEMONIC("xchg Eb,Gb");
10119
10120 /*
10121 * If rm is denoting a register, no more instruction bytes.
10122 */
10123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10124 {
10125 IEMOP_HLP_NO_LOCK_PREFIX();
10126
10127 IEM_MC_BEGIN(0, 2);
10128 IEM_MC_LOCAL(uint8_t, uTmp1);
10129 IEM_MC_LOCAL(uint8_t, uTmp2);
10130
10131 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10132 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10133 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10134 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10135
10136 IEM_MC_ADVANCE_RIP();
10137 IEM_MC_END();
10138 }
10139 else
10140 {
10141 /*
10142 * We're accessing memory.
10143 */
10144/** @todo the register must be committed separately! */
10145 IEM_MC_BEGIN(2, 2);
10146 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10147 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10149
10150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10151 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10152 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10153 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10154 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10155
10156 IEM_MC_ADVANCE_RIP();
10157 IEM_MC_END();
10158 }
10159 return VINF_SUCCESS;
10160}
10161
10162
10163/** Opcode 0x87. */
10164FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10165{
10166 IEMOP_MNEMONIC("xchg Ev,Gv");
10167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10168
10169 /*
10170 * If rm is denoting a register, no more instruction bytes.
10171 */
10172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10173 {
10174 IEMOP_HLP_NO_LOCK_PREFIX();
10175
10176 switch (pIemCpu->enmEffOpSize)
10177 {
10178 case IEMMODE_16BIT:
10179 IEM_MC_BEGIN(0, 2);
10180 IEM_MC_LOCAL(uint16_t, uTmp1);
10181 IEM_MC_LOCAL(uint16_t, uTmp2);
10182
10183 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10184 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10185 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10186 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10187
10188 IEM_MC_ADVANCE_RIP();
10189 IEM_MC_END();
10190 return VINF_SUCCESS;
10191
10192 case IEMMODE_32BIT:
10193 IEM_MC_BEGIN(0, 2);
10194 IEM_MC_LOCAL(uint32_t, uTmp1);
10195 IEM_MC_LOCAL(uint32_t, uTmp2);
10196
10197 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10198 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10199 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10200 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10201
10202 IEM_MC_ADVANCE_RIP();
10203 IEM_MC_END();
10204 return VINF_SUCCESS;
10205
10206 case IEMMODE_64BIT:
10207 IEM_MC_BEGIN(0, 2);
10208 IEM_MC_LOCAL(uint64_t, uTmp1);
10209 IEM_MC_LOCAL(uint64_t, uTmp2);
10210
10211 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10212 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10213 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
10214 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
10215
10216 IEM_MC_ADVANCE_RIP();
10217 IEM_MC_END();
10218 return VINF_SUCCESS;
10219
10220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10221 }
10222 }
10223 else
10224 {
10225 /*
10226 * We're accessing memory.
10227 */
10228 switch (pIemCpu->enmEffOpSize)
10229 {
10230/** @todo the register must be committed separately! */
10231 case IEMMODE_16BIT:
10232 IEM_MC_BEGIN(2, 2);
10233 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10234 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10236
10237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10238 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10239 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10240 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10242
10243 IEM_MC_ADVANCE_RIP();
10244 IEM_MC_END();
10245 return VINF_SUCCESS;
10246
10247 case IEMMODE_32BIT:
10248 IEM_MC_BEGIN(2, 2);
10249 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10250 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10252
10253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10254 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10255 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10256 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10257 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10258
10259 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10260 IEM_MC_ADVANCE_RIP();
10261 IEM_MC_END();
10262 return VINF_SUCCESS;
10263
10264 case IEMMODE_64BIT:
10265 IEM_MC_BEGIN(2, 2);
10266 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10267 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10269
10270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10271 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10272 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10273 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10274 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10275
10276 IEM_MC_ADVANCE_RIP();
10277 IEM_MC_END();
10278 return VINF_SUCCESS;
10279
10280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10281 }
10282 }
10283}
10284
10285
10286/** Opcode 0x88. */
10287FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10288{
10289 IEMOP_MNEMONIC("mov Eb,Gb");
10290
10291 uint8_t bRm;
10292 IEM_OPCODE_GET_NEXT_U8(&bRm);
10293 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10294
10295 /*
10296 * If rm is denoting a register, no more instruction bytes.
10297 */
10298 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10299 {
10300 IEM_MC_BEGIN(0, 1);
10301 IEM_MC_LOCAL(uint8_t, u8Value);
10302 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10303 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
10304 IEM_MC_ADVANCE_RIP();
10305 IEM_MC_END();
10306 }
10307 else
10308 {
10309 /*
10310 * We're writing a register to memory.
10311 */
10312 IEM_MC_BEGIN(0, 2);
10313 IEM_MC_LOCAL(uint8_t, u8Value);
10314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10316 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10317 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
10318 IEM_MC_ADVANCE_RIP();
10319 IEM_MC_END();
10320 }
10321 return VINF_SUCCESS;
10322
10323}
10324
10325
10326/** Opcode 0x89. */
10327FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10328{
10329 IEMOP_MNEMONIC("mov Ev,Gv");
10330
10331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10332 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10333
10334 /*
10335 * If rm is denoting a register, no more instruction bytes.
10336 */
10337 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10338 {
10339 switch (pIemCpu->enmEffOpSize)
10340 {
10341 case IEMMODE_16BIT:
10342 IEM_MC_BEGIN(0, 1);
10343 IEM_MC_LOCAL(uint16_t, u16Value);
10344 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10345 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10346 IEM_MC_ADVANCE_RIP();
10347 IEM_MC_END();
10348 break;
10349
10350 case IEMMODE_32BIT:
10351 IEM_MC_BEGIN(0, 1);
10352 IEM_MC_LOCAL(uint32_t, u32Value);
10353 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10354 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10355 IEM_MC_ADVANCE_RIP();
10356 IEM_MC_END();
10357 break;
10358
10359 case IEMMODE_64BIT:
10360 IEM_MC_BEGIN(0, 1);
10361 IEM_MC_LOCAL(uint64_t, u64Value);
10362 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10363 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10364 IEM_MC_ADVANCE_RIP();
10365 IEM_MC_END();
10366 break;
10367 }
10368 }
10369 else
10370 {
10371 /*
10372 * We're writing a register to memory.
10373 */
10374 switch (pIemCpu->enmEffOpSize)
10375 {
10376 case IEMMODE_16BIT:
10377 IEM_MC_BEGIN(0, 2);
10378 IEM_MC_LOCAL(uint16_t, u16Value);
10379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10381 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10382 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10383 IEM_MC_ADVANCE_RIP();
10384 IEM_MC_END();
10385 break;
10386
10387 case IEMMODE_32BIT:
10388 IEM_MC_BEGIN(0, 2);
10389 IEM_MC_LOCAL(uint32_t, u32Value);
10390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10392 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10393 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
10394 IEM_MC_ADVANCE_RIP();
10395 IEM_MC_END();
10396 break;
10397
10398 case IEMMODE_64BIT:
10399 IEM_MC_BEGIN(0, 2);
10400 IEM_MC_LOCAL(uint64_t, u64Value);
10401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10403 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
10404 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
10405 IEM_MC_ADVANCE_RIP();
10406 IEM_MC_END();
10407 break;
10408 }
10409 }
10410 return VINF_SUCCESS;
10411}
10412
10413
10414/** Opcode 0x8a. */
10415FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10416{
10417 IEMOP_MNEMONIC("mov Gb,Eb");
10418
10419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10420 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10421
10422 /*
10423 * If rm is denoting a register, no more instruction bytes.
10424 */
10425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10426 {
10427 IEM_MC_BEGIN(0, 1);
10428 IEM_MC_LOCAL(uint8_t, u8Value);
10429 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10430 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10431 IEM_MC_ADVANCE_RIP();
10432 IEM_MC_END();
10433 }
10434 else
10435 {
10436 /*
10437 * We're loading a register from memory.
10438 */
10439 IEM_MC_BEGIN(0, 2);
10440 IEM_MC_LOCAL(uint8_t, u8Value);
10441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10443 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
10444 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
10445 IEM_MC_ADVANCE_RIP();
10446 IEM_MC_END();
10447 }
10448 return VINF_SUCCESS;
10449}
10450
10451
10452/** Opcode 0x8b. */
10453FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10454{
10455 IEMOP_MNEMONIC("mov Gv,Ev");
10456
10457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10458 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10459
10460 /*
10461 * If rm is denoting a register, no more instruction bytes.
10462 */
10463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10464 {
10465 switch (pIemCpu->enmEffOpSize)
10466 {
10467 case IEMMODE_16BIT:
10468 IEM_MC_BEGIN(0, 1);
10469 IEM_MC_LOCAL(uint16_t, u16Value);
10470 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10471 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10472 IEM_MC_ADVANCE_RIP();
10473 IEM_MC_END();
10474 break;
10475
10476 case IEMMODE_32BIT:
10477 IEM_MC_BEGIN(0, 1);
10478 IEM_MC_LOCAL(uint32_t, u32Value);
10479 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10481 IEM_MC_ADVANCE_RIP();
10482 IEM_MC_END();
10483 break;
10484
10485 case IEMMODE_64BIT:
10486 IEM_MC_BEGIN(0, 1);
10487 IEM_MC_LOCAL(uint64_t, u64Value);
10488 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10489 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10490 IEM_MC_ADVANCE_RIP();
10491 IEM_MC_END();
10492 break;
10493 }
10494 }
10495 else
10496 {
10497 /*
10498 * We're loading a register from memory.
10499 */
10500 switch (pIemCpu->enmEffOpSize)
10501 {
10502 case IEMMODE_16BIT:
10503 IEM_MC_BEGIN(0, 2);
10504 IEM_MC_LOCAL(uint16_t, u16Value);
10505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10507 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10508 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
10509 IEM_MC_ADVANCE_RIP();
10510 IEM_MC_END();
10511 break;
10512
10513 case IEMMODE_32BIT:
10514 IEM_MC_BEGIN(0, 2);
10515 IEM_MC_LOCAL(uint32_t, u32Value);
10516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10518 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
10519 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
10520 IEM_MC_ADVANCE_RIP();
10521 IEM_MC_END();
10522 break;
10523
10524 case IEMMODE_64BIT:
10525 IEM_MC_BEGIN(0, 2);
10526 IEM_MC_LOCAL(uint64_t, u64Value);
10527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10529 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
10530 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
10531 IEM_MC_ADVANCE_RIP();
10532 IEM_MC_END();
10533 break;
10534 }
10535 }
10536 return VINF_SUCCESS;
10537}
10538
10539
10540/** Opcode 0x63. */
10541FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10542{
10543 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
10544 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10545 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
10546 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10547 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10548}
10549
10550
10551/** Opcode 0x8c. */
10552FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10553{
10554 IEMOP_MNEMONIC("mov Ev,Sw");
10555
10556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10557 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10558
10559 /*
10560 * Check that the destination register exists. The REX.R prefix is ignored.
10561 */
10562 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10563 if ( iSegReg > X86_SREG_GS)
10564 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10565
10566 /*
10567 * If rm is denoting a register, no more instruction bytes.
10568 * In that case, the operand size is respected and the upper bits are
10569 * cleared (starting with some pentium).
10570 */
10571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10572 {
10573 switch (pIemCpu->enmEffOpSize)
10574 {
10575 case IEMMODE_16BIT:
10576 IEM_MC_BEGIN(0, 1);
10577 IEM_MC_LOCAL(uint16_t, u16Value);
10578 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10579 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10580 IEM_MC_ADVANCE_RIP();
10581 IEM_MC_END();
10582 break;
10583
10584 case IEMMODE_32BIT:
10585 IEM_MC_BEGIN(0, 1);
10586 IEM_MC_LOCAL(uint32_t, u32Value);
10587 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10588 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10589 IEM_MC_ADVANCE_RIP();
10590 IEM_MC_END();
10591 break;
10592
10593 case IEMMODE_64BIT:
10594 IEM_MC_BEGIN(0, 1);
10595 IEM_MC_LOCAL(uint64_t, u64Value);
10596 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10597 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10598 IEM_MC_ADVANCE_RIP();
10599 IEM_MC_END();
10600 break;
10601 }
10602 }
10603 else
10604 {
10605 /*
10606 * We're saving the register to memory. The access is word sized
10607 * regardless of operand size prefixes.
10608 */
10609#if 0 /* not necessary */
10610 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10611#endif
10612 IEM_MC_BEGIN(0, 2);
10613 IEM_MC_LOCAL(uint16_t, u16Value);
10614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10616 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10617 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10618 IEM_MC_ADVANCE_RIP();
10619 IEM_MC_END();
10620 }
10621 return VINF_SUCCESS;
10622}
10623
10624
10625
10626
10627/** Opcode 0x8d. */
10628FNIEMOP_DEF(iemOp_lea_Gv_M)
10629{
10630 IEMOP_MNEMONIC("lea Gv,M");
10631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10632 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10634 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10635
10636 switch (pIemCpu->enmEffOpSize)
10637 {
10638 case IEMMODE_16BIT:
10639 IEM_MC_BEGIN(0, 2);
10640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10641 IEM_MC_LOCAL(uint16_t, u16Cast);
10642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10643 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10644 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10645 IEM_MC_ADVANCE_RIP();
10646 IEM_MC_END();
10647 return VINF_SUCCESS;
10648
10649 case IEMMODE_32BIT:
10650 IEM_MC_BEGIN(0, 2);
10651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10652 IEM_MC_LOCAL(uint32_t, u32Cast);
10653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10654 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10655 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 return VINF_SUCCESS;
10659
10660 case IEMMODE_64BIT:
10661 IEM_MC_BEGIN(0, 1);
10662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10664 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668 }
10669 AssertFailedReturn(VERR_IEM_IPE_7);
10670}
10671
10672
10673/** Opcode 0x8e. */
10674FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10675{
10676 IEMOP_MNEMONIC("mov Sw,Ev");
10677
10678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10679 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10680
10681 /*
10682 * The practical operand size is 16-bit.
10683 */
10684#if 0 /* not necessary */
10685 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10686#endif
10687
10688 /*
10689 * Check that the destination register exists and can be used with this
10690 * instruction. The REX.R prefix is ignored.
10691 */
10692 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10693 if ( iSegReg == X86_SREG_CS
10694 || iSegReg > X86_SREG_GS)
10695 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10696
10697 /*
10698 * If rm is denoting a register, no more instruction bytes.
10699 */
10700 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10701 {
10702 IEM_MC_BEGIN(2, 0);
10703 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10704 IEM_MC_ARG(uint16_t, u16Value, 1);
10705 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10706 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10707 IEM_MC_END();
10708 }
10709 else
10710 {
10711 /*
10712 * We're loading the register from memory. The access is word sized
10713 * regardless of operand size prefixes.
10714 */
10715 IEM_MC_BEGIN(2, 1);
10716 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10717 IEM_MC_ARG(uint16_t, u16Value, 1);
10718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10720 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10721 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10722 IEM_MC_END();
10723 }
10724 return VINF_SUCCESS;
10725}
10726
10727
10728/** Opcode 0x8f /0. */
10729FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10730{
10731 /* This bugger is rather annoying as it requires rSP to be updated before
10732 doing the effective address calculations. Will eventually require a
10733 split between the R/M+SIB decoding and the effective address
10734 calculation - which is something that is required for any attempt at
10735 reusing this code for a recompiler. It may also be good to have if we
10736 need to delay #UD exception caused by invalid lock prefixes.
10737
10738 For now, we'll do a mostly safe interpreter-only implementation here. */
10739 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10740 * now until tests show it's checked.. */
10741 IEMOP_MNEMONIC("pop Ev");
10742 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10743
10744 /* Register access is relatively easy and can share code. */
10745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10746 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10747
10748 /*
10749 * Memory target.
10750 *
10751 * Intel says that RSP is incremented before it's used in any effective
10752 * address calcuations. This means some serious extra annoyance here since
10753 * we decode and calculate the effective address in one step and like to
10754 * delay committing registers till everything is done.
10755 *
10756 * So, we'll decode and calculate the effective address twice. This will
10757 * require some recoding if turned into a recompiler.
10758 */
10759 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10760
10761#ifndef TST_IEM_CHECK_MC
10762 /* Calc effective address with modified ESP. */
10763 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10764 RTGCPTR GCPtrEff;
10765 VBOXSTRICTRC rcStrict;
10766 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10767 if (rcStrict != VINF_SUCCESS)
10768 return rcStrict;
10769 pIemCpu->offOpcode = offOpcodeSaved;
10770
10771 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10772 uint64_t const RspSaved = pCtx->rsp;
10773 switch (pIemCpu->enmEffOpSize)
10774 {
10775 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10776 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10777 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10779 }
10780 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10781 Assert(rcStrict == VINF_SUCCESS);
10782 pCtx->rsp = RspSaved;
10783
10784 /* Perform the operation - this should be CImpl. */
10785 RTUINT64U TmpRsp;
10786 TmpRsp.u = pCtx->rsp;
10787 switch (pIemCpu->enmEffOpSize)
10788 {
10789 case IEMMODE_16BIT:
10790 {
10791 uint16_t u16Value;
10792 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10793 if (rcStrict == VINF_SUCCESS)
10794 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10795 break;
10796 }
10797
10798 case IEMMODE_32BIT:
10799 {
10800 uint32_t u32Value;
10801 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10802 if (rcStrict == VINF_SUCCESS)
10803 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10804 break;
10805 }
10806
10807 case IEMMODE_64BIT:
10808 {
10809 uint64_t u64Value;
10810 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10811 if (rcStrict == VINF_SUCCESS)
10812 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10813 break;
10814 }
10815
10816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10817 }
10818 if (rcStrict == VINF_SUCCESS)
10819 {
10820 pCtx->rsp = TmpRsp.u;
10821 iemRegUpdateRipAndClearRF(pIemCpu);
10822 }
10823 return rcStrict;
10824
10825#else
10826 return VERR_IEM_IPE_2;
10827#endif
10828}
10829
10830
10831/** Opcode 0x8f. */
10832FNIEMOP_DEF(iemOp_Grp1A)
10833{
10834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10835 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10836 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10837
10838 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10839 /** @todo XOP decoding. */
10840 IEMOP_MNEMONIC("3-byte-xop");
10841 return IEMOP_RAISE_INVALID_OPCODE();
10842}
10843
10844
10845/**
10846 * Common 'xchg reg,rAX' helper.
10847 */
10848FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10849{
10850 IEMOP_HLP_NO_LOCK_PREFIX();
10851
10852 iReg |= pIemCpu->uRexB;
10853 switch (pIemCpu->enmEffOpSize)
10854 {
10855 case IEMMODE_16BIT:
10856 IEM_MC_BEGIN(0, 2);
10857 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10858 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10859 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10860 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10861 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10862 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10863 IEM_MC_ADVANCE_RIP();
10864 IEM_MC_END();
10865 return VINF_SUCCESS;
10866
10867 case IEMMODE_32BIT:
10868 IEM_MC_BEGIN(0, 2);
10869 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10870 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10871 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10872 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10873 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10874 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10875 IEM_MC_ADVANCE_RIP();
10876 IEM_MC_END();
10877 return VINF_SUCCESS;
10878
10879 case IEMMODE_64BIT:
10880 IEM_MC_BEGIN(0, 2);
10881 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10882 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10883 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10884 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10885 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10886 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10887 IEM_MC_ADVANCE_RIP();
10888 IEM_MC_END();
10889 return VINF_SUCCESS;
10890
10891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10892 }
10893}
10894
10895
10896/** Opcode 0x90. */
10897FNIEMOP_DEF(iemOp_nop)
10898{
10899 /* R8/R8D and RAX/EAX can be exchanged. */
10900 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10901 {
10902 IEMOP_MNEMONIC("xchg r8,rAX");
10903 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10904 }
10905
10906 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10907 IEMOP_MNEMONIC("pause");
10908 else
10909 IEMOP_MNEMONIC("nop");
10910 IEM_MC_BEGIN(0, 0);
10911 IEM_MC_ADVANCE_RIP();
10912 IEM_MC_END();
10913 return VINF_SUCCESS;
10914}
10915
10916
10917/** Opcode 0x91. */
10918FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10919{
10920 IEMOP_MNEMONIC("xchg rCX,rAX");
10921 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10922}
10923
10924
10925/** Opcode 0x92. */
10926FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10927{
10928 IEMOP_MNEMONIC("xchg rDX,rAX");
10929 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10930}
10931
10932
10933/** Opcode 0x93. */
10934FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10935{
10936 IEMOP_MNEMONIC("xchg rBX,rAX");
10937 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10938}
10939
10940
10941/** Opcode 0x94. */
10942FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10943{
10944 IEMOP_MNEMONIC("xchg rSX,rAX");
10945 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10946}
10947
10948
10949/** Opcode 0x95. */
10950FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10951{
10952 IEMOP_MNEMONIC("xchg rBP,rAX");
10953 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10954}
10955
10956
10957/** Opcode 0x96. */
10958FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10959{
10960 IEMOP_MNEMONIC("xchg rSI,rAX");
10961 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10962}
10963
10964
10965/** Opcode 0x97. */
10966FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10967{
10968 IEMOP_MNEMONIC("xchg rDI,rAX");
10969 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10970}
10971
10972
10973/** Opcode 0x98. */
10974FNIEMOP_DEF(iemOp_cbw)
10975{
10976 IEMOP_HLP_NO_LOCK_PREFIX();
10977 switch (pIemCpu->enmEffOpSize)
10978 {
10979 case IEMMODE_16BIT:
10980 IEMOP_MNEMONIC("cbw");
10981 IEM_MC_BEGIN(0, 1);
10982 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10983 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10984 } IEM_MC_ELSE() {
10985 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10986 } IEM_MC_ENDIF();
10987 IEM_MC_ADVANCE_RIP();
10988 IEM_MC_END();
10989 return VINF_SUCCESS;
10990
10991 case IEMMODE_32BIT:
10992 IEMOP_MNEMONIC("cwde");
10993 IEM_MC_BEGIN(0, 1);
10994 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10995 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10996 } IEM_MC_ELSE() {
10997 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10998 } IEM_MC_ENDIF();
10999 IEM_MC_ADVANCE_RIP();
11000 IEM_MC_END();
11001 return VINF_SUCCESS;
11002
11003 case IEMMODE_64BIT:
11004 IEMOP_MNEMONIC("cdqe");
11005 IEM_MC_BEGIN(0, 1);
11006 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11007 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11008 } IEM_MC_ELSE() {
11009 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11010 } IEM_MC_ENDIF();
11011 IEM_MC_ADVANCE_RIP();
11012 IEM_MC_END();
11013 return VINF_SUCCESS;
11014
11015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11016 }
11017}
11018
11019
11020/** Opcode 0x99. */
11021FNIEMOP_DEF(iemOp_cwd)
11022{
11023 IEMOP_HLP_NO_LOCK_PREFIX();
11024 switch (pIemCpu->enmEffOpSize)
11025 {
11026 case IEMMODE_16BIT:
11027 IEMOP_MNEMONIC("cwd");
11028 IEM_MC_BEGIN(0, 1);
11029 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11030 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11031 } IEM_MC_ELSE() {
11032 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11033 } IEM_MC_ENDIF();
11034 IEM_MC_ADVANCE_RIP();
11035 IEM_MC_END();
11036 return VINF_SUCCESS;
11037
11038 case IEMMODE_32BIT:
11039 IEMOP_MNEMONIC("cdq");
11040 IEM_MC_BEGIN(0, 1);
11041 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11042 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11043 } IEM_MC_ELSE() {
11044 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11045 } IEM_MC_ENDIF();
11046 IEM_MC_ADVANCE_RIP();
11047 IEM_MC_END();
11048 return VINF_SUCCESS;
11049
11050 case IEMMODE_64BIT:
11051 IEMOP_MNEMONIC("cqo");
11052 IEM_MC_BEGIN(0, 1);
11053 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11054 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11055 } IEM_MC_ELSE() {
11056 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11057 } IEM_MC_ENDIF();
11058 IEM_MC_ADVANCE_RIP();
11059 IEM_MC_END();
11060 return VINF_SUCCESS;
11061
11062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11063 }
11064}
11065
11066
11067/** Opcode 0x9a. */
11068FNIEMOP_DEF(iemOp_call_Ap)
11069{
11070 IEMOP_MNEMONIC("call Ap");
11071 IEMOP_HLP_NO_64BIT();
11072
11073 /* Decode the far pointer address and pass it on to the far call C implementation. */
11074 uint32_t offSeg;
11075 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
11076 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11077 else
11078 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11079 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11081 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
11082}
11083
11084
11085/** Opcode 0x9b. (aka fwait) */
11086FNIEMOP_DEF(iemOp_wait)
11087{
11088 IEMOP_MNEMONIC("wait");
11089 IEMOP_HLP_NO_LOCK_PREFIX();
11090
11091 IEM_MC_BEGIN(0, 0);
11092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11094 IEM_MC_ADVANCE_RIP();
11095 IEM_MC_END();
11096 return VINF_SUCCESS;
11097}
11098
11099
11100/** Opcode 0x9c. */
11101FNIEMOP_DEF(iemOp_pushf_Fv)
11102{
11103 IEMOP_HLP_NO_LOCK_PREFIX();
11104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11105 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
11106}
11107
11108
11109/** Opcode 0x9d. */
11110FNIEMOP_DEF(iemOp_popf_Fv)
11111{
11112 IEMOP_HLP_NO_LOCK_PREFIX();
11113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11114 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
11115}
11116
11117
11118/** Opcode 0x9e. */
11119FNIEMOP_DEF(iemOp_sahf)
11120{
11121 IEMOP_MNEMONIC("sahf");
11122 IEMOP_HLP_NO_LOCK_PREFIX();
11123 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11124 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11125 return IEMOP_RAISE_INVALID_OPCODE();
11126 IEM_MC_BEGIN(0, 2);
11127 IEM_MC_LOCAL(uint32_t, u32Flags);
11128 IEM_MC_LOCAL(uint32_t, EFlags);
11129 IEM_MC_FETCH_EFLAGS(EFlags);
11130 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11131 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11132 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11133 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11134 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11135 IEM_MC_COMMIT_EFLAGS(EFlags);
11136 IEM_MC_ADVANCE_RIP();
11137 IEM_MC_END();
11138 return VINF_SUCCESS;
11139}
11140
11141
11142/** Opcode 0x9f. */
11143FNIEMOP_DEF(iemOp_lahf)
11144{
11145 IEMOP_MNEMONIC("lahf");
11146 IEMOP_HLP_NO_LOCK_PREFIX();
11147 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11148 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
11149 return IEMOP_RAISE_INVALID_OPCODE();
11150 IEM_MC_BEGIN(0, 1);
11151 IEM_MC_LOCAL(uint8_t, u8Flags);
11152 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11153 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11154 IEM_MC_ADVANCE_RIP();
11155 IEM_MC_END();
11156 return VINF_SUCCESS;
11157}
11158
11159
11160/**
11161 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11162 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11163 * prefixes. Will return on failures.
11164 * @param a_GCPtrMemOff The variable to store the offset in.
11165 */
11166#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11167 do \
11168 { \
11169 switch (pIemCpu->enmEffAddrMode) \
11170 { \
11171 case IEMMODE_16BIT: \
11172 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11173 break; \
11174 case IEMMODE_32BIT: \
11175 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11176 break; \
11177 case IEMMODE_64BIT: \
11178 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11179 break; \
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11181 } \
11182 IEMOP_HLP_NO_LOCK_PREFIX(); \
11183 } while (0)
11184
11185/** Opcode 0xa0. */
11186FNIEMOP_DEF(iemOp_mov_Al_Ob)
11187{
11188 /*
11189 * Get the offset and fend of lock prefixes.
11190 */
11191 RTGCPTR GCPtrMemOff;
11192 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11193
11194 /*
11195 * Fetch AL.
11196 */
11197 IEM_MC_BEGIN(0,1);
11198 IEM_MC_LOCAL(uint8_t, u8Tmp);
11199 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11200 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11201 IEM_MC_ADVANCE_RIP();
11202 IEM_MC_END();
11203 return VINF_SUCCESS;
11204}
11205
11206
11207/** Opcode 0xa1. */
11208FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11209{
11210 /*
11211 * Get the offset and fend of lock prefixes.
11212 */
11213 IEMOP_MNEMONIC("mov rAX,Ov");
11214 RTGCPTR GCPtrMemOff;
11215 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11216
11217 /*
11218 * Fetch rAX.
11219 */
11220 switch (pIemCpu->enmEffOpSize)
11221 {
11222 case IEMMODE_16BIT:
11223 IEM_MC_BEGIN(0,1);
11224 IEM_MC_LOCAL(uint16_t, u16Tmp);
11225 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11226 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11227 IEM_MC_ADVANCE_RIP();
11228 IEM_MC_END();
11229 return VINF_SUCCESS;
11230
11231 case IEMMODE_32BIT:
11232 IEM_MC_BEGIN(0,1);
11233 IEM_MC_LOCAL(uint32_t, u32Tmp);
11234 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11235 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11236 IEM_MC_ADVANCE_RIP();
11237 IEM_MC_END();
11238 return VINF_SUCCESS;
11239
11240 case IEMMODE_64BIT:
11241 IEM_MC_BEGIN(0,1);
11242 IEM_MC_LOCAL(uint64_t, u64Tmp);
11243 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
11244 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11245 IEM_MC_ADVANCE_RIP();
11246 IEM_MC_END();
11247 return VINF_SUCCESS;
11248
11249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11250 }
11251}
11252
11253
11254/** Opcode 0xa2. */
11255FNIEMOP_DEF(iemOp_mov_Ob_AL)
11256{
11257 /*
11258 * Get the offset and fend of lock prefixes.
11259 */
11260 RTGCPTR GCPtrMemOff;
11261 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11262
11263 /*
11264 * Store AL.
11265 */
11266 IEM_MC_BEGIN(0,1);
11267 IEM_MC_LOCAL(uint8_t, u8Tmp);
11268 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11269 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
11270 IEM_MC_ADVANCE_RIP();
11271 IEM_MC_END();
11272 return VINF_SUCCESS;
11273}
11274
11275
11276/** Opcode 0xa3. */
11277FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11278{
11279 /*
11280 * Get the offset and fend of lock prefixes.
11281 */
11282 RTGCPTR GCPtrMemOff;
11283 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11284
11285 /*
11286 * Store rAX.
11287 */
11288 switch (pIemCpu->enmEffOpSize)
11289 {
11290 case IEMMODE_16BIT:
11291 IEM_MC_BEGIN(0,1);
11292 IEM_MC_LOCAL(uint16_t, u16Tmp);
11293 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11294 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
11295 IEM_MC_ADVANCE_RIP();
11296 IEM_MC_END();
11297 return VINF_SUCCESS;
11298
11299 case IEMMODE_32BIT:
11300 IEM_MC_BEGIN(0,1);
11301 IEM_MC_LOCAL(uint32_t, u32Tmp);
11302 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11303 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
11304 IEM_MC_ADVANCE_RIP();
11305 IEM_MC_END();
11306 return VINF_SUCCESS;
11307
11308 case IEMMODE_64BIT:
11309 IEM_MC_BEGIN(0,1);
11310 IEM_MC_LOCAL(uint64_t, u64Tmp);
11311 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11312 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
11313 IEM_MC_ADVANCE_RIP();
11314 IEM_MC_END();
11315 return VINF_SUCCESS;
11316
11317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11318 }
11319}
11320
11321/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11322#define IEM_MOVS_CASE(ValBits, AddrBits) \
11323 IEM_MC_BEGIN(0, 2); \
11324 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11325 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11326 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11327 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11328 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11329 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11331 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11332 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11333 } IEM_MC_ELSE() { \
11334 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11335 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11336 } IEM_MC_ENDIF(); \
11337 IEM_MC_ADVANCE_RIP(); \
11338 IEM_MC_END();
11339
11340/** Opcode 0xa4. */
11341FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11342{
11343 IEMOP_HLP_NO_LOCK_PREFIX();
11344
11345 /*
11346 * Use the C implementation if a repeat prefix is encountered.
11347 */
11348 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11349 {
11350 IEMOP_MNEMONIC("rep movsb Xb,Yb");
11351 switch (pIemCpu->enmEffAddrMode)
11352 {
11353 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
11354 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
11355 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
11356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11357 }
11358 }
11359 IEMOP_MNEMONIC("movsb Xb,Yb");
11360
11361 /*
11362 * Sharing case implementation with movs[wdq] below.
11363 */
11364 switch (pIemCpu->enmEffAddrMode)
11365 {
11366 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11367 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11368 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11370 }
11371 return VINF_SUCCESS;
11372}
11373
11374
11375/** Opcode 0xa5. */
11376FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11377{
11378 IEMOP_HLP_NO_LOCK_PREFIX();
11379
11380 /*
11381 * Use the C implementation if a repeat prefix is encountered.
11382 */
11383 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11384 {
11385 IEMOP_MNEMONIC("rep movs Xv,Yv");
11386 switch (pIemCpu->enmEffOpSize)
11387 {
11388 case IEMMODE_16BIT:
11389 switch (pIemCpu->enmEffAddrMode)
11390 {
11391 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
11392 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
11393 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
11394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11395 }
11396 break;
11397 case IEMMODE_32BIT:
11398 switch (pIemCpu->enmEffAddrMode)
11399 {
11400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
11401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
11402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
11403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11404 }
11405 case IEMMODE_64BIT:
11406 switch (pIemCpu->enmEffAddrMode)
11407 {
11408 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
11410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
11411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11412 }
11413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11414 }
11415 }
11416 IEMOP_MNEMONIC("movs Xv,Yv");
11417
11418 /*
11419 * Annoying double switch here.
11420 * Using ugly macro for implementing the cases, sharing it with movsb.
11421 */
11422 switch (pIemCpu->enmEffOpSize)
11423 {
11424 case IEMMODE_16BIT:
11425 switch (pIemCpu->enmEffAddrMode)
11426 {
11427 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11428 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11429 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11431 }
11432 break;
11433
11434 case IEMMODE_32BIT:
11435 switch (pIemCpu->enmEffAddrMode)
11436 {
11437 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11438 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11439 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11441 }
11442 break;
11443
11444 case IEMMODE_64BIT:
11445 switch (pIemCpu->enmEffAddrMode)
11446 {
11447 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11448 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11449 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452 break;
11453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11454 }
11455 return VINF_SUCCESS;
11456}
11457
11458#undef IEM_MOVS_CASE
11459
11460/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11461#define IEM_CMPS_CASE(ValBits, AddrBits) \
11462 IEM_MC_BEGIN(3, 3); \
11463 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11464 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11465 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11466 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11467 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11468 \
11469 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11470 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
11471 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11472 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11473 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11474 IEM_MC_REF_EFLAGS(pEFlags); \
11475 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11476 \
11477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11478 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11479 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11480 } IEM_MC_ELSE() { \
11481 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11482 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11483 } IEM_MC_ENDIF(); \
11484 IEM_MC_ADVANCE_RIP(); \
11485 IEM_MC_END(); \
11486
11487/** Opcode 0xa6. */
11488FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11489{
11490 IEMOP_HLP_NO_LOCK_PREFIX();
11491
11492 /*
11493 * Use the C implementation if a repeat prefix is encountered.
11494 */
11495 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11496 {
11497 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
11501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
11502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 }
11506 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11507 {
11508 IEMOP_MNEMONIC("repe cmps Xb,Yb");
11509 switch (pIemCpu->enmEffAddrMode)
11510 {
11511 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
11512 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
11513 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
11514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11515 }
11516 }
11517 IEMOP_MNEMONIC("cmps Xb,Yb");
11518
11519 /*
11520 * Sharing case implementation with cmps[wdq] below.
11521 */
11522 switch (pIemCpu->enmEffAddrMode)
11523 {
11524 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11525 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11526 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11528 }
11529 return VINF_SUCCESS;
11530
11531}
11532
11533
11534/** Opcode 0xa7. */
11535FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11536{
11537 IEMOP_HLP_NO_LOCK_PREFIX();
11538
11539 /*
11540 * Use the C implementation if a repeat prefix is encountered.
11541 */
11542 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11543 {
11544 IEMOP_MNEMONIC("repe cmps Xv,Yv");
11545 switch (pIemCpu->enmEffOpSize)
11546 {
11547 case IEMMODE_16BIT:
11548 switch (pIemCpu->enmEffAddrMode)
11549 {
11550 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
11551 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
11552 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
11553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11554 }
11555 break;
11556 case IEMMODE_32BIT:
11557 switch (pIemCpu->enmEffAddrMode)
11558 {
11559 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
11560 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
11561 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11563 }
11564 case IEMMODE_64BIT:
11565 switch (pIemCpu->enmEffAddrMode)
11566 {
11567 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11568 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11569 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11571 }
11572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11573 }
11574 }
11575
11576 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11577 {
11578 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11579 switch (pIemCpu->enmEffOpSize)
11580 {
11581 case IEMMODE_16BIT:
11582 switch (pIemCpu->enmEffAddrMode)
11583 {
11584 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11585 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11586 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11588 }
11589 break;
11590 case IEMMODE_32BIT:
11591 switch (pIemCpu->enmEffAddrMode)
11592 {
11593 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11594 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11595 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11597 }
11598 case IEMMODE_64BIT:
11599 switch (pIemCpu->enmEffAddrMode)
11600 {
11601 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11602 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11603 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11605 }
11606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11607 }
11608 }
11609
11610 IEMOP_MNEMONIC("cmps Xv,Yv");
11611
11612 /*
11613 * Annoying double switch here.
11614 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11615 */
11616 switch (pIemCpu->enmEffOpSize)
11617 {
11618 case IEMMODE_16BIT:
11619 switch (pIemCpu->enmEffAddrMode)
11620 {
11621 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11622 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11623 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11625 }
11626 break;
11627
11628 case IEMMODE_32BIT:
11629 switch (pIemCpu->enmEffAddrMode)
11630 {
11631 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11632 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11633 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11635 }
11636 break;
11637
11638 case IEMMODE_64BIT:
11639 switch (pIemCpu->enmEffAddrMode)
11640 {
11641 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11642 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11643 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11645 }
11646 break;
11647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11648 }
11649 return VINF_SUCCESS;
11650
11651}
11652
11653#undef IEM_CMPS_CASE
11654
11655/** Opcode 0xa8. */
11656FNIEMOP_DEF(iemOp_test_AL_Ib)
11657{
11658 IEMOP_MNEMONIC("test al,Ib");
11659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11660 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11661}
11662
11663
11664/** Opcode 0xa9. */
11665FNIEMOP_DEF(iemOp_test_eAX_Iz)
11666{
11667 IEMOP_MNEMONIC("test rAX,Iz");
11668 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11669 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11670}
11671
11672
11673/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11674#define IEM_STOS_CASE(ValBits, AddrBits) \
11675 IEM_MC_BEGIN(0, 2); \
11676 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11677 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11678 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11679 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11680 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11681 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11682 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11683 } IEM_MC_ELSE() { \
11684 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11685 } IEM_MC_ENDIF(); \
11686 IEM_MC_ADVANCE_RIP(); \
11687 IEM_MC_END(); \
11688
11689/** Opcode 0xaa. */
11690FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11691{
11692 IEMOP_HLP_NO_LOCK_PREFIX();
11693
11694 /*
11695 * Use the C implementation if a repeat prefix is encountered.
11696 */
11697 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11698 {
11699 IEMOP_MNEMONIC("rep stos Yb,al");
11700 switch (pIemCpu->enmEffAddrMode)
11701 {
11702 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11703 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11704 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11706 }
11707 }
11708 IEMOP_MNEMONIC("stos Yb,al");
11709
11710 /*
11711 * Sharing case implementation with stos[wdq] below.
11712 */
11713 switch (pIemCpu->enmEffAddrMode)
11714 {
11715 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11716 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11717 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11719 }
11720 return VINF_SUCCESS;
11721}
11722
11723
11724/** Opcode 0xab. */
11725FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11726{
11727 IEMOP_HLP_NO_LOCK_PREFIX();
11728
11729 /*
11730 * Use the C implementation if a repeat prefix is encountered.
11731 */
11732 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11733 {
11734 IEMOP_MNEMONIC("rep stos Yv,rAX");
11735 switch (pIemCpu->enmEffOpSize)
11736 {
11737 case IEMMODE_16BIT:
11738 switch (pIemCpu->enmEffAddrMode)
11739 {
11740 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11741 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11742 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11744 }
11745 break;
11746 case IEMMODE_32BIT:
11747 switch (pIemCpu->enmEffAddrMode)
11748 {
11749 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11750 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11751 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11753 }
11754 case IEMMODE_64BIT:
11755 switch (pIemCpu->enmEffAddrMode)
11756 {
11757 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11758 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11759 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11761 }
11762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11763 }
11764 }
11765 IEMOP_MNEMONIC("stos Yv,rAX");
11766
11767 /*
11768 * Annoying double switch here.
11769 * Using ugly macro for implementing the cases, sharing it with stosb.
11770 */
11771 switch (pIemCpu->enmEffOpSize)
11772 {
11773 case IEMMODE_16BIT:
11774 switch (pIemCpu->enmEffAddrMode)
11775 {
11776 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11777 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11778 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11780 }
11781 break;
11782
11783 case IEMMODE_32BIT:
11784 switch (pIemCpu->enmEffAddrMode)
11785 {
11786 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11787 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11788 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11790 }
11791 break;
11792
11793 case IEMMODE_64BIT:
11794 switch (pIemCpu->enmEffAddrMode)
11795 {
11796 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11797 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11798 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11800 }
11801 break;
11802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11803 }
11804 return VINF_SUCCESS;
11805}
11806
11807#undef IEM_STOS_CASE
11808
11809/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11810#define IEM_LODS_CASE(ValBits, AddrBits) \
11811 IEM_MC_BEGIN(0, 2); \
11812 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11813 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11814 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11815 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11816 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11818 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11819 } IEM_MC_ELSE() { \
11820 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11821 } IEM_MC_ENDIF(); \
11822 IEM_MC_ADVANCE_RIP(); \
11823 IEM_MC_END();
11824
11825/** Opcode 0xac. */
11826FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11827{
11828 IEMOP_HLP_NO_LOCK_PREFIX();
11829
11830 /*
11831 * Use the C implementation if a repeat prefix is encountered.
11832 */
11833 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11834 {
11835 IEMOP_MNEMONIC("rep lodsb al,Xb");
11836 switch (pIemCpu->enmEffAddrMode)
11837 {
11838 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11839 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11840 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11842 }
11843 }
11844 IEMOP_MNEMONIC("lodsb al,Xb");
11845
11846 /*
11847 * Sharing case implementation with stos[wdq] below.
11848 */
11849 switch (pIemCpu->enmEffAddrMode)
11850 {
11851 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11852 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11853 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11855 }
11856 return VINF_SUCCESS;
11857}
11858
11859
11860/** Opcode 0xad. */
11861FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11862{
11863 IEMOP_HLP_NO_LOCK_PREFIX();
11864
11865 /*
11866 * Use the C implementation if a repeat prefix is encountered.
11867 */
11868 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11869 {
11870 IEMOP_MNEMONIC("rep lods rAX,Xv");
11871 switch (pIemCpu->enmEffOpSize)
11872 {
11873 case IEMMODE_16BIT:
11874 switch (pIemCpu->enmEffAddrMode)
11875 {
11876 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11877 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11878 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11880 }
11881 break;
11882 case IEMMODE_32BIT:
11883 switch (pIemCpu->enmEffAddrMode)
11884 {
11885 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11886 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11887 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11889 }
11890 case IEMMODE_64BIT:
11891 switch (pIemCpu->enmEffAddrMode)
11892 {
11893 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11894 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11895 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11897 }
11898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11899 }
11900 }
11901 IEMOP_MNEMONIC("lods rAX,Xv");
11902
11903 /*
11904 * Annoying double switch here.
11905 * Using ugly macro for implementing the cases, sharing it with lodsb.
11906 */
11907 switch (pIemCpu->enmEffOpSize)
11908 {
11909 case IEMMODE_16BIT:
11910 switch (pIemCpu->enmEffAddrMode)
11911 {
11912 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11913 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11914 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11916 }
11917 break;
11918
11919 case IEMMODE_32BIT:
11920 switch (pIemCpu->enmEffAddrMode)
11921 {
11922 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11923 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11924 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11926 }
11927 break;
11928
11929 case IEMMODE_64BIT:
11930 switch (pIemCpu->enmEffAddrMode)
11931 {
11932 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11933 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11934 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11936 }
11937 break;
11938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11939 }
11940 return VINF_SUCCESS;
11941}
11942
11943#undef IEM_LODS_CASE
11944
11945/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11946#define IEM_SCAS_CASE(ValBits, AddrBits) \
11947 IEM_MC_BEGIN(3, 2); \
11948 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11949 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11950 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11951 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11952 \
11953 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11954 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11955 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11956 IEM_MC_REF_EFLAGS(pEFlags); \
11957 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11958 \
11959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11960 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11961 } IEM_MC_ELSE() { \
11962 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11963 } IEM_MC_ENDIF(); \
11964 IEM_MC_ADVANCE_RIP(); \
11965 IEM_MC_END();
11966
11967/** Opcode 0xae. */
11968FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11969{
11970 IEMOP_HLP_NO_LOCK_PREFIX();
11971
11972 /*
11973 * Use the C implementation if a repeat prefix is encountered.
11974 */
11975 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11976 {
11977 IEMOP_MNEMONIC("repe scasb al,Xb");
11978 switch (pIemCpu->enmEffAddrMode)
11979 {
11980 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 }
11986 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11987 {
11988 IEMOP_MNEMONIC("repne scasb al,Xb");
11989 switch (pIemCpu->enmEffAddrMode)
11990 {
11991 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11992 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11993 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11995 }
11996 }
11997 IEMOP_MNEMONIC("scasb al,Xb");
11998
11999 /*
12000 * Sharing case implementation with stos[wdq] below.
12001 */
12002 switch (pIemCpu->enmEffAddrMode)
12003 {
12004 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12005 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12006 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12008 }
12009 return VINF_SUCCESS;
12010}
12011
12012
12013/** Opcode 0xaf. */
12014FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12015{
12016 IEMOP_HLP_NO_LOCK_PREFIX();
12017
12018 /*
12019 * Use the C implementation if a repeat prefix is encountered.
12020 */
12021 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
12022 {
12023 IEMOP_MNEMONIC("repe scas rAX,Xv");
12024 switch (pIemCpu->enmEffOpSize)
12025 {
12026 case IEMMODE_16BIT:
12027 switch (pIemCpu->enmEffAddrMode)
12028 {
12029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12033 }
12034 break;
12035 case IEMMODE_32BIT:
12036 switch (pIemCpu->enmEffAddrMode)
12037 {
12038 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12039 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12040 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12042 }
12043 case IEMMODE_64BIT:
12044 switch (pIemCpu->enmEffAddrMode)
12045 {
12046 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12047 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12048 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12050 }
12051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12052 }
12053 }
12054 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
12055 {
12056 IEMOP_MNEMONIC("repne scas rAX,Xv");
12057 switch (pIemCpu->enmEffOpSize)
12058 {
12059 case IEMMODE_16BIT:
12060 switch (pIemCpu->enmEffAddrMode)
12061 {
12062 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12063 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12064 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12066 }
12067 break;
12068 case IEMMODE_32BIT:
12069 switch (pIemCpu->enmEffAddrMode)
12070 {
12071 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12072 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12073 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12075 }
12076 case IEMMODE_64BIT:
12077 switch (pIemCpu->enmEffAddrMode)
12078 {
12079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12080 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12081 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12083 }
12084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12085 }
12086 }
12087 IEMOP_MNEMONIC("scas rAX,Xv");
12088
12089 /*
12090 * Annoying double switch here.
12091 * Using ugly macro for implementing the cases, sharing it with scasb.
12092 */
12093 switch (pIemCpu->enmEffOpSize)
12094 {
12095 case IEMMODE_16BIT:
12096 switch (pIemCpu->enmEffAddrMode)
12097 {
12098 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12099 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12100 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12102 }
12103 break;
12104
12105 case IEMMODE_32BIT:
12106 switch (pIemCpu->enmEffAddrMode)
12107 {
12108 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12109 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12110 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12112 }
12113 break;
12114
12115 case IEMMODE_64BIT:
12116 switch (pIemCpu->enmEffAddrMode)
12117 {
12118 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12119 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12120 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12122 }
12123 break;
12124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12125 }
12126 return VINF_SUCCESS;
12127}
12128
12129#undef IEM_SCAS_CASE
12130
12131/**
12132 * Common 'mov r8, imm8' helper.
12133 */
12134FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12135{
12136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12137 IEMOP_HLP_NO_LOCK_PREFIX();
12138
12139 IEM_MC_BEGIN(0, 1);
12140 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12141 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12142 IEM_MC_ADVANCE_RIP();
12143 IEM_MC_END();
12144
12145 return VINF_SUCCESS;
12146}
12147
12148
12149/** Opcode 0xb0. */
12150FNIEMOP_DEF(iemOp_mov_AL_Ib)
12151{
12152 IEMOP_MNEMONIC("mov AL,Ib");
12153 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
12154}
12155
12156
12157/** Opcode 0xb1. */
12158FNIEMOP_DEF(iemOp_CL_Ib)
12159{
12160 IEMOP_MNEMONIC("mov CL,Ib");
12161 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
12162}
12163
12164
12165/** Opcode 0xb2. */
12166FNIEMOP_DEF(iemOp_DL_Ib)
12167{
12168 IEMOP_MNEMONIC("mov DL,Ib");
12169 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
12170}
12171
12172
12173/** Opcode 0xb3. */
12174FNIEMOP_DEF(iemOp_BL_Ib)
12175{
12176 IEMOP_MNEMONIC("mov BL,Ib");
12177 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
12178}
12179
12180
12181/** Opcode 0xb4. */
12182FNIEMOP_DEF(iemOp_mov_AH_Ib)
12183{
12184 IEMOP_MNEMONIC("mov AH,Ib");
12185 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
12186}
12187
12188
12189/** Opcode 0xb5. */
12190FNIEMOP_DEF(iemOp_CH_Ib)
12191{
12192 IEMOP_MNEMONIC("mov CH,Ib");
12193 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
12194}
12195
12196
12197/** Opcode 0xb6. */
12198FNIEMOP_DEF(iemOp_DH_Ib)
12199{
12200 IEMOP_MNEMONIC("mov DH,Ib");
12201 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
12202}
12203
12204
12205/** Opcode 0xb7. */
12206FNIEMOP_DEF(iemOp_BH_Ib)
12207{
12208 IEMOP_MNEMONIC("mov BH,Ib");
12209 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
12210}
12211
12212
12213/**
12214 * Common 'mov regX,immX' helper.
12215 */
12216FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12217{
12218 switch (pIemCpu->enmEffOpSize)
12219 {
12220 case IEMMODE_16BIT:
12221 {
12222 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12223 IEMOP_HLP_NO_LOCK_PREFIX();
12224
12225 IEM_MC_BEGIN(0, 1);
12226 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12227 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12228 IEM_MC_ADVANCE_RIP();
12229 IEM_MC_END();
12230 break;
12231 }
12232
12233 case IEMMODE_32BIT:
12234 {
12235 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12236 IEMOP_HLP_NO_LOCK_PREFIX();
12237
12238 IEM_MC_BEGIN(0, 1);
12239 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12240 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12241 IEM_MC_ADVANCE_RIP();
12242 IEM_MC_END();
12243 break;
12244 }
12245 case IEMMODE_64BIT:
12246 {
12247 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12248 IEMOP_HLP_NO_LOCK_PREFIX();
12249
12250 IEM_MC_BEGIN(0, 1);
12251 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12252 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12253 IEM_MC_ADVANCE_RIP();
12254 IEM_MC_END();
12255 break;
12256 }
12257 }
12258
12259 return VINF_SUCCESS;
12260}
12261
12262
12263/** Opcode 0xb8. */
12264FNIEMOP_DEF(iemOp_eAX_Iv)
12265{
12266 IEMOP_MNEMONIC("mov rAX,IV");
12267 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
12268}
12269
12270
12271/** Opcode 0xb9. */
12272FNIEMOP_DEF(iemOp_eCX_Iv)
12273{
12274 IEMOP_MNEMONIC("mov rCX,IV");
12275 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
12276}
12277
12278
12279/** Opcode 0xba. */
12280FNIEMOP_DEF(iemOp_eDX_Iv)
12281{
12282 IEMOP_MNEMONIC("mov rDX,IV");
12283 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
12284}
12285
12286
12287/** Opcode 0xbb. */
12288FNIEMOP_DEF(iemOp_eBX_Iv)
12289{
12290 IEMOP_MNEMONIC("mov rBX,IV");
12291 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
12292}
12293
12294
12295/** Opcode 0xbc. */
12296FNIEMOP_DEF(iemOp_eSP_Iv)
12297{
12298 IEMOP_MNEMONIC("mov rSP,IV");
12299 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
12300}
12301
12302
12303/** Opcode 0xbd. */
12304FNIEMOP_DEF(iemOp_eBP_Iv)
12305{
12306 IEMOP_MNEMONIC("mov rBP,IV");
12307 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
12308}
12309
12310
12311/** Opcode 0xbe. */
12312FNIEMOP_DEF(iemOp_eSI_Iv)
12313{
12314 IEMOP_MNEMONIC("mov rSI,IV");
12315 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
12316}
12317
12318
12319/** Opcode 0xbf. */
12320FNIEMOP_DEF(iemOp_eDI_Iv)
12321{
12322 IEMOP_MNEMONIC("mov rDI,IV");
12323 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
12324}
12325
12326
12327/** Opcode 0xc0. */
12328FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12329{
12330 IEMOP_HLP_MIN_186();
12331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12332 PCIEMOPSHIFTSIZES pImpl;
12333 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12334 {
12335 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
12336 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
12337 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
12338 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
12339 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
12340 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
12341 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
12342 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12343 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12344 }
12345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12346
12347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12348 {
12349 /* register */
12350 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12351 IEMOP_HLP_NO_LOCK_PREFIX();
12352 IEM_MC_BEGIN(3, 0);
12353 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12354 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12356 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12357 IEM_MC_REF_EFLAGS(pEFlags);
12358 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12359 IEM_MC_ADVANCE_RIP();
12360 IEM_MC_END();
12361 }
12362 else
12363 {
12364 /* memory */
12365 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12366 IEM_MC_BEGIN(3, 2);
12367 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12368 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12369 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12371
12372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12373 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12374 IEM_MC_ASSIGN(cShiftArg, cShift);
12375 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12376 IEM_MC_FETCH_EFLAGS(EFlags);
12377 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12378
12379 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12380 IEM_MC_COMMIT_EFLAGS(EFlags);
12381 IEM_MC_ADVANCE_RIP();
12382 IEM_MC_END();
12383 }
12384 return VINF_SUCCESS;
12385}
12386
12387
12388/** Opcode 0xc1. */
12389FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12390{
12391 IEMOP_HLP_MIN_186();
12392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12393 PCIEMOPSHIFTSIZES pImpl;
12394 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12395 {
12396 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
12397 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
12398 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
12399 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
12400 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
12401 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
12402 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
12403 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12404 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12405 }
12406 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12407
12408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12409 {
12410 /* register */
12411 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12412 IEMOP_HLP_NO_LOCK_PREFIX();
12413 switch (pIemCpu->enmEffOpSize)
12414 {
12415 case IEMMODE_16BIT:
12416 IEM_MC_BEGIN(3, 0);
12417 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12418 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12419 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12420 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12421 IEM_MC_REF_EFLAGS(pEFlags);
12422 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12423 IEM_MC_ADVANCE_RIP();
12424 IEM_MC_END();
12425 return VINF_SUCCESS;
12426
12427 case IEMMODE_32BIT:
12428 IEM_MC_BEGIN(3, 0);
12429 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12430 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12432 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12433 IEM_MC_REF_EFLAGS(pEFlags);
12434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12435 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12436 IEM_MC_ADVANCE_RIP();
12437 IEM_MC_END();
12438 return VINF_SUCCESS;
12439
12440 case IEMMODE_64BIT:
12441 IEM_MC_BEGIN(3, 0);
12442 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12443 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12444 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12445 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12446 IEM_MC_REF_EFLAGS(pEFlags);
12447 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12448 IEM_MC_ADVANCE_RIP();
12449 IEM_MC_END();
12450 return VINF_SUCCESS;
12451
12452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12453 }
12454 }
12455 else
12456 {
12457 /* memory */
12458 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12459 switch (pIemCpu->enmEffOpSize)
12460 {
12461 case IEMMODE_16BIT:
12462 IEM_MC_BEGIN(3, 2);
12463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12464 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12467
12468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12469 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12470 IEM_MC_ASSIGN(cShiftArg, cShift);
12471 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12472 IEM_MC_FETCH_EFLAGS(EFlags);
12473 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12474
12475 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12476 IEM_MC_COMMIT_EFLAGS(EFlags);
12477 IEM_MC_ADVANCE_RIP();
12478 IEM_MC_END();
12479 return VINF_SUCCESS;
12480
12481 case IEMMODE_32BIT:
12482 IEM_MC_BEGIN(3, 2);
12483 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12484 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12485 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12487
12488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12489 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12490 IEM_MC_ASSIGN(cShiftArg, cShift);
12491 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12492 IEM_MC_FETCH_EFLAGS(EFlags);
12493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12494
12495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12496 IEM_MC_COMMIT_EFLAGS(EFlags);
12497 IEM_MC_ADVANCE_RIP();
12498 IEM_MC_END();
12499 return VINF_SUCCESS;
12500
12501 case IEMMODE_64BIT:
12502 IEM_MC_BEGIN(3, 2);
12503 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12504 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12505 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12507
12508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12509 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12510 IEM_MC_ASSIGN(cShiftArg, cShift);
12511 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12512 IEM_MC_FETCH_EFLAGS(EFlags);
12513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12514
12515 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12516 IEM_MC_COMMIT_EFLAGS(EFlags);
12517 IEM_MC_ADVANCE_RIP();
12518 IEM_MC_END();
12519 return VINF_SUCCESS;
12520
12521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12522 }
12523 }
12524}
12525
12526
12527/** Opcode 0xc2. */
12528FNIEMOP_DEF(iemOp_retn_Iw)
12529{
12530 IEMOP_MNEMONIC("retn Iw");
12531 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12532 IEMOP_HLP_NO_LOCK_PREFIX();
12533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12534 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
12535}
12536
12537
12538/** Opcode 0xc3. */
12539FNIEMOP_DEF(iemOp_retn)
12540{
12541 IEMOP_MNEMONIC("retn");
12542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12543 IEMOP_HLP_NO_LOCK_PREFIX();
12544 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
12545}
12546
12547
12548/** Opcode 0xc4. */
12549FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12550{
12551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12552 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
12553 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12554 {
12555 IEMOP_MNEMONIC("2-byte-vex");
12556 /* The LES instruction is invalid 64-bit mode. In legacy and
12557 compatability mode it is invalid with MOD=3.
12558 The use as a VEX prefix is made possible by assigning the inverted
12559 REX.R to the top MOD bit, and the top bit in the inverted register
12560 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12561 to accessing registers 0..7 in this VEX form. */
12562 /** @todo VEX: Just use new tables for it. */
12563 return IEMOP_RAISE_INVALID_OPCODE();
12564 }
12565 IEMOP_MNEMONIC("les Gv,Mp");
12566 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12567}
12568
12569
12570/** Opcode 0xc5. */
12571FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12572{
12573 /* The LDS instruction is invalid 64-bit mode. In legacy and
12574 compatability mode it is invalid with MOD=3.
12575 The use as a VEX prefix is made possible by assigning the inverted
12576 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12577 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12579 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12580 {
12581 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12582 {
12583 IEMOP_MNEMONIC("lds Gv,Mp");
12584 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12585 }
12586 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12587 }
12588
12589 IEMOP_MNEMONIC("3-byte-vex");
12590 /** @todo Test when exctly the VEX conformance checks kick in during
12591 * instruction decoding and fetching (using \#PF). */
12592 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12593 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12594 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12595#if 0 /* will make sense of this next week... */
12596 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12597 &&
12598 )
12599 {
12600
12601 }
12602#endif
12603
12604 /** @todo VEX: Just use new tables for it. */
12605 return IEMOP_RAISE_INVALID_OPCODE();
12606}
12607
12608
12609/** Opcode 0xc6. */
12610FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12611{
12612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12613 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12614 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12615 return IEMOP_RAISE_INVALID_OPCODE();
12616 IEMOP_MNEMONIC("mov Eb,Ib");
12617
12618 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12619 {
12620 /* register access */
12621 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12622 IEM_MC_BEGIN(0, 0);
12623 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12624 IEM_MC_ADVANCE_RIP();
12625 IEM_MC_END();
12626 }
12627 else
12628 {
12629 /* memory access. */
12630 IEM_MC_BEGIN(0, 1);
12631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12633 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12634 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12635 IEM_MC_ADVANCE_RIP();
12636 IEM_MC_END();
12637 }
12638 return VINF_SUCCESS;
12639}
12640
12641
12642/** Opcode 0xc7. */
12643FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12644{
12645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12646 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12647 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12648 return IEMOP_RAISE_INVALID_OPCODE();
12649 IEMOP_MNEMONIC("mov Ev,Iz");
12650
12651 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12652 {
12653 /* register access */
12654 switch (pIemCpu->enmEffOpSize)
12655 {
12656 case IEMMODE_16BIT:
12657 IEM_MC_BEGIN(0, 0);
12658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12659 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12660 IEM_MC_ADVANCE_RIP();
12661 IEM_MC_END();
12662 return VINF_SUCCESS;
12663
12664 case IEMMODE_32BIT:
12665 IEM_MC_BEGIN(0, 0);
12666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12667 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12668 IEM_MC_ADVANCE_RIP();
12669 IEM_MC_END();
12670 return VINF_SUCCESS;
12671
12672 case IEMMODE_64BIT:
12673 IEM_MC_BEGIN(0, 0);
12674 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12675 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12676 IEM_MC_ADVANCE_RIP();
12677 IEM_MC_END();
12678 return VINF_SUCCESS;
12679
12680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12681 }
12682 }
12683 else
12684 {
12685 /* memory access. */
12686 switch (pIemCpu->enmEffOpSize)
12687 {
12688 case IEMMODE_16BIT:
12689 IEM_MC_BEGIN(0, 1);
12690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12692 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12693 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12694 IEM_MC_ADVANCE_RIP();
12695 IEM_MC_END();
12696 return VINF_SUCCESS;
12697
12698 case IEMMODE_32BIT:
12699 IEM_MC_BEGIN(0, 1);
12700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12702 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12703 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12704 IEM_MC_ADVANCE_RIP();
12705 IEM_MC_END();
12706 return VINF_SUCCESS;
12707
12708 case IEMMODE_64BIT:
12709 IEM_MC_BEGIN(0, 1);
12710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12712 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12713 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12714 IEM_MC_ADVANCE_RIP();
12715 IEM_MC_END();
12716 return VINF_SUCCESS;
12717
12718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12719 }
12720 }
12721}
12722
12723
12724
12725
12726/** Opcode 0xc8. */
12727FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12728{
12729 IEMOP_MNEMONIC("enter Iw,Ib");
12730 IEMOP_HLP_MIN_186();
12731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12732 IEMOP_HLP_NO_LOCK_PREFIX();
12733 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12734 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12735 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12736}
12737
12738
12739/** Opcode 0xc9. */
12740FNIEMOP_DEF(iemOp_leave)
12741{
12742 IEMOP_MNEMONIC("retn");
12743 IEMOP_HLP_MIN_186();
12744 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12745 IEMOP_HLP_NO_LOCK_PREFIX();
12746 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12747}
12748
12749
12750/** Opcode 0xca. */
12751FNIEMOP_DEF(iemOp_retf_Iw)
12752{
12753 IEMOP_MNEMONIC("retf Iw");
12754 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12755 IEMOP_HLP_NO_LOCK_PREFIX();
12756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12757 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12758}
12759
12760
12761/** Opcode 0xcb. */
12762FNIEMOP_DEF(iemOp_retf)
12763{
12764 IEMOP_MNEMONIC("retf");
12765 IEMOP_HLP_NO_LOCK_PREFIX();
12766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12767 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12768}
12769
12770
12771/** Opcode 0xcc. */
12772FNIEMOP_DEF(iemOp_int_3)
12773{
12774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12775 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12776}
12777
12778
12779/** Opcode 0xcd. */
12780FNIEMOP_DEF(iemOp_int_Ib)
12781{
12782 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12784 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12785}
12786
12787
12788/** Opcode 0xce. */
12789FNIEMOP_DEF(iemOp_into)
12790{
12791 IEMOP_MNEMONIC("into");
12792 IEMOP_HLP_NO_64BIT();
12793
12794 IEM_MC_BEGIN(2, 0);
12795 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12796 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12797 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12798 IEM_MC_END();
12799 return VINF_SUCCESS;
12800}
12801
12802
12803/** Opcode 0xcf. */
12804FNIEMOP_DEF(iemOp_iret)
12805{
12806 IEMOP_MNEMONIC("iret");
12807 IEMOP_HLP_NO_LOCK_PREFIX();
12808 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12809}
12810
12811
12812/** Opcode 0xd0. */
12813FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12814{
12815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12816 PCIEMOPSHIFTSIZES pImpl;
12817 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12818 {
12819 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12820 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12821 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12822 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12823 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12824 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12825 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12826 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12827 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12828 }
12829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12830
12831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12832 {
12833 /* register */
12834 IEMOP_HLP_NO_LOCK_PREFIX();
12835 IEM_MC_BEGIN(3, 0);
12836 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12837 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12838 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12839 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12840 IEM_MC_REF_EFLAGS(pEFlags);
12841 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12842 IEM_MC_ADVANCE_RIP();
12843 IEM_MC_END();
12844 }
12845 else
12846 {
12847 /* memory */
12848 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12849 IEM_MC_BEGIN(3, 2);
12850 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12851 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12852 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12854
12855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12856 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12857 IEM_MC_FETCH_EFLAGS(EFlags);
12858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12859
12860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12861 IEM_MC_COMMIT_EFLAGS(EFlags);
12862 IEM_MC_ADVANCE_RIP();
12863 IEM_MC_END();
12864 }
12865 return VINF_SUCCESS;
12866}
12867
12868
12869
12870/** Opcode 0xd1. */
12871FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12872{
12873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12874 PCIEMOPSHIFTSIZES pImpl;
12875 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12876 {
12877 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12878 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12879 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12880 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12881 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12882 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12883 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12884 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12885 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12886 }
12887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12888
12889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12890 {
12891 /* register */
12892 IEMOP_HLP_NO_LOCK_PREFIX();
12893 switch (pIemCpu->enmEffOpSize)
12894 {
12895 case IEMMODE_16BIT:
12896 IEM_MC_BEGIN(3, 0);
12897 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12898 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12899 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12900 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12901 IEM_MC_REF_EFLAGS(pEFlags);
12902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12903 IEM_MC_ADVANCE_RIP();
12904 IEM_MC_END();
12905 return VINF_SUCCESS;
12906
12907 case IEMMODE_32BIT:
12908 IEM_MC_BEGIN(3, 0);
12909 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12910 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12911 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12912 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12913 IEM_MC_REF_EFLAGS(pEFlags);
12914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12915 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12916 IEM_MC_ADVANCE_RIP();
12917 IEM_MC_END();
12918 return VINF_SUCCESS;
12919
12920 case IEMMODE_64BIT:
12921 IEM_MC_BEGIN(3, 0);
12922 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12923 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12924 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12925 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12926 IEM_MC_REF_EFLAGS(pEFlags);
12927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12928 IEM_MC_ADVANCE_RIP();
12929 IEM_MC_END();
12930 return VINF_SUCCESS;
12931
12932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12933 }
12934 }
12935 else
12936 {
12937 /* memory */
12938 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12939 switch (pIemCpu->enmEffOpSize)
12940 {
12941 case IEMMODE_16BIT:
12942 IEM_MC_BEGIN(3, 2);
12943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12944 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12945 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12947
12948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12949 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12950 IEM_MC_FETCH_EFLAGS(EFlags);
12951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12952
12953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12954 IEM_MC_COMMIT_EFLAGS(EFlags);
12955 IEM_MC_ADVANCE_RIP();
12956 IEM_MC_END();
12957 return VINF_SUCCESS;
12958
12959 case IEMMODE_32BIT:
12960 IEM_MC_BEGIN(3, 2);
12961 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12962 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12963 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12965
12966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12967 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12968 IEM_MC_FETCH_EFLAGS(EFlags);
12969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12970
12971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12972 IEM_MC_COMMIT_EFLAGS(EFlags);
12973 IEM_MC_ADVANCE_RIP();
12974 IEM_MC_END();
12975 return VINF_SUCCESS;
12976
12977 case IEMMODE_64BIT:
12978 IEM_MC_BEGIN(3, 2);
12979 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12980 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12981 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12983
12984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12985 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12986 IEM_MC_FETCH_EFLAGS(EFlags);
12987 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12988
12989 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12990 IEM_MC_COMMIT_EFLAGS(EFlags);
12991 IEM_MC_ADVANCE_RIP();
12992 IEM_MC_END();
12993 return VINF_SUCCESS;
12994
12995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12996 }
12997 }
12998}
12999
13000
13001/** Opcode 0xd2. */
13002FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13003{
13004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13005 PCIEMOPSHIFTSIZES pImpl;
13006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13007 {
13008 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
13009 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
13010 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
13011 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
13012 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
13013 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
13014 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
13015 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13016 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13017 }
13018 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13019
13020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13021 {
13022 /* register */
13023 IEMOP_HLP_NO_LOCK_PREFIX();
13024 IEM_MC_BEGIN(3, 0);
13025 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13026 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13028 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13029 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13030 IEM_MC_REF_EFLAGS(pEFlags);
13031 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13032 IEM_MC_ADVANCE_RIP();
13033 IEM_MC_END();
13034 }
13035 else
13036 {
13037 /* memory */
13038 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13039 IEM_MC_BEGIN(3, 2);
13040 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13041 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13042 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13044
13045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13046 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13047 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13048 IEM_MC_FETCH_EFLAGS(EFlags);
13049 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13050
13051 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13052 IEM_MC_COMMIT_EFLAGS(EFlags);
13053 IEM_MC_ADVANCE_RIP();
13054 IEM_MC_END();
13055 }
13056 return VINF_SUCCESS;
13057}
13058
13059
13060/** Opcode 0xd3. */
13061FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13062{
13063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13064 PCIEMOPSHIFTSIZES pImpl;
13065 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13066 {
13067 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
13068 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
13069 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
13070 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
13071 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
13072 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
13073 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
13074 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13075 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13076 }
13077 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13078
13079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13080 {
13081 /* register */
13082 IEMOP_HLP_NO_LOCK_PREFIX();
13083 switch (pIemCpu->enmEffOpSize)
13084 {
13085 case IEMMODE_16BIT:
13086 IEM_MC_BEGIN(3, 0);
13087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13088 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13090 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13091 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13092 IEM_MC_REF_EFLAGS(pEFlags);
13093 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13094 IEM_MC_ADVANCE_RIP();
13095 IEM_MC_END();
13096 return VINF_SUCCESS;
13097
13098 case IEMMODE_32BIT:
13099 IEM_MC_BEGIN(3, 0);
13100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13101 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13103 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13104 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13105 IEM_MC_REF_EFLAGS(pEFlags);
13106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13107 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13108 IEM_MC_ADVANCE_RIP();
13109 IEM_MC_END();
13110 return VINF_SUCCESS;
13111
13112 case IEMMODE_64BIT:
13113 IEM_MC_BEGIN(3, 0);
13114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13115 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13117 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
13118 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13119 IEM_MC_REF_EFLAGS(pEFlags);
13120 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13121 IEM_MC_ADVANCE_RIP();
13122 IEM_MC_END();
13123 return VINF_SUCCESS;
13124
13125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13126 }
13127 }
13128 else
13129 {
13130 /* memory */
13131 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
13132 switch (pIemCpu->enmEffOpSize)
13133 {
13134 case IEMMODE_16BIT:
13135 IEM_MC_BEGIN(3, 2);
13136 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13137 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13138 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13140
13141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13142 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13143 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13144 IEM_MC_FETCH_EFLAGS(EFlags);
13145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13146
13147 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13148 IEM_MC_COMMIT_EFLAGS(EFlags);
13149 IEM_MC_ADVANCE_RIP();
13150 IEM_MC_END();
13151 return VINF_SUCCESS;
13152
13153 case IEMMODE_32BIT:
13154 IEM_MC_BEGIN(3, 2);
13155 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13156 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13159
13160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13161 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13162 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13163 IEM_MC_FETCH_EFLAGS(EFlags);
13164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13165
13166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13167 IEM_MC_COMMIT_EFLAGS(EFlags);
13168 IEM_MC_ADVANCE_RIP();
13169 IEM_MC_END();
13170 return VINF_SUCCESS;
13171
13172 case IEMMODE_64BIT:
13173 IEM_MC_BEGIN(3, 2);
13174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13175 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13178
13179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13180 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13181 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
13182 IEM_MC_FETCH_EFLAGS(EFlags);
13183 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13184
13185 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13186 IEM_MC_COMMIT_EFLAGS(EFlags);
13187 IEM_MC_ADVANCE_RIP();
13188 IEM_MC_END();
13189 return VINF_SUCCESS;
13190
13191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13192 }
13193 }
13194}
13195
13196/** Opcode 0xd4. */
13197FNIEMOP_DEF(iemOp_aam_Ib)
13198{
13199 IEMOP_MNEMONIC("aam Ib");
13200 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13201 IEMOP_HLP_NO_LOCK_PREFIX();
13202 IEMOP_HLP_NO_64BIT();
13203 if (!bImm)
13204 return IEMOP_RAISE_DIVIDE_ERROR();
13205 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13206}
13207
13208
13209/** Opcode 0xd5. */
13210FNIEMOP_DEF(iemOp_aad_Ib)
13211{
13212 IEMOP_MNEMONIC("aad Ib");
13213 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13214 IEMOP_HLP_NO_LOCK_PREFIX();
13215 IEMOP_HLP_NO_64BIT();
13216 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13217}
13218
13219
13220/** Opcode 0xd6. */
13221FNIEMOP_DEF(iemOp_salc)
13222{
13223 IEMOP_MNEMONIC("salc");
13224 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13225 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13227 IEMOP_HLP_NO_64BIT();
13228
13229 IEM_MC_BEGIN(0, 0);
13230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13231 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13232 } IEM_MC_ELSE() {
13233 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13234 } IEM_MC_ENDIF();
13235 IEM_MC_ADVANCE_RIP();
13236 IEM_MC_END();
13237 return VINF_SUCCESS;
13238}
13239
13240
13241/** Opcode 0xd7. */
13242FNIEMOP_DEF(iemOp_xlat)
13243{
13244 IEMOP_MNEMONIC("xlat");
13245 IEMOP_HLP_NO_LOCK_PREFIX();
13246 switch (pIemCpu->enmEffAddrMode)
13247 {
13248 case IEMMODE_16BIT:
13249 IEM_MC_BEGIN(2, 0);
13250 IEM_MC_LOCAL(uint8_t, u8Tmp);
13251 IEM_MC_LOCAL(uint16_t, u16Addr);
13252 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13253 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13254 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
13255 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13256 IEM_MC_ADVANCE_RIP();
13257 IEM_MC_END();
13258 return VINF_SUCCESS;
13259
13260 case IEMMODE_32BIT:
13261 IEM_MC_BEGIN(2, 0);
13262 IEM_MC_LOCAL(uint8_t, u8Tmp);
13263 IEM_MC_LOCAL(uint32_t, u32Addr);
13264 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13265 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13266 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
13267 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13268 IEM_MC_ADVANCE_RIP();
13269 IEM_MC_END();
13270 return VINF_SUCCESS;
13271
13272 case IEMMODE_64BIT:
13273 IEM_MC_BEGIN(2, 0);
13274 IEM_MC_LOCAL(uint8_t, u8Tmp);
13275 IEM_MC_LOCAL(uint64_t, u64Addr);
13276 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13277 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13278 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
13279 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13280 IEM_MC_ADVANCE_RIP();
13281 IEM_MC_END();
13282 return VINF_SUCCESS;
13283
13284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13285 }
13286}
13287
13288
13289/**
13290 * Common worker for FPU instructions working on ST0 and STn, and storing the
13291 * result in ST0.
13292 *
13293 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13294 */
13295FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13296{
13297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13298
13299 IEM_MC_BEGIN(3, 1);
13300 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13301 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13304
13305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13307 IEM_MC_PREPARE_FPU_USAGE();
13308 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13309 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13310 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13311 IEM_MC_ELSE()
13312 IEM_MC_FPU_STACK_UNDERFLOW(0);
13313 IEM_MC_ENDIF();
13314 IEM_MC_ADVANCE_RIP();
13315
13316 IEM_MC_END();
13317 return VINF_SUCCESS;
13318}
13319
13320
13321/**
13322 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13323 * flags.
13324 *
13325 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13326 */
13327FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13328{
13329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13330
13331 IEM_MC_BEGIN(3, 1);
13332 IEM_MC_LOCAL(uint16_t, u16Fsw);
13333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13334 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13335 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13336
13337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13339 IEM_MC_PREPARE_FPU_USAGE();
13340 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13341 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13342 IEM_MC_UPDATE_FSW(u16Fsw);
13343 IEM_MC_ELSE()
13344 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13345 IEM_MC_ENDIF();
13346 IEM_MC_ADVANCE_RIP();
13347
13348 IEM_MC_END();
13349 return VINF_SUCCESS;
13350}
13351
13352
13353/**
13354 * Common worker for FPU instructions working on ST0 and STn, only affecting
13355 * flags, and popping when done.
13356 *
13357 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13358 */
13359FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13360{
13361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13362
13363 IEM_MC_BEGIN(3, 1);
13364 IEM_MC_LOCAL(uint16_t, u16Fsw);
13365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13366 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13368
13369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13371 IEM_MC_PREPARE_FPU_USAGE();
13372 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13373 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13374 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13375 IEM_MC_ELSE()
13376 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13377 IEM_MC_ENDIF();
13378 IEM_MC_ADVANCE_RIP();
13379
13380 IEM_MC_END();
13381 return VINF_SUCCESS;
13382}
13383
13384
13385/** Opcode 0xd8 11/0. */
13386FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13387{
13388 IEMOP_MNEMONIC("fadd st0,stN");
13389 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13390}
13391
13392
13393/** Opcode 0xd8 11/1. */
13394FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13395{
13396 IEMOP_MNEMONIC("fmul st0,stN");
13397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13398}
13399
13400
13401/** Opcode 0xd8 11/2. */
13402FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13403{
13404 IEMOP_MNEMONIC("fcom st0,stN");
13405 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13406}
13407
13408
13409/** Opcode 0xd8 11/3. */
13410FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13411{
13412 IEMOP_MNEMONIC("fcomp st0,stN");
13413 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13414}
13415
13416
13417/** Opcode 0xd8 11/4. */
13418FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13419{
13420 IEMOP_MNEMONIC("fsub st0,stN");
13421 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13422}
13423
13424
13425/** Opcode 0xd8 11/5. */
13426FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13427{
13428 IEMOP_MNEMONIC("fsubr st0,stN");
13429 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13430}
13431
13432
13433/** Opcode 0xd8 11/6. */
13434FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13435{
13436 IEMOP_MNEMONIC("fdiv st0,stN");
13437 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13438}
13439
13440
13441/** Opcode 0xd8 11/7. */
13442FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13443{
13444 IEMOP_MNEMONIC("fdivr st0,stN");
13445 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13446}
13447
13448
13449/**
13450 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13451 * the result in ST0.
13452 *
13453 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13454 */
13455FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13456{
13457 IEM_MC_BEGIN(3, 3);
13458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13459 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13460 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13461 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13462 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13463 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13464
13465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13467
13468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13470 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13471
13472 IEM_MC_PREPARE_FPU_USAGE();
13473 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13474 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13475 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13476 IEM_MC_ELSE()
13477 IEM_MC_FPU_STACK_UNDERFLOW(0);
13478 IEM_MC_ENDIF();
13479 IEM_MC_ADVANCE_RIP();
13480
13481 IEM_MC_END();
13482 return VINF_SUCCESS;
13483}
13484
13485
13486/** Opcode 0xd8 !11/0. */
13487FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13488{
13489 IEMOP_MNEMONIC("fadd st0,m32r");
13490 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13491}
13492
13493
13494/** Opcode 0xd8 !11/1. */
13495FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13496{
13497 IEMOP_MNEMONIC("fmul st0,m32r");
13498 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13499}
13500
13501
13502/** Opcode 0xd8 !11/2. */
13503FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13504{
13505 IEMOP_MNEMONIC("fcom st0,m32r");
13506
13507 IEM_MC_BEGIN(3, 3);
13508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13509 IEM_MC_LOCAL(uint16_t, u16Fsw);
13510 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13511 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13512 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13513 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13514
13515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13517
13518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13520 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13521
13522 IEM_MC_PREPARE_FPU_USAGE();
13523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13524 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13525 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13526 IEM_MC_ELSE()
13527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13528 IEM_MC_ENDIF();
13529 IEM_MC_ADVANCE_RIP();
13530
13531 IEM_MC_END();
13532 return VINF_SUCCESS;
13533}
13534
13535
13536/** Opcode 0xd8 !11/3. */
13537FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13538{
13539 IEMOP_MNEMONIC("fcomp st0,m32r");
13540
13541 IEM_MC_BEGIN(3, 3);
13542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13543 IEM_MC_LOCAL(uint16_t, u16Fsw);
13544 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13545 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13546 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13547 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13548
13549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13551
13552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13554 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13555
13556 IEM_MC_PREPARE_FPU_USAGE();
13557 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13558 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13559 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13560 IEM_MC_ELSE()
13561 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13562 IEM_MC_ENDIF();
13563 IEM_MC_ADVANCE_RIP();
13564
13565 IEM_MC_END();
13566 return VINF_SUCCESS;
13567}
13568
13569
13570/** Opcode 0xd8 !11/4. */
13571FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13572{
13573 IEMOP_MNEMONIC("fsub st0,m32r");
13574 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13575}
13576
13577
13578/** Opcode 0xd8 !11/5. */
13579FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13580{
13581 IEMOP_MNEMONIC("fsubr st0,m32r");
13582 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13583}
13584
13585
13586/** Opcode 0xd8 !11/6. */
13587FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13588{
13589 IEMOP_MNEMONIC("fdiv st0,m32r");
13590 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13591}
13592
13593
13594/** Opcode 0xd8 !11/7. */
13595FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13596{
13597 IEMOP_MNEMONIC("fdivr st0,m32r");
13598 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13599}
13600
13601
13602/** Opcode 0xd8. */
13603FNIEMOP_DEF(iemOp_EscF0)
13604{
13605 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13607
13608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13609 {
13610 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13611 {
13612 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13613 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13614 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13615 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13616 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13617 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13618 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13619 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13621 }
13622 }
13623 else
13624 {
13625 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13626 {
13627 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13628 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13629 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13630 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13631 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13632 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13633 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13634 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13636 }
13637 }
13638}
13639
13640
13641/** Opcode 0xd9 /0 mem32real
13642 * @sa iemOp_fld_m64r */
13643FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13644{
13645 IEMOP_MNEMONIC("fld m32r");
13646
13647 IEM_MC_BEGIN(2, 3);
13648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13649 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13650 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13651 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13652 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13653
13654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13656
13657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13659 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13660
13661 IEM_MC_PREPARE_FPU_USAGE();
13662 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13663 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13664 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13665 IEM_MC_ELSE()
13666 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13667 IEM_MC_ENDIF();
13668 IEM_MC_ADVANCE_RIP();
13669
13670 IEM_MC_END();
13671 return VINF_SUCCESS;
13672}
13673
13674
13675/** Opcode 0xd9 !11/2 mem32real */
13676FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13677{
13678 IEMOP_MNEMONIC("fst m32r");
13679 IEM_MC_BEGIN(3, 2);
13680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13681 IEM_MC_LOCAL(uint16_t, u16Fsw);
13682 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13683 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13684 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13685
13686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13690
13691 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13692 IEM_MC_PREPARE_FPU_USAGE();
13693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13694 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13695 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13696 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13697 IEM_MC_ELSE()
13698 IEM_MC_IF_FCW_IM()
13699 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13700 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13701 IEM_MC_ENDIF();
13702 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13703 IEM_MC_ENDIF();
13704 IEM_MC_ADVANCE_RIP();
13705
13706 IEM_MC_END();
13707 return VINF_SUCCESS;
13708}
13709
13710
13711/** Opcode 0xd9 !11/3 */
13712FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13713{
13714 IEMOP_MNEMONIC("fstp m32r");
13715 IEM_MC_BEGIN(3, 2);
13716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13717 IEM_MC_LOCAL(uint16_t, u16Fsw);
13718 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13719 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13720 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13721
13722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13724 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13725 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13726
13727 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13728 IEM_MC_PREPARE_FPU_USAGE();
13729 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13730 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13731 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13732 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13733 IEM_MC_ELSE()
13734 IEM_MC_IF_FCW_IM()
13735 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13736 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13737 IEM_MC_ENDIF();
13738 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13739 IEM_MC_ENDIF();
13740 IEM_MC_ADVANCE_RIP();
13741
13742 IEM_MC_END();
13743 return VINF_SUCCESS;
13744}
13745
13746
13747/** Opcode 0xd9 !11/4 */
13748FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13749{
13750 IEMOP_MNEMONIC("fldenv m14/28byte");
13751 IEM_MC_BEGIN(3, 0);
13752 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13753 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13754 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13758 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13759 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13760 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13761 IEM_MC_END();
13762 return VINF_SUCCESS;
13763}
13764
13765
13766/** Opcode 0xd9 !11/5 */
13767FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13768{
13769 IEMOP_MNEMONIC("fldcw m2byte");
13770 IEM_MC_BEGIN(1, 1);
13771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13772 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13777 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13778 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13779 IEM_MC_END();
13780 return VINF_SUCCESS;
13781}
13782
13783
13784/** Opcode 0xd9 !11/6 */
13785FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13786{
13787 IEMOP_MNEMONIC("fstenv m14/m28byte");
13788 IEM_MC_BEGIN(3, 0);
13789 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13790 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13791 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13795 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13796 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13797 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13798 IEM_MC_END();
13799 return VINF_SUCCESS;
13800}
13801
13802
13803/** Opcode 0xd9 !11/7 */
13804FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13805{
13806 IEMOP_MNEMONIC("fnstcw m2byte");
13807 IEM_MC_BEGIN(2, 0);
13808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13809 IEM_MC_LOCAL(uint16_t, u16Fcw);
13810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13813 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
13814 IEM_MC_FETCH_FCW(u16Fcw);
13815 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13816 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13817 IEM_MC_END();
13818 return VINF_SUCCESS;
13819}
13820
13821
13822/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13823FNIEMOP_DEF(iemOp_fnop)
13824{
13825 IEMOP_MNEMONIC("fnop");
13826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13827
13828 IEM_MC_BEGIN(0, 0);
13829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13831 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13832 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13833 * intel optimizations. Investigate. */
13834 IEM_MC_UPDATE_FPU_OPCODE_IP();
13835 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13836 IEM_MC_END();
13837 return VINF_SUCCESS;
13838}
13839
13840
13841/** Opcode 0xd9 11/0 stN */
13842FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13843{
13844 IEMOP_MNEMONIC("fld stN");
13845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13846
13847 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13848 * indicates that it does. */
13849 IEM_MC_BEGIN(0, 2);
13850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13851 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13854
13855 IEM_MC_PREPARE_FPU_USAGE();
13856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13857 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13858 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13859 IEM_MC_ELSE()
13860 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13861 IEM_MC_ENDIF();
13862
13863 IEM_MC_ADVANCE_RIP();
13864 IEM_MC_END();
13865
13866 return VINF_SUCCESS;
13867}
13868
13869
13870/** Opcode 0xd9 11/3 stN */
13871FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13872{
13873 IEMOP_MNEMONIC("fxch stN");
13874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13875
13876 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13877 * indicates that it does. */
13878 IEM_MC_BEGIN(1, 3);
13879 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13880 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13882 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13885
13886 IEM_MC_PREPARE_FPU_USAGE();
13887 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13888 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13889 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13890 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13891 IEM_MC_ELSE()
13892 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13893 IEM_MC_ENDIF();
13894
13895 IEM_MC_ADVANCE_RIP();
13896 IEM_MC_END();
13897
13898 return VINF_SUCCESS;
13899}
13900
13901
13902/** Opcode 0xd9 11/4, 0xdd 11/2. */
13903FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13904{
13905 IEMOP_MNEMONIC("fstp st0,stN");
13906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13907
13908 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13909 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13910 if (!iDstReg)
13911 {
13912 IEM_MC_BEGIN(0, 1);
13913 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13915 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13916
13917 IEM_MC_PREPARE_FPU_USAGE();
13918 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13919 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13920 IEM_MC_ELSE()
13921 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13922 IEM_MC_ENDIF();
13923
13924 IEM_MC_ADVANCE_RIP();
13925 IEM_MC_END();
13926 }
13927 else
13928 {
13929 IEM_MC_BEGIN(0, 2);
13930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13931 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13934
13935 IEM_MC_PREPARE_FPU_USAGE();
13936 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13937 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13938 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13939 IEM_MC_ELSE()
13940 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13941 IEM_MC_ENDIF();
13942
13943 IEM_MC_ADVANCE_RIP();
13944 IEM_MC_END();
13945 }
13946 return VINF_SUCCESS;
13947}
13948
13949
13950/**
13951 * Common worker for FPU instructions working on ST0 and replaces it with the
13952 * result, i.e. unary operators.
13953 *
13954 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13955 */
13956FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13957{
13958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13959
13960 IEM_MC_BEGIN(2, 1);
13961 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13962 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13963 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13964
13965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13967 IEM_MC_PREPARE_FPU_USAGE();
13968 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13969 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13970 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13971 IEM_MC_ELSE()
13972 IEM_MC_FPU_STACK_UNDERFLOW(0);
13973 IEM_MC_ENDIF();
13974 IEM_MC_ADVANCE_RIP();
13975
13976 IEM_MC_END();
13977 return VINF_SUCCESS;
13978}
13979
13980
13981/** Opcode 0xd9 0xe0. */
13982FNIEMOP_DEF(iemOp_fchs)
13983{
13984 IEMOP_MNEMONIC("fchs st0");
13985 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13986}
13987
13988
13989/** Opcode 0xd9 0xe1. */
13990FNIEMOP_DEF(iemOp_fabs)
13991{
13992 IEMOP_MNEMONIC("fabs st0");
13993 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13994}
13995
13996
13997/**
13998 * Common worker for FPU instructions working on ST0 and only returns FSW.
13999 *
14000 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14001 */
14002FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14003{
14004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14005
14006 IEM_MC_BEGIN(2, 1);
14007 IEM_MC_LOCAL(uint16_t, u16Fsw);
14008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14010
14011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14013 IEM_MC_PREPARE_FPU_USAGE();
14014 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14015 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14016 IEM_MC_UPDATE_FSW(u16Fsw);
14017 IEM_MC_ELSE()
14018 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14019 IEM_MC_ENDIF();
14020 IEM_MC_ADVANCE_RIP();
14021
14022 IEM_MC_END();
14023 return VINF_SUCCESS;
14024}
14025
14026
14027/** Opcode 0xd9 0xe4. */
14028FNIEMOP_DEF(iemOp_ftst)
14029{
14030 IEMOP_MNEMONIC("ftst st0");
14031 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14032}
14033
14034
14035/** Opcode 0xd9 0xe5. */
14036FNIEMOP_DEF(iemOp_fxam)
14037{
14038 IEMOP_MNEMONIC("fxam st0");
14039 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14040}
14041
14042
14043/**
14044 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14045 *
14046 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14047 */
14048FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14049{
14050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14051
14052 IEM_MC_BEGIN(1, 1);
14053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14054 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14055
14056 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14057 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14058 IEM_MC_PREPARE_FPU_USAGE();
14059 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14060 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14061 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14062 IEM_MC_ELSE()
14063 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14064 IEM_MC_ENDIF();
14065 IEM_MC_ADVANCE_RIP();
14066
14067 IEM_MC_END();
14068 return VINF_SUCCESS;
14069}
14070
14071
14072/** Opcode 0xd9 0xe8. */
14073FNIEMOP_DEF(iemOp_fld1)
14074{
14075 IEMOP_MNEMONIC("fld1");
14076 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14077}
14078
14079
14080/** Opcode 0xd9 0xe9. */
14081FNIEMOP_DEF(iemOp_fldl2t)
14082{
14083 IEMOP_MNEMONIC("fldl2t");
14084 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14085}
14086
14087
14088/** Opcode 0xd9 0xea. */
14089FNIEMOP_DEF(iemOp_fldl2e)
14090{
14091 IEMOP_MNEMONIC("fldl2e");
14092 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14093}
14094
14095/** Opcode 0xd9 0xeb. */
14096FNIEMOP_DEF(iemOp_fldpi)
14097{
14098 IEMOP_MNEMONIC("fldpi");
14099 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14100}
14101
14102
14103/** Opcode 0xd9 0xec. */
14104FNIEMOP_DEF(iemOp_fldlg2)
14105{
14106 IEMOP_MNEMONIC("fldlg2");
14107 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14108}
14109
14110/** Opcode 0xd9 0xed. */
14111FNIEMOP_DEF(iemOp_fldln2)
14112{
14113 IEMOP_MNEMONIC("fldln2");
14114 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14115}
14116
14117
14118/** Opcode 0xd9 0xee. */
14119FNIEMOP_DEF(iemOp_fldz)
14120{
14121 IEMOP_MNEMONIC("fldz");
14122 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14123}
14124
14125
14126/** Opcode 0xd9 0xf0. */
14127FNIEMOP_DEF(iemOp_f2xm1)
14128{
14129 IEMOP_MNEMONIC("f2xm1 st0");
14130 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14131}
14132
14133
14134/** Opcode 0xd9 0xf1. */
14135FNIEMOP_DEF(iemOp_fylx2)
14136{
14137 IEMOP_MNEMONIC("fylx2 st0");
14138 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
14139}
14140
14141
14142/**
14143 * Common worker for FPU instructions working on ST0 and having two outputs, one
14144 * replacing ST0 and one pushed onto the stack.
14145 *
14146 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14147 */
14148FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14149{
14150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14151
14152 IEM_MC_BEGIN(2, 1);
14153 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14154 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14156
14157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14158 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14159 IEM_MC_PREPARE_FPU_USAGE();
14160 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14161 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14162 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14163 IEM_MC_ELSE()
14164 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14165 IEM_MC_ENDIF();
14166 IEM_MC_ADVANCE_RIP();
14167
14168 IEM_MC_END();
14169 return VINF_SUCCESS;
14170}
14171
14172
14173/** Opcode 0xd9 0xf2. */
14174FNIEMOP_DEF(iemOp_fptan)
14175{
14176 IEMOP_MNEMONIC("fptan st0");
14177 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14178}
14179
14180
14181/**
14182 * Common worker for FPU instructions working on STn and ST0, storing the result
14183 * in STn, and popping the stack unless IE, DE or ZE was raised.
14184 *
14185 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14186 */
14187FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14188{
14189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14190
14191 IEM_MC_BEGIN(3, 1);
14192 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14193 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14196
14197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14199
14200 IEM_MC_PREPARE_FPU_USAGE();
14201 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14202 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14203 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14204 IEM_MC_ELSE()
14205 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14206 IEM_MC_ENDIF();
14207 IEM_MC_ADVANCE_RIP();
14208
14209 IEM_MC_END();
14210 return VINF_SUCCESS;
14211}
14212
14213
14214/** Opcode 0xd9 0xf3. */
14215FNIEMOP_DEF(iemOp_fpatan)
14216{
14217 IEMOP_MNEMONIC("fpatan st1,st0");
14218 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14219}
14220
14221
14222/** Opcode 0xd9 0xf4. */
14223FNIEMOP_DEF(iemOp_fxtract)
14224{
14225 IEMOP_MNEMONIC("fxtract st0");
14226 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14227}
14228
14229
14230/** Opcode 0xd9 0xf5. */
14231FNIEMOP_DEF(iemOp_fprem1)
14232{
14233 IEMOP_MNEMONIC("fprem1 st0, st1");
14234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14235}
14236
14237
14238/** Opcode 0xd9 0xf6. */
14239FNIEMOP_DEF(iemOp_fdecstp)
14240{
14241 IEMOP_MNEMONIC("fdecstp");
14242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14243 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14244 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14245 * FINCSTP and FDECSTP. */
14246
14247 IEM_MC_BEGIN(0,0);
14248
14249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14251
14252 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14253 IEM_MC_FPU_STACK_DEC_TOP();
14254 IEM_MC_UPDATE_FSW_CONST(0);
14255
14256 IEM_MC_ADVANCE_RIP();
14257 IEM_MC_END();
14258 return VINF_SUCCESS;
14259}
14260
14261
14262/** Opcode 0xd9 0xf7. */
14263FNIEMOP_DEF(iemOp_fincstp)
14264{
14265 IEMOP_MNEMONIC("fincstp");
14266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14267 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14268 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14269 * FINCSTP and FDECSTP. */
14270
14271 IEM_MC_BEGIN(0,0);
14272
14273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14274 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14275
14276 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14277 IEM_MC_FPU_STACK_INC_TOP();
14278 IEM_MC_UPDATE_FSW_CONST(0);
14279
14280 IEM_MC_ADVANCE_RIP();
14281 IEM_MC_END();
14282 return VINF_SUCCESS;
14283}
14284
14285
14286/** Opcode 0xd9 0xf8. */
14287FNIEMOP_DEF(iemOp_fprem)
14288{
14289 IEMOP_MNEMONIC("fprem st0, st1");
14290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14291}
14292
14293
14294/** Opcode 0xd9 0xf9. */
14295FNIEMOP_DEF(iemOp_fyl2xp1)
14296{
14297 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
14298 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14299}
14300
14301
14302/** Opcode 0xd9 0xfa. */
14303FNIEMOP_DEF(iemOp_fsqrt)
14304{
14305 IEMOP_MNEMONIC("fsqrt st0");
14306 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14307}
14308
14309
14310/** Opcode 0xd9 0xfb. */
14311FNIEMOP_DEF(iemOp_fsincos)
14312{
14313 IEMOP_MNEMONIC("fsincos st0");
14314 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14315}
14316
14317
14318/** Opcode 0xd9 0xfc. */
14319FNIEMOP_DEF(iemOp_frndint)
14320{
14321 IEMOP_MNEMONIC("frndint st0");
14322 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14323}
14324
14325
14326/** Opcode 0xd9 0xfd. */
14327FNIEMOP_DEF(iemOp_fscale)
14328{
14329 IEMOP_MNEMONIC("fscale st0, st1");
14330 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14331}
14332
14333
14334/** Opcode 0xd9 0xfe. */
14335FNIEMOP_DEF(iemOp_fsin)
14336{
14337 IEMOP_MNEMONIC("fsin st0");
14338 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14339}
14340
14341
14342/** Opcode 0xd9 0xff. */
14343FNIEMOP_DEF(iemOp_fcos)
14344{
14345 IEMOP_MNEMONIC("fcos st0");
14346 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14347}
14348
14349
14350/** Used by iemOp_EscF1. */
14351static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14352{
14353 /* 0xe0 */ iemOp_fchs,
14354 /* 0xe1 */ iemOp_fabs,
14355 /* 0xe2 */ iemOp_Invalid,
14356 /* 0xe3 */ iemOp_Invalid,
14357 /* 0xe4 */ iemOp_ftst,
14358 /* 0xe5 */ iemOp_fxam,
14359 /* 0xe6 */ iemOp_Invalid,
14360 /* 0xe7 */ iemOp_Invalid,
14361 /* 0xe8 */ iemOp_fld1,
14362 /* 0xe9 */ iemOp_fldl2t,
14363 /* 0xea */ iemOp_fldl2e,
14364 /* 0xeb */ iemOp_fldpi,
14365 /* 0xec */ iemOp_fldlg2,
14366 /* 0xed */ iemOp_fldln2,
14367 /* 0xee */ iemOp_fldz,
14368 /* 0xef */ iemOp_Invalid,
14369 /* 0xf0 */ iemOp_f2xm1,
14370 /* 0xf1 */ iemOp_fylx2,
14371 /* 0xf2 */ iemOp_fptan,
14372 /* 0xf3 */ iemOp_fpatan,
14373 /* 0xf4 */ iemOp_fxtract,
14374 /* 0xf5 */ iemOp_fprem1,
14375 /* 0xf6 */ iemOp_fdecstp,
14376 /* 0xf7 */ iemOp_fincstp,
14377 /* 0xf8 */ iemOp_fprem,
14378 /* 0xf9 */ iemOp_fyl2xp1,
14379 /* 0xfa */ iemOp_fsqrt,
14380 /* 0xfb */ iemOp_fsincos,
14381 /* 0xfc */ iemOp_frndint,
14382 /* 0xfd */ iemOp_fscale,
14383 /* 0xfe */ iemOp_fsin,
14384 /* 0xff */ iemOp_fcos
14385};
14386
14387
14388/** Opcode 0xd9. */
14389FNIEMOP_DEF(iemOp_EscF1)
14390{
14391 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14394 {
14395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14396 {
14397 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14398 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14399 case 2:
14400 if (bRm == 0xd0)
14401 return FNIEMOP_CALL(iemOp_fnop);
14402 return IEMOP_RAISE_INVALID_OPCODE();
14403 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14404 case 4:
14405 case 5:
14406 case 6:
14407 case 7:
14408 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14409 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14411 }
14412 }
14413 else
14414 {
14415 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14416 {
14417 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14418 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14419 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14420 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14421 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14422 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14423 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14424 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14426 }
14427 }
14428}
14429
14430
14431/** Opcode 0xda 11/0. */
14432FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14433{
14434 IEMOP_MNEMONIC("fcmovb st0,stN");
14435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14436
14437 IEM_MC_BEGIN(0, 1);
14438 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14439
14440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14442
14443 IEM_MC_PREPARE_FPU_USAGE();
14444 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14446 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14447 IEM_MC_ENDIF();
14448 IEM_MC_UPDATE_FPU_OPCODE_IP();
14449 IEM_MC_ELSE()
14450 IEM_MC_FPU_STACK_UNDERFLOW(0);
14451 IEM_MC_ENDIF();
14452 IEM_MC_ADVANCE_RIP();
14453
14454 IEM_MC_END();
14455 return VINF_SUCCESS;
14456}
14457
14458
14459/** Opcode 0xda 11/1. */
14460FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14461{
14462 IEMOP_MNEMONIC("fcmove st0,stN");
14463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14464
14465 IEM_MC_BEGIN(0, 1);
14466 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14467
14468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14470
14471 IEM_MC_PREPARE_FPU_USAGE();
14472 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14474 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14475 IEM_MC_ENDIF();
14476 IEM_MC_UPDATE_FPU_OPCODE_IP();
14477 IEM_MC_ELSE()
14478 IEM_MC_FPU_STACK_UNDERFLOW(0);
14479 IEM_MC_ENDIF();
14480 IEM_MC_ADVANCE_RIP();
14481
14482 IEM_MC_END();
14483 return VINF_SUCCESS;
14484}
14485
14486
14487/** Opcode 0xda 11/2. */
14488FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14489{
14490 IEMOP_MNEMONIC("fcmovbe st0,stN");
14491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14492
14493 IEM_MC_BEGIN(0, 1);
14494 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14495
14496 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14497 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14498
14499 IEM_MC_PREPARE_FPU_USAGE();
14500 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14501 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14502 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14503 IEM_MC_ENDIF();
14504 IEM_MC_UPDATE_FPU_OPCODE_IP();
14505 IEM_MC_ELSE()
14506 IEM_MC_FPU_STACK_UNDERFLOW(0);
14507 IEM_MC_ENDIF();
14508 IEM_MC_ADVANCE_RIP();
14509
14510 IEM_MC_END();
14511 return VINF_SUCCESS;
14512}
14513
14514
14515/** Opcode 0xda 11/3. */
14516FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14517{
14518 IEMOP_MNEMONIC("fcmovu st0,stN");
14519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14520
14521 IEM_MC_BEGIN(0, 1);
14522 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14523
14524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14526
14527 IEM_MC_PREPARE_FPU_USAGE();
14528 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14530 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14531 IEM_MC_ENDIF();
14532 IEM_MC_UPDATE_FPU_OPCODE_IP();
14533 IEM_MC_ELSE()
14534 IEM_MC_FPU_STACK_UNDERFLOW(0);
14535 IEM_MC_ENDIF();
14536 IEM_MC_ADVANCE_RIP();
14537
14538 IEM_MC_END();
14539 return VINF_SUCCESS;
14540}
14541
14542
14543/**
14544 * Common worker for FPU instructions working on ST0 and STn, only affecting
14545 * flags, and popping twice when done.
14546 *
14547 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14548 */
14549FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14550{
14551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14552
14553 IEM_MC_BEGIN(3, 1);
14554 IEM_MC_LOCAL(uint16_t, u16Fsw);
14555 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14558
14559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14560 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14561
14562 IEM_MC_PREPARE_FPU_USAGE();
14563 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14564 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14565 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14566 IEM_MC_ELSE()
14567 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14568 IEM_MC_ENDIF();
14569 IEM_MC_ADVANCE_RIP();
14570
14571 IEM_MC_END();
14572 return VINF_SUCCESS;
14573}
14574
14575
14576/** Opcode 0xda 0xe9. */
14577FNIEMOP_DEF(iemOp_fucompp)
14578{
14579 IEMOP_MNEMONIC("fucompp st0,stN");
14580 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14581}
14582
14583
14584/**
14585 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14586 * the result in ST0.
14587 *
14588 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14589 */
14590FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14591{
14592 IEM_MC_BEGIN(3, 3);
14593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14594 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14595 IEM_MC_LOCAL(int32_t, i32Val2);
14596 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14598 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14599
14600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14602
14603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14605 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14606
14607 IEM_MC_PREPARE_FPU_USAGE();
14608 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14609 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14610 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14611 IEM_MC_ELSE()
14612 IEM_MC_FPU_STACK_UNDERFLOW(0);
14613 IEM_MC_ENDIF();
14614 IEM_MC_ADVANCE_RIP();
14615
14616 IEM_MC_END();
14617 return VINF_SUCCESS;
14618}
14619
14620
14621/** Opcode 0xda !11/0. */
14622FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14623{
14624 IEMOP_MNEMONIC("fiadd m32i");
14625 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14626}
14627
14628
14629/** Opcode 0xda !11/1. */
14630FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14631{
14632 IEMOP_MNEMONIC("fimul m32i");
14633 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14634}
14635
14636
14637/** Opcode 0xda !11/2. */
14638FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14639{
14640 IEMOP_MNEMONIC("ficom st0,m32i");
14641
14642 IEM_MC_BEGIN(3, 3);
14643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14644 IEM_MC_LOCAL(uint16_t, u16Fsw);
14645 IEM_MC_LOCAL(int32_t, i32Val2);
14646 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14648 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14649
14650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14652
14653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14655 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14656
14657 IEM_MC_PREPARE_FPU_USAGE();
14658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14659 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14660 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14661 IEM_MC_ELSE()
14662 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14663 IEM_MC_ENDIF();
14664 IEM_MC_ADVANCE_RIP();
14665
14666 IEM_MC_END();
14667 return VINF_SUCCESS;
14668}
14669
14670
14671/** Opcode 0xda !11/3. */
14672FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14673{
14674 IEMOP_MNEMONIC("ficomp st0,m32i");
14675
14676 IEM_MC_BEGIN(3, 3);
14677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14678 IEM_MC_LOCAL(uint16_t, u16Fsw);
14679 IEM_MC_LOCAL(int32_t, i32Val2);
14680 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14681 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14682 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14683
14684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14686
14687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14689 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14690
14691 IEM_MC_PREPARE_FPU_USAGE();
14692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14693 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14694 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14695 IEM_MC_ELSE()
14696 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14697 IEM_MC_ENDIF();
14698 IEM_MC_ADVANCE_RIP();
14699
14700 IEM_MC_END();
14701 return VINF_SUCCESS;
14702}
14703
14704
14705/** Opcode 0xda !11/4. */
14706FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14707{
14708 IEMOP_MNEMONIC("fisub m32i");
14709 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14710}
14711
14712
14713/** Opcode 0xda !11/5. */
14714FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14715{
14716 IEMOP_MNEMONIC("fisubr m32i");
14717 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14718}
14719
14720
14721/** Opcode 0xda !11/6. */
14722FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14723{
14724 IEMOP_MNEMONIC("fidiv m32i");
14725 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14726}
14727
14728
14729/** Opcode 0xda !11/7. */
14730FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14731{
14732 IEMOP_MNEMONIC("fidivr m32i");
14733 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14734}
14735
14736
14737/** Opcode 0xda. */
14738FNIEMOP_DEF(iemOp_EscF2)
14739{
14740 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14743 {
14744 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14745 {
14746 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14747 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14748 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14749 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14750 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14751 case 5:
14752 if (bRm == 0xe9)
14753 return FNIEMOP_CALL(iemOp_fucompp);
14754 return IEMOP_RAISE_INVALID_OPCODE();
14755 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14756 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14758 }
14759 }
14760 else
14761 {
14762 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14763 {
14764 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14765 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14766 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14767 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14768 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14769 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14770 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14771 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14773 }
14774 }
14775}
14776
14777
14778/** Opcode 0xdb !11/0. */
14779FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14780{
14781 IEMOP_MNEMONIC("fild m32i");
14782
14783 IEM_MC_BEGIN(2, 3);
14784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14785 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14786 IEM_MC_LOCAL(int32_t, i32Val);
14787 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14788 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14789
14790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14792
14793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14795 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14796
14797 IEM_MC_PREPARE_FPU_USAGE();
14798 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14799 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14800 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14801 IEM_MC_ELSE()
14802 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14803 IEM_MC_ENDIF();
14804 IEM_MC_ADVANCE_RIP();
14805
14806 IEM_MC_END();
14807 return VINF_SUCCESS;
14808}
14809
14810
14811/** Opcode 0xdb !11/1. */
14812FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14813{
14814 IEMOP_MNEMONIC("fisttp m32i");
14815 IEM_MC_BEGIN(3, 2);
14816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14817 IEM_MC_LOCAL(uint16_t, u16Fsw);
14818 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14819 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14820 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14821
14822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14824 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14825 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14826
14827 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14828 IEM_MC_PREPARE_FPU_USAGE();
14829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14830 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14831 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14832 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14833 IEM_MC_ELSE()
14834 IEM_MC_IF_FCW_IM()
14835 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14836 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14837 IEM_MC_ENDIF();
14838 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14839 IEM_MC_ENDIF();
14840 IEM_MC_ADVANCE_RIP();
14841
14842 IEM_MC_END();
14843 return VINF_SUCCESS;
14844}
14845
14846
14847/** Opcode 0xdb !11/2. */
14848FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14849{
14850 IEMOP_MNEMONIC("fist m32i");
14851 IEM_MC_BEGIN(3, 2);
14852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14853 IEM_MC_LOCAL(uint16_t, u16Fsw);
14854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14855 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14857
14858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14861 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14862
14863 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14864 IEM_MC_PREPARE_FPU_USAGE();
14865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14867 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14868 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14869 IEM_MC_ELSE()
14870 IEM_MC_IF_FCW_IM()
14871 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14872 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14873 IEM_MC_ENDIF();
14874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14875 IEM_MC_ENDIF();
14876 IEM_MC_ADVANCE_RIP();
14877
14878 IEM_MC_END();
14879 return VINF_SUCCESS;
14880}
14881
14882
14883/** Opcode 0xdb !11/3. */
14884FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14885{
14886 IEMOP_MNEMONIC("fisttp m32i");
14887 IEM_MC_BEGIN(3, 2);
14888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14889 IEM_MC_LOCAL(uint16_t, u16Fsw);
14890 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14891 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14892 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14893
14894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14897 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14898
14899 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14900 IEM_MC_PREPARE_FPU_USAGE();
14901 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14902 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14903 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14904 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14905 IEM_MC_ELSE()
14906 IEM_MC_IF_FCW_IM()
14907 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14908 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14909 IEM_MC_ENDIF();
14910 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14911 IEM_MC_ENDIF();
14912 IEM_MC_ADVANCE_RIP();
14913
14914 IEM_MC_END();
14915 return VINF_SUCCESS;
14916}
14917
14918
14919/** Opcode 0xdb !11/5. */
14920FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14921{
14922 IEMOP_MNEMONIC("fld m80r");
14923
14924 IEM_MC_BEGIN(2, 3);
14925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14927 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14928 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14929 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14930
14931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14933
14934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14936 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14937
14938 IEM_MC_PREPARE_FPU_USAGE();
14939 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14940 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14941 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14942 IEM_MC_ELSE()
14943 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14944 IEM_MC_ENDIF();
14945 IEM_MC_ADVANCE_RIP();
14946
14947 IEM_MC_END();
14948 return VINF_SUCCESS;
14949}
14950
14951
14952/** Opcode 0xdb !11/7. */
14953FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14954{
14955 IEMOP_MNEMONIC("fstp m80r");
14956 IEM_MC_BEGIN(3, 2);
14957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14958 IEM_MC_LOCAL(uint16_t, u16Fsw);
14959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14960 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14962
14963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14967
14968 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14969 IEM_MC_PREPARE_FPU_USAGE();
14970 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14971 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14972 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14973 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14974 IEM_MC_ELSE()
14975 IEM_MC_IF_FCW_IM()
14976 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14977 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14978 IEM_MC_ENDIF();
14979 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14980 IEM_MC_ENDIF();
14981 IEM_MC_ADVANCE_RIP();
14982
14983 IEM_MC_END();
14984 return VINF_SUCCESS;
14985}
14986
14987
14988/** Opcode 0xdb 11/0. */
14989FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14990{
14991 IEMOP_MNEMONIC("fcmovnb st0,stN");
14992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14993
14994 IEM_MC_BEGIN(0, 1);
14995 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14996
14997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14999
15000 IEM_MC_PREPARE_FPU_USAGE();
15001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15002 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15003 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15004 IEM_MC_ENDIF();
15005 IEM_MC_UPDATE_FPU_OPCODE_IP();
15006 IEM_MC_ELSE()
15007 IEM_MC_FPU_STACK_UNDERFLOW(0);
15008 IEM_MC_ENDIF();
15009 IEM_MC_ADVANCE_RIP();
15010
15011 IEM_MC_END();
15012 return VINF_SUCCESS;
15013}
15014
15015
15016/** Opcode 0xdb 11/1. */
15017FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15018{
15019 IEMOP_MNEMONIC("fcmovne st0,stN");
15020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15021
15022 IEM_MC_BEGIN(0, 1);
15023 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15024
15025 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15026 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15027
15028 IEM_MC_PREPARE_FPU_USAGE();
15029 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15030 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15031 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15032 IEM_MC_ENDIF();
15033 IEM_MC_UPDATE_FPU_OPCODE_IP();
15034 IEM_MC_ELSE()
15035 IEM_MC_FPU_STACK_UNDERFLOW(0);
15036 IEM_MC_ENDIF();
15037 IEM_MC_ADVANCE_RIP();
15038
15039 IEM_MC_END();
15040 return VINF_SUCCESS;
15041}
15042
15043
15044/** Opcode 0xdb 11/2. */
15045FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15046{
15047 IEMOP_MNEMONIC("fcmovnbe st0,stN");
15048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15049
15050 IEM_MC_BEGIN(0, 1);
15051 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15052
15053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15055
15056 IEM_MC_PREPARE_FPU_USAGE();
15057 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15058 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15059 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15060 IEM_MC_ENDIF();
15061 IEM_MC_UPDATE_FPU_OPCODE_IP();
15062 IEM_MC_ELSE()
15063 IEM_MC_FPU_STACK_UNDERFLOW(0);
15064 IEM_MC_ENDIF();
15065 IEM_MC_ADVANCE_RIP();
15066
15067 IEM_MC_END();
15068 return VINF_SUCCESS;
15069}
15070
15071
15072/** Opcode 0xdb 11/3. */
15073FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15074{
15075 IEMOP_MNEMONIC("fcmovnnu st0,stN");
15076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15077
15078 IEM_MC_BEGIN(0, 1);
15079 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15080
15081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15083
15084 IEM_MC_PREPARE_FPU_USAGE();
15085 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15086 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15087 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15088 IEM_MC_ENDIF();
15089 IEM_MC_UPDATE_FPU_OPCODE_IP();
15090 IEM_MC_ELSE()
15091 IEM_MC_FPU_STACK_UNDERFLOW(0);
15092 IEM_MC_ENDIF();
15093 IEM_MC_ADVANCE_RIP();
15094
15095 IEM_MC_END();
15096 return VINF_SUCCESS;
15097}
15098
15099
15100/** Opcode 0xdb 0xe0. */
15101FNIEMOP_DEF(iemOp_fneni)
15102{
15103 IEMOP_MNEMONIC("fneni (8087/ign)");
15104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15105 IEM_MC_BEGIN(0,0);
15106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15107 IEM_MC_ADVANCE_RIP();
15108 IEM_MC_END();
15109 return VINF_SUCCESS;
15110}
15111
15112
15113/** Opcode 0xdb 0xe1. */
15114FNIEMOP_DEF(iemOp_fndisi)
15115{
15116 IEMOP_MNEMONIC("fndisi (8087/ign)");
15117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15118 IEM_MC_BEGIN(0,0);
15119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15120 IEM_MC_ADVANCE_RIP();
15121 IEM_MC_END();
15122 return VINF_SUCCESS;
15123}
15124
15125
15126/** Opcode 0xdb 0xe2. */
15127FNIEMOP_DEF(iemOp_fnclex)
15128{
15129 IEMOP_MNEMONIC("fnclex");
15130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15131
15132 IEM_MC_BEGIN(0,0);
15133 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15135 IEM_MC_CLEAR_FSW_EX();
15136 IEM_MC_ADVANCE_RIP();
15137 IEM_MC_END();
15138 return VINF_SUCCESS;
15139}
15140
15141
15142/** Opcode 0xdb 0xe3. */
15143FNIEMOP_DEF(iemOp_fninit)
15144{
15145 IEMOP_MNEMONIC("fninit");
15146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15147 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15148}
15149
15150
15151/** Opcode 0xdb 0xe4. */
15152FNIEMOP_DEF(iemOp_fnsetpm)
15153{
15154 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15156 IEM_MC_BEGIN(0,0);
15157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15158 IEM_MC_ADVANCE_RIP();
15159 IEM_MC_END();
15160 return VINF_SUCCESS;
15161}
15162
15163
15164/** Opcode 0xdb 0xe5. */
15165FNIEMOP_DEF(iemOp_frstpm)
15166{
15167 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15168#if 0 /* #UDs on newer CPUs */
15169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15170 IEM_MC_BEGIN(0,0);
15171 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15172 IEM_MC_ADVANCE_RIP();
15173 IEM_MC_END();
15174 return VINF_SUCCESS;
15175#else
15176 return IEMOP_RAISE_INVALID_OPCODE();
15177#endif
15178}
15179
15180
15181/** Opcode 0xdb 11/5. */
15182FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15183{
15184 IEMOP_MNEMONIC("fucomi st0,stN");
15185 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15186}
15187
15188
15189/** Opcode 0xdb 11/6. */
15190FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15191{
15192 IEMOP_MNEMONIC("fcomi st0,stN");
15193 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15194}
15195
15196
15197/** Opcode 0xdb. */
15198FNIEMOP_DEF(iemOp_EscF3)
15199{
15200 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15203 {
15204 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15205 {
15206 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15207 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15208 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15209 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15210 case 4:
15211 switch (bRm)
15212 {
15213 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15214 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15215 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15216 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15217 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15218 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15219 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15220 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15222 }
15223 break;
15224 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15225 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15226 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15228 }
15229 }
15230 else
15231 {
15232 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15233 {
15234 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15235 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15236 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15237 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15238 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15239 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15240 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15241 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15243 }
15244 }
15245}
15246
15247
15248/**
15249 * Common worker for FPU instructions working on STn and ST0, and storing the
15250 * result in STn unless IE, DE or ZE was raised.
15251 *
15252 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15253 */
15254FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15255{
15256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15257
15258 IEM_MC_BEGIN(3, 1);
15259 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15260 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15261 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15262 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15263
15264 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15265 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15266
15267 IEM_MC_PREPARE_FPU_USAGE();
15268 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15269 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15270 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15271 IEM_MC_ELSE()
15272 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15273 IEM_MC_ENDIF();
15274 IEM_MC_ADVANCE_RIP();
15275
15276 IEM_MC_END();
15277 return VINF_SUCCESS;
15278}
15279
15280
15281/** Opcode 0xdc 11/0. */
15282FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15283{
15284 IEMOP_MNEMONIC("fadd stN,st0");
15285 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15286}
15287
15288
15289/** Opcode 0xdc 11/1. */
15290FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15291{
15292 IEMOP_MNEMONIC("fmul stN,st0");
15293 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15294}
15295
15296
15297/** Opcode 0xdc 11/4. */
15298FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15299{
15300 IEMOP_MNEMONIC("fsubr stN,st0");
15301 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15302}
15303
15304
15305/** Opcode 0xdc 11/5. */
15306FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15307{
15308 IEMOP_MNEMONIC("fsub stN,st0");
15309 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15310}
15311
15312
15313/** Opcode 0xdc 11/6. */
15314FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15315{
15316 IEMOP_MNEMONIC("fdivr stN,st0");
15317 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15318}
15319
15320
15321/** Opcode 0xdc 11/7. */
15322FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15323{
15324 IEMOP_MNEMONIC("fdiv stN,st0");
15325 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15326}
15327
15328
15329/**
15330 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15331 * memory operand, and storing the result in ST0.
15332 *
15333 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15334 */
15335FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15336{
15337 IEM_MC_BEGIN(3, 3);
15338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15339 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15340 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15341 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15342 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15343 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15344
15345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15349
15350 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
15351 IEM_MC_PREPARE_FPU_USAGE();
15352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15353 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15354 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
15355 IEM_MC_ELSE()
15356 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
15357 IEM_MC_ENDIF();
15358 IEM_MC_ADVANCE_RIP();
15359
15360 IEM_MC_END();
15361 return VINF_SUCCESS;
15362}
15363
15364
15365/** Opcode 0xdc !11/0. */
15366FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15367{
15368 IEMOP_MNEMONIC("fadd m64r");
15369 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15370}
15371
15372
15373/** Opcode 0xdc !11/1. */
15374FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15375{
15376 IEMOP_MNEMONIC("fmul m64r");
15377 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15378}
15379
15380
15381/** Opcode 0xdc !11/2. */
15382FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15383{
15384 IEMOP_MNEMONIC("fcom st0,m64r");
15385
15386 IEM_MC_BEGIN(3, 3);
15387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15388 IEM_MC_LOCAL(uint16_t, u16Fsw);
15389 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15390 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15391 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15392 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15393
15394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15396
15397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15399 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15400
15401 IEM_MC_PREPARE_FPU_USAGE();
15402 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15403 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15404 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15405 IEM_MC_ELSE()
15406 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15407 IEM_MC_ENDIF();
15408 IEM_MC_ADVANCE_RIP();
15409
15410 IEM_MC_END();
15411 return VINF_SUCCESS;
15412}
15413
15414
15415/** Opcode 0xdc !11/3. */
15416FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15417{
15418 IEMOP_MNEMONIC("fcomp st0,m64r");
15419
15420 IEM_MC_BEGIN(3, 3);
15421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15422 IEM_MC_LOCAL(uint16_t, u16Fsw);
15423 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15424 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15426 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15427
15428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15430
15431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15433 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15434
15435 IEM_MC_PREPARE_FPU_USAGE();
15436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15437 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15438 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15439 IEM_MC_ELSE()
15440 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15441 IEM_MC_ENDIF();
15442 IEM_MC_ADVANCE_RIP();
15443
15444 IEM_MC_END();
15445 return VINF_SUCCESS;
15446}
15447
15448
15449/** Opcode 0xdc !11/4. */
15450FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15451{
15452 IEMOP_MNEMONIC("fsub m64r");
15453 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15454}
15455
15456
15457/** Opcode 0xdc !11/5. */
15458FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15459{
15460 IEMOP_MNEMONIC("fsubr m64r");
15461 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15462}
15463
15464
15465/** Opcode 0xdc !11/6. */
15466FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15467{
15468 IEMOP_MNEMONIC("fdiv m64r");
15469 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15470}
15471
15472
15473/** Opcode 0xdc !11/7. */
15474FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15475{
15476 IEMOP_MNEMONIC("fdivr m64r");
15477 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15478}
15479
15480
15481/** Opcode 0xdc. */
15482FNIEMOP_DEF(iemOp_EscF4)
15483{
15484 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15487 {
15488 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15489 {
15490 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15491 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15492 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15493 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15494 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15495 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15496 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15497 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15499 }
15500 }
15501 else
15502 {
15503 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15504 {
15505 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15506 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15507 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15508 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15509 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15510 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15511 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15512 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15514 }
15515 }
15516}
15517
15518
15519/** Opcode 0xdd !11/0.
15520 * @sa iemOp_fld_m32r */
15521FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15522{
15523 IEMOP_MNEMONIC("fld m64r");
15524
15525 IEM_MC_BEGIN(2, 3);
15526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15528 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15530 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15531
15532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15536
15537 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15538 IEM_MC_PREPARE_FPU_USAGE();
15539 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15540 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15541 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15542 IEM_MC_ELSE()
15543 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15544 IEM_MC_ENDIF();
15545 IEM_MC_ADVANCE_RIP();
15546
15547 IEM_MC_END();
15548 return VINF_SUCCESS;
15549}
15550
15551
15552/** Opcode 0xdd !11/0. */
15553FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15554{
15555 IEMOP_MNEMONIC("fisttp m64i");
15556 IEM_MC_BEGIN(3, 2);
15557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15558 IEM_MC_LOCAL(uint16_t, u16Fsw);
15559 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15560 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15561 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15562
15563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15567
15568 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15569 IEM_MC_PREPARE_FPU_USAGE();
15570 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15571 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15572 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15573 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15574 IEM_MC_ELSE()
15575 IEM_MC_IF_FCW_IM()
15576 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15577 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15578 IEM_MC_ENDIF();
15579 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15580 IEM_MC_ENDIF();
15581 IEM_MC_ADVANCE_RIP();
15582
15583 IEM_MC_END();
15584 return VINF_SUCCESS;
15585}
15586
15587
15588/** Opcode 0xdd !11/0. */
15589FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15590{
15591 IEMOP_MNEMONIC("fst m64r");
15592 IEM_MC_BEGIN(3, 2);
15593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15594 IEM_MC_LOCAL(uint16_t, u16Fsw);
15595 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15596 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15598
15599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15603
15604 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15605 IEM_MC_PREPARE_FPU_USAGE();
15606 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15607 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15608 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15609 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15610 IEM_MC_ELSE()
15611 IEM_MC_IF_FCW_IM()
15612 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15613 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15614 IEM_MC_ENDIF();
15615 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15616 IEM_MC_ENDIF();
15617 IEM_MC_ADVANCE_RIP();
15618
15619 IEM_MC_END();
15620 return VINF_SUCCESS;
15621}
15622
15623
15624
15625
15626/** Opcode 0xdd !11/0. */
15627FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15628{
15629 IEMOP_MNEMONIC("fstp m64r");
15630 IEM_MC_BEGIN(3, 2);
15631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15632 IEM_MC_LOCAL(uint16_t, u16Fsw);
15633 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15634 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15636
15637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15641
15642 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15643 IEM_MC_PREPARE_FPU_USAGE();
15644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15646 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15647 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15648 IEM_MC_ELSE()
15649 IEM_MC_IF_FCW_IM()
15650 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15651 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15652 IEM_MC_ENDIF();
15653 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15654 IEM_MC_ENDIF();
15655 IEM_MC_ADVANCE_RIP();
15656
15657 IEM_MC_END();
15658 return VINF_SUCCESS;
15659}
15660
15661
15662/** Opcode 0xdd !11/0. */
15663FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15664{
15665 IEMOP_MNEMONIC("frstor m94/108byte");
15666 IEM_MC_BEGIN(3, 0);
15667 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15668 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15669 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15673 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15674 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15675 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15676 IEM_MC_END();
15677 return VINF_SUCCESS;
15678}
15679
15680
15681/** Opcode 0xdd !11/0. */
15682FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15683{
15684 IEMOP_MNEMONIC("fnsave m94/108byte");
15685 IEM_MC_BEGIN(3, 0);
15686 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15687 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15688 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15692 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15693 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15694 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15695 IEM_MC_END();
15696 return VINF_SUCCESS;
15697
15698}
15699
15700/** Opcode 0xdd !11/0. */
15701FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15702{
15703 IEMOP_MNEMONIC("fnstsw m16");
15704
15705 IEM_MC_BEGIN(0, 2);
15706 IEM_MC_LOCAL(uint16_t, u16Tmp);
15707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15708
15709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15712
15713 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15714 IEM_MC_FETCH_FSW(u16Tmp);
15715 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15716 IEM_MC_ADVANCE_RIP();
15717
15718/** @todo Debug / drop a hint to the verifier that things may differ
15719 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15720 * NT4SP1. (X86_FSW_PE) */
15721 IEM_MC_END();
15722 return VINF_SUCCESS;
15723}
15724
15725
15726/** Opcode 0xdd 11/0. */
15727FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15728{
15729 IEMOP_MNEMONIC("ffree stN");
15730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15731 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15732 unmodified. */
15733
15734 IEM_MC_BEGIN(0, 0);
15735
15736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15738
15739 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15740 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15741 IEM_MC_UPDATE_FPU_OPCODE_IP();
15742
15743 IEM_MC_ADVANCE_RIP();
15744 IEM_MC_END();
15745 return VINF_SUCCESS;
15746}
15747
15748
15749/** Opcode 0xdd 11/1. */
15750FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15751{
15752 IEMOP_MNEMONIC("fst st0,stN");
15753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15754
15755 IEM_MC_BEGIN(0, 2);
15756 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15757 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15758 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15760
15761 IEM_MC_PREPARE_FPU_USAGE();
15762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15763 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15764 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15765 IEM_MC_ELSE()
15766 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15767 IEM_MC_ENDIF();
15768
15769 IEM_MC_ADVANCE_RIP();
15770 IEM_MC_END();
15771 return VINF_SUCCESS;
15772}
15773
15774
15775/** Opcode 0xdd 11/3. */
15776FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15777{
15778 IEMOP_MNEMONIC("fcom st0,stN");
15779 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15780}
15781
15782
15783/** Opcode 0xdd 11/4. */
15784FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15785{
15786 IEMOP_MNEMONIC("fcomp st0,stN");
15787 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15788}
15789
15790
15791/** Opcode 0xdd. */
15792FNIEMOP_DEF(iemOp_EscF5)
15793{
15794 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15797 {
15798 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15799 {
15800 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15801 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15802 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15803 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15804 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15805 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15806 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15807 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15809 }
15810 }
15811 else
15812 {
15813 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15814 {
15815 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15816 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15817 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15818 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15819 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15820 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15821 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15822 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15824 }
15825 }
15826}
15827
15828
15829/** Opcode 0xde 11/0. */
15830FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15831{
15832 IEMOP_MNEMONIC("faddp stN,st0");
15833 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15834}
15835
15836
15837/** Opcode 0xde 11/0. */
15838FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15839{
15840 IEMOP_MNEMONIC("fmulp stN,st0");
15841 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15842}
15843
15844
15845/** Opcode 0xde 0xd9. */
15846FNIEMOP_DEF(iemOp_fcompp)
15847{
15848 IEMOP_MNEMONIC("fucompp st0,stN");
15849 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15850}
15851
15852
15853/** Opcode 0xde 11/4. */
15854FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15855{
15856 IEMOP_MNEMONIC("fsubrp stN,st0");
15857 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15858}
15859
15860
15861/** Opcode 0xde 11/5. */
15862FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15863{
15864 IEMOP_MNEMONIC("fsubp stN,st0");
15865 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15866}
15867
15868
15869/** Opcode 0xde 11/6. */
15870FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15871{
15872 IEMOP_MNEMONIC("fdivrp stN,st0");
15873 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15874}
15875
15876
15877/** Opcode 0xde 11/7. */
15878FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15879{
15880 IEMOP_MNEMONIC("fdivp stN,st0");
15881 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15882}
15883
15884
15885/**
15886 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15887 * the result in ST0.
15888 *
15889 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15890 */
15891FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15892{
15893 IEM_MC_BEGIN(3, 3);
15894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15895 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15896 IEM_MC_LOCAL(int16_t, i16Val2);
15897 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15899 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15900
15901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15903
15904 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15905 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15906 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15907
15908 IEM_MC_PREPARE_FPU_USAGE();
15909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15910 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15911 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15912 IEM_MC_ELSE()
15913 IEM_MC_FPU_STACK_UNDERFLOW(0);
15914 IEM_MC_ENDIF();
15915 IEM_MC_ADVANCE_RIP();
15916
15917 IEM_MC_END();
15918 return VINF_SUCCESS;
15919}
15920
15921
15922/** Opcode 0xde !11/0. */
15923FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15924{
15925 IEMOP_MNEMONIC("fiadd m16i");
15926 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15927}
15928
15929
15930/** Opcode 0xde !11/1. */
15931FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15932{
15933 IEMOP_MNEMONIC("fimul m16i");
15934 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15935}
15936
15937
15938/** Opcode 0xde !11/2. */
15939FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15940{
15941 IEMOP_MNEMONIC("ficom st0,m16i");
15942
15943 IEM_MC_BEGIN(3, 3);
15944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15945 IEM_MC_LOCAL(uint16_t, u16Fsw);
15946 IEM_MC_LOCAL(int16_t, i16Val2);
15947 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15948 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15949 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15950
15951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15953
15954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15955 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15956 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15957
15958 IEM_MC_PREPARE_FPU_USAGE();
15959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15960 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15961 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15962 IEM_MC_ELSE()
15963 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15964 IEM_MC_ENDIF();
15965 IEM_MC_ADVANCE_RIP();
15966
15967 IEM_MC_END();
15968 return VINF_SUCCESS;
15969}
15970
15971
15972/** Opcode 0xde !11/3. */
15973FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15974{
15975 IEMOP_MNEMONIC("ficomp st0,m16i");
15976
15977 IEM_MC_BEGIN(3, 3);
15978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15979 IEM_MC_LOCAL(uint16_t, u16Fsw);
15980 IEM_MC_LOCAL(int16_t, i16Val2);
15981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15983 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15984
15985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15987
15988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15990 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15991
15992 IEM_MC_PREPARE_FPU_USAGE();
15993 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15994 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15995 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15996 IEM_MC_ELSE()
15997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15998 IEM_MC_ENDIF();
15999 IEM_MC_ADVANCE_RIP();
16000
16001 IEM_MC_END();
16002 return VINF_SUCCESS;
16003}
16004
16005
16006/** Opcode 0xde !11/4. */
16007FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16008{
16009 IEMOP_MNEMONIC("fisub m16i");
16010 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16011}
16012
16013
16014/** Opcode 0xde !11/5. */
16015FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16016{
16017 IEMOP_MNEMONIC("fisubr m16i");
16018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16019}
16020
16021
16022/** Opcode 0xde !11/6. */
16023FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16024{
16025 IEMOP_MNEMONIC("fiadd m16i");
16026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16027}
16028
16029
16030/** Opcode 0xde !11/7. */
16031FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16032{
16033 IEMOP_MNEMONIC("fiadd m16i");
16034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16035}
16036
16037
16038/** Opcode 0xde. */
16039FNIEMOP_DEF(iemOp_EscF6)
16040{
16041 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
16042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16043 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16044 {
16045 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16046 {
16047 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16048 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16049 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16050 case 3: if (bRm == 0xd9)
16051 return FNIEMOP_CALL(iemOp_fcompp);
16052 return IEMOP_RAISE_INVALID_OPCODE();
16053 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16054 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16055 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16056 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16058 }
16059 }
16060 else
16061 {
16062 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16063 {
16064 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16065 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16066 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16067 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16068 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16069 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16070 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16071 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16073 }
16074 }
16075}
16076
16077
16078/** Opcode 0xdf 11/0.
16079 * Undocument instruction, assumed to work like ffree + fincstp. */
16080FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16081{
16082 IEMOP_MNEMONIC("ffreep stN");
16083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16084
16085 IEM_MC_BEGIN(0, 0);
16086
16087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16089
16090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16091 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16092 IEM_MC_FPU_STACK_INC_TOP();
16093 IEM_MC_UPDATE_FPU_OPCODE_IP();
16094
16095 IEM_MC_ADVANCE_RIP();
16096 IEM_MC_END();
16097 return VINF_SUCCESS;
16098}
16099
16100
16101/** Opcode 0xdf 0xe0. */
16102FNIEMOP_DEF(iemOp_fnstsw_ax)
16103{
16104 IEMOP_MNEMONIC("fnstsw ax");
16105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16106
16107 IEM_MC_BEGIN(0, 1);
16108 IEM_MC_LOCAL(uint16_t, u16Tmp);
16109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16110 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16111 IEM_MC_FETCH_FSW(u16Tmp);
16112 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16113 IEM_MC_ADVANCE_RIP();
16114 IEM_MC_END();
16115 return VINF_SUCCESS;
16116}
16117
16118
16119/** Opcode 0xdf 11/5. */
16120FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16121{
16122 IEMOP_MNEMONIC("fcomip st0,stN");
16123 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16124}
16125
16126
16127/** Opcode 0xdf 11/6. */
16128FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16129{
16130 IEMOP_MNEMONIC("fcomip st0,stN");
16131 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16132}
16133
16134
16135/** Opcode 0xdf !11/0. */
16136FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16137{
16138 IEMOP_MNEMONIC("fild m16i");
16139
16140 IEM_MC_BEGIN(2, 3);
16141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16142 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16143 IEM_MC_LOCAL(int16_t, i16Val);
16144 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16145 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16146
16147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16149
16150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16152 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16153
16154 IEM_MC_PREPARE_FPU_USAGE();
16155 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16156 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16157 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16158 IEM_MC_ELSE()
16159 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16160 IEM_MC_ENDIF();
16161 IEM_MC_ADVANCE_RIP();
16162
16163 IEM_MC_END();
16164 return VINF_SUCCESS;
16165}
16166
16167
16168/** Opcode 0xdf !11/1. */
16169FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16170{
16171 IEMOP_MNEMONIC("fisttp m16i");
16172 IEM_MC_BEGIN(3, 2);
16173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16174 IEM_MC_LOCAL(uint16_t, u16Fsw);
16175 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16176 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16177 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16178
16179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16183
16184 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16185 IEM_MC_PREPARE_FPU_USAGE();
16186 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16187 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16188 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16189 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16190 IEM_MC_ELSE()
16191 IEM_MC_IF_FCW_IM()
16192 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16193 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16194 IEM_MC_ENDIF();
16195 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16196 IEM_MC_ENDIF();
16197 IEM_MC_ADVANCE_RIP();
16198
16199 IEM_MC_END();
16200 return VINF_SUCCESS;
16201}
16202
16203
16204/** Opcode 0xdf !11/2. */
16205FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16206{
16207 IEMOP_MNEMONIC("fistp m16i");
16208 IEM_MC_BEGIN(3, 2);
16209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16210 IEM_MC_LOCAL(uint16_t, u16Fsw);
16211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16212 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16214
16215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16217 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16218 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16219
16220 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16221 IEM_MC_PREPARE_FPU_USAGE();
16222 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16223 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16224 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16225 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16226 IEM_MC_ELSE()
16227 IEM_MC_IF_FCW_IM()
16228 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16229 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16230 IEM_MC_ENDIF();
16231 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16232 IEM_MC_ENDIF();
16233 IEM_MC_ADVANCE_RIP();
16234
16235 IEM_MC_END();
16236 return VINF_SUCCESS;
16237}
16238
16239
16240/** Opcode 0xdf !11/3. */
16241FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16242{
16243 IEMOP_MNEMONIC("fistp m16i");
16244 IEM_MC_BEGIN(3, 2);
16245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16246 IEM_MC_LOCAL(uint16_t, u16Fsw);
16247 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16248 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16250
16251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16253 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16254 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16255
16256 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16257 IEM_MC_PREPARE_FPU_USAGE();
16258 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16259 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16260 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16261 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16262 IEM_MC_ELSE()
16263 IEM_MC_IF_FCW_IM()
16264 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16265 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16266 IEM_MC_ENDIF();
16267 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16268 IEM_MC_ENDIF();
16269 IEM_MC_ADVANCE_RIP();
16270
16271 IEM_MC_END();
16272 return VINF_SUCCESS;
16273}
16274
16275
16276/** Opcode 0xdf !11/4. */
16277FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16278
16279
16280/** Opcode 0xdf !11/5. */
16281FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16282{
16283 IEMOP_MNEMONIC("fild m64i");
16284
16285 IEM_MC_BEGIN(2, 3);
16286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16287 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16288 IEM_MC_LOCAL(int64_t, i64Val);
16289 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16290 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16291
16292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16294
16295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16297 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
16298
16299 IEM_MC_PREPARE_FPU_USAGE();
16300 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16301 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16302 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
16303 IEM_MC_ELSE()
16304 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
16305 IEM_MC_ENDIF();
16306 IEM_MC_ADVANCE_RIP();
16307
16308 IEM_MC_END();
16309 return VINF_SUCCESS;
16310}
16311
16312
16313/** Opcode 0xdf !11/6. */
16314FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16315
16316
16317/** Opcode 0xdf !11/7. */
16318FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16319{
16320 IEMOP_MNEMONIC("fistp m64i");
16321 IEM_MC_BEGIN(3, 2);
16322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16323 IEM_MC_LOCAL(uint16_t, u16Fsw);
16324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16325 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16327
16328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16332
16333 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
16334 IEM_MC_PREPARE_FPU_USAGE();
16335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16337 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16338 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
16339 IEM_MC_ELSE()
16340 IEM_MC_IF_FCW_IM()
16341 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16342 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16343 IEM_MC_ENDIF();
16344 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
16345 IEM_MC_ENDIF();
16346 IEM_MC_ADVANCE_RIP();
16347
16348 IEM_MC_END();
16349 return VINF_SUCCESS;
16350}
16351
16352
16353/** Opcode 0xdf. */
16354FNIEMOP_DEF(iemOp_EscF7)
16355{
16356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16357 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16358 {
16359 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16360 {
16361 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16362 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16363 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16364 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16365 case 4: if (bRm == 0xe0)
16366 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16367 return IEMOP_RAISE_INVALID_OPCODE();
16368 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16369 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16370 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16372 }
16373 }
16374 else
16375 {
16376 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16377 {
16378 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16379 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16380 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16381 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16382 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16383 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16384 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16385 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16387 }
16388 }
16389}
16390
16391
16392/** Opcode 0xe0. */
16393FNIEMOP_DEF(iemOp_loopne_Jb)
16394{
16395 IEMOP_MNEMONIC("loopne Jb");
16396 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16397 IEMOP_HLP_NO_LOCK_PREFIX();
16398 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16399
16400 switch (pIemCpu->enmEffAddrMode)
16401 {
16402 case IEMMODE_16BIT:
16403 IEM_MC_BEGIN(0,0);
16404 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16405 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16406 IEM_MC_REL_JMP_S8(i8Imm);
16407 } IEM_MC_ELSE() {
16408 IEM_MC_ADVANCE_RIP();
16409 } IEM_MC_ENDIF();
16410 IEM_MC_END();
16411 return VINF_SUCCESS;
16412
16413 case IEMMODE_32BIT:
16414 IEM_MC_BEGIN(0,0);
16415 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16416 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16417 IEM_MC_REL_JMP_S8(i8Imm);
16418 } IEM_MC_ELSE() {
16419 IEM_MC_ADVANCE_RIP();
16420 } IEM_MC_ENDIF();
16421 IEM_MC_END();
16422 return VINF_SUCCESS;
16423
16424 case IEMMODE_64BIT:
16425 IEM_MC_BEGIN(0,0);
16426 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16427 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16428 IEM_MC_REL_JMP_S8(i8Imm);
16429 } IEM_MC_ELSE() {
16430 IEM_MC_ADVANCE_RIP();
16431 } IEM_MC_ENDIF();
16432 IEM_MC_END();
16433 return VINF_SUCCESS;
16434
16435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16436 }
16437}
16438
16439
16440/** Opcode 0xe1. */
16441FNIEMOP_DEF(iemOp_loope_Jb)
16442{
16443 IEMOP_MNEMONIC("loope Jb");
16444 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16445 IEMOP_HLP_NO_LOCK_PREFIX();
16446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16447
16448 switch (pIemCpu->enmEffAddrMode)
16449 {
16450 case IEMMODE_16BIT:
16451 IEM_MC_BEGIN(0,0);
16452 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16453 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16454 IEM_MC_REL_JMP_S8(i8Imm);
16455 } IEM_MC_ELSE() {
16456 IEM_MC_ADVANCE_RIP();
16457 } IEM_MC_ENDIF();
16458 IEM_MC_END();
16459 return VINF_SUCCESS;
16460
16461 case IEMMODE_32BIT:
16462 IEM_MC_BEGIN(0,0);
16463 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16464 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16465 IEM_MC_REL_JMP_S8(i8Imm);
16466 } IEM_MC_ELSE() {
16467 IEM_MC_ADVANCE_RIP();
16468 } IEM_MC_ENDIF();
16469 IEM_MC_END();
16470 return VINF_SUCCESS;
16471
16472 case IEMMODE_64BIT:
16473 IEM_MC_BEGIN(0,0);
16474 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16475 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16476 IEM_MC_REL_JMP_S8(i8Imm);
16477 } IEM_MC_ELSE() {
16478 IEM_MC_ADVANCE_RIP();
16479 } IEM_MC_ENDIF();
16480 IEM_MC_END();
16481 return VINF_SUCCESS;
16482
16483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16484 }
16485}
16486
16487
16488/** Opcode 0xe2. */
16489FNIEMOP_DEF(iemOp_loop_Jb)
16490{
16491 IEMOP_MNEMONIC("loop Jb");
16492 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16493 IEMOP_HLP_NO_LOCK_PREFIX();
16494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16495
16496 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16497 * using the 32-bit operand size override. How can that be restarted? See
16498 * weird pseudo code in intel manual. */
16499 switch (pIemCpu->enmEffAddrMode)
16500 {
16501 case IEMMODE_16BIT:
16502 IEM_MC_BEGIN(0,0);
16503 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16504 {
16505 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16506 IEM_MC_IF_CX_IS_NZ() {
16507 IEM_MC_REL_JMP_S8(i8Imm);
16508 } IEM_MC_ELSE() {
16509 IEM_MC_ADVANCE_RIP();
16510 } IEM_MC_ENDIF();
16511 }
16512 else
16513 {
16514 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16515 IEM_MC_ADVANCE_RIP();
16516 }
16517 IEM_MC_END();
16518 return VINF_SUCCESS;
16519
16520 case IEMMODE_32BIT:
16521 IEM_MC_BEGIN(0,0);
16522 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16523 {
16524 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16525 IEM_MC_IF_ECX_IS_NZ() {
16526 IEM_MC_REL_JMP_S8(i8Imm);
16527 } IEM_MC_ELSE() {
16528 IEM_MC_ADVANCE_RIP();
16529 } IEM_MC_ENDIF();
16530 }
16531 else
16532 {
16533 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16534 IEM_MC_ADVANCE_RIP();
16535 }
16536 IEM_MC_END();
16537 return VINF_SUCCESS;
16538
16539 case IEMMODE_64BIT:
16540 IEM_MC_BEGIN(0,0);
16541 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
16542 {
16543 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16544 IEM_MC_IF_RCX_IS_NZ() {
16545 IEM_MC_REL_JMP_S8(i8Imm);
16546 } IEM_MC_ELSE() {
16547 IEM_MC_ADVANCE_RIP();
16548 } IEM_MC_ENDIF();
16549 }
16550 else
16551 {
16552 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16553 IEM_MC_ADVANCE_RIP();
16554 }
16555 IEM_MC_END();
16556 return VINF_SUCCESS;
16557
16558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16559 }
16560}
16561
16562
16563/** Opcode 0xe3. */
16564FNIEMOP_DEF(iemOp_jecxz_Jb)
16565{
16566 IEMOP_MNEMONIC("jecxz Jb");
16567 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16568 IEMOP_HLP_NO_LOCK_PREFIX();
16569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16570
16571 switch (pIemCpu->enmEffAddrMode)
16572 {
16573 case IEMMODE_16BIT:
16574 IEM_MC_BEGIN(0,0);
16575 IEM_MC_IF_CX_IS_NZ() {
16576 IEM_MC_ADVANCE_RIP();
16577 } IEM_MC_ELSE() {
16578 IEM_MC_REL_JMP_S8(i8Imm);
16579 } IEM_MC_ENDIF();
16580 IEM_MC_END();
16581 return VINF_SUCCESS;
16582
16583 case IEMMODE_32BIT:
16584 IEM_MC_BEGIN(0,0);
16585 IEM_MC_IF_ECX_IS_NZ() {
16586 IEM_MC_ADVANCE_RIP();
16587 } IEM_MC_ELSE() {
16588 IEM_MC_REL_JMP_S8(i8Imm);
16589 } IEM_MC_ENDIF();
16590 IEM_MC_END();
16591 return VINF_SUCCESS;
16592
16593 case IEMMODE_64BIT:
16594 IEM_MC_BEGIN(0,0);
16595 IEM_MC_IF_RCX_IS_NZ() {
16596 IEM_MC_ADVANCE_RIP();
16597 } IEM_MC_ELSE() {
16598 IEM_MC_REL_JMP_S8(i8Imm);
16599 } IEM_MC_ENDIF();
16600 IEM_MC_END();
16601 return VINF_SUCCESS;
16602
16603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16604 }
16605}
16606
16607
16608/** Opcode 0xe4 */
16609FNIEMOP_DEF(iemOp_in_AL_Ib)
16610{
16611 IEMOP_MNEMONIC("in eAX,Ib");
16612 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16613 IEMOP_HLP_NO_LOCK_PREFIX();
16614 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16615}
16616
16617
16618/** Opcode 0xe5 */
16619FNIEMOP_DEF(iemOp_in_eAX_Ib)
16620{
16621 IEMOP_MNEMONIC("in eAX,Ib");
16622 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16623 IEMOP_HLP_NO_LOCK_PREFIX();
16624 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16625}
16626
16627
16628/** Opcode 0xe6 */
16629FNIEMOP_DEF(iemOp_out_Ib_AL)
16630{
16631 IEMOP_MNEMONIC("out Ib,AL");
16632 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16633 IEMOP_HLP_NO_LOCK_PREFIX();
16634 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16635}
16636
16637
16638/** Opcode 0xe7 */
16639FNIEMOP_DEF(iemOp_out_Ib_eAX)
16640{
16641 IEMOP_MNEMONIC("out Ib,eAX");
16642 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16643 IEMOP_HLP_NO_LOCK_PREFIX();
16644 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16645}
16646
16647
16648/** Opcode 0xe8. */
16649FNIEMOP_DEF(iemOp_call_Jv)
16650{
16651 IEMOP_MNEMONIC("call Jv");
16652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16653 switch (pIemCpu->enmEffOpSize)
16654 {
16655 case IEMMODE_16BIT:
16656 {
16657 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16658 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16659 }
16660
16661 case IEMMODE_32BIT:
16662 {
16663 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16664 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16665 }
16666
16667 case IEMMODE_64BIT:
16668 {
16669 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16670 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16671 }
16672
16673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16674 }
16675}
16676
16677
16678/** Opcode 0xe9. */
16679FNIEMOP_DEF(iemOp_jmp_Jv)
16680{
16681 IEMOP_MNEMONIC("jmp Jv");
16682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16683 switch (pIemCpu->enmEffOpSize)
16684 {
16685 case IEMMODE_16BIT:
16686 {
16687 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16688 IEM_MC_BEGIN(0, 0);
16689 IEM_MC_REL_JMP_S16(i16Imm);
16690 IEM_MC_END();
16691 return VINF_SUCCESS;
16692 }
16693
16694 case IEMMODE_64BIT:
16695 case IEMMODE_32BIT:
16696 {
16697 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16698 IEM_MC_BEGIN(0, 0);
16699 IEM_MC_REL_JMP_S32(i32Imm);
16700 IEM_MC_END();
16701 return VINF_SUCCESS;
16702 }
16703
16704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16705 }
16706}
16707
16708
16709/** Opcode 0xea. */
16710FNIEMOP_DEF(iemOp_jmp_Ap)
16711{
16712 IEMOP_MNEMONIC("jmp Ap");
16713 IEMOP_HLP_NO_64BIT();
16714
16715 /* Decode the far pointer address and pass it on to the far call C implementation. */
16716 uint32_t offSeg;
16717 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16718 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16719 else
16720 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16721 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16722 IEMOP_HLP_NO_LOCK_PREFIX();
16723 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16724}
16725
16726
16727/** Opcode 0xeb. */
16728FNIEMOP_DEF(iemOp_jmp_Jb)
16729{
16730 IEMOP_MNEMONIC("jmp Jb");
16731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16732 IEMOP_HLP_NO_LOCK_PREFIX();
16733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16734
16735 IEM_MC_BEGIN(0, 0);
16736 IEM_MC_REL_JMP_S8(i8Imm);
16737 IEM_MC_END();
16738 return VINF_SUCCESS;
16739}
16740
16741
16742/** Opcode 0xec */
16743FNIEMOP_DEF(iemOp_in_AL_DX)
16744{
16745 IEMOP_MNEMONIC("in AL,DX");
16746 IEMOP_HLP_NO_LOCK_PREFIX();
16747 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16748}
16749
16750
16751/** Opcode 0xed */
16752FNIEMOP_DEF(iemOp_eAX_DX)
16753{
16754 IEMOP_MNEMONIC("in eAX,DX");
16755 IEMOP_HLP_NO_LOCK_PREFIX();
16756 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16757}
16758
16759
16760/** Opcode 0xee */
16761FNIEMOP_DEF(iemOp_out_DX_AL)
16762{
16763 IEMOP_MNEMONIC("out DX,AL");
16764 IEMOP_HLP_NO_LOCK_PREFIX();
16765 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16766}
16767
16768
16769/** Opcode 0xef */
16770FNIEMOP_DEF(iemOp_out_DX_eAX)
16771{
16772 IEMOP_MNEMONIC("out DX,eAX");
16773 IEMOP_HLP_NO_LOCK_PREFIX();
16774 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16775}
16776
16777
16778/** Opcode 0xf0. */
16779FNIEMOP_DEF(iemOp_lock)
16780{
16781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16782 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16783
16784 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16785 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16786}
16787
16788
16789/** Opcode 0xf1. */
16790FNIEMOP_DEF(iemOp_int_1)
16791{
16792 IEMOP_MNEMONIC("int1"); /* icebp */
16793 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
16794 /** @todo testcase! */
16795 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16796}
16797
16798
16799/** Opcode 0xf2. */
16800FNIEMOP_DEF(iemOp_repne)
16801{
16802 /* This overrides any previous REPE prefix. */
16803 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16804 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16805 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16806
16807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16809}
16810
16811
16812/** Opcode 0xf3. */
16813FNIEMOP_DEF(iemOp_repe)
16814{
16815 /* This overrides any previous REPNE prefix. */
16816 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16817 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16818 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16819
16820 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16821 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16822}
16823
16824
16825/** Opcode 0xf4. */
16826FNIEMOP_DEF(iemOp_hlt)
16827{
16828 IEMOP_HLP_NO_LOCK_PREFIX();
16829 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16830}
16831
16832
16833/** Opcode 0xf5. */
16834FNIEMOP_DEF(iemOp_cmc)
16835{
16836 IEMOP_MNEMONIC("cmc");
16837 IEMOP_HLP_NO_LOCK_PREFIX();
16838 IEM_MC_BEGIN(0, 0);
16839 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16840 IEM_MC_ADVANCE_RIP();
16841 IEM_MC_END();
16842 return VINF_SUCCESS;
16843}
16844
16845
16846/**
16847 * Common implementation of 'inc/dec/not/neg Eb'.
16848 *
16849 * @param bRm The RM byte.
16850 * @param pImpl The instruction implementation.
16851 */
16852FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16853{
16854 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16855 {
16856 /* register access */
16857 IEM_MC_BEGIN(2, 0);
16858 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16859 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16860 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16861 IEM_MC_REF_EFLAGS(pEFlags);
16862 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16863 IEM_MC_ADVANCE_RIP();
16864 IEM_MC_END();
16865 }
16866 else
16867 {
16868 /* memory access. */
16869 IEM_MC_BEGIN(2, 2);
16870 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16873
16874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16875 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16876 IEM_MC_FETCH_EFLAGS(EFlags);
16877 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16878 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16879 else
16880 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16881
16882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16883 IEM_MC_COMMIT_EFLAGS(EFlags);
16884 IEM_MC_ADVANCE_RIP();
16885 IEM_MC_END();
16886 }
16887 return VINF_SUCCESS;
16888}
16889
16890
16891/**
16892 * Common implementation of 'inc/dec/not/neg Ev'.
16893 *
16894 * @param bRm The RM byte.
16895 * @param pImpl The instruction implementation.
16896 */
16897FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16898{
16899 /* Registers are handled by a common worker. */
16900 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16901 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16902
16903 /* Memory we do here. */
16904 switch (pIemCpu->enmEffOpSize)
16905 {
16906 case IEMMODE_16BIT:
16907 IEM_MC_BEGIN(2, 2);
16908 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16909 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16911
16912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16913 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16914 IEM_MC_FETCH_EFLAGS(EFlags);
16915 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16916 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16917 else
16918 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16919
16920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16921 IEM_MC_COMMIT_EFLAGS(EFlags);
16922 IEM_MC_ADVANCE_RIP();
16923 IEM_MC_END();
16924 return VINF_SUCCESS;
16925
16926 case IEMMODE_32BIT:
16927 IEM_MC_BEGIN(2, 2);
16928 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16931
16932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16933 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16934 IEM_MC_FETCH_EFLAGS(EFlags);
16935 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16936 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16937 else
16938 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16939
16940 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16941 IEM_MC_COMMIT_EFLAGS(EFlags);
16942 IEM_MC_ADVANCE_RIP();
16943 IEM_MC_END();
16944 return VINF_SUCCESS;
16945
16946 case IEMMODE_64BIT:
16947 IEM_MC_BEGIN(2, 2);
16948 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16951
16952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16953 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16954 IEM_MC_FETCH_EFLAGS(EFlags);
16955 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16956 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16957 else
16958 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16959
16960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16961 IEM_MC_COMMIT_EFLAGS(EFlags);
16962 IEM_MC_ADVANCE_RIP();
16963 IEM_MC_END();
16964 return VINF_SUCCESS;
16965
16966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16967 }
16968}
16969
16970
16971/** Opcode 0xf6 /0. */
16972FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16973{
16974 IEMOP_MNEMONIC("test Eb,Ib");
16975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16976
16977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16978 {
16979 /* register access */
16980 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16981 IEMOP_HLP_NO_LOCK_PREFIX();
16982
16983 IEM_MC_BEGIN(3, 0);
16984 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16985 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16987 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16988 IEM_MC_REF_EFLAGS(pEFlags);
16989 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16990 IEM_MC_ADVANCE_RIP();
16991 IEM_MC_END();
16992 }
16993 else
16994 {
16995 /* memory access. */
16996 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16997
16998 IEM_MC_BEGIN(3, 2);
16999 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17000 IEM_MC_ARG(uint8_t, u8Src, 1);
17001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17003
17004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17005 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17006 IEM_MC_ASSIGN(u8Src, u8Imm);
17007 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17008 IEM_MC_FETCH_EFLAGS(EFlags);
17009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17010
17011 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17012 IEM_MC_COMMIT_EFLAGS(EFlags);
17013 IEM_MC_ADVANCE_RIP();
17014 IEM_MC_END();
17015 }
17016 return VINF_SUCCESS;
17017}
17018
17019
17020/** Opcode 0xf7 /0. */
17021FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17022{
17023 IEMOP_MNEMONIC("test Ev,Iv");
17024 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17025 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17026
17027 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17028 {
17029 /* register access */
17030 switch (pIemCpu->enmEffOpSize)
17031 {
17032 case IEMMODE_16BIT:
17033 {
17034 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17035 IEM_MC_BEGIN(3, 0);
17036 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17037 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17039 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17040 IEM_MC_REF_EFLAGS(pEFlags);
17041 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17042 IEM_MC_ADVANCE_RIP();
17043 IEM_MC_END();
17044 return VINF_SUCCESS;
17045 }
17046
17047 case IEMMODE_32BIT:
17048 {
17049 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17050 IEM_MC_BEGIN(3, 0);
17051 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17052 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17053 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17054 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17055 IEM_MC_REF_EFLAGS(pEFlags);
17056 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17057 /* No clearing the high dword here - test doesn't write back the result. */
17058 IEM_MC_ADVANCE_RIP();
17059 IEM_MC_END();
17060 return VINF_SUCCESS;
17061 }
17062
17063 case IEMMODE_64BIT:
17064 {
17065 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17066 IEM_MC_BEGIN(3, 0);
17067 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17068 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17070 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17071 IEM_MC_REF_EFLAGS(pEFlags);
17072 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17073 IEM_MC_ADVANCE_RIP();
17074 IEM_MC_END();
17075 return VINF_SUCCESS;
17076 }
17077
17078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17079 }
17080 }
17081 else
17082 {
17083 /* memory access. */
17084 switch (pIemCpu->enmEffOpSize)
17085 {
17086 case IEMMODE_16BIT:
17087 {
17088 IEM_MC_BEGIN(3, 2);
17089 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17090 IEM_MC_ARG(uint16_t, u16Src, 1);
17091 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17093
17094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17095 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17096 IEM_MC_ASSIGN(u16Src, u16Imm);
17097 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17098 IEM_MC_FETCH_EFLAGS(EFlags);
17099 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17100
17101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17102 IEM_MC_COMMIT_EFLAGS(EFlags);
17103 IEM_MC_ADVANCE_RIP();
17104 IEM_MC_END();
17105 return VINF_SUCCESS;
17106 }
17107
17108 case IEMMODE_32BIT:
17109 {
17110 IEM_MC_BEGIN(3, 2);
17111 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17112 IEM_MC_ARG(uint32_t, u32Src, 1);
17113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17115
17116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17117 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17118 IEM_MC_ASSIGN(u32Src, u32Imm);
17119 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17120 IEM_MC_FETCH_EFLAGS(EFlags);
17121 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17122
17123 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17124 IEM_MC_COMMIT_EFLAGS(EFlags);
17125 IEM_MC_ADVANCE_RIP();
17126 IEM_MC_END();
17127 return VINF_SUCCESS;
17128 }
17129
17130 case IEMMODE_64BIT:
17131 {
17132 IEM_MC_BEGIN(3, 2);
17133 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17134 IEM_MC_ARG(uint64_t, u64Src, 1);
17135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17137
17138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17139 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17140 IEM_MC_ASSIGN(u64Src, u64Imm);
17141 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
17142 IEM_MC_FETCH_EFLAGS(EFlags);
17143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17144
17145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17146 IEM_MC_COMMIT_EFLAGS(EFlags);
17147 IEM_MC_ADVANCE_RIP();
17148 IEM_MC_END();
17149 return VINF_SUCCESS;
17150 }
17151
17152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17153 }
17154 }
17155}
17156
17157
17158/** Opcode 0xf6 /4, /5, /6 and /7. */
17159FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17160{
17161 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17162
17163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17164 {
17165 /* register access */
17166 IEMOP_HLP_NO_LOCK_PREFIX();
17167 IEM_MC_BEGIN(3, 1);
17168 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17169 IEM_MC_ARG(uint8_t, u8Value, 1);
17170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17171 IEM_MC_LOCAL(int32_t, rc);
17172
17173 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17174 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17175 IEM_MC_REF_EFLAGS(pEFlags);
17176 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17177 IEM_MC_IF_LOCAL_IS_Z(rc) {
17178 IEM_MC_ADVANCE_RIP();
17179 } IEM_MC_ELSE() {
17180 IEM_MC_RAISE_DIVIDE_ERROR();
17181 } IEM_MC_ENDIF();
17182
17183 IEM_MC_END();
17184 }
17185 else
17186 {
17187 /* memory access. */
17188 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17189
17190 IEM_MC_BEGIN(3, 2);
17191 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17192 IEM_MC_ARG(uint8_t, u8Value, 1);
17193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17195 IEM_MC_LOCAL(int32_t, rc);
17196
17197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17198 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
17199 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17200 IEM_MC_REF_EFLAGS(pEFlags);
17201 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17202 IEM_MC_IF_LOCAL_IS_Z(rc) {
17203 IEM_MC_ADVANCE_RIP();
17204 } IEM_MC_ELSE() {
17205 IEM_MC_RAISE_DIVIDE_ERROR();
17206 } IEM_MC_ENDIF();
17207
17208 IEM_MC_END();
17209 }
17210 return VINF_SUCCESS;
17211}
17212
17213
17214/** Opcode 0xf7 /4, /5, /6 and /7. */
17215FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17216{
17217 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
17218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17219
17220 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17221 {
17222 /* register access */
17223 switch (pIemCpu->enmEffOpSize)
17224 {
17225 case IEMMODE_16BIT:
17226 {
17227 IEMOP_HLP_NO_LOCK_PREFIX();
17228 IEM_MC_BEGIN(4, 1);
17229 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17230 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17231 IEM_MC_ARG(uint16_t, u16Value, 2);
17232 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17233 IEM_MC_LOCAL(int32_t, rc);
17234
17235 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17236 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17237 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17238 IEM_MC_REF_EFLAGS(pEFlags);
17239 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17240 IEM_MC_IF_LOCAL_IS_Z(rc) {
17241 IEM_MC_ADVANCE_RIP();
17242 } IEM_MC_ELSE() {
17243 IEM_MC_RAISE_DIVIDE_ERROR();
17244 } IEM_MC_ENDIF();
17245
17246 IEM_MC_END();
17247 return VINF_SUCCESS;
17248 }
17249
17250 case IEMMODE_32BIT:
17251 {
17252 IEMOP_HLP_NO_LOCK_PREFIX();
17253 IEM_MC_BEGIN(4, 1);
17254 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17255 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17256 IEM_MC_ARG(uint32_t, u32Value, 2);
17257 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17258 IEM_MC_LOCAL(int32_t, rc);
17259
17260 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17261 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17262 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17263 IEM_MC_REF_EFLAGS(pEFlags);
17264 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17265 IEM_MC_IF_LOCAL_IS_Z(rc) {
17266 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17267 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17268 IEM_MC_ADVANCE_RIP();
17269 } IEM_MC_ELSE() {
17270 IEM_MC_RAISE_DIVIDE_ERROR();
17271 } IEM_MC_ENDIF();
17272
17273 IEM_MC_END();
17274 return VINF_SUCCESS;
17275 }
17276
17277 case IEMMODE_64BIT:
17278 {
17279 IEMOP_HLP_NO_LOCK_PREFIX();
17280 IEM_MC_BEGIN(4, 1);
17281 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17282 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17283 IEM_MC_ARG(uint64_t, u64Value, 2);
17284 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17285 IEM_MC_LOCAL(int32_t, rc);
17286
17287 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17288 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17289 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17290 IEM_MC_REF_EFLAGS(pEFlags);
17291 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17292 IEM_MC_IF_LOCAL_IS_Z(rc) {
17293 IEM_MC_ADVANCE_RIP();
17294 } IEM_MC_ELSE() {
17295 IEM_MC_RAISE_DIVIDE_ERROR();
17296 } IEM_MC_ENDIF();
17297
17298 IEM_MC_END();
17299 return VINF_SUCCESS;
17300 }
17301
17302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17303 }
17304 }
17305 else
17306 {
17307 /* memory access. */
17308 switch (pIemCpu->enmEffOpSize)
17309 {
17310 case IEMMODE_16BIT:
17311 {
17312 IEMOP_HLP_NO_LOCK_PREFIX();
17313 IEM_MC_BEGIN(4, 2);
17314 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17315 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17316 IEM_MC_ARG(uint16_t, u16Value, 2);
17317 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17319 IEM_MC_LOCAL(int32_t, rc);
17320
17321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17322 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
17323 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17324 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17325 IEM_MC_REF_EFLAGS(pEFlags);
17326 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17327 IEM_MC_IF_LOCAL_IS_Z(rc) {
17328 IEM_MC_ADVANCE_RIP();
17329 } IEM_MC_ELSE() {
17330 IEM_MC_RAISE_DIVIDE_ERROR();
17331 } IEM_MC_ENDIF();
17332
17333 IEM_MC_END();
17334 return VINF_SUCCESS;
17335 }
17336
17337 case IEMMODE_32BIT:
17338 {
17339 IEMOP_HLP_NO_LOCK_PREFIX();
17340 IEM_MC_BEGIN(4, 2);
17341 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17342 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17343 IEM_MC_ARG(uint32_t, u32Value, 2);
17344 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17346 IEM_MC_LOCAL(int32_t, rc);
17347
17348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17349 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
17350 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17351 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17352 IEM_MC_REF_EFLAGS(pEFlags);
17353 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17354 IEM_MC_IF_LOCAL_IS_Z(rc) {
17355 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17356 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17357 IEM_MC_ADVANCE_RIP();
17358 } IEM_MC_ELSE() {
17359 IEM_MC_RAISE_DIVIDE_ERROR();
17360 } IEM_MC_ENDIF();
17361
17362 IEM_MC_END();
17363 return VINF_SUCCESS;
17364 }
17365
17366 case IEMMODE_64BIT:
17367 {
17368 IEMOP_HLP_NO_LOCK_PREFIX();
17369 IEM_MC_BEGIN(4, 2);
17370 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17371 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17372 IEM_MC_ARG(uint64_t, u64Value, 2);
17373 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17375 IEM_MC_LOCAL(int32_t, rc);
17376
17377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17378 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
17379 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17380 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17381 IEM_MC_REF_EFLAGS(pEFlags);
17382 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17383 IEM_MC_IF_LOCAL_IS_Z(rc) {
17384 IEM_MC_ADVANCE_RIP();
17385 } IEM_MC_ELSE() {
17386 IEM_MC_RAISE_DIVIDE_ERROR();
17387 } IEM_MC_ENDIF();
17388
17389 IEM_MC_END();
17390 return VINF_SUCCESS;
17391 }
17392
17393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17394 }
17395 }
17396}
17397
17398/** Opcode 0xf6. */
17399FNIEMOP_DEF(iemOp_Grp3_Eb)
17400{
17401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17402 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17403 {
17404 case 0:
17405 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17406 case 1:
17407/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17408 return IEMOP_RAISE_INVALID_OPCODE();
17409 case 2:
17410 IEMOP_MNEMONIC("not Eb");
17411 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17412 case 3:
17413 IEMOP_MNEMONIC("neg Eb");
17414 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17415 case 4:
17416 IEMOP_MNEMONIC("mul Eb");
17417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17418 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17419 case 5:
17420 IEMOP_MNEMONIC("imul Eb");
17421 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17422 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17423 case 6:
17424 IEMOP_MNEMONIC("div Eb");
17425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17426 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17427 case 7:
17428 IEMOP_MNEMONIC("idiv Eb");
17429 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17430 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17432 }
17433}
17434
17435
17436/** Opcode 0xf7. */
17437FNIEMOP_DEF(iemOp_Grp3_Ev)
17438{
17439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17440 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17441 {
17442 case 0:
17443 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17444 case 1:
17445/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17446 return IEMOP_RAISE_INVALID_OPCODE();
17447 case 2:
17448 IEMOP_MNEMONIC("not Ev");
17449 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17450 case 3:
17451 IEMOP_MNEMONIC("neg Ev");
17452 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17453 case 4:
17454 IEMOP_MNEMONIC("mul Ev");
17455 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17456 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17457 case 5:
17458 IEMOP_MNEMONIC("imul Ev");
17459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17460 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17461 case 6:
17462 IEMOP_MNEMONIC("div Ev");
17463 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17464 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17465 case 7:
17466 IEMOP_MNEMONIC("idiv Ev");
17467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17468 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17470 }
17471}
17472
17473
17474/** Opcode 0xf8. */
17475FNIEMOP_DEF(iemOp_clc)
17476{
17477 IEMOP_MNEMONIC("clc");
17478 IEMOP_HLP_NO_LOCK_PREFIX();
17479 IEM_MC_BEGIN(0, 0);
17480 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17481 IEM_MC_ADVANCE_RIP();
17482 IEM_MC_END();
17483 return VINF_SUCCESS;
17484}
17485
17486
17487/** Opcode 0xf9. */
17488FNIEMOP_DEF(iemOp_stc)
17489{
17490 IEMOP_MNEMONIC("stc");
17491 IEMOP_HLP_NO_LOCK_PREFIX();
17492 IEM_MC_BEGIN(0, 0);
17493 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17494 IEM_MC_ADVANCE_RIP();
17495 IEM_MC_END();
17496 return VINF_SUCCESS;
17497}
17498
17499
17500/** Opcode 0xfa. */
17501FNIEMOP_DEF(iemOp_cli)
17502{
17503 IEMOP_MNEMONIC("cli");
17504 IEMOP_HLP_NO_LOCK_PREFIX();
17505 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17506}
17507
17508
17509FNIEMOP_DEF(iemOp_sti)
17510{
17511 IEMOP_MNEMONIC("sti");
17512 IEMOP_HLP_NO_LOCK_PREFIX();
17513 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17514}
17515
17516
17517/** Opcode 0xfc. */
17518FNIEMOP_DEF(iemOp_cld)
17519{
17520 IEMOP_MNEMONIC("cld");
17521 IEMOP_HLP_NO_LOCK_PREFIX();
17522 IEM_MC_BEGIN(0, 0);
17523 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17524 IEM_MC_ADVANCE_RIP();
17525 IEM_MC_END();
17526 return VINF_SUCCESS;
17527}
17528
17529
17530/** Opcode 0xfd. */
17531FNIEMOP_DEF(iemOp_std)
17532{
17533 IEMOP_MNEMONIC("std");
17534 IEMOP_HLP_NO_LOCK_PREFIX();
17535 IEM_MC_BEGIN(0, 0);
17536 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17537 IEM_MC_ADVANCE_RIP();
17538 IEM_MC_END();
17539 return VINF_SUCCESS;
17540}
17541
17542
17543/** Opcode 0xfe. */
17544FNIEMOP_DEF(iemOp_Grp4)
17545{
17546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17547 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17548 {
17549 case 0:
17550 IEMOP_MNEMONIC("inc Ev");
17551 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17552 case 1:
17553 IEMOP_MNEMONIC("dec Ev");
17554 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17555 default:
17556 IEMOP_MNEMONIC("grp4-ud");
17557 return IEMOP_RAISE_INVALID_OPCODE();
17558 }
17559}
17560
17561
17562/**
17563 * Opcode 0xff /2.
17564 * @param bRm The RM byte.
17565 */
17566FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17567{
17568 IEMOP_MNEMONIC("calln Ev");
17569 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17571
17572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17573 {
17574 /* The new RIP is taken from a register. */
17575 switch (pIemCpu->enmEffOpSize)
17576 {
17577 case IEMMODE_16BIT:
17578 IEM_MC_BEGIN(1, 0);
17579 IEM_MC_ARG(uint16_t, u16Target, 0);
17580 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17581 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17582 IEM_MC_END()
17583 return VINF_SUCCESS;
17584
17585 case IEMMODE_32BIT:
17586 IEM_MC_BEGIN(1, 0);
17587 IEM_MC_ARG(uint32_t, u32Target, 0);
17588 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17589 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17590 IEM_MC_END()
17591 return VINF_SUCCESS;
17592
17593 case IEMMODE_64BIT:
17594 IEM_MC_BEGIN(1, 0);
17595 IEM_MC_ARG(uint64_t, u64Target, 0);
17596 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17597 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17598 IEM_MC_END()
17599 return VINF_SUCCESS;
17600
17601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17602 }
17603 }
17604 else
17605 {
17606 /* The new RIP is taken from a register. */
17607 switch (pIemCpu->enmEffOpSize)
17608 {
17609 case IEMMODE_16BIT:
17610 IEM_MC_BEGIN(1, 1);
17611 IEM_MC_ARG(uint16_t, u16Target, 0);
17612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17614 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17615 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17616 IEM_MC_END()
17617 return VINF_SUCCESS;
17618
17619 case IEMMODE_32BIT:
17620 IEM_MC_BEGIN(1, 1);
17621 IEM_MC_ARG(uint32_t, u32Target, 0);
17622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17624 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17625 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17626 IEM_MC_END()
17627 return VINF_SUCCESS;
17628
17629 case IEMMODE_64BIT:
17630 IEM_MC_BEGIN(1, 1);
17631 IEM_MC_ARG(uint64_t, u64Target, 0);
17632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17634 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17635 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17636 IEM_MC_END()
17637 return VINF_SUCCESS;
17638
17639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17640 }
17641 }
17642}
17643
17644typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17645
17646FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17647{
17648 /* Registers? How?? */
17649 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17650 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17651
17652 /* Far pointer loaded from memory. */
17653 switch (pIemCpu->enmEffOpSize)
17654 {
17655 case IEMMODE_16BIT:
17656 IEM_MC_BEGIN(3, 1);
17657 IEM_MC_ARG(uint16_t, u16Sel, 0);
17658 IEM_MC_ARG(uint16_t, offSeg, 1);
17659 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17663 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17664 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17665 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17666 IEM_MC_END();
17667 return VINF_SUCCESS;
17668
17669 case IEMMODE_64BIT:
17670 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17671 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17672 * and call far qword [rsp] encodings. */
17673 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17674 {
17675 IEM_MC_BEGIN(3, 1);
17676 IEM_MC_ARG(uint16_t, u16Sel, 0);
17677 IEM_MC_ARG(uint64_t, offSeg, 1);
17678 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17682 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17683 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17684 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17685 IEM_MC_END();
17686 return VINF_SUCCESS;
17687 }
17688 /* AMD falls thru. */
17689
17690 case IEMMODE_32BIT:
17691 IEM_MC_BEGIN(3, 1);
17692 IEM_MC_ARG(uint16_t, u16Sel, 0);
17693 IEM_MC_ARG(uint32_t, offSeg, 1);
17694 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17698 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17699 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17700 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17701 IEM_MC_END();
17702 return VINF_SUCCESS;
17703
17704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17705 }
17706}
17707
17708
17709/**
17710 * Opcode 0xff /3.
17711 * @param bRm The RM byte.
17712 */
17713FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17714{
17715 IEMOP_MNEMONIC("callf Ep");
17716 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17717}
17718
17719
17720/**
17721 * Opcode 0xff /4.
17722 * @param bRm The RM byte.
17723 */
17724FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17725{
17726 IEMOP_MNEMONIC("jmpn Ev");
17727 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17729
17730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17731 {
17732 /* The new RIP is taken from a register. */
17733 switch (pIemCpu->enmEffOpSize)
17734 {
17735 case IEMMODE_16BIT:
17736 IEM_MC_BEGIN(0, 1);
17737 IEM_MC_LOCAL(uint16_t, u16Target);
17738 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17739 IEM_MC_SET_RIP_U16(u16Target);
17740 IEM_MC_END()
17741 return VINF_SUCCESS;
17742
17743 case IEMMODE_32BIT:
17744 IEM_MC_BEGIN(0, 1);
17745 IEM_MC_LOCAL(uint32_t, u32Target);
17746 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17747 IEM_MC_SET_RIP_U32(u32Target);
17748 IEM_MC_END()
17749 return VINF_SUCCESS;
17750
17751 case IEMMODE_64BIT:
17752 IEM_MC_BEGIN(0, 1);
17753 IEM_MC_LOCAL(uint64_t, u64Target);
17754 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17755 IEM_MC_SET_RIP_U64(u64Target);
17756 IEM_MC_END()
17757 return VINF_SUCCESS;
17758
17759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17760 }
17761 }
17762 else
17763 {
17764 /* The new RIP is taken from a memory location. */
17765 switch (pIemCpu->enmEffOpSize)
17766 {
17767 case IEMMODE_16BIT:
17768 IEM_MC_BEGIN(0, 2);
17769 IEM_MC_LOCAL(uint16_t, u16Target);
17770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17772 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17773 IEM_MC_SET_RIP_U16(u16Target);
17774 IEM_MC_END()
17775 return VINF_SUCCESS;
17776
17777 case IEMMODE_32BIT:
17778 IEM_MC_BEGIN(0, 2);
17779 IEM_MC_LOCAL(uint32_t, u32Target);
17780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17782 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17783 IEM_MC_SET_RIP_U32(u32Target);
17784 IEM_MC_END()
17785 return VINF_SUCCESS;
17786
17787 case IEMMODE_64BIT:
17788 IEM_MC_BEGIN(0, 2);
17789 IEM_MC_LOCAL(uint64_t, u64Target);
17790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17792 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17793 IEM_MC_SET_RIP_U64(u64Target);
17794 IEM_MC_END()
17795 return VINF_SUCCESS;
17796
17797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17798 }
17799 }
17800}
17801
17802
17803/**
17804 * Opcode 0xff /5.
17805 * @param bRm The RM byte.
17806 */
17807FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17808{
17809 IEMOP_MNEMONIC("jmpf Ep");
17810 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17811}
17812
17813
17814/**
17815 * Opcode 0xff /6.
17816 * @param bRm The RM byte.
17817 */
17818FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17819{
17820 IEMOP_MNEMONIC("push Ev");
17821 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17822
17823 /* Registers are handled by a common worker. */
17824 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17825 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17826
17827 /* Memory we do here. */
17828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17829 switch (pIemCpu->enmEffOpSize)
17830 {
17831 case IEMMODE_16BIT:
17832 IEM_MC_BEGIN(0, 2);
17833 IEM_MC_LOCAL(uint16_t, u16Src);
17834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17836 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17837 IEM_MC_PUSH_U16(u16Src);
17838 IEM_MC_ADVANCE_RIP();
17839 IEM_MC_END();
17840 return VINF_SUCCESS;
17841
17842 case IEMMODE_32BIT:
17843 IEM_MC_BEGIN(0, 2);
17844 IEM_MC_LOCAL(uint32_t, u32Src);
17845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17847 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17848 IEM_MC_PUSH_U32(u32Src);
17849 IEM_MC_ADVANCE_RIP();
17850 IEM_MC_END();
17851 return VINF_SUCCESS;
17852
17853 case IEMMODE_64BIT:
17854 IEM_MC_BEGIN(0, 2);
17855 IEM_MC_LOCAL(uint64_t, u64Src);
17856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17858 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17859 IEM_MC_PUSH_U64(u64Src);
17860 IEM_MC_ADVANCE_RIP();
17861 IEM_MC_END();
17862 return VINF_SUCCESS;
17863
17864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17865 }
17866}
17867
17868
17869/** Opcode 0xff. */
17870FNIEMOP_DEF(iemOp_Grp5)
17871{
17872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17873 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17874 {
17875 case 0:
17876 IEMOP_MNEMONIC("inc Ev");
17877 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17878 case 1:
17879 IEMOP_MNEMONIC("dec Ev");
17880 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17881 case 2:
17882 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17883 case 3:
17884 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17885 case 4:
17886 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17887 case 5:
17888 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17889 case 6:
17890 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17891 case 7:
17892 IEMOP_MNEMONIC("grp5-ud");
17893 return IEMOP_RAISE_INVALID_OPCODE();
17894 }
17895 AssertFailedReturn(VERR_IEM_IPE_3);
17896}
17897
17898
17899
17900const PFNIEMOP g_apfnOneByteMap[256] =
17901{
17902 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17903 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17904 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17905 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17906 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17907 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17908 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17909 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17910 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17911 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17912 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17913 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17914 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17915 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17916 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17917 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17918 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17919 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17920 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17921 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17922 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17923 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17924 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17925 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17926 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17927 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17928 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17929 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17930 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17931 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17932 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17933 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17934 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17935 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17936 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17937 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17938 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17939 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17940 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17941 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17942 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17943 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17944 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17945 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17946 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17947 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17948 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17949 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17950 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17951 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17952 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17953 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17954 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17955 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17956 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17957 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17958 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17959 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17960 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17961 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17962 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17963 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17964 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17965 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17966};
17967
17968
17969/** @} */
17970
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette