VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 36828

Last change on this file since 36828 was 36828, checked in by vboxsync, 14 years ago

IEM: implemented XLAT, extended the output when hitting a stub.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 289.9 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 36828 2011-04-23 23:17:21Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/**
20 * Common worker for instructions like ADD, AND, OR, ++ with a byte
21 * memory/register as the destination.
22 *
23 * @param pImpl Pointer to the instruction implementation (assembly).
24 */
25FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
26{
27 uint8_t bRm;
28 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
29
30 /*
31 * If rm is denoting a register, no more instruction bytes.
32 */
33 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
34 {
35 IEMOP_HLP_NO_LOCK_PREFIX();
36
37 IEM_MC_BEGIN(3, 0);
38 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
39 IEM_MC_ARG(uint8_t, u8Src, 1);
40 IEM_MC_ARG(uint32_t *, pEFlags, 2);
41
42 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
43 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
44 IEM_MC_REF_EFLAGS(pEFlags);
45 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
46
47 IEM_MC_ADVANCE_RIP();
48 IEM_MC_END();
49 }
50 else
51 {
52 /*
53 * We're accessing memory.
54 * Note! We're putting the eflags on the stack here so we can commit them
55 * after the memory.
56 */
57 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
58 IEM_MC_BEGIN(3, 2);
59 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
60 IEM_MC_ARG(uint8_t, u8Src, 1);
61 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
62 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
63
64 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
65 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
66 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
67 IEM_MC_FETCH_EFLAGS(EFlags);
68 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
69 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
70 else
71 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
72
73 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
74 IEM_MC_COMMIT_EFLAGS(EFlags);
75 IEM_MC_ADVANCE_RIP();
76 IEM_MC_END();
77 }
78 return VINF_SUCCESS;
79}
80
81
82/**
83 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
84 * memory/register as the destination.
85 *
86 * @param pImpl Pointer to the instruction implementation (assembly).
87 */
88FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
89{
90 uint8_t bRm;
91 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
92
93 /*
94 * If rm is denoting a register, no more instruction bytes.
95 */
96 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
97 {
98 IEMOP_HLP_NO_LOCK_PREFIX();
99
100 switch (pIemCpu->enmEffOpSize)
101 {
102 case IEMMODE_16BIT:
103 IEM_MC_BEGIN(3, 0);
104 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
105 IEM_MC_ARG(uint16_t, u16Src, 1);
106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
107
108 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
109 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
110 IEM_MC_REF_EFLAGS(pEFlags);
111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
112
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 break;
116
117 case IEMMODE_32BIT:
118 IEM_MC_BEGIN(3, 0);
119 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
120 IEM_MC_ARG(uint32_t, u32Src, 1);
121 IEM_MC_ARG(uint32_t *, pEFlags, 2);
122
123 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
124 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
125 IEM_MC_REF_EFLAGS(pEFlags);
126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
127
128 IEM_MC_ADVANCE_RIP();
129 IEM_MC_END();
130 break;
131
132 case IEMMODE_64BIT:
133 IEM_MC_BEGIN(3, 0);
134 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
135 IEM_MC_ARG(uint64_t, u64Src, 1);
136 IEM_MC_ARG(uint32_t *, pEFlags, 2);
137
138 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
139 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
140 IEM_MC_REF_EFLAGS(pEFlags);
141 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
142
143 IEM_MC_ADVANCE_RIP();
144 IEM_MC_END();
145 break;
146 }
147 }
148 else
149 {
150 /*
151 * We're accessing memory.
152 * Note! We're putting the eflags on the stack here so we can commit them
153 * after the memory.
154 */
155 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
156 switch (pIemCpu->enmEffOpSize)
157 {
158 case IEMMODE_16BIT:
159 IEM_MC_BEGIN(3, 2);
160 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
161 IEM_MC_ARG(uint16_t, u16Src, 1);
162 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
164
165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
166 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
167 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
168 IEM_MC_FETCH_EFLAGS(EFlags);
169 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
171 else
172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
173
174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
175 IEM_MC_COMMIT_EFLAGS(EFlags);
176 IEM_MC_ADVANCE_RIP();
177 IEM_MC_END();
178 break;
179
180 case IEMMODE_32BIT:
181 IEM_MC_BEGIN(3, 2);
182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
183 IEM_MC_ARG(uint32_t, u32Src, 1);
184 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
186
187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
188 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
189 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
190 IEM_MC_FETCH_EFLAGS(EFlags);
191 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
193 else
194 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
195
196 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
197 IEM_MC_COMMIT_EFLAGS(EFlags);
198 IEM_MC_ADVANCE_RIP();
199 IEM_MC_END();
200 break;
201
202 case IEMMODE_64BIT:
203 IEM_MC_BEGIN(3, 2);
204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
205 IEM_MC_ARG(uint64_t, u64Src, 1);
206 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
208
209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
210 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
211 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
212 IEM_MC_FETCH_EFLAGS(EFlags);
213 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
214 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
215 else
216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
217
218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
219 IEM_MC_COMMIT_EFLAGS(EFlags);
220 IEM_MC_ADVANCE_RIP();
221 IEM_MC_END();
222 break;
223 }
224 }
225 return VINF_SUCCESS;
226}
227
228
229/**
230 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
231 * the destination.
232 *
233 * @param pImpl Pointer to the instruction implementation (assembly).
234 */
235FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
236{
237 uint8_t bRm;
238 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
239 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
240
241 /*
242 * If rm is denoting a register, no more instruction bytes.
243 */
244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
245 {
246 IEM_MC_BEGIN(3, 0);
247 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
248 IEM_MC_ARG(uint8_t, u8Src, 1);
249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
250
251 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
252 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
253 IEM_MC_REF_EFLAGS(pEFlags);
254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
255
256 IEM_MC_ADVANCE_RIP();
257 IEM_MC_END();
258 }
259 else
260 {
261 /*
262 * We're accessing memory.
263 */
264 IEM_MC_BEGIN(3, 1);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
269
270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
271 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
272 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
273 IEM_MC_REF_EFLAGS(pEFlags);
274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
275
276 IEM_MC_ADVANCE_RIP();
277 IEM_MC_END();
278 }
279 return VINF_SUCCESS;
280}
281
282
283/**
284 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
285 * register as the destination.
286 *
287 * @param pImpl Pointer to the instruction implementation (assembly).
288 */
289FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
290{
291 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
292 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
293
294 /*
295 * If rm is denoting a register, no more instruction bytes.
296 */
297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
298 {
299 switch (pIemCpu->enmEffOpSize)
300 {
301 case IEMMODE_16BIT:
302 IEM_MC_BEGIN(3, 0);
303 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
304 IEM_MC_ARG(uint16_t, u16Src, 1);
305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
306
307 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
308 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
309 IEM_MC_REF_EFLAGS(pEFlags);
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
311
312 IEM_MC_ADVANCE_RIP();
313 IEM_MC_END();
314 break;
315
316 case IEMMODE_32BIT:
317 IEM_MC_BEGIN(3, 0);
318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
319 IEM_MC_ARG(uint32_t, u32Src, 1);
320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
321
322 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
323 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
324 IEM_MC_REF_EFLAGS(pEFlags);
325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
326
327 IEM_MC_ADVANCE_RIP();
328 IEM_MC_END();
329 break;
330
331 case IEMMODE_64BIT:
332 IEM_MC_BEGIN(3, 0);
333 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
334 IEM_MC_ARG(uint64_t, u64Src, 1);
335 IEM_MC_ARG(uint32_t *, pEFlags, 2);
336
337 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
338 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
339 IEM_MC_REF_EFLAGS(pEFlags);
340 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
341
342 IEM_MC_ADVANCE_RIP();
343 IEM_MC_END();
344 break;
345 }
346 }
347 else
348 {
349 /*
350 * We're accessing memory.
351 */
352 switch (pIemCpu->enmEffOpSize)
353 {
354 case IEMMODE_16BIT:
355 IEM_MC_BEGIN(3, 1);
356 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
357 IEM_MC_ARG(uint16_t, u16Src, 1);
358 IEM_MC_ARG(uint32_t *, pEFlags, 2);
359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
360
361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
362 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
363 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
364 IEM_MC_REF_EFLAGS(pEFlags);
365 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
366
367 IEM_MC_ADVANCE_RIP();
368 IEM_MC_END();
369 break;
370
371 case IEMMODE_32BIT:
372 IEM_MC_BEGIN(3, 1);
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
374 IEM_MC_ARG(uint32_t, u32Src, 1);
375 IEM_MC_ARG(uint32_t *, pEFlags, 2);
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
377
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
379 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
380 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
381 IEM_MC_REF_EFLAGS(pEFlags);
382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
383
384 IEM_MC_ADVANCE_RIP();
385 IEM_MC_END();
386 break;
387
388 case IEMMODE_64BIT:
389 IEM_MC_BEGIN(3, 1);
390 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
391 IEM_MC_ARG(uint64_t, u64Src, 1);
392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
394
395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
396 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
397 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
398 IEM_MC_REF_EFLAGS(pEFlags);
399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
400
401 IEM_MC_ADVANCE_RIP();
402 IEM_MC_END();
403 break;
404 }
405 }
406 return VINF_SUCCESS;
407}
408
409
410/**
411 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
412 * a byte immediate.
413 *
414 * @param pImpl Pointer to the instruction implementation (assembly).
415 */
416FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
417{
418 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
419 IEMOP_HLP_NO_LOCK_PREFIX();
420
421 IEM_MC_BEGIN(3, 0);
422 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
423 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
425
426 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
427 IEM_MC_REF_EFLAGS(pEFlags);
428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 return VINF_SUCCESS;
433}
434
435
436/**
437 * Common worker for instructions like ADD, AND, OR, ++ with working on
438 * AX/EAX/RAX with a word/dword immediate.
439 *
440 * @param pImpl Pointer to the instruction implementation (assembly).
441 */
442FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
443{
444 switch (pIemCpu->enmEffOpSize)
445 {
446 case IEMMODE_16BIT:
447 {
448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
449 IEMOP_HLP_NO_LOCK_PREFIX();
450
451 IEM_MC_BEGIN(3, 0);
452 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
453 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
454 IEM_MC_ARG(uint32_t *, pEFlags, 2);
455
456 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
457 IEM_MC_REF_EFLAGS(pEFlags);
458 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
459
460 IEM_MC_ADVANCE_RIP();
461 IEM_MC_END();
462 return VINF_SUCCESS;
463 }
464
465 case IEMMODE_32BIT:
466 {
467 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
468 IEMOP_HLP_NO_LOCK_PREFIX();
469
470 IEM_MC_BEGIN(3, 0);
471 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
472 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
474
475 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
476 IEM_MC_REF_EFLAGS(pEFlags);
477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
478
479 IEM_MC_ADVANCE_RIP();
480 IEM_MC_END();
481 return VINF_SUCCESS;
482 }
483
484 case IEMMODE_64BIT:
485 {
486 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
487 IEMOP_HLP_NO_LOCK_PREFIX();
488
489 IEM_MC_BEGIN(3, 0);
490 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
491 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
492 IEM_MC_ARG(uint32_t *, pEFlags, 2);
493
494 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
495 IEM_MC_REF_EFLAGS(pEFlags);
496 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
497
498 IEM_MC_ADVANCE_RIP();
499 IEM_MC_END();
500 return VINF_SUCCESS;
501 }
502
503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
504 }
505}
506
507
508/** Opcodes 0xf1, 0xd6. */
509FNIEMOP_DEF(iemOp_Invalid)
510{
511 IEMOP_MNEMONIC("Invalid");
512 return IEMOP_RAISE_INVALID_OPCODE();
513}
514
515
516
517/** @name ..... opcodes.
518 *
519 * @{
520 */
521
522/** @} */
523
524
525/** @name Two byte opcodes (first byte 0x0f).
526 *
527 * @{
528 */
529
530/** Opcode 0x0f 0x00 /0. */
531FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
532{
533 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
534}
535
536
537/** Opcode 0x0f 0x00 /1. */
538FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
539{
540 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
541}
542
543
544/** Opcode 0x0f 0x00 /2. */
545FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
546{
547 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
548}
549
550
551/** Opcode 0x0f 0x00 /3. */
552FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
553{
554 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
555}
556
557
558/** Opcode 0x0f 0x00 /4. */
559FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
560{
561 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
562}
563
564
565/** Opcode 0x0f 0x00 /5. */
566FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
567{
568 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
569}
570
571
572/** Opcode 0x0f 0x00. */
573FNIEMOP_DEF(iemOp_Grp6)
574{
575 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
576 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
577 {
578 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
579 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
580 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
581 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
582 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
583 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
584 case 6: return IEMOP_RAISE_INVALID_OPCODE();
585 case 7: return IEMOP_RAISE_INVALID_OPCODE();
586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
587 }
588
589}
590
591
592/** Opcode 0x0f 0x01 /0. */
593FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
594{
595 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
596}
597
598
599/** Opcode 0x0f 0x01 /0. */
600FNIEMOP_DEF(iemOp_Grp7_vmcall)
601{
602 AssertFailed();
603 return IEMOP_RAISE_INVALID_OPCODE();
604}
605
606
607/** Opcode 0x0f 0x01 /0. */
608FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
609{
610 AssertFailed();
611 return IEMOP_RAISE_INVALID_OPCODE();
612}
613
614
615/** Opcode 0x0f 0x01 /0. */
616FNIEMOP_DEF(iemOp_Grp7_vmresume)
617{
618 AssertFailed();
619 return IEMOP_RAISE_INVALID_OPCODE();
620}
621
622
623/** Opcode 0x0f 0x01 /0. */
624FNIEMOP_DEF(iemOp_Grp7_vmxoff)
625{
626 AssertFailed();
627 return IEMOP_RAISE_INVALID_OPCODE();
628}
629
630
631/** Opcode 0x0f 0x01 /1. */
632FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
633{
634 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
635}
636
637
638/** Opcode 0x0f 0x01 /1. */
639FNIEMOP_DEF(iemOp_Grp7_monitor)
640{
641 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
642}
643
644
645/** Opcode 0x0f 0x01 /1. */
646FNIEMOP_DEF(iemOp_Grp7_mwait)
647{
648 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
649}
650
651
652/** Opcode 0x0f 0x01 /2. */
653FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
654{
655 IEMOP_HLP_NO_LOCK_PREFIX();
656
657 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
658 ? IEMMODE_64BIT
659 : pIemCpu->enmEffOpSize;
660 IEM_MC_BEGIN(3, 1);
661 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
662 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
663 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
665 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
666 IEM_MC_END();
667 return VINF_SUCCESS;
668}
669
670
671/** Opcode 0x0f 0x01 /2. */
672FNIEMOP_DEF(iemOp_Grp7_xgetbv)
673{
674 AssertFailed();
675 return IEMOP_RAISE_INVALID_OPCODE();
676}
677
678
679/** Opcode 0x0f 0x01 /2. */
680FNIEMOP_DEF(iemOp_Grp7_xsetbv)
681{
682 AssertFailed();
683 return IEMOP_RAISE_INVALID_OPCODE();
684}
685
686
687/** Opcode 0x0f 0x01 /3. */
688FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
689{
690 IEMOP_HLP_NO_LOCK_PREFIX();
691
692 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
693 ? IEMMODE_64BIT
694 : pIemCpu->enmEffOpSize;
695 IEM_MC_BEGIN(3, 1);
696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
697 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
698 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
700 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
701 IEM_MC_END();
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x01 /4. */
707FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
708{
709 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
710}
711
712
713/** Opcode 0x0f 0x01 /6. */
714FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
715{
716 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
717}
718
719
720/** Opcode 0x0f 0x01 /7. */
721FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
722{
723 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
724}
725
726
727/** Opcode 0x0f 0x01 /7. */
728FNIEMOP_DEF(iemOp_Grp7_swapgs)
729{
730 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
731}
732
733
734/** Opcode 0x0f 0x01 /7. */
735FNIEMOP_DEF(iemOp_Grp7_rdtscp)
736{
737 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
738}
739
740
741/** Opcode 0x0f 0x01. */
742FNIEMOP_DEF(iemOp_Grp7)
743{
744 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
745 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
746 {
747 case 0:
748 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
749 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
750 switch (bRm & X86_MODRM_RM_MASK)
751 {
752 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
753 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
754 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
755 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
756 }
757 return IEMOP_RAISE_INVALID_OPCODE();
758
759 case 1:
760 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
761 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
762 switch (bRm & X86_MODRM_RM_MASK)
763 {
764 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
765 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
766 }
767 return IEMOP_RAISE_INVALID_OPCODE();
768
769 case 2:
770 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
771 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
772 switch (bRm & X86_MODRM_RM_MASK)
773 {
774 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
775 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
776 }
777 return IEMOP_RAISE_INVALID_OPCODE();
778
779 case 3:
780 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
781 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
782 return IEMOP_RAISE_INVALID_OPCODE();
783
784 case 4:
785 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
786
787 case 5:
788 return IEMOP_RAISE_INVALID_OPCODE();
789
790 case 6:
791 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
792
793 case 7:
794 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
795 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
796 switch (bRm & X86_MODRM_RM_MASK)
797 {
798 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
799 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
800 }
801 return IEMOP_RAISE_INVALID_OPCODE();
802
803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
804 }
805}
806
807
808/** Opcode 0x0f 0x02. */
809FNIEMOP_STUB(iemOp_lar_Gv_Ew);
810/** Opcode 0x0f 0x03. */
811FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
812/** Opcode 0x0f 0x04. */
813FNIEMOP_STUB(iemOp_syscall);
814/** Opcode 0x0f 0x05. */
815FNIEMOP_STUB(iemOp_clts);
816/** Opcode 0x0f 0x06. */
817FNIEMOP_STUB(iemOp_sysret);
818/** Opcode 0x0f 0x08. */
819FNIEMOP_STUB(iemOp_invd);
820/** Opcode 0x0f 0x09. */
821FNIEMOP_STUB(iemOp_wbinvd);
822/** Opcode 0x0f 0x0b. */
823FNIEMOP_STUB(iemOp_ud2);
824/** Opcode 0x0f 0x0d. */
825FNIEMOP_STUB(iemOp_nop_Ev_prefetch);
826/** Opcode 0x0f 0x0e. */
827FNIEMOP_STUB(iemOp_femms);
828/** Opcode 0x0f 0x0f. */
829FNIEMOP_STUB(iemOp_3Dnow);
830/** Opcode 0x0f 0x10. */
831FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
832/** Opcode 0x0f 0x11. */
833FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
834/** Opcode 0x0f 0x12. */
835FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
836/** Opcode 0x0f 0x13. */
837FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
838/** Opcode 0x0f 0x14. */
839FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
840/** Opcode 0x0f 0x15. */
841FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
842/** Opcode 0x0f 0x16. */
843FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
844/** Opcode 0x0f 0x17. */
845FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
846/** Opcode 0x0f 0x18. */
847FNIEMOP_STUB(iemOp_prefetch_Grp16);
848
849
850/** Opcode 0x0f 0x20. */
851FNIEMOP_DEF(iemOp_mov_Rd_Cd)
852{
853 /* mod is ignored, as is operand size overrides. */
854 IEMOP_MNEMONIC("mov Rd,Cd");
855 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
856 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
857 else
858 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
859
860 /** @todo Verify that the the invalid lock sequence exception (\#UD) is raised
861 * before the privilege level violation (\#GP). */
862 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
863 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
864 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
865 {
866 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
867 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
868 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
869 iCrReg |= 8;
870 }
871 switch (iCrReg)
872 {
873 case 0: case 2: case 3: case 4: case 8:
874 break;
875 default:
876 return IEMOP_RAISE_INVALID_OPCODE();
877 }
878
879 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
880}
881
882
883/** Opcode 0x0f 0x21. */
884FNIEMOP_STUB(iemOp_mov_Rd_Dd);
885
886
887/** Opcode 0x0f 0x22. */
888FNIEMOP_DEF(iemOp_mov_Cd_Rd)
889{
890 /* mod is ignored, as is operand size overrides. */
891 IEMOP_MNEMONIC("mov Cd,Rd");
892 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
893 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
894 else
895 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
896
897 /** @todo Verify that the the invalid lock sequence exception (\#UD) is raised
898 * before the privilege level violation (\#GP). */
899 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
900 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
901 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
902 {
903 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
904 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
905 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
906 iCrReg |= 8;
907 }
908 switch (iCrReg)
909 {
910 case 0: case 2: case 3: case 4: case 8:
911 break;
912 default:
913 return IEMOP_RAISE_INVALID_OPCODE();
914 }
915
916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
917}
918
919
920/** Opcode 0x0f 0x23. */
921FNIEMOP_STUB(iemOp_mov_Dd_Rd);
922/** Opcode 0x0f 0x24. */
923FNIEMOP_STUB(iemOp_mov_Rd_Td);
924/** Opcode 0x0f 0x26. */
925FNIEMOP_STUB(iemOp_mov_Td_Rd);
926/** Opcode 0x0f 0x28. */
927FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
928/** Opcode 0x0f 0x29. */
929FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
930/** Opcode 0x0f 0x2a. */
931FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
932/** Opcode 0x0f 0x2b. */
933FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
934/** Opcode 0x0f 0x2c. */
935FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
936/** Opcode 0x0f 0x2d. */
937FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
938/** Opcode 0x0f 0x2e. */
939FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
940/** Opcode 0x0f 0x2f. */
941FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
942/** Opcode 0x0f 0x30. */
943FNIEMOP_STUB(iemOp_wrmsr);
944/** Opcode 0x0f 0x31. */
945FNIEMOP_STUB(iemOp_rdtsc);
946/** Opcode 0x0f 0x33. */
947FNIEMOP_STUB(iemOp_rdmsr);
948/** Opcode 0x0f 0x34. */
949FNIEMOP_STUB(iemOp_rdpmc);
950/** Opcode 0x0f 0x34. */
951FNIEMOP_STUB(iemOp_sysenter);
952/** Opcode 0x0f 0x35. */
953FNIEMOP_STUB(iemOp_sysexit);
954/** Opcode 0x0f 0x37. */
955FNIEMOP_STUB(iemOp_getsec);
956/** Opcode 0x0f 0x38. */
957FNIEMOP_STUB(iemOp_3byte_Esc_A4);
958/** Opcode 0x0f 0x39. */
959FNIEMOP_STUB(iemOp_3byte_Esc_A5);
960/** Opcode 0x0f 0x3c (?). */
961FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
962/** Opcode 0x0f 0x40. */
963FNIEMOP_STUB(iemOp_cmovo_Gv_Ev);
964/** Opcode 0x0f 0x41. */
965FNIEMOP_STUB(iemOp_cmovno_Gv_Ev);
966/** Opcode 0x0f 0x42. */
967FNIEMOP_STUB(iemOp_cmovc_Gv_Ev);
968/** Opcode 0x0f 0x43. */
969FNIEMOP_STUB(iemOp_cmovnc_Gv_Ev);
970/** Opcode 0x0f 0x44. */
971FNIEMOP_STUB(iemOp_cmove_Gv_Ev);
972/** Opcode 0x0f 0x45. */
973FNIEMOP_STUB(iemOp_cmovne_Gv_Ev);
974/** Opcode 0x0f 0x46. */
975FNIEMOP_STUB(iemOp_cmovbe_Gv_Ev);
976/** Opcode 0x0f 0x47. */
977FNIEMOP_STUB(iemOp_cmovnbe_Gv_Ev);
978/** Opcode 0x0f 0x48. */
979FNIEMOP_STUB(iemOp_cmovs_Gv_Ev);
980/** Opcode 0x0f 0x49. */
981FNIEMOP_STUB(iemOp_cmovns_Gv_Ev);
982/** Opcode 0x0f 0x4a. */
983FNIEMOP_STUB(iemOp_cmovp_Gv_Ev);
984/** Opcode 0x0f 0x4b. */
985FNIEMOP_STUB(iemOp_cmovnp_Gv_Ev);
986/** Opcode 0x0f 0x4c. */
987FNIEMOP_STUB(iemOp_cmovl_Gv_Ev);
988/** Opcode 0x0f 0x4d. */
989FNIEMOP_STUB(iemOp_cmovnl_Gv_Ev);
990/** Opcode 0x0f 0x4e. */
991FNIEMOP_STUB(iemOp_cmovle_Gv_Ev);
992/** Opcode 0x0f 0x4f. */
993FNIEMOP_STUB(iemOp_cmovnle_Gv_Ev);
994/** Opcode 0x0f 0x50. */
995FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
996/** Opcode 0x0f 0x51. */
997FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
998/** Opcode 0x0f 0x52. */
999FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1000/** Opcode 0x0f 0x53. */
1001FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1002/** Opcode 0x0f 0x54. */
1003FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1004/** Opcode 0x0f 0x55. */
1005FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1006/** Opcode 0x0f 0x56. */
1007FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1008/** Opcode 0x0f 0x57. */
1009FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1010/** Opcode 0x0f 0x58. */
1011FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1012/** Opcode 0x0f 0x59. */
1013FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1014/** Opcode 0x0f 0x5a. */
1015FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1016/** Opcode 0x0f 0x5b. */
1017FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1018/** Opcode 0x0f 0x5c. */
1019FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1020/** Opcode 0x0f 0x5d. */
1021FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1022/** Opcode 0x0f 0x5e. */
1023FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1024/** Opcode 0x0f 0x5f. */
1025FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1026/** Opcode 0x0f 0x60. */
1027FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1028/** Opcode 0x0f 0x61. */
1029FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1030/** Opcode 0x0f 0x62. */
1031FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1032/** Opcode 0x0f 0x63. */
1033FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1034/** Opcode 0x0f 0x64. */
1035FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1036/** Opcode 0x0f 0x65. */
1037FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1038/** Opcode 0x0f 0x66. */
1039FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1040/** Opcode 0x0f 0x67. */
1041FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1042/** Opcode 0x0f 0x68. */
1043FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1044/** Opcode 0x0f 0x69. */
1045FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1046/** Opcode 0x0f 0x6a. */
1047FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1048/** Opcode 0x0f 0x6b. */
1049FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1050/** Opcode 0x0f 0x6c. */
1051FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1052/** Opcode 0x0f 0x6d. */
1053FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1054/** Opcode 0x0f 0x6e. */
1055FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1056/** Opcode 0x0f 0x6f. */
1057FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1058/** Opcode 0x0f 0x70. */
1059FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1060/** Opcode 0x0f 0x71. */
1061FNIEMOP_STUB(iemOp_Grp12);
1062/** Opcode 0x0f 0x72. */
1063FNIEMOP_STUB(iemOp_Grp13);
1064/** Opcode 0x0f 0x73. */
1065FNIEMOP_STUB(iemOp_Grp14);
1066/** Opcode 0x0f 0x74. */
1067FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
1068/** Opcode 0x0f 0x75. */
1069FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
1070/** Opcode 0x0f 0x76. */
1071FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
1072/** Opcode 0x0f 0x77. */
1073FNIEMOP_STUB(iemOp_emms);
1074/** Opcode 0x0f 0x78. */
1075FNIEMOP_STUB(iemOp_vmread);
1076/** Opcode 0x0f 0x79. */
1077FNIEMOP_STUB(iemOp_vmwrite);
1078/** Opcode 0x0f 0x7c. */
1079FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
1080/** Opcode 0x0f 0x7d. */
1081FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
1082/** Opcode 0x0f 0x7e. */
1083FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
1084/** Opcode 0x0f 0x7f. */
1085FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
1086
1087
1088/** Opcode 0x0f 0x80. */
1089FNIEMOP_DEF(iemOp_jo_Jv)
1090{
1091 IEMOP_MNEMONIC("jo Jv");
1092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1093 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1094 {
1095 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1096 IEMOP_HLP_NO_LOCK_PREFIX();
1097
1098 IEM_MC_BEGIN(0, 0);
1099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1100 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1101 } IEM_MC_ELSE() {
1102 IEM_MC_ADVANCE_RIP();
1103 } IEM_MC_ENDIF();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1109 IEMOP_HLP_NO_LOCK_PREFIX();
1110
1111 IEM_MC_BEGIN(0, 0);
1112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1113 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1114 } IEM_MC_ELSE() {
1115 IEM_MC_ADVANCE_RIP();
1116 } IEM_MC_ENDIF();
1117 IEM_MC_END();
1118 }
1119 return VINF_SUCCESS;
1120}
1121
1122
1123/** Opcode 0x0f 0x81. */
1124FNIEMOP_DEF(iemOp_jno_Jv)
1125{
1126 IEMOP_MNEMONIC("jno Jv");
1127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1128 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1129 {
1130 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1131 IEMOP_HLP_NO_LOCK_PREFIX();
1132
1133 IEM_MC_BEGIN(0, 0);
1134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1135 IEM_MC_ADVANCE_RIP();
1136 } IEM_MC_ELSE() {
1137 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1138 } IEM_MC_ENDIF();
1139 IEM_MC_END();
1140 }
1141 else
1142 {
1143 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1144 IEMOP_HLP_NO_LOCK_PREFIX();
1145
1146 IEM_MC_BEGIN(0, 0);
1147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
1148 IEM_MC_ADVANCE_RIP();
1149 } IEM_MC_ELSE() {
1150 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1151 } IEM_MC_ENDIF();
1152 IEM_MC_END();
1153 }
1154 return VINF_SUCCESS;
1155}
1156
1157
1158/** Opcode 0x0f 0x82. */
1159FNIEMOP_DEF(iemOp_jc_Jv)
1160{
1161 IEMOP_MNEMONIC("jc/jb/jnae Jv");
1162 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1163 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1164 {
1165 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1166 IEMOP_HLP_NO_LOCK_PREFIX();
1167
1168 IEM_MC_BEGIN(0, 0);
1169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1170 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1171 } IEM_MC_ELSE() {
1172 IEM_MC_ADVANCE_RIP();
1173 } IEM_MC_ENDIF();
1174 IEM_MC_END();
1175 }
1176 else
1177 {
1178 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1179 IEMOP_HLP_NO_LOCK_PREFIX();
1180
1181 IEM_MC_BEGIN(0, 0);
1182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1183 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1184 } IEM_MC_ELSE() {
1185 IEM_MC_ADVANCE_RIP();
1186 } IEM_MC_ENDIF();
1187 IEM_MC_END();
1188 }
1189 return VINF_SUCCESS;
1190}
1191
1192
1193/** Opcode 0x0f 0x83. */
1194FNIEMOP_DEF(iemOp_jnc_Jv)
1195{
1196 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
1197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1198 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1199 {
1200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1201 IEMOP_HLP_NO_LOCK_PREFIX();
1202
1203 IEM_MC_BEGIN(0, 0);
1204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1205 IEM_MC_ADVANCE_RIP();
1206 } IEM_MC_ELSE() {
1207 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1208 } IEM_MC_ENDIF();
1209 IEM_MC_END();
1210 }
1211 else
1212 {
1213 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1214 IEMOP_HLP_NO_LOCK_PREFIX();
1215
1216 IEM_MC_BEGIN(0, 0);
1217 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
1218 IEM_MC_ADVANCE_RIP();
1219 } IEM_MC_ELSE() {
1220 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1221 } IEM_MC_ENDIF();
1222 IEM_MC_END();
1223 }
1224 return VINF_SUCCESS;
1225}
1226
1227
1228/** Opcode 0x0f 0x84. */
1229FNIEMOP_DEF(iemOp_je_Jv)
1230{
1231 IEMOP_MNEMONIC("je/jz Jv");
1232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1233 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1234 {
1235 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1236 IEMOP_HLP_NO_LOCK_PREFIX();
1237
1238 IEM_MC_BEGIN(0, 0);
1239 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1240 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1241 } IEM_MC_ELSE() {
1242 IEM_MC_ADVANCE_RIP();
1243 } IEM_MC_ENDIF();
1244 IEM_MC_END();
1245 }
1246 else
1247 {
1248 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1249 IEMOP_HLP_NO_LOCK_PREFIX();
1250
1251 IEM_MC_BEGIN(0, 0);
1252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1253 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1254 } IEM_MC_ELSE() {
1255 IEM_MC_ADVANCE_RIP();
1256 } IEM_MC_ENDIF();
1257 IEM_MC_END();
1258 }
1259 return VINF_SUCCESS;
1260}
1261
1262
1263/** Opcode 0x0f 0x85. */
1264FNIEMOP_DEF(iemOp_jne_Jv)
1265{
1266 IEMOP_MNEMONIC("jne/jnz Jv");
1267 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1268 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1269 {
1270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1271 IEMOP_HLP_NO_LOCK_PREFIX();
1272
1273 IEM_MC_BEGIN(0, 0);
1274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1275 IEM_MC_ADVANCE_RIP();
1276 } IEM_MC_ELSE() {
1277 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1278 } IEM_MC_ENDIF();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1284 IEMOP_HLP_NO_LOCK_PREFIX();
1285
1286 IEM_MC_BEGIN(0, 0);
1287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
1288 IEM_MC_ADVANCE_RIP();
1289 } IEM_MC_ELSE() {
1290 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1291 } IEM_MC_ENDIF();
1292 IEM_MC_END();
1293 }
1294 return VINF_SUCCESS;
1295}
1296
1297
1298/** Opcode 0x0f 0x86. */
1299FNIEMOP_DEF(iemOp_jbe_Jv)
1300{
1301 IEMOP_MNEMONIC("jbe/jna Jv");
1302 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1303 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1304 {
1305 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1306 IEMOP_HLP_NO_LOCK_PREFIX();
1307
1308 IEM_MC_BEGIN(0, 0);
1309 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1310 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1311 } IEM_MC_ELSE() {
1312 IEM_MC_ADVANCE_RIP();
1313 } IEM_MC_ENDIF();
1314 IEM_MC_END();
1315 }
1316 else
1317 {
1318 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1319 IEMOP_HLP_NO_LOCK_PREFIX();
1320
1321 IEM_MC_BEGIN(0, 0);
1322 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1323 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1324 } IEM_MC_ELSE() {
1325 IEM_MC_ADVANCE_RIP();
1326 } IEM_MC_ENDIF();
1327 IEM_MC_END();
1328 }
1329 return VINF_SUCCESS;
1330}
1331
1332
1333/** Opcode 0x0f 0x87. */
1334FNIEMOP_DEF(iemOp_jnbe_Jv)
1335{
1336 IEMOP_MNEMONIC("jnbe/ja Jv");
1337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1338 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1339 {
1340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1341 IEMOP_HLP_NO_LOCK_PREFIX();
1342
1343 IEM_MC_BEGIN(0, 0);
1344 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1345 IEM_MC_ADVANCE_RIP();
1346 } IEM_MC_ELSE() {
1347 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1348 } IEM_MC_ENDIF();
1349 IEM_MC_END();
1350 }
1351 else
1352 {
1353 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1354 IEMOP_HLP_NO_LOCK_PREFIX();
1355
1356 IEM_MC_BEGIN(0, 0);
1357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
1358 IEM_MC_ADVANCE_RIP();
1359 } IEM_MC_ELSE() {
1360 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1361 } IEM_MC_ENDIF();
1362 IEM_MC_END();
1363 }
1364 return VINF_SUCCESS;
1365}
1366
1367
1368/** Opcode 0x0f 0x88. */
1369FNIEMOP_DEF(iemOp_js_Jv)
1370{
1371 IEMOP_MNEMONIC("js Jv");
1372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1373 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1374 {
1375 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1376 IEMOP_HLP_NO_LOCK_PREFIX();
1377
1378 IEM_MC_BEGIN(0, 0);
1379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1380 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1381 } IEM_MC_ELSE() {
1382 IEM_MC_ADVANCE_RIP();
1383 } IEM_MC_ENDIF();
1384 IEM_MC_END();
1385 }
1386 else
1387 {
1388 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1389 IEMOP_HLP_NO_LOCK_PREFIX();
1390
1391 IEM_MC_BEGIN(0, 0);
1392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1393 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1394 } IEM_MC_ELSE() {
1395 IEM_MC_ADVANCE_RIP();
1396 } IEM_MC_ENDIF();
1397 IEM_MC_END();
1398 }
1399 return VINF_SUCCESS;
1400}
1401
1402
1403/** Opcode 0x0f 0x89. */
1404FNIEMOP_DEF(iemOp_jns_Jv)
1405{
1406 IEMOP_MNEMONIC("jns Jv");
1407 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1408 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1409 {
1410 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1411 IEMOP_HLP_NO_LOCK_PREFIX();
1412
1413 IEM_MC_BEGIN(0, 0);
1414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1415 IEM_MC_ADVANCE_RIP();
1416 } IEM_MC_ELSE() {
1417 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1418 } IEM_MC_ENDIF();
1419 IEM_MC_END();
1420 }
1421 else
1422 {
1423 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1424 IEMOP_HLP_NO_LOCK_PREFIX();
1425
1426 IEM_MC_BEGIN(0, 0);
1427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
1428 IEM_MC_ADVANCE_RIP();
1429 } IEM_MC_ELSE() {
1430 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1431 } IEM_MC_ENDIF();
1432 IEM_MC_END();
1433 }
1434 return VINF_SUCCESS;
1435}
1436
1437
1438/** Opcode 0x0f 0x8a. */
1439FNIEMOP_DEF(iemOp_jp_Jv)
1440{
1441 IEMOP_MNEMONIC("jp Jv");
1442 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1443 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1444 {
1445 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1446 IEMOP_HLP_NO_LOCK_PREFIX();
1447
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1450 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1451 } IEM_MC_ELSE() {
1452 IEM_MC_ADVANCE_RIP();
1453 } IEM_MC_ENDIF();
1454 IEM_MC_END();
1455 }
1456 else
1457 {
1458 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1459 IEMOP_HLP_NO_LOCK_PREFIX();
1460
1461 IEM_MC_BEGIN(0, 0);
1462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1463 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1464 } IEM_MC_ELSE() {
1465 IEM_MC_ADVANCE_RIP();
1466 } IEM_MC_ENDIF();
1467 IEM_MC_END();
1468 }
1469 return VINF_SUCCESS;
1470}
1471
1472
1473/** Opcode 0x0f 0x8b. */
1474FNIEMOP_DEF(iemOp_jnp_Jv)
1475{
1476 IEMOP_MNEMONIC("jo Jv");
1477 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1478 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1479 {
1480 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1481 IEMOP_HLP_NO_LOCK_PREFIX();
1482
1483 IEM_MC_BEGIN(0, 0);
1484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1485 IEM_MC_ADVANCE_RIP();
1486 } IEM_MC_ELSE() {
1487 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1488 } IEM_MC_ENDIF();
1489 IEM_MC_END();
1490 }
1491 else
1492 {
1493 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1494 IEMOP_HLP_NO_LOCK_PREFIX();
1495
1496 IEM_MC_BEGIN(0, 0);
1497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
1498 IEM_MC_ADVANCE_RIP();
1499 } IEM_MC_ELSE() {
1500 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1501 } IEM_MC_ENDIF();
1502 IEM_MC_END();
1503 }
1504 return VINF_SUCCESS;
1505}
1506
1507
1508/** Opcode 0x0f 0x8c. */
1509FNIEMOP_DEF(iemOp_jl_Jv)
1510{
1511 IEMOP_MNEMONIC("jl/jnge Jv");
1512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1513 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1514 {
1515 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1516 IEMOP_HLP_NO_LOCK_PREFIX();
1517
1518 IEM_MC_BEGIN(0, 0);
1519 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
1520 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1521 } IEM_MC_ELSE() {
1522 IEM_MC_ADVANCE_RIP();
1523 } IEM_MC_ENDIF();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1529 IEMOP_HLP_NO_LOCK_PREFIX();
1530
1531 IEM_MC_BEGIN(0, 0);
1532 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
1533 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1534 } IEM_MC_ELSE() {
1535 IEM_MC_ADVANCE_RIP();
1536 } IEM_MC_ENDIF();
1537 IEM_MC_END();
1538 }
1539 return VINF_SUCCESS;
1540}
1541
1542
1543/** Opcode 0x0f 0x8d. */
1544FNIEMOP_DEF(iemOp_jnl_Jv)
1545{
1546 IEMOP_MNEMONIC("jnl/jge Jv");
1547 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1548 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1549 {
1550 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1551 IEMOP_HLP_NO_LOCK_PREFIX();
1552
1553 IEM_MC_BEGIN(0, 0);
1554 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
1555 IEM_MC_ADVANCE_RIP();
1556 } IEM_MC_ELSE() {
1557 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1558 } IEM_MC_ENDIF();
1559 IEM_MC_END();
1560 }
1561 else
1562 {
1563 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1564 IEMOP_HLP_NO_LOCK_PREFIX();
1565
1566 IEM_MC_BEGIN(0, 0);
1567 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
1568 IEM_MC_ADVANCE_RIP();
1569 } IEM_MC_ELSE() {
1570 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1571 } IEM_MC_ENDIF();
1572 IEM_MC_END();
1573 }
1574 return VINF_SUCCESS;
1575}
1576
1577
1578/** Opcode 0x0f 0x8e. */
1579FNIEMOP_DEF(iemOp_jle_Jv)
1580{
1581 IEMOP_MNEMONIC("jle/jng Jv");
1582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1583 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1584 {
1585 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1586 IEMOP_HLP_NO_LOCK_PREFIX();
1587
1588 IEM_MC_BEGIN(0, 0);
1589 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
1590 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1591 } IEM_MC_ELSE() {
1592 IEM_MC_ADVANCE_RIP();
1593 } IEM_MC_ENDIF();
1594 IEM_MC_END();
1595 }
1596 else
1597 {
1598 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1599 IEMOP_HLP_NO_LOCK_PREFIX();
1600
1601 IEM_MC_BEGIN(0, 0);
1602 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
1603 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1604 } IEM_MC_ELSE() {
1605 IEM_MC_ADVANCE_RIP();
1606 } IEM_MC_ENDIF();
1607 IEM_MC_END();
1608 }
1609 return VINF_SUCCESS;
1610}
1611
1612
1613/** Opcode 0x0f 0x8f. */
1614FNIEMOP_DEF(iemOp_jnle_Jv)
1615{
1616 IEMOP_MNEMONIC("jnle/jg Jv");
1617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1618 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
1619 {
1620 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
1621 IEMOP_HLP_NO_LOCK_PREFIX();
1622
1623 IEM_MC_BEGIN(0, 0);
1624 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
1625 IEM_MC_ADVANCE_RIP();
1626 } IEM_MC_ELSE() {
1627 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
1628 } IEM_MC_ENDIF();
1629 IEM_MC_END();
1630 }
1631 else
1632 {
1633 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
1634 IEMOP_HLP_NO_LOCK_PREFIX();
1635
1636 IEM_MC_BEGIN(0, 0);
1637 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
1638 IEM_MC_ADVANCE_RIP();
1639 } IEM_MC_ELSE() {
1640 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
1641 } IEM_MC_ENDIF();
1642 IEM_MC_END();
1643 }
1644 return VINF_SUCCESS;
1645}
1646
1647
1648/** Opcode 0x0f 0x90. */
1649FNIEMOP_STUB(iemOp_seto_Jv);
1650/** Opcode 0x0f 0x91. */
1651FNIEMOP_STUB(iemOp_setno_Jv);
1652/** Opcode 0x0f 0x92. */
1653FNIEMOP_STUB(iemOp_setc_Jv);
1654/** Opcode 0x0f 0x93. */
1655FNIEMOP_STUB(iemOp_setnc_Jv);
1656/** Opcode 0x0f 0x94. */
1657FNIEMOP_STUB(iemOp_sete_Jv);
1658/** Opcode 0x0f 0x95. */
1659FNIEMOP_STUB(iemOp_setne_Jv);
1660/** Opcode 0x0f 0x96. */
1661FNIEMOP_STUB(iemOp_setbe_Jv);
1662/** Opcode 0x0f 0x97. */
1663FNIEMOP_STUB(iemOp_setnbe_Jv);
1664/** Opcode 0x0f 0x98. */
1665FNIEMOP_STUB(iemOp_sets_Jv);
1666/** Opcode 0x0f 0x99. */
1667FNIEMOP_STUB(iemOp_setns_Jv);
1668/** Opcode 0x0f 0x9a. */
1669FNIEMOP_STUB(iemOp_setp_Jv);
1670/** Opcode 0x0f 0x9b. */
1671FNIEMOP_STUB(iemOp_setnp_Jv);
1672/** Opcode 0x0f 0x9c. */
1673FNIEMOP_STUB(iemOp_setl_Jv);
1674/** Opcode 0x0f 0x9d. */
1675FNIEMOP_STUB(iemOp_setnl_Jv);
1676/** Opcode 0x0f 0x9e. */
1677FNIEMOP_STUB(iemOp_setle_Jv);
1678/** Opcode 0x0f 0x9f. */
1679FNIEMOP_STUB(iemOp_setnle_Jv);
1680
1681
1682/**
1683 * Common 'push segment-register' helper.
1684 */
1685FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
1686{
1687 IEMOP_HLP_NO_LOCK_PREFIX();
1688 if (iReg < X86_SREG_FS)
1689 IEMOP_HLP_NO_64BIT();
1690 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1691
1692 switch (pIemCpu->enmEffOpSize)
1693 {
1694 case IEMMODE_16BIT:
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint16_t, u16Value);
1697 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
1698 IEM_MC_PUSH_U16(u16Value);
1699 IEM_MC_ADVANCE_RIP();
1700 IEM_MC_END();
1701 break;
1702
1703 case IEMMODE_32BIT:
1704 IEM_MC_BEGIN(0, 1);
1705 IEM_MC_LOCAL(uint32_t, u32Value);
1706 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
1707 IEM_MC_PUSH_U32(u32Value);
1708 IEM_MC_ADVANCE_RIP();
1709 IEM_MC_END();
1710 break;
1711
1712 case IEMMODE_64BIT:
1713 IEM_MC_BEGIN(0, 1);
1714 IEM_MC_LOCAL(uint64_t, u64Value);
1715 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
1716 IEM_MC_PUSH_U64(u64Value);
1717 IEM_MC_ADVANCE_RIP();
1718 IEM_MC_END();
1719 break;
1720 }
1721
1722 return VINF_SUCCESS;
1723}
1724
1725
1726/** Opcode 0x0f 0xa0. */
1727FNIEMOP_DEF(iemOp_push_fs)
1728{
1729 IEMOP_HLP_NO_LOCK_PREFIX();
1730 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
1731}
1732
1733
1734/** Opcode 0x0f 0xa1. */
1735FNIEMOP_DEF(iemOp_pop_fs)
1736{
1737 IEMOP_HLP_NO_LOCK_PREFIX();
1738 return IEM_MC_DEFER_TO_CIMPL_2(iemOpCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
1739}
1740
1741
1742/** Opcode 0x0f 0xa2. */
1743FNIEMOP_STUB(iemOp_cpuid);
1744/** Opcode 0x0f 0xa3. */
1745FNIEMOP_STUB(iemOp_bt_Ev_Gv);
1746/** Opcode 0x0f 0xa4. */
1747FNIEMOP_STUB(iemOp_shld_Ev_Gv_Ib);
1748/** Opcode 0x0f 0xa7. */
1749FNIEMOP_STUB(iemOp_shld_Ev_Gv_CL);
1750
1751
1752/** Opcode 0x0f 0xa8. */
1753FNIEMOP_DEF(iemOp_push_gs)
1754{
1755 IEMOP_HLP_NO_LOCK_PREFIX();
1756 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
1757}
1758
1759
1760/** Opcode 0x0f 0xa9. */
1761FNIEMOP_DEF(iemOp_pop_gs)
1762{
1763 IEMOP_HLP_NO_LOCK_PREFIX();
1764 return IEM_MC_DEFER_TO_CIMPL_2(iemOpCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
1765}
1766
1767
1768/** Opcode 0x0f 0xaa. */
1769FNIEMOP_STUB(iemOp_rsm);
1770/** Opcode 0x0f 0xab. */
1771FNIEMOP_STUB(iemOp_bts_Ev_Gv);
1772/** Opcode 0x0f 0xac. */
1773FNIEMOP_STUB(iemOp_shrd_Ev_Gv_Ib);
1774/** Opcode 0x0f 0xad. */
1775FNIEMOP_STUB(iemOp_shrd_Ev_Gv_CL);
1776/** Opcode 0x0f 0xae. */
1777FNIEMOP_STUB(iemOp_Grp15);
1778
1779
1780/** Opcode 0x0f 0xaf. */
1781FNIEMOP_DEF(iemOp_imul_Gv_Ev)
1782{
1783 IEMOP_MNEMONIC("imul Gv,Ev");
1784#ifdef IEM_VERIFICATION_MODE
1785 pIemCpu->fMulDivHack = true;
1786#endif
1787 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
1788}
1789
1790
1791/** Opcode 0x0f 0xb0. */
1792FNIEMOP_STUB(iemOp_cmpxchg_Eb_Gb);
1793/** Opcode 0x0f 0xb1. */
1794FNIEMOP_STUB(iemOp_cmpxchg_Ev_Gv);
1795
1796
1797FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
1798{
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
1800 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
1801
1802 /* The source cannot be a register. */
1803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1804 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
1805 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & bRm & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1806
1807 switch (pIemCpu->enmEffOpSize)
1808 {
1809 case IEMMODE_16BIT:
1810 IEM_MC_BEGIN(5, 1);
1811 IEM_MC_ARG(uint16_t, uSel, 0);
1812 IEM_MC_ARG(uint16_t, offSeg, 1);
1813 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
1814 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
1815 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
1816 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
1817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
1818 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
1819 IEM_MC_FETCH_MEM_U16(uSel, pIemCpu->iEffSeg, GCPtrEff + 2);
1820 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
1821 IEM_MC_END();
1822 return VINF_SUCCESS;
1823
1824 case IEMMODE_32BIT:
1825 IEM_MC_BEGIN(5, 1);
1826 IEM_MC_ARG(uint16_t, uSel, 0);
1827 IEM_MC_ARG(uint32_t, offSeg, 1);
1828 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
1829 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
1830 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
1831 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
1832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
1833 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
1834 IEM_MC_FETCH_MEM_U16(uSel, pIemCpu->iEffSeg, GCPtrEff + 4);
1835 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838
1839 case IEMMODE_64BIT:
1840 IEM_MC_BEGIN(5, 1);
1841 IEM_MC_ARG(uint16_t, uSel, 0);
1842 IEM_MC_ARG(uint64_t, offSeg, 1);
1843 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
1844 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
1845 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
1846 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
1847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
1848 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
1849 IEM_MC_FETCH_MEM_U16(uSel, pIemCpu->iEffSeg, GCPtrEff + 8);
1850 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
1851 IEM_MC_END();
1852 return VINF_SUCCESS;
1853
1854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1855 }
1856}
1857
1858
1859/** Opcode 0x0f 0xb2. */
1860FNIEMOP_DEF(iemOp_lss_Gv_Mp)
1861{
1862 IEMOP_MNEMONIC("lss Gv,Mp");
1863 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
1864}
1865
1866
1867/** Opcode 0x0f 0xb3. */
1868FNIEMOP_STUB(iemOp_btr_Ev_Gv);
1869
1870
1871/** Opcode 0x0f 0xb4. */
1872FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
1873{
1874 IEMOP_MNEMONIC("lfs Gv,Mp");
1875 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
1876}
1877
1878
1879/** Opcode 0x0f 0xb5. */
1880FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
1881{
1882 IEMOP_MNEMONIC("lgs Gv,Mp");
1883 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
1884}
1885
1886
1887/** Opcode 0x0f 0xb6. */
1888FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
1889{
1890 IEMOP_MNEMONIC("movzx Gv,Eb");
1891
1892 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
1893 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
1894
1895 /*
1896 * If rm is denoting a register, no more instruction bytes.
1897 */
1898 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1899 {
1900 switch (pIemCpu->enmEffOpSize)
1901 {
1902 case IEMMODE_16BIT:
1903 IEM_MC_BEGIN(0, 1);
1904 IEM_MC_LOCAL(uint16_t, u16Value);
1905 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1906 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
1907 IEM_MC_ADVANCE_RIP();
1908 IEM_MC_END();
1909 return VINF_SUCCESS;
1910
1911 case IEMMODE_32BIT:
1912 IEM_MC_BEGIN(0, 1);
1913 IEM_MC_LOCAL(uint32_t, u32Value);
1914 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1915 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
1916 IEM_MC_ADVANCE_RIP();
1917 IEM_MC_END();
1918 return VINF_SUCCESS;
1919
1920 case IEMMODE_64BIT:
1921 IEM_MC_BEGIN(0, 1);
1922 IEM_MC_LOCAL(uint64_t, u64Value);
1923 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1924 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
1925 IEM_MC_ADVANCE_RIP();
1926 IEM_MC_END();
1927 return VINF_SUCCESS;
1928
1929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1930 }
1931 }
1932 else
1933 {
1934 /*
1935 * We're loading a register from memory.
1936 */
1937 switch (pIemCpu->enmEffOpSize)
1938 {
1939 case IEMMODE_16BIT:
1940 IEM_MC_BEGIN(0, 2);
1941 IEM_MC_LOCAL(uint16_t, u16Value);
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
1944 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
1945 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
1946 IEM_MC_ADVANCE_RIP();
1947 IEM_MC_END();
1948 return VINF_SUCCESS;
1949
1950 case IEMMODE_32BIT:
1951 IEM_MC_BEGIN(0, 2);
1952 IEM_MC_LOCAL(uint32_t, u32Value);
1953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
1955 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
1956 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
1957 IEM_MC_ADVANCE_RIP();
1958 IEM_MC_END();
1959 return VINF_SUCCESS;
1960
1961 case IEMMODE_64BIT:
1962 IEM_MC_BEGIN(0, 2);
1963 IEM_MC_LOCAL(uint64_t, u64Value);
1964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
1966 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
1967 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 return VINF_SUCCESS;
1971
1972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1973 }
1974 }
1975}
1976
1977
1978/** Opcode 0x0f 0xb7. */
1979FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
1980{
1981 IEMOP_MNEMONIC("movzx Gv,Ew");
1982
1983 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
1984 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
1985
1986 /** @todo Not entirely sure how the operand size prefix is handled here,
1987 * assuming that it will be ignored. Would be nice to have a few
1988 * test for this. */
1989 /*
1990 * If rm is denoting a register, no more instruction bytes.
1991 */
1992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1993 {
1994 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
1995 {
1996 IEM_MC_BEGIN(0, 1);
1997 IEM_MC_LOCAL(uint32_t, u32Value);
1998 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1999 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 }
2003 else
2004 {
2005 IEM_MC_BEGIN(0, 1);
2006 IEM_MC_LOCAL(uint64_t, u64Value);
2007 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2008 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
2009 IEM_MC_ADVANCE_RIP();
2010 IEM_MC_END();
2011 }
2012 }
2013 else
2014 {
2015 /*
2016 * We're loading a register from memory.
2017 */
2018 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
2019 {
2020 IEM_MC_BEGIN(0, 2);
2021 IEM_MC_LOCAL(uint32_t, u32Value);
2022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2024 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
2025 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
2026 IEM_MC_ADVANCE_RIP();
2027 IEM_MC_END();
2028 }
2029 else
2030 {
2031 IEM_MC_BEGIN(0, 2);
2032 IEM_MC_LOCAL(uint64_t, u64Value);
2033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2035 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
2036 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
2037 IEM_MC_ADVANCE_RIP();
2038 IEM_MC_END();
2039 }
2040 }
2041 return VINF_SUCCESS;
2042}
2043
2044
2045/** Opcode 0x0f 0xb8. */
2046FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
2047/** Opcode 0x0f 0xb9. */
2048FNIEMOP_STUB(iemOp_Grp10);
2049/** Opcode 0x0f 0xba. */
2050FNIEMOP_STUB(iemOp_Grp11);
2051/** Opcode 0x0f 0xbb. */
2052FNIEMOP_STUB(iemOp_btc_Ev_Gv);
2053/** Opcode 0x0f 0xbc. */
2054FNIEMOP_STUB(iemOp_bsf_Gv_Ev);
2055/** Opcode 0x0f 0xbd. */
2056FNIEMOP_STUB(iemOp_bsr_Gv_Ev);
2057/** Opcode 0x0f 0xbe. */
2058FNIEMOP_STUB(iemOp_movsx_Gv_Eb);
2059/** Opcode 0x0f 0xbf. */
2060FNIEMOP_STUB(iemOp_movsx_Gv_Ew);
2061/** Opcode 0x0f 0xc0. */
2062FNIEMOP_STUB(iemOp_xadd_Eb_Gb);
2063/** Opcode 0x0f 0xc1. */
2064FNIEMOP_STUB(iemOp_xadd_Ev_Gv);
2065/** Opcode 0x0f 0xc2. */
2066FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
2067/** Opcode 0x0f 0xc3. */
2068FNIEMOP_STUB(iemOp_movnti_My_Gy);
2069/** Opcode 0x0f 0xc4. */
2070FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
2071/** Opcode 0x0f 0xc5. */
2072FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
2073/** Opcode 0x0f 0xc6. */
2074FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
2075/** Opcode 0x0f 0xc7. */
2076FNIEMOP_STUB(iemOp_Grp9);
2077/** Opcode 0x0f 0xc8. */
2078FNIEMOP_STUB(iemOp_bswap_rAX_r8);
2079/** Opcode 0x0f 0xc9. */
2080FNIEMOP_STUB(iemOp_bswap_rCX_r9);
2081/** Opcode 0x0f 0xca. */
2082FNIEMOP_STUB(iemOp_bswap_rDX_r10);
2083/** Opcode 0x0f 0xcb. */
2084FNIEMOP_STUB(iemOp_bswap_rBX_r11);
2085/** Opcode 0x0f 0xcc. */
2086FNIEMOP_STUB(iemOp_bswap_rSP_r12);
2087/** Opcode 0x0f 0xcd. */
2088FNIEMOP_STUB(iemOp_bswap_rBP_r13);
2089/** Opcode 0x0f 0xce. */
2090FNIEMOP_STUB(iemOp_bswap_rSI_r14);
2091/** Opcode 0x0f 0xcf. */
2092FNIEMOP_STUB(iemOp_bswap_rDI_r15);
2093/** Opcode 0x0f 0xd0. */
2094FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
2095/** Opcode 0x0f 0xd1. */
2096FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
2097/** Opcode 0x0f 0xd2. */
2098FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
2099/** Opcode 0x0f 0xd3. */
2100FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
2101/** Opcode 0x0f 0xd4. */
2102FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
2103/** Opcode 0x0f 0xd5. */
2104FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
2105/** Opcode 0x0f 0xd6. */
2106FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
2107/** Opcode 0x0f 0xd7. */
2108FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
2109/** Opcode 0x0f 0xd8. */
2110FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
2111/** Opcode 0x0f 0xd9. */
2112FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
2113/** Opcode 0x0f 0xda. */
2114FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
2115/** Opcode 0x0f 0xdb. */
2116FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
2117/** Opcode 0x0f 0xdc. */
2118FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
2119/** Opcode 0x0f 0xdd. */
2120FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
2121/** Opcode 0x0f 0xde. */
2122FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
2123/** Opcode 0x0f 0xdf. */
2124FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
2125/** Opcode 0x0f 0xe0. */
2126FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
2127/** Opcode 0x0f 0xe1. */
2128FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
2129/** Opcode 0x0f 0xe2. */
2130FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
2131/** Opcode 0x0f 0xe3. */
2132FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
2133/** Opcode 0x0f 0xe4. */
2134FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
2135/** Opcode 0x0f 0xe5. */
2136FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
2137/** Opcode 0x0f 0xe6. */
2138FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
2139/** Opcode 0x0f 0xe7. */
2140FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
2141/** Opcode 0x0f 0xe8. */
2142FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
2143/** Opcode 0x0f 0xe9. */
2144FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
2145/** Opcode 0x0f 0xea. */
2146FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
2147/** Opcode 0x0f 0xeb. */
2148FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
2149/** Opcode 0x0f 0xec. */
2150FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
2151/** Opcode 0x0f 0xed. */
2152FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
2153/** Opcode 0x0f 0xee. */
2154FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
2155/** Opcode 0x0f 0xef. */
2156FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
2157/** Opcode 0x0f 0xf0. */
2158FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
2159/** Opcode 0x0f 0xf1. */
2160FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
2161/** Opcode 0x0f 0xf2. */
2162FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
2163/** Opcode 0x0f 0xf3. */
2164FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
2165/** Opcode 0x0f 0xf4. */
2166FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
2167/** Opcode 0x0f 0xf5. */
2168FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
2169/** Opcode 0x0f 0xf6. */
2170FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
2171/** Opcode 0x0f 0xf7. */
2172FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
2173/** Opcode 0x0f 0xf8. */
2174FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
2175/** Opcode 0x0f 0xf9. */
2176FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
2177/** Opcode 0x0f 0xfa. */
2178FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
2179/** Opcode 0x0f 0xfb. */
2180FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
2181/** Opcode 0x0f 0xfc. */
2182FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
2183/** Opcode 0x0f 0xfd. */
2184FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
2185/** Opcode 0x0f 0xfe. */
2186FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
2187
2188
2189const PFNIEMOP g_apfnTwoByteMap[256] =
2190{
2191 /* 0x00 */ iemOp_Grp6, iemOp_Grp7, iemOp_lar_Gv_Ew, iemOp_lsl_Gv_Ew,
2192 /* 0x04 */ iemOp_Invalid, iemOp_syscall, iemOp_clts, iemOp_sysret,
2193 /* 0x08 */ iemOp_invd, iemOp_wbinvd, iemOp_Invalid, iemOp_ud2,
2194 /* 0x0c */ iemOp_Invalid, iemOp_nop_Ev_prefetch, iemOp_femms, iemOp_3Dnow,
2195 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
2196 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
2197 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
2198 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
2199 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
2200 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
2201 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
2202 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
2203 /* 0x18 */ iemOp_prefetch_Grp16, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
2204 /* 0x1c */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
2205 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Dd, iemOp_mov_Cd_Rd, iemOp_mov_Dd_Rd,
2206 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_Invalid, iemOp_mov_Td_Rd, iemOp_Invalid,
2207 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
2208 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
2209 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
2210 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
2211 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
2212 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
2213 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
2214 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
2215 /* 0x30 */ iemOp_wrmsr, iemOp_rdtsc, iemOp_rdmsr, iemOp_rdpmc,
2216 /* 0x34 */ iemOp_sysenter, iemOp_sysexit, iemOp_Invalid, iemOp_getsec,
2217 /* 0x38 */ iemOp_3byte_Esc_A4, iemOp_Invalid, iemOp_3byte_Esc_A5, iemOp_Invalid,
2218 /* 0x3c */ iemOp_movnti_Gv_Ev/*?*/,iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
2219 /* 0x40 */ iemOp_cmovo_Gv_Ev, iemOp_cmovno_Gv_Ev, iemOp_cmovc_Gv_Ev, iemOp_cmovnc_Gv_Ev,
2220 /* 0x44 */ iemOp_cmove_Gv_Ev, iemOp_cmovne_Gv_Ev, iemOp_cmovbe_Gv_Ev, iemOp_cmovnbe_Gv_Ev,
2221 /* 0x48 */ iemOp_cmovs_Gv_Ev, iemOp_cmovns_Gv_Ev, iemOp_cmovp_Gv_Ev, iemOp_cmovnp_Gv_Ev,
2222 /* 0x4c */ iemOp_cmovl_Gv_Ev, iemOp_cmovnl_Gv_Ev, iemOp_cmovle_Gv_Ev, iemOp_cmovnle_Gv_Ev,
2223 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
2224 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
2225 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
2226 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
2227 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
2228 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
2229 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
2230 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
2231 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
2232 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
2233 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
2234 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
2235 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
2236 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
2237 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
2238 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
2239 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
2240 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
2241 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
2242 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
2243 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
2244 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
2245 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
2246 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
2247 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
2248 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
2249 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
2250 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
2251 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
2252 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
2253 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
2254 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
2255 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
2256 /* 0x71 */ iemOp_Grp12,
2257 /* 0x72 */ iemOp_Grp13,
2258 /* 0x73 */ iemOp_Grp14,
2259 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
2260 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
2261 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
2262 /* 0x77 */ iemOp_emms,
2263 /* 0x78 */ iemOp_vmread, iemOp_vmwrite, iemOp_Invalid, iemOp_Invalid,
2264 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
2265 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
2266 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
2267 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
2268 /* 0x80 */ iemOp_jo_Jv, iemOp_jno_Jv, iemOp_jc_Jv, iemOp_jnc_Jv,
2269 /* 0x84 */ iemOp_je_Jv, iemOp_jne_Jv, iemOp_jbe_Jv, iemOp_jnbe_Jv,
2270 /* 0x88 */ iemOp_js_Jv, iemOp_jns_Jv, iemOp_jp_Jv, iemOp_jnp_Jv,
2271 /* 0x8c */ iemOp_jl_Jv, iemOp_jnl_Jv, iemOp_jle_Jv, iemOp_jnle_Jv,
2272 /* 0x90 */ iemOp_seto_Jv, iemOp_setno_Jv, iemOp_setc_Jv, iemOp_setnc_Jv,
2273 /* 0x94 */ iemOp_sete_Jv, iemOp_setne_Jv, iemOp_setbe_Jv, iemOp_setnbe_Jv,
2274 /* 0x98 */ iemOp_sets_Jv, iemOp_setns_Jv, iemOp_setp_Jv, iemOp_setnp_Jv,
2275 /* 0x9c */ iemOp_setl_Jv, iemOp_setnl_Jv, iemOp_setle_Jv, iemOp_setnle_Jv,
2276 /* 0xa0 */ iemOp_push_fs, iemOp_pop_fs, iemOp_cpuid, iemOp_bt_Ev_Gv,
2277 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib, iemOp_shld_Ev_Gv_CL, iemOp_Invalid, iemOp_Invalid,
2278 /* 0xa8 */ iemOp_push_gs, iemOp_pop_gs, iemOp_rsm, iemOp_bts_Ev_Gv,
2279 /* 0xac */ iemOp_shrd_Ev_Gv_Ib, iemOp_shrd_Ev_Gv_CL, iemOp_Grp15, iemOp_imul_Gv_Ev,
2280 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb, iemOp_cmpxchg_Ev_Gv, iemOp_lss_Gv_Mp, iemOp_btr_Ev_Gv,
2281 /* 0xb4 */ iemOp_lfs_Gv_Mp, iemOp_lgs_Gv_Mp, iemOp_movzx_Gv_Eb, iemOp_movzx_Gv_Ew,
2282 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,iemOp_Grp10, iemOp_Grp11, iemOp_btc_Ev_Gv,
2283 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_movsx_Gv_Eb, iemOp_movsx_Gv_Ew,
2284 /* 0xc0 */ iemOp_xadd_Eb_Gb,
2285 /* 0xc1 */ iemOp_xadd_Ev_Gv,
2286 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
2287 /* 0xc3 */ iemOp_movnti_My_Gy,
2288 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
2289 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
2290 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
2291 /* 0xc7 */ iemOp_Grp9,
2292 /* 0xc8 */ iemOp_bswap_rAX_r8, iemOp_bswap_rCX_r9, iemOp_bswap_rDX_r10, iemOp_bswap_rBX_r11,
2293 /* 0xcc */ iemOp_bswap_rSP_r12, iemOp_bswap_rBP_r13, iemOp_bswap_rSI_r14, iemOp_bswap_rDI_r15,
2294 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
2295 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
2296 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
2297 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
2298 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
2299 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
2300 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
2301 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
2302 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
2303 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
2304 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
2305 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
2306 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
2307 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
2308 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
2309 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
2310 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
2311 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
2312 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
2313 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
2314 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
2315 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
2316 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
2317 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
2318 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
2319 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
2320 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
2321 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
2322 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
2323 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
2324 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
2325 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
2326 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
2327 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
2328 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
2329 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
2330 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
2331 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
2332 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
2333 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
2334 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
2335 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
2336 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
2337 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
2338 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
2339 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
2340 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
2341 /* 0xff */ iemOp_Invalid
2342};
2343
2344/** @} */
2345
2346
2347/** @name One byte opcodes.
2348 *
2349 * @{
2350 */
2351
2352/** Opcode 0x00. */
2353FNIEMOP_DEF(iemOp_add_Eb_Gb)
2354{
2355 IEMOP_MNEMONIC("add Eb,Gb");
2356 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
2357}
2358
2359
2360/** Opcode 0x01. */
2361FNIEMOP_DEF(iemOp_add_Ev_Gv)
2362{
2363 IEMOP_MNEMONIC("add Ev,Gv");
2364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
2365}
2366
2367
2368/** Opcode 0x02. */
2369FNIEMOP_DEF(iemOp_add_Gb_Eb)
2370{
2371 IEMOP_MNEMONIC("add Gb,Eb");
2372 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
2373}
2374
2375
2376/** Opcode 0x03. */
2377FNIEMOP_DEF(iemOp_add_Gv_Ev)
2378{
2379 IEMOP_MNEMONIC("add Gv,Ev");
2380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
2381}
2382
2383
2384/** Opcode 0x04. */
2385FNIEMOP_DEF(iemOp_add_Al_Ib)
2386{
2387 IEMOP_MNEMONIC("add al,Ib");
2388 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
2389}
2390
2391
2392/** Opcode 0x05. */
2393FNIEMOP_DEF(iemOp_add_eAX_Iz)
2394{
2395 IEMOP_MNEMONIC("add rAX,Iz");
2396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
2397}
2398
2399
2400/** Opcode 0x06. */
2401FNIEMOP_DEF(iemOp_push_ES)
2402{
2403 IEMOP_MNEMONIC("push es");
2404 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
2405}
2406
2407
2408/** Opcode 0x07. */
2409FNIEMOP_DEF(iemOp_pop_ES)
2410{
2411 IEMOP_MNEMONIC("pop es");
2412 IEMOP_HLP_NO_64BIT();
2413 IEMOP_HLP_NO_LOCK_PREFIX();
2414 return IEM_MC_DEFER_TO_CIMPL_2(iemOpCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
2415}
2416
2417
2418/** Opcode 0x08. */
2419FNIEMOP_DEF(iemOp_or_Eb_Gb)
2420{
2421 IEMOP_MNEMONIC("or Eb,Gb");
2422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
2423}
2424
2425
2426/** Opcode 0x09. */
2427FNIEMOP_DEF(iemOp_or_Ev_Gv)
2428{
2429 IEMOP_MNEMONIC("or Ev,Gv ");
2430 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
2431}
2432
2433
2434/** Opcode 0x0a. */
2435FNIEMOP_DEF(iemOp_or_Gb_Eb)
2436{
2437 IEMOP_MNEMONIC("or Gb,Eb");
2438 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
2439}
2440
2441
2442/** Opcode 0x0b. */
2443FNIEMOP_DEF(iemOp_or_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC("or Gv,Ev");
2446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
2447}
2448
2449
2450/** Opcode 0x0c. */
2451FNIEMOP_DEF(iemOp_or_Al_Ib)
2452{
2453 IEMOP_MNEMONIC("or al,Ib");
2454 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
2455}
2456
2457
2458/** Opcode 0x0d. */
2459FNIEMOP_DEF(iemOp_or_eAX_Iz)
2460{
2461 IEMOP_MNEMONIC("or rAX,Iz");
2462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
2463}
2464
2465
2466/** Opcode 0x0e. */
2467FNIEMOP_DEF(iemOp_push_CS)
2468{
2469 IEMOP_MNEMONIC("push cs");
2470 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
2471}
2472
2473
2474/** Opcode 0x0f. */
2475FNIEMOP_DEF(iemOp_2byteEscape)
2476{
2477 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2478 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
2479}
2480
2481/** Opcode 0x10. */
2482FNIEMOP_DEF(iemOp_adc_Eb_Gb)
2483{
2484 IEMOP_MNEMONIC("adc Eb,Gb");
2485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
2486}
2487
2488
2489/** Opcode 0x11. */
2490FNIEMOP_DEF(iemOp_adc_Ev_Gv)
2491{
2492 IEMOP_MNEMONIC("adc Ev,Gv");
2493 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
2494}
2495
2496
2497/** Opcode 0x12. */
2498FNIEMOP_DEF(iemOp_adc_Gb_Eb)
2499{
2500 IEMOP_MNEMONIC("adc Gb,Eb");
2501 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
2502}
2503
2504
2505/** Opcode 0x13. */
2506FNIEMOP_DEF(iemOp_adc_Gv_Ev)
2507{
2508 IEMOP_MNEMONIC("adc Gv,Ev");
2509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
2510}
2511
2512
2513/** Opcode 0x14. */
2514FNIEMOP_DEF(iemOp_adc_Al_Ib)
2515{
2516 IEMOP_MNEMONIC("adc al,Ib");
2517 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
2518}
2519
2520
2521/** Opcode 0x15. */
2522FNIEMOP_DEF(iemOp_adc_eAX_Iz)
2523{
2524 IEMOP_MNEMONIC("adc rAX,Iz");
2525 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
2526}
2527
2528
2529/** Opcode 0x16. */
2530FNIEMOP_DEF(iemOp_push_SS)
2531{
2532 IEMOP_MNEMONIC("push ss");
2533 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
2534}
2535
2536
2537/** Opcode 0x17. */
2538FNIEMOP_DEF(iemOp_pop_SS)
2539{
2540 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
2541 IEMOP_HLP_NO_LOCK_PREFIX();
2542 IEMOP_HLP_NO_64BIT();
2543 return IEM_MC_DEFER_TO_CIMPL_2(iemOpCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
2544}
2545
2546
2547/** Opcode 0x18. */
2548FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
2549{
2550 IEMOP_MNEMONIC("sbb Eb,Gb");
2551 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
2552}
2553
2554
2555/** Opcode 0x19. */
2556FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
2557{
2558 IEMOP_MNEMONIC("sbb Ev,Gv");
2559 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
2560}
2561
2562
2563/** Opcode 0x1a. */
2564FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
2565{
2566 IEMOP_MNEMONIC("sbb Gb,Eb");
2567 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
2568}
2569
2570
2571/** Opcode 0x1b. */
2572FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
2573{
2574 IEMOP_MNEMONIC("sbb Gv,Ev");
2575 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
2576}
2577
2578
2579/** Opcode 0x1c. */
2580FNIEMOP_DEF(iemOp_sbb_Al_Ib)
2581{
2582 IEMOP_MNEMONIC("sbb al,Ib");
2583 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
2584}
2585
2586
2587/** Opcode 0x1d. */
2588FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
2589{
2590 IEMOP_MNEMONIC("sbb rAX,Iz");
2591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
2592}
2593
2594
2595/** Opcode 0x1e. */
2596FNIEMOP_DEF(iemOp_push_DS)
2597{
2598 IEMOP_MNEMONIC("push ds");
2599 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
2600}
2601
2602
2603/** Opcode 0x1f. */
2604FNIEMOP_DEF(iemOp_pop_DS)
2605{
2606 IEMOP_MNEMONIC("pop ds");
2607 IEMOP_HLP_NO_LOCK_PREFIX();
2608 IEMOP_HLP_NO_64BIT();
2609 return IEM_MC_DEFER_TO_CIMPL_2(iemOpCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
2610}
2611
2612
2613/** Opcode 0x20. */
2614FNIEMOP_DEF(iemOp_and_Eb_Gb)
2615{
2616 IEMOP_MNEMONIC("and Eb,Gb");
2617 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
2618}
2619
2620
2621/** Opcode 0x21. */
2622FNIEMOP_DEF(iemOp_and_Ev_Gv)
2623{
2624 IEMOP_MNEMONIC("and Ev,Gv");
2625 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
2626}
2627
2628
2629/** Opcode 0x22. */
2630FNIEMOP_DEF(iemOp_and_Gb_Eb)
2631{
2632 IEMOP_MNEMONIC("and Gb,Eb");
2633 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
2634}
2635
2636
2637/** Opcode 0x23. */
2638FNIEMOP_DEF(iemOp_and_Gv_Ev)
2639{
2640 IEMOP_MNEMONIC("and Gv,Ev");
2641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
2642}
2643
2644
2645/** Opcode 0x24. */
2646FNIEMOP_DEF(iemOp_and_Al_Ib)
2647{
2648 IEMOP_MNEMONIC("and al,Ib");
2649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
2650}
2651
2652
2653/** Opcode 0x25. */
2654FNIEMOP_DEF(iemOp_and_eAX_Iz)
2655{
2656 IEMOP_MNEMONIC("and rAX,Iz");
2657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
2658}
2659
2660
2661/** Opcode 0x26. */
2662FNIEMOP_DEF(iemOp_seg_ES)
2663{
2664 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
2665 pIemCpu->iEffSeg = X86_SREG_ES;
2666
2667 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2668 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2669}
2670
2671
2672/** Opcode 0x27. */
2673FNIEMOP_STUB(iemOp_daa);
2674
2675
2676/** Opcode 0x28. */
2677FNIEMOP_DEF(iemOp_sub_Eb_Gb)
2678{
2679 IEMOP_MNEMONIC("sub Eb,Gb");
2680 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
2681}
2682
2683
2684/** Opcode 0x29. */
2685FNIEMOP_DEF(iemOp_sub_Ev_Gv)
2686{
2687 IEMOP_MNEMONIC("sub Ev,Gv");
2688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
2689}
2690
2691
2692/** Opcode 0x2a. */
2693FNIEMOP_DEF(iemOp_sub_Gb_Eb)
2694{
2695 IEMOP_MNEMONIC("sub Gb,Eb");
2696 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
2697}
2698
2699
2700/** Opcode 0x2b. */
2701FNIEMOP_DEF(iemOp_sub_Gv_Ev)
2702{
2703 IEMOP_MNEMONIC("sub Gv,Ev");
2704 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
2705}
2706
2707
2708/** Opcode 0x2c. */
2709FNIEMOP_DEF(iemOp_sub_Al_Ib)
2710{
2711 IEMOP_MNEMONIC("sub al,Ib");
2712 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
2713}
2714
2715
2716/** Opcode 0x2d. */
2717FNIEMOP_DEF(iemOp_sub_eAX_Iz)
2718{
2719 IEMOP_MNEMONIC("sub rAX,Iz");
2720 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
2721}
2722
2723
2724/** Opcode 0x2e. */
2725FNIEMOP_DEF(iemOp_seg_CS)
2726{
2727 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
2728 pIemCpu->iEffSeg = X86_SREG_CS;
2729
2730 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2731 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2732}
2733
2734
2735/** Opcode 0x2f. */
2736FNIEMOP_STUB(iemOp_das);
2737
2738
2739/** Opcode 0x30. */
2740FNIEMOP_DEF(iemOp_xor_Eb_Gb)
2741{
2742 IEMOP_MNEMONIC("xor Eb,Gb");
2743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
2744}
2745
2746
2747/** Opcode 0x31. */
2748FNIEMOP_DEF(iemOp_xor_Ev_Gv)
2749{
2750 IEMOP_MNEMONIC("xor Ev,Gv");
2751 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
2752}
2753
2754
2755/** Opcode 0x32. */
2756FNIEMOP_DEF(iemOp_xor_Gb_Eb)
2757{
2758 IEMOP_MNEMONIC("xor Gb,Eb");
2759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
2760}
2761
2762
2763/** Opcode 0x33. */
2764FNIEMOP_DEF(iemOp_xor_Gv_Ev)
2765{
2766 IEMOP_MNEMONIC("xor Gv,Ev");
2767 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
2768}
2769
2770
2771/** Opcode 0x34. */
2772FNIEMOP_DEF(iemOp_xor_Al_Ib)
2773{
2774 IEMOP_MNEMONIC("xor al,Ib");
2775 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
2776}
2777
2778
2779/** Opcode 0x35. */
2780FNIEMOP_DEF(iemOp_xor_eAX_Iz)
2781{
2782 IEMOP_MNEMONIC("xor rAX,Iz");
2783 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
2784}
2785
2786
2787/** Opcode 0x36. */
2788FNIEMOP_DEF(iemOp_seg_SS)
2789{
2790 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
2791 pIemCpu->iEffSeg = X86_SREG_SS;
2792
2793 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2795}
2796
2797
2798/** Opcode 0x37. */
2799FNIEMOP_STUB(iemOp_aaa);
2800
2801
2802/** Opcode 0x38. */
2803FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
2804{
2805 IEMOP_MNEMONIC("cmp Eb,Gb");
2806 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
2807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
2808}
2809
2810
2811/** Opcode 0x39. */
2812FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
2813{
2814 IEMOP_MNEMONIC("cmp Ev,Gv");
2815 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
2816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
2817}
2818
2819
2820/** Opcode 0x3a. */
2821FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
2822{
2823 IEMOP_MNEMONIC("cmp Gb,Eb");
2824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
2825}
2826
2827
2828/** Opcode 0x3b. */
2829FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
2830{
2831 IEMOP_MNEMONIC("cmp Gv,Ev");
2832 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
2833}
2834
2835
2836/** Opcode 0x3c. */
2837FNIEMOP_DEF(iemOp_cmp_Al_Ib)
2838{
2839 IEMOP_MNEMONIC("cmp al,Ib");
2840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
2841}
2842
2843
2844/** Opcode 0x3d. */
2845FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
2846{
2847 IEMOP_MNEMONIC("cmp rAX,Iz");
2848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
2849}
2850
2851
2852/** Opcode 0x3e. */
2853FNIEMOP_DEF(iemOp_seg_DS)
2854{
2855 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
2856 pIemCpu->iEffSeg = X86_SREG_DS;
2857
2858 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2859 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2860}
2861
2862
2863/** Opcode 0x3f. */
2864FNIEMOP_STUB(iemOp_aas);
2865
2866/**
2867 * Common 'inc/dec/not/neg register' helper.
2868 */
2869FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
2870{
2871 IEMOP_HLP_NO_LOCK_PREFIX();
2872 switch (pIemCpu->enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 IEM_MC_BEGIN(2, 0);
2876 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2877 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2878 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2879 IEM_MC_REF_EFLAGS(pEFlags);
2880 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
2881 IEM_MC_ADVANCE_RIP();
2882 IEM_MC_END();
2883 return VINF_SUCCESS;
2884
2885 case IEMMODE_32BIT:
2886 IEM_MC_BEGIN(2, 0);
2887 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2888 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2889 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2890 IEM_MC_REF_EFLAGS(pEFlags);
2891 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
2892 IEM_MC_ADVANCE_RIP();
2893 IEM_MC_END();
2894 return VINF_SUCCESS;
2895
2896 case IEMMODE_64BIT:
2897 IEM_MC_BEGIN(2, 0);
2898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2899 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2900 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2901 IEM_MC_REF_EFLAGS(pEFlags);
2902 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
2903 IEM_MC_ADVANCE_RIP();
2904 IEM_MC_END();
2905 return VINF_SUCCESS;
2906 }
2907 return VINF_SUCCESS;
2908}
2909
2910
2911/** Opcode 0x40. */
2912FNIEMOP_DEF(iemOp_inc_eAX)
2913{
2914 /*
2915 * This is a REX prefix in 64-bit mode.
2916 */
2917 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
2918 {
2919 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
2920
2921 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2923 }
2924
2925 IEMOP_MNEMONIC("inc eAX");
2926 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
2927}
2928
2929
2930/** Opcode 0x41. */
2931FNIEMOP_DEF(iemOp_inc_eCX)
2932{
2933 /*
2934 * This is a REX prefix in 64-bit mode.
2935 */
2936 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
2937 {
2938 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
2939 pIemCpu->uRexB = 1 << 3;
2940
2941 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2942 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2943 }
2944
2945 IEMOP_MNEMONIC("inc eCX");
2946 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
2947}
2948
2949
2950/** Opcode 0x42. */
2951FNIEMOP_DEF(iemOp_inc_eDX)
2952{
2953 /*
2954 * This is a REX prefix in 64-bit mode.
2955 */
2956 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
2957 {
2958 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
2959 pIemCpu->uRexIndex = 1 << 3;
2960
2961 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2963 }
2964
2965 IEMOP_MNEMONIC("inc eDX");
2966 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
2967}
2968
2969
2970
2971/** Opcode 0x43. */
2972FNIEMOP_DEF(iemOp_inc_eBX)
2973{
2974 /*
2975 * This is a REX prefix in 64-bit mode.
2976 */
2977 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
2978 {
2979 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2980 pIemCpu->uRexB = 1 << 3;
2981 pIemCpu->uRexIndex = 1 << 3;
2982
2983 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
2984 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2985 }
2986
2987 IEMOP_MNEMONIC("inc eBX");
2988 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
2989}
2990
2991
2992/** Opcode 0x44. */
2993FNIEMOP_DEF(iemOp_inc_eSP)
2994{
2995 /*
2996 * This is a REX prefix in 64-bit mode.
2997 */
2998 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
2999 {
3000 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
3001 pIemCpu->uRexReg = 1 << 3;
3002
3003 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3004 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3005 }
3006
3007 IEMOP_MNEMONIC("inc eSP");
3008 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
3009}
3010
3011
3012/** Opcode 0x45. */
3013FNIEMOP_DEF(iemOp_inc_eBP)
3014{
3015 /*
3016 * This is a REX prefix in 64-bit mode.
3017 */
3018 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3019 {
3020 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
3021 pIemCpu->uRexReg = 1 << 3;
3022 pIemCpu->uRexB = 1 << 3;
3023
3024 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3026 }
3027
3028 IEMOP_MNEMONIC("inc eBP");
3029 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
3030}
3031
3032
3033/** Opcode 0x46. */
3034FNIEMOP_DEF(iemOp_inc_eSI)
3035{
3036 /*
3037 * This is a REX prefix in 64-bit mode.
3038 */
3039 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3040 {
3041 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
3042 pIemCpu->uRexReg = 1 << 3;
3043 pIemCpu->uRexIndex = 1 << 3;
3044
3045 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3046 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3047 }
3048
3049 IEMOP_MNEMONIC("inc eSI");
3050 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
3051}
3052
3053
3054/** Opcode 0x47. */
3055FNIEMOP_DEF(iemOp_inc_eDI)
3056{
3057 /*
3058 * This is a REX prefix in 64-bit mode.
3059 */
3060 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3061 {
3062 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
3063 pIemCpu->uRexReg = 1 << 3;
3064 pIemCpu->uRexB = 1 << 3;
3065 pIemCpu->uRexIndex = 1 << 3;
3066
3067 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3068 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3069 }
3070
3071 IEMOP_MNEMONIC("inc eDI");
3072 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
3073}
3074
3075
3076/** Opcode 0x48. */
3077FNIEMOP_DEF(iemOp_dec_eAX)
3078{
3079 /*
3080 * This is a REX prefix in 64-bit mode.
3081 */
3082 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3083 {
3084 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
3085 iemRecalEffOpSize(pIemCpu);
3086
3087 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3088 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3089 }
3090
3091 IEMOP_MNEMONIC("dec eAX");
3092 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
3093}
3094
3095
3096/** Opcode 0x49. */
3097FNIEMOP_DEF(iemOp_dec_eCX)
3098{
3099 /*
3100 * This is a REX prefix in 64-bit mode.
3101 */
3102 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3103 {
3104 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
3105 pIemCpu->uRexB = 1 << 3;
3106 iemRecalEffOpSize(pIemCpu);
3107
3108 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3109 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3110 }
3111
3112 IEMOP_MNEMONIC("dec eCX");
3113 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
3114}
3115
3116
3117/** Opcode 0x4a. */
3118FNIEMOP_DEF(iemOp_dec_eDX)
3119{
3120 /*
3121 * This is a REX prefix in 64-bit mode.
3122 */
3123 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3124 {
3125 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
3126 pIemCpu->uRexIndex = 1 << 3;
3127 iemRecalEffOpSize(pIemCpu);
3128
3129 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3130 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3131 }
3132
3133 IEMOP_MNEMONIC("dec eDX");
3134 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
3135}
3136
3137
3138/** Opcode 0x4b. */
3139FNIEMOP_DEF(iemOp_dec_eBX)
3140{
3141 /*
3142 * This is a REX prefix in 64-bit mode.
3143 */
3144 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3145 {
3146 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
3147 pIemCpu->uRexB = 1 << 3;
3148 pIemCpu->uRexIndex = 1 << 3;
3149 iemRecalEffOpSize(pIemCpu);
3150
3151 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3152 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3153 }
3154
3155 IEMOP_MNEMONIC("dec eBX");
3156 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
3157}
3158
3159
3160/** Opcode 0x4c. */
3161FNIEMOP_DEF(iemOp_dec_eSP)
3162{
3163 /*
3164 * This is a REX prefix in 64-bit mode.
3165 */
3166 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3167 {
3168 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
3169 pIemCpu->uRexReg = 1 << 3;
3170 iemRecalEffOpSize(pIemCpu);
3171
3172 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3173 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3174 }
3175
3176 IEMOP_MNEMONIC("dec eSP");
3177 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
3178}
3179
3180
3181/** Opcode 0x4d. */
3182FNIEMOP_DEF(iemOp_dec_eBP)
3183{
3184 /*
3185 * This is a REX prefix in 64-bit mode.
3186 */
3187 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3188 {
3189 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
3190 pIemCpu->uRexReg = 1 << 3;
3191 pIemCpu->uRexB = 1 << 3;
3192 iemRecalEffOpSize(pIemCpu);
3193
3194 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3195 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3196 }
3197
3198 IEMOP_MNEMONIC("dec eBP");
3199 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
3200}
3201
3202
3203/** Opcode 0x4e. */
3204FNIEMOP_DEF(iemOp_dec_eSI)
3205{
3206 /*
3207 * This is a REX prefix in 64-bit mode.
3208 */
3209 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3210 {
3211 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
3212 pIemCpu->uRexReg = 1 << 3;
3213 pIemCpu->uRexIndex = 1 << 3;
3214 iemRecalEffOpSize(pIemCpu);
3215
3216 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3218 }
3219
3220 IEMOP_MNEMONIC("dec eSI");
3221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
3222}
3223
3224
3225/** Opcode 0x4f. */
3226FNIEMOP_DEF(iemOp_dec_eDI)
3227{
3228 /*
3229 * This is a REX prefix in 64-bit mode.
3230 */
3231 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3232 {
3233 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
3234 pIemCpu->uRexReg = 1 << 3;
3235 pIemCpu->uRexB = 1 << 3;
3236 pIemCpu->uRexIndex = 1 << 3;
3237 iemRecalEffOpSize(pIemCpu);
3238
3239 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3241 }
3242
3243 IEMOP_MNEMONIC("dec eDI");
3244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
3245}
3246
3247
3248/**
3249 * Common 'push register' helper.
3250 */
3251FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
3252{
3253 IEMOP_HLP_NO_LOCK_PREFIX();
3254 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3255 {
3256 iReg |= pIemCpu->uRexB;
3257 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
3258 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
3259 }
3260
3261 switch (pIemCpu->enmEffOpSize)
3262 {
3263 case IEMMODE_16BIT:
3264 IEM_MC_BEGIN(0, 1);
3265 IEM_MC_LOCAL(uint16_t, u16Value);
3266 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
3267 IEM_MC_PUSH_U16(u16Value);
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 break;
3271
3272 case IEMMODE_32BIT:
3273 IEM_MC_BEGIN(0, 1);
3274 IEM_MC_LOCAL(uint32_t, u32Value);
3275 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
3276 IEM_MC_PUSH_U32(u32Value);
3277 IEM_MC_ADVANCE_RIP();
3278 IEM_MC_END();
3279 break;
3280
3281 case IEMMODE_64BIT:
3282 IEM_MC_BEGIN(0, 1);
3283 IEM_MC_LOCAL(uint64_t, u64Value);
3284 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
3285 IEM_MC_PUSH_U64(u64Value);
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 break;
3289 }
3290
3291 return VINF_SUCCESS;
3292}
3293
3294
3295/** Opcode 0x50. */
3296FNIEMOP_DEF(iemOp_push_eAX)
3297{
3298 IEMOP_MNEMONIC("push rAX");
3299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
3300}
3301
3302
3303/** Opcode 0x51. */
3304FNIEMOP_DEF(iemOp_push_eCX)
3305{
3306 IEMOP_MNEMONIC("push rCX");
3307 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
3308}
3309
3310
3311/** Opcode 0x52. */
3312FNIEMOP_DEF(iemOp_push_eDX)
3313{
3314 IEMOP_MNEMONIC("push rDX");
3315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
3316}
3317
3318
3319/** Opcode 0x53. */
3320FNIEMOP_DEF(iemOp_push_eBX)
3321{
3322 IEMOP_MNEMONIC("push rBX");
3323 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
3324}
3325
3326
3327/** Opcode 0x54. */
3328FNIEMOP_DEF(iemOp_push_eSP)
3329{
3330 IEMOP_MNEMONIC("push rSP");
3331 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
3332}
3333
3334
3335/** Opcode 0x55. */
3336FNIEMOP_DEF(iemOp_push_eBP)
3337{
3338 IEMOP_MNEMONIC("push rBP");
3339 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
3340}
3341
3342
3343/** Opcode 0x56. */
3344FNIEMOP_DEF(iemOp_push_eSI)
3345{
3346 IEMOP_MNEMONIC("push rSI");
3347 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
3348}
3349
3350
3351/** Opcode 0x57. */
3352FNIEMOP_DEF(iemOp_push_eDI)
3353{
3354 IEMOP_MNEMONIC("push rDI");
3355 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
3356}
3357
3358
3359/**
3360 * Common 'pop register' helper.
3361 */
3362FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
3363{
3364 IEMOP_HLP_NO_LOCK_PREFIX();
3365 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
3366 {
3367 iReg |= pIemCpu->uRexB;
3368 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
3369 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
3370 }
3371
3372 switch (pIemCpu->enmEffOpSize)
3373 {
3374 case IEMMODE_16BIT:
3375 IEM_MC_BEGIN(0, 1);
3376 IEM_MC_LOCAL(uint16_t, *pu16Dst);
3377 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
3378 IEM_MC_POP_U16(pu16Dst);
3379 IEM_MC_ADVANCE_RIP();
3380 IEM_MC_END();
3381 break;
3382
3383 case IEMMODE_32BIT:
3384 IEM_MC_BEGIN(0, 1);
3385 IEM_MC_LOCAL(uint32_t, *pu32Dst);
3386 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
3387 IEM_MC_POP_U32(pu32Dst);
3388 IEM_MC_ADVANCE_RIP();
3389 IEM_MC_END();
3390 break;
3391
3392 case IEMMODE_64BIT:
3393 IEM_MC_BEGIN(0, 1);
3394 IEM_MC_LOCAL(uint64_t, *pu64Dst);
3395 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
3396 IEM_MC_POP_U64(pu64Dst);
3397 IEM_MC_ADVANCE_RIP();
3398 IEM_MC_END();
3399 break;
3400 }
3401
3402 return VINF_SUCCESS;
3403}
3404
3405
3406/** Opcode 0x58. */
3407FNIEMOP_DEF(iemOp_pop_eAX)
3408{
3409 IEMOP_MNEMONIC("pop rAX");
3410 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
3411}
3412
3413
3414/** Opcode 0x59. */
3415FNIEMOP_DEF(iemOp_pop_eCX)
3416{
3417 IEMOP_MNEMONIC("pop rCX");
3418 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
3419}
3420
3421
3422/** Opcode 0x5a. */
3423FNIEMOP_DEF(iemOp_pop_eDX)
3424{
3425 IEMOP_MNEMONIC("pop rDX");
3426 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
3427}
3428
3429
3430/** Opcode 0x5b. */
3431FNIEMOP_DEF(iemOp_pop_eBX)
3432{
3433 IEMOP_MNEMONIC("pop rBX");
3434 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
3435}
3436
3437
3438/** Opcode 0x5c. */
3439FNIEMOP_DEF(iemOp_pop_eSP)
3440{
3441 IEMOP_MNEMONIC("pop rSP");
3442 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
3443}
3444
3445
3446/** Opcode 0x5d. */
3447FNIEMOP_DEF(iemOp_pop_eBP)
3448{
3449 IEMOP_MNEMONIC("pop rBP");
3450 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
3451}
3452
3453
3454/** Opcode 0x5e. */
3455FNIEMOP_DEF(iemOp_pop_eSI)
3456{
3457 IEMOP_MNEMONIC("pop rSI");
3458 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
3459}
3460
3461
3462/** Opcode 0x5f. */
3463FNIEMOP_DEF(iemOp_pop_eDI)
3464{
3465 IEMOP_MNEMONIC("pop rDI");
3466 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
3467}
3468
3469
3470/** Opcode 0x60. */
3471FNIEMOP_DEF(iemOp_pusha)
3472{
3473 IEMOP_MNEMONIC("pusha");
3474 IEMOP_HLP_NO_64BIT();
3475 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3476 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
3477 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
3478 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
3479}
3480
3481
3482/** Opcode 0x61. */
3483FNIEMOP_DEF(iemOp_popa)
3484{
3485 IEMOP_MNEMONIC("popa");
3486 IEMOP_HLP_NO_64BIT();
3487 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3488 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
3489 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
3490 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
3491}
3492
3493
3494/** Opcode 0x62. */
3495FNIEMOP_STUB(iemOp_bound_Gv_Ma);
3496/** Opcode 0x63. */
3497FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
3498
3499
3500/** Opcode 0x64. */
3501FNIEMOP_DEF(iemOp_seg_FS)
3502{
3503 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
3504 pIemCpu->iEffSeg = X86_SREG_FS;
3505
3506 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3507 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3508}
3509
3510
3511/** Opcode 0x65. */
3512FNIEMOP_DEF(iemOp_seg_GS)
3513{
3514 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
3515 pIemCpu->iEffSeg = X86_SREG_GS;
3516
3517 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3518 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3519}
3520
3521
3522/** Opcode 0x66. */
3523FNIEMOP_DEF(iemOp_op_size)
3524{
3525 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
3526 iemRecalEffOpSize(pIemCpu);
3527
3528 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3529 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3530}
3531
3532
3533/** Opcode 0x67. */
3534FNIEMOP_DEF(iemOp_addr_size)
3535{
3536 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
3537 switch (pIemCpu->enmDefAddrMode)
3538 {
3539 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
3540 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
3541 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
3542 default: AssertFailed();
3543 }
3544
3545 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
3546 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
3547}
3548
3549
3550/** Opcode 0x68. */
3551FNIEMOP_DEF(iemOp_push_Iz)
3552{
3553 IEMOP_MNEMONIC("push Iz");
3554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3555 switch (pIemCpu->enmEffOpSize)
3556 {
3557 case IEMMODE_16BIT:
3558 {
3559 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
3560 IEMOP_HLP_NO_LOCK_PREFIX();
3561 IEM_MC_BEGIN(0,0);
3562 IEM_MC_PUSH_U16(u16Imm);
3563 IEM_MC_ADVANCE_RIP();
3564 IEM_MC_END();
3565 return VINF_SUCCESS;
3566 }
3567
3568 case IEMMODE_32BIT:
3569 {
3570 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
3571 IEMOP_HLP_NO_LOCK_PREFIX();
3572 IEM_MC_BEGIN(0,0);
3573 IEM_MC_PUSH_U32(u32Imm);
3574 IEM_MC_ADVANCE_RIP();
3575 IEM_MC_END();
3576 return VINF_SUCCESS;
3577 }
3578
3579 case IEMMODE_64BIT:
3580 {
3581 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
3582 IEMOP_HLP_NO_LOCK_PREFIX();
3583 IEM_MC_BEGIN(0,0);
3584 IEM_MC_PUSH_U64(u64Imm);
3585 IEM_MC_ADVANCE_RIP();
3586 IEM_MC_END();
3587 return VINF_SUCCESS;
3588 }
3589
3590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3591 }
3592}
3593
3594
3595/** Opcode 0x69. */
3596FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
3597{
3598 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
3599 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
3600
3601 switch (pIemCpu->enmEffOpSize)
3602 {
3603 case IEMMODE_16BIT:
3604 {
3605 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
3606 IEMOP_HLP_NO_LOCK_PREFIX();
3607 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3608 {
3609 /* register operand */
3610 IEM_MC_BEGIN(3, 1);
3611 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3612 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
3613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3614 IEM_MC_LOCAL(uint16_t, u16Tmp);
3615
3616 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3617 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3618 IEM_MC_REF_EFLAGS(pEFlags);
3619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
3620 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
3621
3622 IEM_MC_ADVANCE_RIP();
3623 IEM_MC_END();
3624 }
3625 else
3626 {
3627 /* memory operand */
3628 IEM_MC_BEGIN(3, 2);
3629 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3630 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
3631 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3632 IEM_MC_LOCAL(uint16_t, u16Tmp);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3634
3635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3636 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3637 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3638 IEM_MC_REF_EFLAGS(pEFlags);
3639 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
3640 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
3641
3642 IEM_MC_ADVANCE_RIP();
3643 IEM_MC_END();
3644 }
3645 return VINF_SUCCESS;
3646 }
3647
3648 case IEMMODE_32BIT:
3649 {
3650 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
3651 IEMOP_HLP_NO_LOCK_PREFIX();
3652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3653 {
3654 /* register operand */
3655 IEM_MC_BEGIN(3, 1);
3656 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3657 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3658 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3659 IEM_MC_LOCAL(uint32_t, u32Tmp);
3660
3661 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3662 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3663 IEM_MC_REF_EFLAGS(pEFlags);
3664 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
3665 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
3666
3667 IEM_MC_ADVANCE_RIP();
3668 IEM_MC_END();
3669 }
3670 else
3671 {
3672 /* memory operand */
3673 IEM_MC_BEGIN(3, 2);
3674 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3675 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3676 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3677 IEM_MC_LOCAL(uint32_t, u32Tmp);
3678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3679
3680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3681 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3682 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3683 IEM_MC_REF_EFLAGS(pEFlags);
3684 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
3685 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
3686
3687 IEM_MC_ADVANCE_RIP();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691 }
3692
3693 case IEMMODE_64BIT:
3694 {
3695 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
3696 IEMOP_HLP_NO_LOCK_PREFIX();
3697 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3698 {
3699 /* register operand */
3700 IEM_MC_BEGIN(3, 1);
3701 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3702 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3703 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3704 IEM_MC_LOCAL(uint64_t, u64Tmp);
3705
3706 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3707 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3708 IEM_MC_REF_EFLAGS(pEFlags);
3709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
3710 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
3711
3712 IEM_MC_ADVANCE_RIP();
3713 IEM_MC_END();
3714 }
3715 else
3716 {
3717 /* memory operand */
3718 IEM_MC_BEGIN(3, 2);
3719 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3720 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3721 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3722 IEM_MC_LOCAL(uint64_t, u64Tmp);
3723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3724
3725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3726 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3727 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3728 IEM_MC_REF_EFLAGS(pEFlags);
3729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
3730 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
3731
3732 IEM_MC_ADVANCE_RIP();
3733 IEM_MC_END();
3734 }
3735 return VINF_SUCCESS;
3736 }
3737 }
3738 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
3739}
3740
3741
3742/** Opcode 0x6a. */
3743FNIEMOP_DEF(iemOp_push_Ib)
3744{
3745 IEMOP_MNEMONIC("push Ib");
3746 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
3747 IEMOP_HLP_NO_LOCK_PREFIX();
3748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3749
3750 IEM_MC_BEGIN(0,0);
3751 switch (pIemCpu->enmEffOpSize)
3752 {
3753 case IEMMODE_16BIT:
3754 IEM_MC_PUSH_U16(i8Imm);
3755 break;
3756 case IEMMODE_32BIT:
3757 IEM_MC_PUSH_U32(i8Imm);
3758 break;
3759 case IEMMODE_64BIT:
3760 IEM_MC_PUSH_U64(i8Imm);
3761 break;
3762 }
3763 IEM_MC_ADVANCE_RIP();
3764 IEM_MC_END();
3765 return VINF_SUCCESS;
3766}
3767
3768
3769/** Opcode 0x6b. */
3770FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3771{
3772 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3773 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
3774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
3775 IEMOP_HLP_NO_LOCK_PREFIX();
3776
3777 switch (pIemCpu->enmEffOpSize)
3778 {
3779 case IEMMODE_16BIT:
3780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3781 {
3782 /* register operand */
3783 IEM_MC_BEGIN(3, 1);
3784 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3785 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3787 IEM_MC_LOCAL(uint16_t, u16Tmp);
3788
3789 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3790 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3791 IEM_MC_REF_EFLAGS(pEFlags);
3792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
3793 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
3794
3795 IEM_MC_ADVANCE_RIP();
3796 IEM_MC_END();
3797 }
3798 else
3799 {
3800 /* memory operand */
3801 IEM_MC_BEGIN(3, 2);
3802 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3803 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3805 IEM_MC_LOCAL(uint16_t, u16Tmp);
3806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3807
3808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3809 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3810 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3811 IEM_MC_REF_EFLAGS(pEFlags);
3812 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
3813 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
3814
3815 IEM_MC_ADVANCE_RIP();
3816 IEM_MC_END();
3817 }
3818 return VINF_SUCCESS;
3819
3820 case IEMMODE_32BIT:
3821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3822 {
3823 /* register operand */
3824 IEM_MC_BEGIN(3, 1);
3825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3826 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3828 IEM_MC_LOCAL(uint32_t, u32Tmp);
3829
3830 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3831 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3832 IEM_MC_REF_EFLAGS(pEFlags);
3833 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
3834 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
3835
3836 IEM_MC_ADVANCE_RIP();
3837 IEM_MC_END();
3838 }
3839 else
3840 {
3841 /* memory operand */
3842 IEM_MC_BEGIN(3, 2);
3843 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3844 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3846 IEM_MC_LOCAL(uint32_t, u32Tmp);
3847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3848
3849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3850 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3851 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3852 IEM_MC_REF_EFLAGS(pEFlags);
3853 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
3854 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
3855
3856 IEM_MC_ADVANCE_RIP();
3857 IEM_MC_END();
3858 }
3859 return VINF_SUCCESS;
3860
3861 case IEMMODE_64BIT:
3862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3863 {
3864 /* register operand */
3865 IEM_MC_BEGIN(3, 1);
3866 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3867 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3868 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3869 IEM_MC_LOCAL(uint64_t, u64Tmp);
3870
3871 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3872 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3873 IEM_MC_REF_EFLAGS(pEFlags);
3874 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
3875 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
3876
3877 IEM_MC_ADVANCE_RIP();
3878 IEM_MC_END();
3879 }
3880 else
3881 {
3882 /* memory operand */
3883 IEM_MC_BEGIN(3, 2);
3884 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3885 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3887 IEM_MC_LOCAL(uint64_t, u64Tmp);
3888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3889
3890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3891 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
3892 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3893 IEM_MC_REF_EFLAGS(pEFlags);
3894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
3895 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
3896
3897 IEM_MC_ADVANCE_RIP();
3898 IEM_MC_END();
3899 }
3900 return VINF_SUCCESS;
3901 }
3902 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
3903}
3904
3905
3906/** Opcode 0x6c. */
3907FNIEMOP_DEF(iemOp_insb_Yb_DX)
3908{
3909 IEMOP_HLP_NO_LOCK_PREFIX();
3910 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3911 {
3912 IEMOP_MNEMONIC("rep ins Yb,DX");
3913 switch (pIemCpu->enmEffAddrMode)
3914 {
3915 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
3916 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
3917 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
3918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3919 }
3920 }
3921 else
3922 {
3923 IEMOP_MNEMONIC("ins Yb,DX");
3924 switch (pIemCpu->enmEffAddrMode)
3925 {
3926 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
3927 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
3928 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
3929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3930 }
3931 }
3932}
3933
3934
3935/** Opcode 0x6d. */
3936FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3937{
3938 IEMOP_HLP_NO_LOCK_PREFIX();
3939 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3940 {
3941 IEMOP_MNEMONIC("rep ins Yv,DX");
3942 switch (pIemCpu->enmEffOpSize)
3943 {
3944 case IEMMODE_16BIT:
3945 switch (pIemCpu->enmEffAddrMode)
3946 {
3947 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
3948 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
3949 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
3950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3951 }
3952 break;
3953 case IEMMODE_64BIT:
3954 case IEMMODE_32BIT:
3955 switch (pIemCpu->enmEffAddrMode)
3956 {
3957 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
3958 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
3959 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
3960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3961 }
3962 break;
3963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3964 }
3965 }
3966 else
3967 {
3968 IEMOP_MNEMONIC("ins Yv,DX");
3969 switch (pIemCpu->enmEffOpSize)
3970 {
3971 case IEMMODE_16BIT:
3972 switch (pIemCpu->enmEffAddrMode)
3973 {
3974 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
3975 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
3976 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
3977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3978 }
3979 break;
3980 case IEMMODE_64BIT:
3981 case IEMMODE_32BIT:
3982 switch (pIemCpu->enmEffAddrMode)
3983 {
3984 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
3985 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
3986 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
3987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3988 }
3989 break;
3990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3991 }
3992 }
3993}
3994
3995
3996/** Opcode 0x6e. */
3997FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3998{
3999 IEMOP_HLP_NO_LOCK_PREFIX();
4000 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4001 {
4002 IEMOP_MNEMONIC("rep out DX,Yb");
4003 switch (pIemCpu->enmEffAddrMode)
4004 {
4005 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
4006 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
4007 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
4008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4009 }
4010 }
4011 else
4012 {
4013 IEMOP_MNEMONIC("out DX,Yb");
4014 switch (pIemCpu->enmEffAddrMode)
4015 {
4016 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
4017 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
4018 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
4019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4020 }
4021 }
4022}
4023
4024
4025/** Opcode 0x6f. */
4026FNIEMOP_DEF(iemOp_outswd_Yv_DX)
4027{
4028 IEMOP_HLP_NO_LOCK_PREFIX();
4029 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
4030 {
4031 IEMOP_MNEMONIC("rep outs DX,Yv");
4032 switch (pIemCpu->enmEffOpSize)
4033 {
4034 case IEMMODE_16BIT:
4035 switch (pIemCpu->enmEffAddrMode)
4036 {
4037 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
4038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
4039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
4040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4041 }
4042 break;
4043 case IEMMODE_64BIT:
4044 case IEMMODE_32BIT:
4045 switch (pIemCpu->enmEffAddrMode)
4046 {
4047 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
4048 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
4049 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
4050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4051 }
4052 break;
4053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4054 }
4055 }
4056 else
4057 {
4058 IEMOP_MNEMONIC("outs DX,Yv");
4059 switch (pIemCpu->enmEffOpSize)
4060 {
4061 case IEMMODE_16BIT:
4062 switch (pIemCpu->enmEffAddrMode)
4063 {
4064 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
4065 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
4066 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
4067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4068 }
4069 break;
4070 case IEMMODE_64BIT:
4071 case IEMMODE_32BIT:
4072 switch (pIemCpu->enmEffAddrMode)
4073 {
4074 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
4075 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
4076 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
4077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4078 }
4079 break;
4080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4081 }
4082 }
4083}
4084
4085
4086/** Opcode 0x70. */
4087FNIEMOP_DEF(iemOp_jo_Jb)
4088{
4089 IEMOP_MNEMONIC("jo Jb");
4090 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4091 IEMOP_HLP_NO_LOCK_PREFIX();
4092 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4093
4094 IEM_MC_BEGIN(0, 0);
4095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4096 IEM_MC_REL_JMP_S8(i8Imm);
4097 } IEM_MC_ELSE() {
4098 IEM_MC_ADVANCE_RIP();
4099 } IEM_MC_ENDIF();
4100 IEM_MC_END();
4101 return VINF_SUCCESS;
4102}
4103
4104
4105/** Opcode 0x71. */
4106FNIEMOP_DEF(iemOp_jno_Jb)
4107{
4108 IEMOP_MNEMONIC("jno Jb");
4109 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4110 IEMOP_HLP_NO_LOCK_PREFIX();
4111 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4112
4113 IEM_MC_BEGIN(0, 0);
4114 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4115 IEM_MC_ADVANCE_RIP();
4116 } IEM_MC_ELSE() {
4117 IEM_MC_REL_JMP_S8(i8Imm);
4118 } IEM_MC_ENDIF();
4119 IEM_MC_END();
4120 return VINF_SUCCESS;
4121}
4122
4123/** Opcode 0x72. */
4124FNIEMOP_DEF(iemOp_jc_Jb)
4125{
4126 IEMOP_MNEMONIC("jc/jnae Jb");
4127 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4128 IEMOP_HLP_NO_LOCK_PREFIX();
4129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4130
4131 IEM_MC_BEGIN(0, 0);
4132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4133 IEM_MC_REL_JMP_S8(i8Imm);
4134 } IEM_MC_ELSE() {
4135 IEM_MC_ADVANCE_RIP();
4136 } IEM_MC_ENDIF();
4137 IEM_MC_END();
4138 return VINF_SUCCESS;
4139}
4140
4141
4142/** Opcode 0x73. */
4143FNIEMOP_DEF(iemOp_jnc_Jb)
4144{
4145 IEMOP_MNEMONIC("jnc/jnb Jb");
4146 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4147 IEMOP_HLP_NO_LOCK_PREFIX();
4148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4149
4150 IEM_MC_BEGIN(0, 0);
4151 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4152 IEM_MC_ADVANCE_RIP();
4153 } IEM_MC_ELSE() {
4154 IEM_MC_REL_JMP_S8(i8Imm);
4155 } IEM_MC_ENDIF();
4156 IEM_MC_END();
4157 return VINF_SUCCESS;
4158}
4159
4160
4161/** Opcode 0x74. */
4162FNIEMOP_DEF(iemOp_je_Jb)
4163{
4164 IEMOP_MNEMONIC("je/jz Jb");
4165 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4166 IEMOP_HLP_NO_LOCK_PREFIX();
4167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4168
4169 IEM_MC_BEGIN(0, 0);
4170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4171 IEM_MC_REL_JMP_S8(i8Imm);
4172 } IEM_MC_ELSE() {
4173 IEM_MC_ADVANCE_RIP();
4174 } IEM_MC_ENDIF();
4175 IEM_MC_END();
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** Opcode 0x75. */
4181FNIEMOP_DEF(iemOp_jne_Jb)
4182{
4183 IEMOP_MNEMONIC("jne/jnz Jb");
4184 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4185 IEMOP_HLP_NO_LOCK_PREFIX();
4186 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4187
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4190 IEM_MC_ADVANCE_RIP();
4191 } IEM_MC_ELSE() {
4192 IEM_MC_REL_JMP_S8(i8Imm);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_END();
4195 return VINF_SUCCESS;
4196}
4197
4198
4199/** Opcode 0x76. */
4200FNIEMOP_DEF(iemOp_jbe_Jb)
4201{
4202 IEMOP_MNEMONIC("jbe/jna Jb");
4203 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4204 IEMOP_HLP_NO_LOCK_PREFIX();
4205 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4206
4207 IEM_MC_BEGIN(0, 0);
4208 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4209 IEM_MC_REL_JMP_S8(i8Imm);
4210 } IEM_MC_ELSE() {
4211 IEM_MC_ADVANCE_RIP();
4212 } IEM_MC_ENDIF();
4213 IEM_MC_END();
4214 return VINF_SUCCESS;
4215}
4216
4217
4218/** Opcode 0x77. */
4219FNIEMOP_DEF(iemOp_jnbe_Jb)
4220{
4221 IEMOP_MNEMONIC("jnbe/ja Jb");
4222 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4223 IEMOP_HLP_NO_LOCK_PREFIX();
4224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4225
4226 IEM_MC_BEGIN(0, 0);
4227 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4228 IEM_MC_ADVANCE_RIP();
4229 } IEM_MC_ELSE() {
4230 IEM_MC_REL_JMP_S8(i8Imm);
4231 } IEM_MC_ENDIF();
4232 IEM_MC_END();
4233 return VINF_SUCCESS;
4234}
4235
4236
4237/** Opcode 0x78. */
4238FNIEMOP_DEF(iemOp_js_Jb)
4239{
4240 IEMOP_MNEMONIC("js Jb");
4241 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4242 IEMOP_HLP_NO_LOCK_PREFIX();
4243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4244
4245 IEM_MC_BEGIN(0, 0);
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4247 IEM_MC_REL_JMP_S8(i8Imm);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_ADVANCE_RIP();
4250 } IEM_MC_ENDIF();
4251 IEM_MC_END();
4252 return VINF_SUCCESS;
4253}
4254
4255
4256/** Opcode 0x79. */
4257FNIEMOP_DEF(iemOp_jns_Jb)
4258{
4259 IEMOP_MNEMONIC("jns Jb");
4260 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4261 IEMOP_HLP_NO_LOCK_PREFIX();
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4263
4264 IEM_MC_BEGIN(0, 0);
4265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4266 IEM_MC_ADVANCE_RIP();
4267 } IEM_MC_ELSE() {
4268 IEM_MC_REL_JMP_S8(i8Imm);
4269 } IEM_MC_ENDIF();
4270 IEM_MC_END();
4271 return VINF_SUCCESS;
4272}
4273
4274
4275/** Opcode 0x7a. */
4276FNIEMOP_DEF(iemOp_jp_Jb)
4277{
4278 IEMOP_MNEMONIC("jp Jb");
4279 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4280 IEMOP_HLP_NO_LOCK_PREFIX();
4281 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4282
4283 IEM_MC_BEGIN(0, 0);
4284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4285 IEM_MC_REL_JMP_S8(i8Imm);
4286 } IEM_MC_ELSE() {
4287 IEM_MC_ADVANCE_RIP();
4288 } IEM_MC_ENDIF();
4289 IEM_MC_END();
4290 return VINF_SUCCESS;
4291}
4292
4293
4294/** Opcode 0x7b. */
4295FNIEMOP_DEF(iemOp_jnp_Jb)
4296{
4297 IEMOP_MNEMONIC("jnp Jb");
4298 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4299 IEMOP_HLP_NO_LOCK_PREFIX();
4300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4301
4302 IEM_MC_BEGIN(0, 0);
4303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4304 IEM_MC_ADVANCE_RIP();
4305 } IEM_MC_ELSE() {
4306 IEM_MC_REL_JMP_S8(i8Imm);
4307 } IEM_MC_ENDIF();
4308 IEM_MC_END();
4309 return VINF_SUCCESS;
4310}
4311
4312
4313/** Opcode 0x7c. */
4314FNIEMOP_DEF(iemOp_jl_Jb)
4315{
4316 IEMOP_MNEMONIC("jl/jnge Jb");
4317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4318 IEMOP_HLP_NO_LOCK_PREFIX();
4319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4320
4321 IEM_MC_BEGIN(0, 0);
4322 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4323 IEM_MC_REL_JMP_S8(i8Imm);
4324 } IEM_MC_ELSE() {
4325 IEM_MC_ADVANCE_RIP();
4326 } IEM_MC_ENDIF();
4327 IEM_MC_END();
4328 return VINF_SUCCESS;
4329}
4330
4331
4332/** Opcode 0x7d. */
4333FNIEMOP_DEF(iemOp_jnl_Jb)
4334{
4335 IEMOP_MNEMONIC("jnl/jge Jb");
4336 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4337 IEMOP_HLP_NO_LOCK_PREFIX();
4338 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4339
4340 IEM_MC_BEGIN(0, 0);
4341 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4342 IEM_MC_ADVANCE_RIP();
4343 } IEM_MC_ELSE() {
4344 IEM_MC_REL_JMP_S8(i8Imm);
4345 } IEM_MC_ENDIF();
4346 IEM_MC_END();
4347 return VINF_SUCCESS;
4348}
4349
4350
4351/** Opcode 0x7e. */
4352FNIEMOP_DEF(iemOp_jle_Jb)
4353{
4354 IEMOP_MNEMONIC("jle/jng Jb");
4355 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4356 IEMOP_HLP_NO_LOCK_PREFIX();
4357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4358
4359 IEM_MC_BEGIN(0, 0);
4360 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4361 IEM_MC_REL_JMP_S8(i8Imm);
4362 } IEM_MC_ELSE() {
4363 IEM_MC_ADVANCE_RIP();
4364 } IEM_MC_ENDIF();
4365 IEM_MC_END();
4366 return VINF_SUCCESS;
4367}
4368
4369
4370/** Opcode 0x7f. */
4371FNIEMOP_DEF(iemOp_jnle_Jb)
4372{
4373 IEMOP_MNEMONIC("jnle/jg Jb");
4374 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
4375 IEMOP_HLP_NO_LOCK_PREFIX();
4376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4377
4378 IEM_MC_BEGIN(0, 0);
4379 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4380 IEM_MC_ADVANCE_RIP();
4381 } IEM_MC_ELSE() {
4382 IEM_MC_REL_JMP_S8(i8Imm);
4383 } IEM_MC_ENDIF();
4384 IEM_MC_END();
4385 return VINF_SUCCESS;
4386}
4387
4388
4389/** Opcode 0x80. */
4390FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4391{
4392 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4393 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
4394 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
4395
4396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4397 {
4398 /* register target */
4399 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4400 IEMOP_HLP_NO_LOCK_PREFIX();
4401 IEM_MC_BEGIN(3, 0);
4402 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4403 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
4404 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4405
4406 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4407 IEM_MC_REF_EFLAGS(pEFlags);
4408 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
4409
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413 else
4414 {
4415 /* memory target */
4416 uint32_t fAccess;
4417 if (pImpl->pfnLockedU8)
4418 fAccess = IEM_ACCESS_DATA_RW;
4419 else
4420 { /* CMP */
4421 IEMOP_HLP_NO_LOCK_PREFIX();
4422 fAccess = IEM_ACCESS_DATA_R;
4423 }
4424 IEM_MC_BEGIN(3, 2);
4425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4428
4429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4430 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4431 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
4432
4433 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4434 IEM_MC_FETCH_EFLAGS(EFlags);
4435 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4436 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
4437 else
4438 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
4439
4440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
4441 IEM_MC_COMMIT_EFLAGS(EFlags);
4442 IEM_MC_ADVANCE_RIP();
4443 IEM_MC_END();
4444 }
4445 return VINF_SUCCESS;
4446}
4447
4448
4449/** Opcode 0x81. */
4450FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4451{
4452 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4453 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
4454 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
4455
4456 switch (pIemCpu->enmEffOpSize)
4457 {
4458 case IEMMODE_16BIT:
4459 {
4460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4461 {
4462 /* register target */
4463 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
4464 IEMOP_HLP_NO_LOCK_PREFIX();
4465 IEM_MC_BEGIN(3, 0);
4466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4467 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
4468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4469
4470 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4471 IEM_MC_REF_EFLAGS(pEFlags);
4472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4473
4474 IEM_MC_ADVANCE_RIP();
4475 IEM_MC_END();
4476 }
4477 else
4478 {
4479 /* memory target */
4480 uint32_t fAccess;
4481 if (pImpl->pfnLockedU16)
4482 fAccess = IEM_ACCESS_DATA_RW;
4483 else
4484 { /* CMP, TEST */
4485 IEMOP_HLP_NO_LOCK_PREFIX();
4486 fAccess = IEM_ACCESS_DATA_R;
4487 }
4488 IEM_MC_BEGIN(3, 2);
4489 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4490 IEM_MC_ARG(uint16_t, u16Src, 1);
4491 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4493
4494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4495 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
4496 IEM_MC_ASSIGN(u16Src, u16Imm);
4497 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4498 IEM_MC_FETCH_EFLAGS(EFlags);
4499 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4500 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4501 else
4502 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4503
4504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4505 IEM_MC_COMMIT_EFLAGS(EFlags);
4506 IEM_MC_ADVANCE_RIP();
4507 IEM_MC_END();
4508 }
4509 break;
4510 }
4511
4512 case IEMMODE_32BIT:
4513 {
4514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4515 {
4516 /* register target */
4517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
4518 IEMOP_HLP_NO_LOCK_PREFIX();
4519 IEM_MC_BEGIN(3, 0);
4520 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4521 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
4522 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4523
4524 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4525 IEM_MC_REF_EFLAGS(pEFlags);
4526 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4527
4528 IEM_MC_ADVANCE_RIP();
4529 IEM_MC_END();
4530 }
4531 else
4532 {
4533 /* memory target */
4534 uint32_t fAccess;
4535 if (pImpl->pfnLockedU32)
4536 fAccess = IEM_ACCESS_DATA_RW;
4537 else
4538 { /* CMP, TEST */
4539 IEMOP_HLP_NO_LOCK_PREFIX();
4540 fAccess = IEM_ACCESS_DATA_R;
4541 }
4542 IEM_MC_BEGIN(3, 2);
4543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4544 IEM_MC_ARG(uint32_t, u32Src, 1);
4545 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4547
4548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
4550 IEM_MC_ASSIGN(u32Src, u32Imm);
4551 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4552 IEM_MC_FETCH_EFLAGS(EFlags);
4553 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4555 else
4556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4557
4558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4559 IEM_MC_COMMIT_EFLAGS(EFlags);
4560 IEM_MC_ADVANCE_RIP();
4561 IEM_MC_END();
4562 }
4563 break;
4564 }
4565
4566 case IEMMODE_64BIT:
4567 {
4568 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4569 {
4570 /* register target */
4571 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
4572 IEMOP_HLP_NO_LOCK_PREFIX();
4573 IEM_MC_BEGIN(3, 0);
4574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4575 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
4576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4577
4578 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4579 IEM_MC_REF_EFLAGS(pEFlags);
4580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4581
4582 IEM_MC_ADVANCE_RIP();
4583 IEM_MC_END();
4584 }
4585 else
4586 {
4587 /* memory target */
4588 uint32_t fAccess;
4589 if (pImpl->pfnLockedU64)
4590 fAccess = IEM_ACCESS_DATA_RW;
4591 else
4592 { /* CMP */
4593 IEMOP_HLP_NO_LOCK_PREFIX();
4594 fAccess = IEM_ACCESS_DATA_R;
4595 }
4596 IEM_MC_BEGIN(3, 2);
4597 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4598 IEM_MC_ARG(uint64_t, u64Src, 1);
4599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4601
4602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4603 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
4604 IEM_MC_ASSIGN(u64Src, u64Imm);
4605 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4606 IEM_MC_FETCH_EFLAGS(EFlags);
4607 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4608 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4609 else
4610 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4611
4612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
4613 IEM_MC_COMMIT_EFLAGS(EFlags);
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 break;
4618 }
4619 }
4620 return VINF_SUCCESS;
4621}
4622
4623
4624/** Opcode 0x82. */
4625 FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4626{
4627 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4628 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4629}
4630
4631
4632/** Opcode 0x83. */
4633FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4634{
4635 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4636 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
4637 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
4638
4639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4640 {
4641 /*
4642 * Register target
4643 */
4644 IEMOP_HLP_NO_LOCK_PREFIX();
4645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4646 switch (pIemCpu->enmEffOpSize)
4647 {
4648 case IEMMODE_16BIT:
4649 {
4650 IEM_MC_BEGIN(3, 0);
4651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4652 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
4653 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4654
4655 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4656 IEM_MC_REF_EFLAGS(pEFlags);
4657 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4658
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 break;
4662 }
4663
4664 case IEMMODE_32BIT:
4665 {
4666 IEM_MC_BEGIN(3, 0);
4667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4668 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
4669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4670
4671 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4672 IEM_MC_REF_EFLAGS(pEFlags);
4673 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4674
4675 IEM_MC_ADVANCE_RIP();
4676 IEM_MC_END();
4677 break;
4678 }
4679
4680 case IEMMODE_64BIT:
4681 {
4682 IEM_MC_BEGIN(3, 0);
4683 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4684 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4686
4687 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4688 IEM_MC_REF_EFLAGS(pEFlags);
4689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4690
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 break;
4694 }
4695 }
4696 }
4697 else
4698 {
4699 /*
4700 * Memory target.
4701 */
4702 uint32_t fAccess;
4703 if (pImpl->pfnLockedU16)
4704 fAccess = IEM_ACCESS_DATA_RW;
4705 else
4706 { /* CMP */
4707 IEMOP_HLP_NO_LOCK_PREFIX();
4708 fAccess = IEM_ACCESS_DATA_R;
4709 }
4710
4711 switch (pIemCpu->enmEffOpSize)
4712 {
4713 case IEMMODE_16BIT:
4714 {
4715 IEM_MC_BEGIN(3, 2);
4716 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4717 IEM_MC_ARG(uint16_t, u16Src, 1);
4718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4720
4721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4722 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4723 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
4724 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4725 IEM_MC_FETCH_EFLAGS(EFlags);
4726 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4727 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4728 else
4729 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4730
4731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4732 IEM_MC_COMMIT_EFLAGS(EFlags);
4733 IEM_MC_ADVANCE_RIP();
4734 IEM_MC_END();
4735 break;
4736 }
4737
4738 case IEMMODE_32BIT:
4739 {
4740 IEM_MC_BEGIN(3, 2);
4741 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4742 IEM_MC_ARG(uint32_t, u32Src, 1);
4743 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4745
4746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4747 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4748 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
4749 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4750 IEM_MC_FETCH_EFLAGS(EFlags);
4751 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4753 else
4754 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4755
4756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4757 IEM_MC_COMMIT_EFLAGS(EFlags);
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 break;
4761 }
4762
4763 case IEMMODE_64BIT:
4764 {
4765 IEM_MC_BEGIN(3, 2);
4766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4767 IEM_MC_ARG(uint64_t, u64Src, 1);
4768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4770
4771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4772 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
4773 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
4774 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4775 IEM_MC_FETCH_EFLAGS(EFlags);
4776 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4777 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4778 else
4779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4780
4781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
4782 IEM_MC_COMMIT_EFLAGS(EFlags);
4783 IEM_MC_ADVANCE_RIP();
4784 IEM_MC_END();
4785 break;
4786 }
4787 }
4788 }
4789 return VINF_SUCCESS;
4790}
4791
4792
4793/** Opcode 0x84. */
4794FNIEMOP_DEF(iemOp_test_Eb_Gb)
4795{
4796 IEMOP_MNEMONIC("test Eb,Gb");
4797 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
4798 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
4799}
4800
4801
4802/** Opcode 0x85. */
4803FNIEMOP_DEF(iemOp_test_Ev_Gv)
4804{
4805 IEMOP_MNEMONIC("test Ev,Gv");
4806 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
4807 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
4808}
4809
4810
4811/** Opcode 0x86. */
4812FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4813{
4814 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4815 IEMOP_MNEMONIC("xchg Eb,Gb");
4816
4817 /*
4818 * If rm is denoting a register, no more instruction bytes.
4819 */
4820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4821 {
4822 IEMOP_HLP_NO_LOCK_PREFIX();
4823
4824 IEM_MC_BEGIN(0, 2);
4825 IEM_MC_LOCAL(uint8_t, uTmp1);
4826 IEM_MC_LOCAL(uint8_t, uTmp2);
4827
4828 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4829 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4830 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
4831 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
4832
4833 IEM_MC_ADVANCE_RIP();
4834 IEM_MC_END();
4835 }
4836 else
4837 {
4838 /*
4839 * We're accessing memory.
4840 */
4841 IEM_MC_BEGIN(2, 2);
4842 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4843 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4845
4846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4847 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4848 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4849 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
4850 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4851
4852 IEM_MC_ADVANCE_RIP();
4853 IEM_MC_END();
4854 }
4855 return VINF_SUCCESS;
4856}
4857
4858
4859/** Opcode 0x87. */
4860FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4861{
4862 IEMOP_MNEMONIC("xchg Ev,Gv");
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4864
4865 /*
4866 * If rm is denoting a register, no more instruction bytes.
4867 */
4868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4869 {
4870 IEMOP_HLP_NO_LOCK_PREFIX();
4871
4872 switch (pIemCpu->enmEffOpSize)
4873 {
4874 case IEMMODE_16BIT:
4875 IEM_MC_BEGIN(0, 2);
4876 IEM_MC_LOCAL(uint16_t, uTmp1);
4877 IEM_MC_LOCAL(uint16_t, uTmp2);
4878
4879 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4880 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4881 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
4882 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
4883
4884 IEM_MC_ADVANCE_RIP();
4885 IEM_MC_END();
4886 return VINF_SUCCESS;
4887
4888 case IEMMODE_32BIT:
4889 IEM_MC_BEGIN(0, 2);
4890 IEM_MC_LOCAL(uint32_t, uTmp1);
4891 IEM_MC_LOCAL(uint32_t, uTmp2);
4892
4893 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4894 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4895 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
4896 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
4897
4898 IEM_MC_ADVANCE_RIP();
4899 IEM_MC_END();
4900 return VINF_SUCCESS;
4901
4902 case IEMMODE_64BIT:
4903 IEM_MC_BEGIN(0, 2);
4904 IEM_MC_LOCAL(uint64_t, uTmp1);
4905 IEM_MC_LOCAL(uint64_t, uTmp2);
4906
4907 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4908 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4909 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
4910 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
4911
4912 IEM_MC_ADVANCE_RIP();
4913 IEM_MC_END();
4914 return VINF_SUCCESS;
4915
4916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4917 }
4918 }
4919 else
4920 {
4921 /*
4922 * We're accessing memory.
4923 */
4924 switch (pIemCpu->enmEffOpSize)
4925 {
4926 case IEMMODE_16BIT:
4927 IEM_MC_BEGIN(2, 2);
4928 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4929 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4931
4932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4933 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4934 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4935 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
4936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4937
4938 IEM_MC_ADVANCE_RIP();
4939 IEM_MC_END();
4940 return VINF_SUCCESS;
4941
4942 case IEMMODE_32BIT:
4943 IEM_MC_BEGIN(2, 2);
4944 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4945 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4947
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4949 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4950 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4951 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
4952 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4953
4954 IEM_MC_ADVANCE_RIP();
4955 IEM_MC_END();
4956 return VINF_SUCCESS;
4957
4958 case IEMMODE_64BIT:
4959 IEM_MC_BEGIN(2, 2);
4960 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4961 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4963
4964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4965 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4966 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4967 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
4968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4969
4970 IEM_MC_ADVANCE_RIP();
4971 IEM_MC_END();
4972 return VINF_SUCCESS;
4973
4974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4975 }
4976 }
4977}
4978
4979
4980/** Opcode 0x88. */
4981FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4982{
4983 IEMOP_MNEMONIC("mov Eb,Gb");
4984
4985 uint8_t bRm;
4986 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
4987 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4988
4989 /*
4990 * If rm is denoting a register, no more instruction bytes.
4991 */
4992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4993 {
4994 IEM_MC_BEGIN(0, 1);
4995 IEM_MC_LOCAL(uint8_t, u8Value);
4996 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4997 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
4998 IEM_MC_ADVANCE_RIP();
4999 IEM_MC_END();
5000 }
5001 else
5002 {
5003 /*
5004 * We're writing a register to memory.
5005 */
5006 IEM_MC_BEGIN(0, 2);
5007 IEM_MC_LOCAL(uint8_t, u8Value);
5008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5010 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5011 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
5012 IEM_MC_ADVANCE_RIP();
5013 IEM_MC_END();
5014 }
5015 return VINF_SUCCESS;
5016
5017}
5018
5019
5020/** Opcode 0x89. */
5021FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5022{
5023 IEMOP_MNEMONIC("mov Ev,Gv");
5024
5025 uint8_t bRm;
5026 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5027 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5028
5029 /*
5030 * If rm is denoting a register, no more instruction bytes.
5031 */
5032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5033 {
5034 switch (pIemCpu->enmEffOpSize)
5035 {
5036 case IEMMODE_16BIT:
5037 IEM_MC_BEGIN(0, 1);
5038 IEM_MC_LOCAL(uint16_t, u16Value);
5039 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5040 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 break;
5044
5045 case IEMMODE_32BIT:
5046 IEM_MC_BEGIN(0, 1);
5047 IEM_MC_LOCAL(uint32_t, u32Value);
5048 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5049 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
5050 IEM_MC_ADVANCE_RIP();
5051 IEM_MC_END();
5052 break;
5053
5054 case IEMMODE_64BIT:
5055 IEM_MC_BEGIN(0, 1);
5056 IEM_MC_LOCAL(uint64_t, u64Value);
5057 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5058 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
5059 IEM_MC_ADVANCE_RIP();
5060 IEM_MC_END();
5061 break;
5062 }
5063 }
5064 else
5065 {
5066 /*
5067 * We're writing a register to memory.
5068 */
5069 switch (pIemCpu->enmEffOpSize)
5070 {
5071 case IEMMODE_16BIT:
5072 IEM_MC_BEGIN(0, 2);
5073 IEM_MC_LOCAL(uint16_t, u16Value);
5074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5076 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5077 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
5078 IEM_MC_ADVANCE_RIP();
5079 IEM_MC_END();
5080 break;
5081
5082 case IEMMODE_32BIT:
5083 IEM_MC_BEGIN(0, 2);
5084 IEM_MC_LOCAL(uint32_t, u32Value);
5085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5087 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5088 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
5089 IEM_MC_ADVANCE_RIP();
5090 IEM_MC_END();
5091 break;
5092
5093 case IEMMODE_64BIT:
5094 IEM_MC_BEGIN(0, 2);
5095 IEM_MC_LOCAL(uint64_t, u64Value);
5096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5098 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5099 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
5100 IEM_MC_ADVANCE_RIP();
5101 IEM_MC_END();
5102 break;
5103 }
5104 }
5105 return VINF_SUCCESS;
5106}
5107
5108
5109/** Opcode 0x8a. */
5110FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5111{
5112 IEMOP_MNEMONIC("mov Gb,Eb");
5113
5114 uint8_t bRm;
5115 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5116 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5117
5118 /*
5119 * If rm is denoting a register, no more instruction bytes.
5120 */
5121 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5122 {
5123 IEM_MC_BEGIN(0, 1);
5124 IEM_MC_LOCAL(uint8_t, u8Value);
5125 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5126 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 }
5130 else
5131 {
5132 /*
5133 * We're loading a register from memory.
5134 */
5135 IEM_MC_BEGIN(0, 2);
5136 IEM_MC_LOCAL(uint8_t, u8Value);
5137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5139 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
5140 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
5141 IEM_MC_ADVANCE_RIP();
5142 IEM_MC_END();
5143 }
5144 return VINF_SUCCESS;
5145}
5146
5147
5148/** Opcode 0x8b. */
5149FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5150{
5151 IEMOP_MNEMONIC("mov Gv,Ev");
5152
5153 uint8_t bRm;
5154 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5155 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5161 {
5162 switch (pIemCpu->enmEffOpSize)
5163 {
5164 case IEMMODE_16BIT:
5165 IEM_MC_BEGIN(0, 1);
5166 IEM_MC_LOCAL(uint16_t, u16Value);
5167 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5168 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5169 IEM_MC_ADVANCE_RIP();
5170 IEM_MC_END();
5171 break;
5172
5173 case IEMMODE_32BIT:
5174 IEM_MC_BEGIN(0, 1);
5175 IEM_MC_LOCAL(uint32_t, u32Value);
5176 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5177 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5178 IEM_MC_ADVANCE_RIP();
5179 IEM_MC_END();
5180 break;
5181
5182 case IEMMODE_64BIT:
5183 IEM_MC_BEGIN(0, 1);
5184 IEM_MC_LOCAL(uint64_t, u64Value);
5185 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5186 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5187 IEM_MC_ADVANCE_RIP();
5188 IEM_MC_END();
5189 break;
5190 }
5191 }
5192 else
5193 {
5194 /*
5195 * We're loading a register from memory.
5196 */
5197 switch (pIemCpu->enmEffOpSize)
5198 {
5199 case IEMMODE_16BIT:
5200 IEM_MC_BEGIN(0, 2);
5201 IEM_MC_LOCAL(uint16_t, u16Value);
5202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5204 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5205 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5206 IEM_MC_ADVANCE_RIP();
5207 IEM_MC_END();
5208 break;
5209
5210 case IEMMODE_32BIT:
5211 IEM_MC_BEGIN(0, 2);
5212 IEM_MC_LOCAL(uint32_t, u32Value);
5213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5215 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5216 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5217 IEM_MC_ADVANCE_RIP();
5218 IEM_MC_END();
5219 break;
5220
5221 case IEMMODE_64BIT:
5222 IEM_MC_BEGIN(0, 2);
5223 IEM_MC_LOCAL(uint64_t, u64Value);
5224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5226 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5227 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 break;
5231 }
5232 }
5233 return VINF_SUCCESS;
5234}
5235
5236
5237/** Opcode 0x8c. */
5238FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5239{
5240 IEMOP_MNEMONIC("mov Ev,Sw");
5241
5242 uint8_t bRm;
5243 IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5245
5246 /*
5247 * Check that the destination register exists. The REX.R prefix is ignored.
5248 */
5249 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
5250 if ( iSegReg > X86_SREG_GS)
5251 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5252
5253 /*
5254 * If rm is denoting a register, no more instruction bytes.
5255 * In that case, the operand size is respected and the upper bits are
5256 * cleared (starting with some pentium).
5257 */
5258 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5259 {
5260 switch (pIemCpu->enmEffOpSize)
5261 {
5262 case IEMMODE_16BIT:
5263 IEM_MC_BEGIN(0, 1);
5264 IEM_MC_LOCAL(uint16_t, u16Value);
5265 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5266 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 break;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(0, 1);
5273 IEM_MC_LOCAL(uint32_t, u32Value);
5274 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5275 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
5276 IEM_MC_ADVANCE_RIP();
5277 IEM_MC_END();
5278 break;
5279
5280 case IEMMODE_64BIT:
5281 IEM_MC_BEGIN(0, 1);
5282 IEM_MC_LOCAL(uint64_t, u64Value);
5283 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5284 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
5285 IEM_MC_ADVANCE_RIP();
5286 IEM_MC_END();
5287 break;
5288 }
5289 }
5290 else
5291 {
5292 /*
5293 * We're saving the register to memory. The access is word sized
5294 * regardless of operand size prefixes.
5295 */
5296#if 0 /* not necessary */
5297 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
5298#endif
5299 IEM_MC_BEGIN(0, 2);
5300 IEM_MC_LOCAL(uint16_t, u16Value);
5301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5303 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5304 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
5305 IEM_MC_ADVANCE_RIP();
5306 IEM_MC_END();
5307 }
5308 return VINF_SUCCESS;
5309}
5310
5311
5312
5313
5314/** Opcode 0x8d. */
5315FNIEMOP_DEF(iemOp_lea_Gv_M)
5316{
5317 IEMOP_MNEMONIC("lea Gv,M");
5318 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5319 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5321 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
5322
5323 switch (pIemCpu->enmEffOpSize)
5324 {
5325 case IEMMODE_16BIT:
5326 IEM_MC_BEGIN(0, 1);
5327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
5329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
5330 IEM_MC_ADVANCE_RIP();
5331 IEM_MC_END();
5332 return VINF_SUCCESS;
5333
5334 case IEMMODE_32BIT:
5335 IEM_MC_BEGIN(0, 1);
5336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
5338 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
5339 IEM_MC_ADVANCE_RIP();
5340 IEM_MC_END();
5341 return VINF_SUCCESS;
5342
5343 case IEMMODE_64BIT:
5344 IEM_MC_BEGIN(0, 1);
5345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
5347 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
5348 IEM_MC_ADVANCE_RIP();
5349 IEM_MC_END();
5350 return VINF_SUCCESS;
5351 }
5352 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
5353}
5354
5355
5356/** Opcode 0x8e. */
5357FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5358{
5359 IEMOP_MNEMONIC("mov Sw,Ev");
5360
5361 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
5362 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5363
5364 /*
5365 * The practical operand size is 16-bit.
5366 */
5367#if 0 /* not necessary */
5368 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
5369#endif
5370
5371 /*
5372 * Check that the destination register exists and can be used with this
5373 * instruction. The REX.R prefix is ignored.
5374 */
5375 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
5376 if ( iSegReg == X86_SREG_CS
5377 || iSegReg > X86_SREG_GS)
5378 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5379
5380 /*
5381 * If rm is denoting a register, no more instruction bytes.
5382 */
5383 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5384 {
5385 IEM_MC_BEGIN(2, 0);
5386 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5387 IEM_MC_ARG(uint16_t, u16Value, 1);
5388 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5389 IEM_MC_CALL_CIMPL_2(iemCImpl_LoadSReg, iSRegArg, u16Value);
5390 IEM_MC_END();
5391 }
5392 else
5393 {
5394 /*
5395 * We're loading the register from memory. The access is word sized
5396 * regardless of operand size prefixes.
5397 */
5398 IEM_MC_BEGIN(2, 1);
5399 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5400 IEM_MC_ARG(uint16_t, u16Value, 1);
5401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5403 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5404 IEM_MC_CALL_CIMPL_2(iemCImpl_LoadSReg, iSRegArg, u16Value);
5405 IEM_MC_END();
5406 }
5407 return VINF_SUCCESS;
5408}
5409
5410
5411/** Opcode 0x8f. */
5412FNIEMOP_STUB(iemOp_pop_Ev);
5413
5414
5415/**
5416 * Common 'xchg reg,rAX' helper.
5417 */
5418FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5419{
5420 IEMOP_HLP_NO_LOCK_PREFIX();
5421
5422 iReg |= pIemCpu->uRexB;
5423 switch (pIemCpu->enmEffOpSize)
5424 {
5425 case IEMMODE_16BIT:
5426 IEM_MC_BEGIN(0, 2);
5427 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5428 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5429 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5430 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5431 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5432 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5433 IEM_MC_ADVANCE_RIP();
5434 IEM_MC_END();
5435 return VINF_SUCCESS;
5436
5437 case IEMMODE_32BIT:
5438 IEM_MC_BEGIN(0, 2);
5439 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5440 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5441 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5442 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5443 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5444 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5445 IEM_MC_ADVANCE_RIP();
5446 IEM_MC_END();
5447 return VINF_SUCCESS;
5448
5449 case IEMMODE_64BIT:
5450 IEM_MC_BEGIN(0, 2);
5451 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5452 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5453 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5454 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5455 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5456 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5457 IEM_MC_ADVANCE_RIP();
5458 IEM_MC_END();
5459 return VINF_SUCCESS;
5460
5461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5462 }
5463}
5464
5465
5466/** Opcode 0x90. */
5467FNIEMOP_DEF(iemOp_nop)
5468{
5469 /* R8/R8D and RAX/EAX can be exchanged. */
5470 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
5471 {
5472 IEMOP_MNEMONIC("xchg r8,rAX");
5473 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5474 }
5475
5476 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
5477 IEMOP_MNEMONIC("pause");
5478 else
5479 IEMOP_MNEMONIC("nop");
5480 IEM_MC_BEGIN(0, 0);
5481 IEM_MC_ADVANCE_RIP();
5482 IEM_MC_END();
5483 return VINF_SUCCESS;
5484}
5485
5486
5487/** Opcode 0x91. */
5488FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5489{
5490 IEMOP_MNEMONIC("xchg rCX,rAX");
5491 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5492}
5493
5494
5495/** Opcode 0x92. */
5496FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5497{
5498 IEMOP_MNEMONIC("xchg rDX,rAX");
5499 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5500}
5501
5502
5503/** Opcode 0x93. */
5504FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5505{
5506 IEMOP_MNEMONIC("xchg rBX,rAX");
5507 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5508}
5509
5510
5511/** Opcode 0x94. */
5512FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5513{
5514 IEMOP_MNEMONIC("xchg rSX,rAX");
5515 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5516}
5517
5518
5519/** Opcode 0x95. */
5520FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5521{
5522 IEMOP_MNEMONIC("xchg rBP,rAX");
5523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5524}
5525
5526
5527/** Opcode 0x96. */
5528FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5529{
5530 IEMOP_MNEMONIC("xchg rSI,rAX");
5531 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5532}
5533
5534
5535/** Opcode 0x97. */
5536FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5537{
5538 IEMOP_MNEMONIC("xchg rDI,rAX");
5539 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5540}
5541
5542
5543/** Opcode 0x98. */
5544FNIEMOP_STUB(iemOp_cbw);
5545
5546
5547/** Opcode 0x99. */
5548FNIEMOP_DEF(iemOp_cwd)
5549{
5550 IEMOP_HLP_NO_LOCK_PREFIX();
5551 switch (pIemCpu->enmEffOpSize)
5552 {
5553 case IEMMODE_16BIT:
5554 IEMOP_MNEMONIC("cwd");
5555 IEM_MC_BEGIN(0, 1);
5556 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5557 IEM_MC_STORE_GREG_U16(X86_GREG_xDX, UINT16_C(0xffff));
5558 } IEM_MC_ELSE() {
5559 IEM_MC_STORE_GREG_U16(X86_GREG_xDX, 0);
5560 } IEM_MC_ENDIF();
5561 IEM_MC_ADVANCE_RIP();
5562 IEM_MC_END();
5563 return VINF_SUCCESS;
5564
5565 case IEMMODE_32BIT:
5566 IEMOP_MNEMONIC("cwq");
5567 IEM_MC_BEGIN(0, 1);
5568 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5569 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, UINT32_C(0xffffffff));
5570 } IEM_MC_ELSE() {
5571 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, 0);
5572 } IEM_MC_ENDIF();
5573 IEM_MC_ADVANCE_RIP();
5574 IEM_MC_END();
5575 return VINF_SUCCESS;
5576
5577 case IEMMODE_64BIT:
5578 IEMOP_MNEMONIC("cqo");
5579 IEM_MC_BEGIN(0, 1);
5580 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5581 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5582 } IEM_MC_ELSE() {
5583 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, 0);
5584 } IEM_MC_ENDIF();
5585 IEM_MC_ADVANCE_RIP();
5586 IEM_MC_END();
5587 return VINF_SUCCESS;
5588
5589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5590 }
5591}
5592
5593
5594/** Opcode 0x9a. */
5595FNIEMOP_STUB(iemOp_call_Ap);
5596/** Opcode 0x9b. */
5597FNIEMOP_STUB(iemOp_wait);
5598
5599
5600/** Opcode 0x9c. */
5601FNIEMOP_DEF(iemOp_pushf_Fv)
5602{
5603 IEMOP_HLP_NO_LOCK_PREFIX();
5604 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5605 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
5606}
5607
5608
5609/** Opcode 0x9d. */
5610FNIEMOP_DEF(iemOp_popf_Fv)
5611{
5612 IEMOP_HLP_NO_LOCK_PREFIX();
5613 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5614 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
5615}
5616
5617
5618/** Opcode 0x9e. */
5619FNIEMOP_STUB(iemOp_sahf);
5620/** Opcode 0x9f. */
5621FNIEMOP_STUB(iemOp_lahf);
5622
5623/**
5624 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5625 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
5626 * prefixes. Will return on failures.
5627 * @param a_GCPtrMemOff The variable to store the offset in.
5628 */
5629#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5630 do \
5631 { \
5632 switch (pIemCpu->enmEffAddrMode) \
5633 { \
5634 case IEMMODE_16BIT: \
5635 { \
5636 uint16_t u16Off; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Off); \
5637 (a_GCPtrMemOff) = u16Off; \
5638 break; \
5639 } \
5640 case IEMMODE_32BIT: \
5641 { \
5642 uint32_t u32Off; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Off); \
5643 (a_GCPtrMemOff) = u32Off; \
5644 break; \
5645 } \
5646 case IEMMODE_64BIT: \
5647 IEM_OPCODE_GET_NEXT_U64(pIemCpu, &(a_GCPtrMemOff)); \
5648 break; \
5649 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5650 } \
5651 IEMOP_HLP_NO_LOCK_PREFIX(); \
5652 } while (0)
5653
5654/** Opcode 0xa0. */
5655FNIEMOP_DEF(iemOp_mov_Al_Ob)
5656{
5657 /*
5658 * Get the offset and fend of lock prefixes.
5659 */
5660 RTGCPTR GCPtrMemOff;
5661 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5662
5663 /*
5664 * Fetch AL.
5665 */
5666 IEM_MC_BEGIN(0,1);
5667 IEM_MC_LOCAL(uint8_t, u8Tmp);
5668 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
5669 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5670 IEM_MC_ADVANCE_RIP();
5671 IEM_MC_END();
5672 return VINF_SUCCESS;
5673}
5674
5675
5676/** Opcode 0xa1. */
5677FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5678{
5679 /*
5680 * Get the offset and fend of lock prefixes.
5681 */
5682 RTGCPTR GCPtrMemOff;
5683 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5684
5685 /*
5686 * Fetch rAX.
5687 */
5688 switch (pIemCpu->enmEffOpSize)
5689 {
5690 case IEMMODE_16BIT:
5691 IEM_MC_BEGIN(0,1);
5692 IEM_MC_LOCAL(uint16_t, u16Tmp);
5693 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
5694 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5695 IEM_MC_ADVANCE_RIP();
5696 IEM_MC_END();
5697 return VINF_SUCCESS;
5698
5699 case IEMMODE_32BIT:
5700 IEM_MC_BEGIN(0,1);
5701 IEM_MC_LOCAL(uint32_t, u32Tmp);
5702 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
5703 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(0,1);
5710 IEM_MC_LOCAL(uint64_t, u64Tmp);
5711 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
5712 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 return VINF_SUCCESS;
5716
5717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5718 }
5719}
5720
5721
5722/** Opcode 0xa2. */
5723FNIEMOP_DEF(iemOp_mov_Ob_AL)
5724{
5725 /*
5726 * Get the offset and fend of lock prefixes.
5727 */
5728 RTGCPTR GCPtrMemOff;
5729 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5730
5731 /*
5732 * Store AL.
5733 */
5734 IEM_MC_BEGIN(0,1);
5735 IEM_MC_LOCAL(uint8_t, u8Tmp);
5736 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5737 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
5738 IEM_MC_ADVANCE_RIP();
5739 IEM_MC_END();
5740 return VINF_SUCCESS;
5741}
5742
5743
5744/** Opcode 0xa3. */
5745FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5746{
5747 /*
5748 * Get the offset and fend of lock prefixes.
5749 */
5750 RTGCPTR GCPtrMemOff;
5751 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5752
5753 /*
5754 * Store rAX.
5755 */
5756 switch (pIemCpu->enmEffOpSize)
5757 {
5758 case IEMMODE_16BIT:
5759 IEM_MC_BEGIN(0,1);
5760 IEM_MC_LOCAL(uint16_t, u16Tmp);
5761 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5762 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
5763 IEM_MC_ADVANCE_RIP();
5764 IEM_MC_END();
5765 return VINF_SUCCESS;
5766
5767 case IEMMODE_32BIT:
5768 IEM_MC_BEGIN(0,1);
5769 IEM_MC_LOCAL(uint32_t, u32Tmp);
5770 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5771 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
5772 IEM_MC_ADVANCE_RIP();
5773 IEM_MC_END();
5774 return VINF_SUCCESS;
5775
5776 case IEMMODE_64BIT:
5777 IEM_MC_BEGIN(0,1);
5778 IEM_MC_LOCAL(uint64_t, u64Tmp);
5779 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5780 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
5781 IEM_MC_ADVANCE_RIP();
5782 IEM_MC_END();
5783 return VINF_SUCCESS;
5784
5785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5786 }
5787}
5788
5789/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5790#define IEM_MOVS_CASE(ValBits, AddrBits) \
5791 IEM_MC_BEGIN(0, 2); \
5792 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5793 IEM_MC_LOCAL(uint##AddrBits##_t, uAddr); \
5794 IEM_MC_FETCH_GREG_U##AddrBits(uAddr, X86_GREG_xSI); \
5795 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
5796 IEM_MC_FETCH_GREG_U##AddrBits(uAddr, X86_GREG_xDI); \
5797 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5799 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5800 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5801 } IEM_MC_ELSE() { \
5802 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5803 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5804 } IEM_MC_ENDIF(); \
5805 IEM_MC_ADVANCE_RIP(); \
5806 IEM_MC_END(); \
5807
5808
5809/** Opcode 0xa4. */
5810FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5811{
5812 IEMOP_HLP_NO_LOCK_PREFIX();
5813
5814 /*
5815 * Use the C implementation if a repeate prefix is encountered.
5816 */
5817 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5818 {
5819 IEMOP_MNEMONIC("rep movsb Xb,Yb");
5820 switch (pIemCpu->enmEffAddrMode)
5821 {
5822 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
5823 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
5824 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
5825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5826 }
5827 }
5828 IEMOP_MNEMONIC("movsb Xb,Yb");
5829
5830 /*
5831 * Sharing case implementation with movs[wdq] below.
5832 */
5833 switch (pIemCpu->enmEffAddrMode)
5834 {
5835 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5836 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5837 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5839 }
5840 return VINF_SUCCESS;
5841}
5842
5843
5844/** Opcode 0xa5. */
5845FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5846{
5847 IEMOP_HLP_NO_LOCK_PREFIX();
5848
5849 /*
5850 * Use the C implementation if a repeate prefix is encountered.
5851 */
5852 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5853 {
5854 IEMOP_MNEMONIC("rep movs Xv,Yv");
5855 switch (pIemCpu->enmEffOpSize)
5856 {
5857 case IEMMODE_16BIT:
5858 switch (pIemCpu->enmEffAddrMode)
5859 {
5860 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
5861 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
5862 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
5863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5864 }
5865 break;
5866 case IEMMODE_32BIT:
5867 switch (pIemCpu->enmEffAddrMode)
5868 {
5869 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
5870 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
5871 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
5872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5873 }
5874 case IEMMODE_64BIT:
5875 switch (pIemCpu->enmEffAddrMode)
5876 {
5877 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
5878 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
5879 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
5880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5881 }
5882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5883 }
5884 }
5885 IEMOP_MNEMONIC("movs Xv,Yv");
5886
5887 /*
5888 * Annoying double switch here.
5889 * Using ugly macro for implementing the cases, sharing it with movsb.
5890 */
5891 switch (pIemCpu->enmEffOpSize)
5892 {
5893 case IEMMODE_16BIT:
5894 switch (pIemCpu->enmEffAddrMode)
5895 {
5896 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5897 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5898 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5900 }
5901 break;
5902
5903 case IEMMODE_32BIT:
5904 switch (pIemCpu->enmEffAddrMode)
5905 {
5906 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5907 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5908 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911 break;
5912
5913 case IEMMODE_64BIT:
5914 switch (pIemCpu->enmEffAddrMode)
5915 {
5916 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
5917 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5918 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5920 }
5921 break;
5922 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5923 }
5924 return VINF_SUCCESS;
5925}
5926
5927#undef IEM_MOVS_CASE
5928
5929/** Opcode 0xa6. */
5930FNIEMOP_STUB(iemOp_cmpsb_Xb_Yb);
5931/** Opcode 0xa7. */
5932FNIEMOP_STUB(iemOp_cmpswd_Xv_Yv);
5933
5934
5935/** Opcode 0xa8. */
5936FNIEMOP_DEF(iemOp_test_AL_Ib)
5937{
5938 IEMOP_MNEMONIC("test al,Ib");
5939 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5940}
5941
5942
5943/** Opcode 0xa9. */
5944FNIEMOP_DEF(iemOp_test_eAX_Iz)
5945{
5946 IEMOP_MNEMONIC("test rAX,Iz");
5947 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5948}
5949
5950
5951/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5952#define IEM_STOS_CASE(ValBits, AddrBits) \
5953 IEM_MC_BEGIN(0, 2); \
5954 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5955 IEM_MC_LOCAL(uint##AddrBits##_t, uAddr); \
5956 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5957 IEM_MC_FETCH_GREG_U##AddrBits(uAddr, X86_GREG_xDI); \
5958 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5960 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5961 } IEM_MC_ELSE() { \
5962 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5963 } IEM_MC_ENDIF(); \
5964 IEM_MC_ADVANCE_RIP(); \
5965 IEM_MC_END(); \
5966
5967/** Opcode 0xaa. */
5968FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5969{
5970 IEMOP_HLP_NO_LOCK_PREFIX();
5971
5972 /*
5973 * Use the C implementation if a repeate prefix is encountered.
5974 */
5975 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5976 {
5977 IEMOP_MNEMONIC("rep stos Yb,al");
5978 switch (pIemCpu->enmEffAddrMode)
5979 {
5980 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5981 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5982 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5984 }
5985 }
5986 IEMOP_MNEMONIC("stos Yb,al");
5987
5988 /*
5989 * Sharing case implementation with stos[wdq] below.
5990 */
5991 switch (pIemCpu->enmEffAddrMode)
5992 {
5993 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5994 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5995 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5997 }
5998 return VINF_SUCCESS;
5999}
6000
6001
6002/** Opcode 0xab. */
6003FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6004{
6005 IEMOP_HLP_NO_LOCK_PREFIX();
6006
6007 /*
6008 * Use the C implementation if a repeate prefix is encountered.
6009 */
6010 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6011 {
6012 IEMOP_MNEMONIC("rep stos Yv,rAX");
6013 switch (pIemCpu->enmEffOpSize)
6014 {
6015 case IEMMODE_16BIT:
6016 switch (pIemCpu->enmEffAddrMode)
6017 {
6018 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
6019 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
6020 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
6021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6022 }
6023 break;
6024 case IEMMODE_32BIT:
6025 switch (pIemCpu->enmEffAddrMode)
6026 {
6027 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
6028 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
6029 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
6030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6031 }
6032 case IEMMODE_64BIT:
6033 switch (pIemCpu->enmEffAddrMode)
6034 {
6035 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6036 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
6037 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
6038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6039 }
6040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6041 }
6042 }
6043 IEMOP_MNEMONIC("stos Yv,rAX");
6044
6045 /*
6046 * Annoying double switch here.
6047 * Using ugly macro for implementing the cases, sharing it with stosb.
6048 */
6049 switch (pIemCpu->enmEffOpSize)
6050 {
6051 case IEMMODE_16BIT:
6052 switch (pIemCpu->enmEffAddrMode)
6053 {
6054 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6055 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6056 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6058 }
6059 break;
6060
6061 case IEMMODE_32BIT:
6062 switch (pIemCpu->enmEffAddrMode)
6063 {
6064 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6065 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6066 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6068 }
6069 break;
6070
6071 case IEMMODE_64BIT:
6072 switch (pIemCpu->enmEffAddrMode)
6073 {
6074 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
6075 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6076 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6078 }
6079 break;
6080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6081 }
6082 return VINF_SUCCESS;
6083}
6084
6085#undef IEM_STOS_CASE
6086
6087/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6088#define IEM_LODS_CASE(ValBits, AddrBits) \
6089 IEM_MC_BEGIN(0, 2); \
6090 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6091 IEM_MC_LOCAL(uint##AddrBits##_t, uAddr); \
6092 IEM_MC_FETCH_GREG_U##AddrBits(uAddr, X86_GREG_xSI); \
6093 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
6094 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6095 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6096 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6097 } IEM_MC_ELSE() { \
6098 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6099 } IEM_MC_ENDIF(); \
6100 IEM_MC_ADVANCE_RIP(); \
6101 IEM_MC_END();
6102
6103/** Opcode 0xac. */
6104FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6105{
6106 IEMOP_HLP_NO_LOCK_PREFIX();
6107
6108 /*
6109 * Use the C implementation if a repeate prefix is encountered.
6110 */
6111 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6112 {
6113 IEMOP_MNEMONIC("rep lodsb al,Xb");
6114 switch (pIemCpu->enmEffAddrMode)
6115 {
6116 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
6117 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
6118 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
6119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6120 }
6121 }
6122 IEMOP_MNEMONIC("lodsb al,Xb");
6123
6124 /*
6125 * Sharing case implementation with stos[wdq] below.
6126 */
6127 switch (pIemCpu->enmEffAddrMode)
6128 {
6129 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6130 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6131 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6133 }
6134 return VINF_SUCCESS;
6135}
6136
6137
6138/** Opcode 0xad. */
6139FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6140{
6141 IEMOP_HLP_NO_LOCK_PREFIX();
6142
6143 /*
6144 * Use the C implementation if a repeate prefix is encountered.
6145 */
6146 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6147 {
6148 IEMOP_MNEMONIC("rep lods rAX,Xv");
6149 switch (pIemCpu->enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 switch (pIemCpu->enmEffAddrMode)
6153 {
6154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
6155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
6156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
6157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6158 }
6159 break;
6160 case IEMMODE_32BIT:
6161 switch (pIemCpu->enmEffAddrMode)
6162 {
6163 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
6164 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
6165 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
6166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6167 }
6168 case IEMMODE_64BIT:
6169 switch (pIemCpu->enmEffAddrMode)
6170 {
6171 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
6172 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
6173 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
6174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6175 }
6176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6177 }
6178 }
6179 IEMOP_MNEMONIC("lods rAX,Xv");
6180
6181 /*
6182 * Annoying double switch here.
6183 * Using ugly macro for implementing the cases, sharing it with lodsb.
6184 */
6185 switch (pIemCpu->enmEffOpSize)
6186 {
6187 case IEMMODE_16BIT:
6188 switch (pIemCpu->enmEffAddrMode)
6189 {
6190 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6191 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6192 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 break;
6196
6197 case IEMMODE_32BIT:
6198 switch (pIemCpu->enmEffAddrMode)
6199 {
6200 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6201 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6202 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 break;
6206
6207 case IEMMODE_64BIT:
6208 switch (pIemCpu->enmEffAddrMode)
6209 {
6210 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
6211 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6212 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6214 }
6215 break;
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218 return VINF_SUCCESS;
6219}
6220
6221#undef IEM_LODS_CASE
6222
6223/** Opcode 0xae. */
6224FNIEMOP_STUB(iemOp_scasb_AL_Xb);
6225/** Opcode 0xaf. */
6226FNIEMOP_STUB(iemOp_scaswd_eAX_Xv);
6227
6228/**
6229 * Common 'mov r8, imm8' helper.
6230 */
6231FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
6232{
6233 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
6234 IEMOP_HLP_NO_LOCK_PREFIX();
6235
6236 IEM_MC_BEGIN(0, 1);
6237 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6238 IEM_MC_STORE_GREG_U8(iReg, u8Value);
6239 IEM_MC_ADVANCE_RIP();
6240 IEM_MC_END();
6241
6242 return VINF_SUCCESS;
6243}
6244
6245
6246/** Opcode 0xb0. */
6247FNIEMOP_DEF(iemOp_mov_AL_Ib)
6248{
6249 IEMOP_MNEMONIC("mov AL,Ib");
6250 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX);
6251}
6252
6253
6254/** Opcode 0xb1. */
6255FNIEMOP_DEF(iemOp_CL_Ib)
6256{
6257 IEMOP_MNEMONIC("mov CL,Ib");
6258 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX);
6259}
6260
6261
6262/** Opcode 0xb2. */
6263FNIEMOP_DEF(iemOp_DL_Ib)
6264{
6265 IEMOP_MNEMONIC("mov DL,Ib");
6266 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX);
6267}
6268
6269
6270/** Opcode 0xb3. */
6271FNIEMOP_DEF(iemOp_BL_Ib)
6272{
6273 IEMOP_MNEMONIC("mov BL,Ib");
6274 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX);
6275}
6276
6277
6278/** Opcode 0xb4. */
6279FNIEMOP_DEF(iemOp_mov_AH_Ib)
6280{
6281 IEMOP_MNEMONIC("mov AH,Ib");
6282 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP);
6283}
6284
6285
6286/** Opcode 0xb5. */
6287FNIEMOP_DEF(iemOp_CH_Ib)
6288{
6289 IEMOP_MNEMONIC("mov CH,Ib");
6290 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP);
6291}
6292
6293
6294/** Opcode 0xb6. */
6295FNIEMOP_DEF(iemOp_DH_Ib)
6296{
6297 IEMOP_MNEMONIC("mov DH,Ib");
6298 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI);
6299}
6300
6301
6302/** Opcode 0xb7. */
6303FNIEMOP_DEF(iemOp_BH_Ib)
6304{
6305 IEMOP_MNEMONIC("mov BH,Ib");
6306 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI);
6307}
6308
6309
6310/**
6311 * Common 'mov regX,immX' helper.
6312 */
6313FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
6314{
6315 switch (pIemCpu->enmEffOpSize)
6316 {
6317 case IEMMODE_16BIT:
6318 {
6319 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
6320 IEMOP_HLP_NO_LOCK_PREFIX();
6321
6322 IEM_MC_BEGIN(0, 1);
6323 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6324 IEM_MC_STORE_GREG_U16(iReg, u16Value);
6325 IEM_MC_ADVANCE_RIP();
6326 IEM_MC_END();
6327 break;
6328 }
6329
6330 case IEMMODE_32BIT:
6331 {
6332 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
6333 IEMOP_HLP_NO_LOCK_PREFIX();
6334
6335 IEM_MC_BEGIN(0, 1);
6336 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6337 IEM_MC_STORE_GREG_U32(iReg, u32Value);
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 break;
6341 }
6342 case IEMMODE_64BIT:
6343 {
6344 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(pIemCpu, &u64Imm);
6345 IEMOP_HLP_NO_LOCK_PREFIX();
6346
6347 IEM_MC_BEGIN(0, 1);
6348 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6349 IEM_MC_STORE_GREG_U64(iReg, u64Value);
6350 IEM_MC_ADVANCE_RIP();
6351 IEM_MC_END();
6352 break;
6353 }
6354 }
6355
6356 return VINF_SUCCESS;
6357}
6358
6359
6360/** Opcode 0xb8. */
6361FNIEMOP_DEF(iemOp_eAX_Iv)
6362{
6363 IEMOP_MNEMONIC("mov rAX,IV");
6364 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX);
6365}
6366
6367
6368/** Opcode 0xb9. */
6369FNIEMOP_DEF(iemOp_eCX_Iv)
6370{
6371 IEMOP_MNEMONIC("mov rCX,IV");
6372 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX);
6373}
6374
6375
6376/** Opcode 0xba. */
6377FNIEMOP_DEF(iemOp_eDX_Iv)
6378{
6379 IEMOP_MNEMONIC("mov rDX,IV");
6380 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX);
6381}
6382
6383
6384/** Opcode 0xbb. */
6385FNIEMOP_DEF(iemOp_eBX_Iv)
6386{
6387 IEMOP_MNEMONIC("mov rBX,IV");
6388 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX);
6389}
6390
6391
6392/** Opcode 0xbc. */
6393FNIEMOP_DEF(iemOp_eSP_Iv)
6394{
6395 IEMOP_MNEMONIC("mov rSP,IV");
6396 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP);
6397}
6398
6399
6400/** Opcode 0xbd. */
6401FNIEMOP_DEF(iemOp_eBP_Iv)
6402{
6403 IEMOP_MNEMONIC("mov rBP,IV");
6404 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP);
6405}
6406
6407
6408/** Opcode 0xbe. */
6409FNIEMOP_DEF(iemOp_eSI_Iv)
6410{
6411 IEMOP_MNEMONIC("mov rSI,IV");
6412 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI);
6413}
6414
6415
6416/** Opcode 0xbf. */
6417FNIEMOP_DEF(iemOp_eDI_Iv)
6418{
6419 IEMOP_MNEMONIC("mov rDI,IV");
6420 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI);
6421}
6422
6423
6424/** Opcode 0xc0. */
6425FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6426{
6427 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6428 PCIEMOPSHIFTSIZES pImpl;
6429 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6430 {
6431 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
6432 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
6433 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
6434 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
6435 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
6436 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
6437 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
6438 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6439 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6440 }
6441
6442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6443 {
6444 /* register */
6445 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6446 IEMOP_HLP_NO_LOCK_PREFIX();
6447 IEM_MC_BEGIN(3, 0);
6448 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6449 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6450 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6451 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6452 IEM_MC_REF_EFLAGS(pEFlags);
6453#ifdef IEM_VERIFICATION_MODE
6454 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6455#endif
6456 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 }
6460 else
6461 {
6462 /* memory */
6463 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6464 IEM_MC_BEGIN(3, 2);
6465 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6466 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6467 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6469
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6471 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6472 IEM_MC_ASSIGN(cShiftArg, cShift);
6473 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6474 IEM_MC_FETCH_EFLAGS(EFlags);
6475#ifdef IEM_VERIFICATION_MODE
6476 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6477#endif
6478 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6479
6480 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6481 IEM_MC_COMMIT_EFLAGS(EFlags);
6482 IEM_MC_ADVANCE_RIP();
6483 IEM_MC_END();
6484 }
6485 return VINF_SUCCESS;
6486}
6487
6488
6489/** Opcode 0xc1. */
6490FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6491{
6492 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6493 PCIEMOPSHIFTSIZES pImpl;
6494 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6495 {
6496 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
6497 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
6498 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
6499 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
6500 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
6501 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
6502 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
6503 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6504 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6505 }
6506
6507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6508 {
6509 /* register */
6510 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6511#ifdef IEM_VERIFICATION_MODE
6512 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6513#endif
6514 IEMOP_HLP_NO_LOCK_PREFIX();
6515 switch (pIemCpu->enmEffOpSize)
6516 {
6517 case IEMMODE_16BIT:
6518 IEM_MC_BEGIN(3, 0);
6519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6520 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6522 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6523 IEM_MC_REF_EFLAGS(pEFlags);
6524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 return VINF_SUCCESS;
6528
6529 case IEMMODE_32BIT:
6530 IEM_MC_BEGIN(3, 0);
6531 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6532 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6534 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6535 IEM_MC_REF_EFLAGS(pEFlags);
6536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6537 IEM_MC_ADVANCE_RIP();
6538 IEM_MC_END();
6539 return VINF_SUCCESS;
6540
6541 case IEMMODE_64BIT:
6542 IEM_MC_BEGIN(3, 0);
6543 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6544 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6546 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6547 IEM_MC_REF_EFLAGS(pEFlags);
6548 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6549 IEM_MC_ADVANCE_RIP();
6550 IEM_MC_END();
6551 return VINF_SUCCESS;
6552
6553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6554 }
6555 }
6556 else
6557 {
6558 /* memory */
6559 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6560 switch (pIemCpu->enmEffOpSize)
6561 {
6562 case IEMMODE_16BIT:
6563 IEM_MC_BEGIN(3, 2);
6564 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6565 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6566 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6568
6569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6570 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6571#ifdef IEM_VERIFICATION_MODE
6572 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6573#endif
6574 IEM_MC_ASSIGN(cShiftArg, cShift);
6575 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6576 IEM_MC_FETCH_EFLAGS(EFlags);
6577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6578
6579 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6580 IEM_MC_COMMIT_EFLAGS(EFlags);
6581 IEM_MC_ADVANCE_RIP();
6582 IEM_MC_END();
6583 return VINF_SUCCESS;
6584
6585 case IEMMODE_32BIT:
6586 IEM_MC_BEGIN(3, 2);
6587 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6588 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6589 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6591
6592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6593 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6594#ifdef IEM_VERIFICATION_MODE
6595 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6596#endif
6597 IEM_MC_ASSIGN(cShiftArg, cShift);
6598 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6599 IEM_MC_FETCH_EFLAGS(EFlags);
6600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6601
6602 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6603 IEM_MC_COMMIT_EFLAGS(EFlags);
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 case IEMMODE_64BIT:
6609 IEM_MC_BEGIN(3, 2);
6610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6611 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6612 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6614
6615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6616 uint8_t cShift; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &cShift);
6617#ifdef IEM_VERIFICATION_MODE
6618 if (cShift > 1) pIemCpu->fShiftOfHack = true;
6619#endif
6620 IEM_MC_ASSIGN(cShiftArg, cShift);
6621 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6622 IEM_MC_FETCH_EFLAGS(EFlags);
6623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6624
6625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6626 IEM_MC_COMMIT_EFLAGS(EFlags);
6627 IEM_MC_ADVANCE_RIP();
6628 IEM_MC_END();
6629 return VINF_SUCCESS;
6630
6631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6632 }
6633 }
6634}
6635
6636
6637/** Opcode 0xc2. */
6638FNIEMOP_DEF(iemOp_retn_Iw)
6639{
6640 IEMOP_MNEMONIC("retn Iw");
6641 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
6642 IEMOP_HLP_NO_LOCK_PREFIX();
6643 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6644 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
6645}
6646
6647
6648/** Opcode 0xc3. */
6649FNIEMOP_DEF(iemOp_retn)
6650{
6651 IEMOP_MNEMONIC("retn");
6652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6653 IEMOP_HLP_NO_LOCK_PREFIX();
6654 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
6655}
6656
6657
6658/** Opcode 0xc4. */
6659FNIEMOP_DEF(iemOp_les_Gv_Mp)
6660{
6661 IEMOP_MNEMONIC("les Gv,Mp");
6662 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
6663}
6664
6665
6666/** Opcode 0xc5. */
6667FNIEMOP_DEF(iemOp_lds_Gv_Mp)
6668{
6669 IEMOP_MNEMONIC("lds Gv,Mp");
6670 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
6671}
6672
6673
6674/** Opcode 0xc6. */
6675FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6676{
6677 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6678 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6679 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6680 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6681 IEMOP_MNEMONIC("mov Eb,Ib");
6682
6683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6684 {
6685 /* register access */
6686 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
6687 IEM_MC_BEGIN(0, 0);
6688 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
6689 IEM_MC_ADVANCE_RIP();
6690 IEM_MC_END();
6691 }
6692 else
6693 {
6694 /* memory access. */
6695 IEM_MC_BEGIN(0, 1);
6696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6698 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
6699 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
6700 IEM_MC_ADVANCE_RIP();
6701 IEM_MC_END();
6702 }
6703 return VINF_SUCCESS;
6704}
6705
6706
6707/** Opcode 0xc7. */
6708FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6709{
6710 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6711 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6712 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6713 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6714 IEMOP_MNEMONIC("mov Ev,Iz");
6715
6716 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6717 {
6718 /* register access */
6719 switch (pIemCpu->enmEffOpSize)
6720 {
6721 case IEMMODE_16BIT:
6722 IEM_MC_BEGIN(0, 0);
6723 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
6724 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
6725 IEM_MC_ADVANCE_RIP();
6726 IEM_MC_END();
6727 return VINF_SUCCESS;
6728
6729 case IEMMODE_32BIT:
6730 IEM_MC_BEGIN(0, 0);
6731 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
6732 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
6733 IEM_MC_ADVANCE_RIP();
6734 IEM_MC_END();
6735 return VINF_SUCCESS;
6736
6737 case IEMMODE_64BIT:
6738 IEM_MC_BEGIN(0, 0);
6739 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(pIemCpu, &u64Imm);
6740 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6746 }
6747 }
6748 else
6749 {
6750 /* memory access. */
6751 switch (pIemCpu->enmEffOpSize)
6752 {
6753 case IEMMODE_16BIT:
6754 IEM_MC_BEGIN(0, 1);
6755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6757 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
6758 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
6759 IEM_MC_ADVANCE_RIP();
6760 IEM_MC_END();
6761 return VINF_SUCCESS;
6762
6763 case IEMMODE_32BIT:
6764 IEM_MC_BEGIN(0, 1);
6765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6767 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
6768 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
6769 IEM_MC_ADVANCE_RIP();
6770 IEM_MC_END();
6771 return VINF_SUCCESS;
6772
6773 case IEMMODE_64BIT:
6774 IEM_MC_BEGIN(0, 1);
6775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6777 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(pIemCpu, &u64Imm);
6778 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 return VINF_SUCCESS;
6782
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6784 }
6785 }
6786}
6787
6788
6789
6790
6791/** Opcode 0xc8. */
6792FNIEMOP_STUB(iemOp_enter_Iw_Ib);
6793/** Opcode 0xc9. */
6794FNIEMOP_STUB(iemOp_leave);
6795
6796
6797/** Opcode 0xca. */
6798FNIEMOP_DEF(iemOp_retf_Iw)
6799{
6800 IEMOP_MNEMONIC("retf Iw");
6801 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
6802 IEMOP_HLP_NO_LOCK_PREFIX();
6803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6804 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
6805}
6806
6807
6808/** Opcode 0xcb. */
6809FNIEMOP_DEF(iemOp_retf)
6810{
6811 IEMOP_MNEMONIC("retf");
6812 IEMOP_HLP_NO_LOCK_PREFIX();
6813 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6814 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
6815}
6816
6817
6818/** Opcode 0xcc. */
6819FNIEMOP_DEF(iemOp_int_3)
6820{
6821 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6822}
6823
6824
6825/** Opcode 0xcd. */
6826FNIEMOP_DEF(iemOp_int_Ib)
6827{
6828 uint8_t u8Int; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Int);
6829 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6830}
6831
6832
6833/** Opcode 0xce. */
6834FNIEMOP_DEF(iemOp_into)
6835{
6836 IEM_MC_BEGIN(2, 0);
6837 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6838 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6839 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6840 IEM_MC_END();
6841 return VINF_SUCCESS;
6842}
6843
6844
6845/** Opcode 0xcf. */
6846FNIEMOP_DEF(iemOp_iret)
6847{
6848 IEMOP_MNEMONIC("iret");
6849 IEMOP_HLP_NO_LOCK_PREFIX();
6850 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
6851}
6852
6853
6854/** Opcode 0xd0. */
6855FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6856{
6857 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6858 PCIEMOPSHIFTSIZES pImpl;
6859 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6860 {
6861 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
6862 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
6863 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
6864 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
6865 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
6866 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
6867 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
6868 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6869 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6870 }
6871
6872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6873 {
6874 /* register */
6875 IEMOP_HLP_NO_LOCK_PREFIX();
6876 IEM_MC_BEGIN(3, 0);
6877 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6878 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6880 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6881 IEM_MC_REF_EFLAGS(pEFlags);
6882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 }
6886 else
6887 {
6888 /* memory */
6889 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6890 IEM_MC_BEGIN(3, 2);
6891 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6892 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6893 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6895
6896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6897 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6898 IEM_MC_FETCH_EFLAGS(EFlags);
6899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6900
6901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6902 IEM_MC_COMMIT_EFLAGS(EFlags);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 }
6906 return VINF_SUCCESS;
6907}
6908
6909
6910
6911/** Opcode 0xd1. */
6912FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6913{
6914 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
6915 PCIEMOPSHIFTSIZES pImpl;
6916 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6917 {
6918 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
6919 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
6920 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
6921 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
6922 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
6923 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
6924 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
6925 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
6926 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6927 }
6928
6929 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6930 {
6931 /* register */
6932 IEMOP_HLP_NO_LOCK_PREFIX();
6933 switch (pIemCpu->enmEffOpSize)
6934 {
6935 case IEMMODE_16BIT:
6936 IEM_MC_BEGIN(3, 0);
6937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6938 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6940 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6941 IEM_MC_REF_EFLAGS(pEFlags);
6942 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6943 IEM_MC_ADVANCE_RIP();
6944 IEM_MC_END();
6945 return VINF_SUCCESS;
6946
6947 case IEMMODE_32BIT:
6948 IEM_MC_BEGIN(3, 0);
6949 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6950 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6951 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6952 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6953 IEM_MC_REF_EFLAGS(pEFlags);
6954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6955 IEM_MC_ADVANCE_RIP();
6956 IEM_MC_END();
6957 return VINF_SUCCESS;
6958
6959 case IEMMODE_64BIT:
6960 IEM_MC_BEGIN(3, 0);
6961 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6962 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6963 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6964 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6965 IEM_MC_REF_EFLAGS(pEFlags);
6966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6967 IEM_MC_ADVANCE_RIP();
6968 IEM_MC_END();
6969 return VINF_SUCCESS;
6970
6971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6972 }
6973 }
6974 else
6975 {
6976 /* memory */
6977 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
6978 switch (pIemCpu->enmEffOpSize)
6979 {
6980 case IEMMODE_16BIT:
6981 IEM_MC_BEGIN(3, 2);
6982 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6983 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6984 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6986
6987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6988 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6989 IEM_MC_FETCH_EFLAGS(EFlags);
6990 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6991
6992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6993 IEM_MC_COMMIT_EFLAGS(EFlags);
6994 IEM_MC_ADVANCE_RIP();
6995 IEM_MC_END();
6996 return VINF_SUCCESS;
6997
6998 case IEMMODE_32BIT:
6999 IEM_MC_BEGIN(3, 2);
7000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7001 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7002 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7004
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7006 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7007 IEM_MC_FETCH_EFLAGS(EFlags);
7008 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7009
7010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7011 IEM_MC_COMMIT_EFLAGS(EFlags);
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015
7016 case IEMMODE_64BIT:
7017 IEM_MC_BEGIN(3, 2);
7018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7019 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7022
7023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7024 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7025 IEM_MC_FETCH_EFLAGS(EFlags);
7026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7027
7028 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7029 IEM_MC_COMMIT_EFLAGS(EFlags);
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 return VINF_SUCCESS;
7033
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037}
7038
7039
7040/** Opcode 0xd2. */
7041FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7042{
7043 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
7044 PCIEMOPSHIFTSIZES pImpl;
7045 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7046 {
7047 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
7048 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
7049 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
7050 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
7051 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
7052 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
7053 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
7054 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
7055 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7056 }
7057
7058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7059 {
7060 /* register */
7061 IEMOP_HLP_NO_LOCK_PREFIX();
7062 IEM_MC_BEGIN(3, 0);
7063 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7064 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7066 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7067 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7068 IEM_MC_REF_EFLAGS(pEFlags);
7069#ifdef IEM_VERIFICATION_MODE
7070 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7071#endif
7072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7073 IEM_MC_ADVANCE_RIP();
7074 IEM_MC_END();
7075 }
7076 else
7077 {
7078 /* memory */
7079 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7080 IEM_MC_BEGIN(3, 2);
7081 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7082 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7083 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7085
7086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7087 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7088 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7089 IEM_MC_FETCH_EFLAGS(EFlags);
7090#ifdef IEM_VERIFICATION_MODE
7091 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7092#endif
7093 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7094
7095 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7096 IEM_MC_COMMIT_EFLAGS(EFlags);
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 }
7100 return VINF_SUCCESS;
7101}
7102
7103
7104/** Opcode 0xd3. */
7105FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7106{
7107 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
7108 PCIEMOPSHIFTSIZES pImpl;
7109 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7110 {
7111 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
7112 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
7113 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
7114 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
7115 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
7116 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
7117 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
7118 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
7119 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7120 }
7121
7122 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7123 {
7124 /* register */
7125 IEMOP_HLP_NO_LOCK_PREFIX();
7126 switch (pIemCpu->enmEffOpSize)
7127 {
7128 case IEMMODE_16BIT:
7129 IEM_MC_BEGIN(3, 0);
7130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7131 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7133 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7134 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7135 IEM_MC_REF_EFLAGS(pEFlags);
7136#ifdef IEM_VERIFICATION_MODE
7137 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7138#endif
7139 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7140 IEM_MC_ADVANCE_RIP();
7141 IEM_MC_END();
7142 return VINF_SUCCESS;
7143
7144 case IEMMODE_32BIT:
7145 IEM_MC_BEGIN(3, 0);
7146 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7147 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7148 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7149 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7150 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7151 IEM_MC_REF_EFLAGS(pEFlags);
7152#ifdef IEM_VERIFICATION_MODE
7153 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7154#endif
7155 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7156 IEM_MC_ADVANCE_RIP();
7157 IEM_MC_END();
7158 return VINF_SUCCESS;
7159
7160 case IEMMODE_64BIT:
7161 IEM_MC_BEGIN(3, 0);
7162 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7163 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7165 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7166 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7167 IEM_MC_REF_EFLAGS(pEFlags);
7168#ifdef IEM_VERIFICATION_MODE
7169 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7170#endif
7171 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7172 IEM_MC_ADVANCE_RIP();
7173 IEM_MC_END();
7174 return VINF_SUCCESS;
7175
7176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7177 }
7178 }
7179 else
7180 {
7181 /* memory */
7182 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7183 switch (pIemCpu->enmEffOpSize)
7184 {
7185 case IEMMODE_16BIT:
7186 IEM_MC_BEGIN(3, 2);
7187 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7188 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7189 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7191
7192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7193 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7194 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7195 IEM_MC_FETCH_EFLAGS(EFlags);
7196#ifdef IEM_VERIFICATION_MODE
7197 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7198#endif
7199 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7200
7201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7202 IEM_MC_COMMIT_EFLAGS(EFlags);
7203 IEM_MC_ADVANCE_RIP();
7204 IEM_MC_END();
7205 return VINF_SUCCESS;
7206
7207 case IEMMODE_32BIT:
7208 IEM_MC_BEGIN(3, 2);
7209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7210 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7211 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7213
7214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7215 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7216 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7217 IEM_MC_FETCH_EFLAGS(EFlags);
7218#ifdef IEM_VERIFICATION_MODE
7219 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7220#endif
7221 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7222
7223 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7224 IEM_MC_COMMIT_EFLAGS(EFlags);
7225 IEM_MC_ADVANCE_RIP();
7226 IEM_MC_END();
7227 return VINF_SUCCESS;
7228
7229 case IEMMODE_64BIT:
7230 IEM_MC_BEGIN(3, 2);
7231 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7232 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7233 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7235
7236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7237 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7238 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7239 IEM_MC_FETCH_EFLAGS(EFlags);
7240#ifdef IEM_VERIFICATION_MODE
7241 if (cShiftArg > 1) pIemCpu->fShiftOfHack = true;
7242#endif
7243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7244
7245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7246 IEM_MC_COMMIT_EFLAGS(EFlags);
7247 IEM_MC_ADVANCE_RIP();
7248 IEM_MC_END();
7249 return VINF_SUCCESS;
7250
7251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7252 }
7253 }
7254}
7255
7256/** Opcode 0xd4. */
7257FNIEMOP_STUB(iemOp_aam_Ib);
7258/** Opcode 0xd5. */
7259FNIEMOP_STUB(iemOp_aad_Ib);
7260
7261
7262/** Opcode 0xd7. */
7263FNIEMOP_DEF(iemOp_xlat)
7264{
7265 IEMOP_HLP_NO_LOCK_PREFIX();
7266 switch (pIemCpu->enmEffAddrMode)
7267 {
7268 case IEMMODE_16BIT:
7269 IEM_MC_BEGIN(2, 0);
7270 IEM_MC_LOCAL(uint8_t, u8Tmp);
7271 IEM_MC_LOCAL(uint16_t, u16Addr);
7272 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7273 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7274 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
7275 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7276 IEM_MC_ADVANCE_RIP();
7277 IEM_MC_END();
7278 return VINF_SUCCESS;
7279
7280 case IEMMODE_32BIT:
7281 IEM_MC_BEGIN(2, 0);
7282 IEM_MC_LOCAL(uint8_t, u8Tmp);
7283 IEM_MC_LOCAL(uint32_t, u32Addr);
7284 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7285 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7286 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
7287 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7288 IEM_MC_ADVANCE_RIP();
7289 IEM_MC_END();
7290 return VINF_SUCCESS;
7291
7292 case IEMMODE_64BIT:
7293 IEM_MC_BEGIN(2, 0);
7294 IEM_MC_LOCAL(uint8_t, u8Tmp);
7295 IEM_MC_LOCAL(uint64_t, u64Addr);
7296 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7297 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7298 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
7299 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7300 IEM_MC_ADVANCE_RIP();
7301 IEM_MC_END();
7302 return VINF_SUCCESS;
7303
7304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7305 }
7306}
7307
7308
7309/** Opcode 0xd8. */
7310FNIEMOP_STUB(iemOp_EscF0);
7311/** Opcode 0xd9. */
7312FNIEMOP_STUB(iemOp_EscF1);
7313/** Opcode 0xda. */
7314FNIEMOP_STUB(iemOp_EscF2);
7315/** Opcode 0xdb. */
7316FNIEMOP_STUB(iemOp_EscF3);
7317/** Opcode 0xdc. */
7318FNIEMOP_STUB(iemOp_EscF4);
7319/** Opcode 0xdd. */
7320FNIEMOP_STUB(iemOp_EscF5);
7321/** Opcode 0xde. */
7322FNIEMOP_STUB(iemOp_EscF6);
7323/** Opcode 0xdf. */
7324FNIEMOP_STUB(iemOp_EscF7);
7325
7326
7327/** Opcode 0xe0. */
7328FNIEMOP_DEF(iemOp_loopne_Jb)
7329{
7330 IEMOP_MNEMONIC("loopne Jb");
7331 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
7332 IEMOP_HLP_NO_LOCK_PREFIX();
7333 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7334
7335 switch (pIemCpu->enmEffAddrMode)
7336 {
7337 case IEMMODE_16BIT:
7338 IEM_MC_BEGIN(0,0);
7339 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
7340 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
7341 IEM_MC_REL_JMP_S8(i8Imm);
7342 } IEM_MC_ELSE() {
7343 IEM_MC_ADVANCE_RIP();
7344 } IEM_MC_ENDIF();
7345 IEM_MC_END();
7346 return VINF_SUCCESS;
7347
7348 case IEMMODE_32BIT:
7349 IEM_MC_BEGIN(0,0);
7350 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
7351 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
7352 IEM_MC_REL_JMP_S8(i8Imm);
7353 } IEM_MC_ELSE() {
7354 IEM_MC_ADVANCE_RIP();
7355 } IEM_MC_ENDIF();
7356 IEM_MC_END();
7357 return VINF_SUCCESS;
7358
7359 case IEMMODE_64BIT:
7360 IEM_MC_BEGIN(0,0);
7361 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
7362 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
7363 IEM_MC_REL_JMP_S8(i8Imm);
7364 } IEM_MC_ELSE() {
7365 IEM_MC_ADVANCE_RIP();
7366 } IEM_MC_ENDIF();
7367 IEM_MC_END();
7368 return VINF_SUCCESS;
7369
7370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7371 }
7372}
7373
7374
7375/** Opcode 0xe1. */
7376FNIEMOP_DEF(iemOp_loope_Jb)
7377{
7378 IEMOP_MNEMONIC("loope Jb");
7379 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
7380 IEMOP_HLP_NO_LOCK_PREFIX();
7381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7382
7383 switch (pIemCpu->enmEffAddrMode)
7384 {
7385 case IEMMODE_16BIT:
7386 IEM_MC_BEGIN(0,0);
7387 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
7388 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
7389 IEM_MC_REL_JMP_S8(i8Imm);
7390 } IEM_MC_ELSE() {
7391 IEM_MC_ADVANCE_RIP();
7392 } IEM_MC_ENDIF();
7393 IEM_MC_END();
7394 return VINF_SUCCESS;
7395
7396 case IEMMODE_32BIT:
7397 IEM_MC_BEGIN(0,0);
7398 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
7399 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
7400 IEM_MC_REL_JMP_S8(i8Imm);
7401 } IEM_MC_ELSE() {
7402 IEM_MC_ADVANCE_RIP();
7403 } IEM_MC_ENDIF();
7404 IEM_MC_END();
7405 return VINF_SUCCESS;
7406
7407 case IEMMODE_64BIT:
7408 IEM_MC_BEGIN(0,0);
7409 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
7410 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
7411 IEM_MC_REL_JMP_S8(i8Imm);
7412 } IEM_MC_ELSE() {
7413 IEM_MC_ADVANCE_RIP();
7414 } IEM_MC_ENDIF();
7415 IEM_MC_END();
7416 return VINF_SUCCESS;
7417
7418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7419 }
7420}
7421
7422
7423/** Opcode 0xe2. */
7424FNIEMOP_DEF(iemOp_loop_Jb)
7425{
7426 IEMOP_MNEMONIC("loop Jb");
7427 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
7428 IEMOP_HLP_NO_LOCK_PREFIX();
7429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7430
7431 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
7432 * using the 32-bit operand size override. How can that be restarted? See
7433 * weird pseudo code in intel manual. */
7434 switch (pIemCpu->enmEffAddrMode)
7435 {
7436 case IEMMODE_16BIT:
7437 IEM_MC_BEGIN(0,0);
7438 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
7439 IEM_MC_IF_CX_IS_NZ() {
7440 IEM_MC_REL_JMP_S8(i8Imm);
7441 } IEM_MC_ELSE() {
7442 IEM_MC_ADVANCE_RIP();
7443 } IEM_MC_ENDIF();
7444 IEM_MC_END();
7445 return VINF_SUCCESS;
7446
7447 case IEMMODE_32BIT:
7448 IEM_MC_BEGIN(0,0);
7449 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
7450 IEM_MC_IF_ECX_IS_NZ() {
7451 IEM_MC_REL_JMP_S8(i8Imm);
7452 } IEM_MC_ELSE() {
7453 IEM_MC_ADVANCE_RIP();
7454 } IEM_MC_ENDIF();
7455 IEM_MC_END();
7456 return VINF_SUCCESS;
7457
7458 case IEMMODE_64BIT:
7459 IEM_MC_BEGIN(0,0);
7460 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
7461 IEM_MC_IF_RCX_IS_NZ() {
7462 IEM_MC_REL_JMP_S8(i8Imm);
7463 } IEM_MC_ELSE() {
7464 IEM_MC_ADVANCE_RIP();
7465 } IEM_MC_ENDIF();
7466 IEM_MC_END();
7467 return VINF_SUCCESS;
7468
7469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7470 }
7471}
7472
7473
7474/** Opcode 0xe3. */
7475FNIEMOP_DEF(iemOp_jecxz_Jb)
7476{
7477 IEMOP_MNEMONIC("jecxz Jb");
7478 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(pIemCpu, &i8Imm);
7479 IEMOP_HLP_NO_LOCK_PREFIX();
7480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7481
7482 switch (pIemCpu->enmEffAddrMode)
7483 {
7484 case IEMMODE_16BIT:
7485 IEM_MC_BEGIN(0,0);
7486 IEM_MC_IF_CX_IS_NZ() {
7487 IEM_MC_ADVANCE_RIP();
7488 } IEM_MC_ELSE() {
7489 IEM_MC_REL_JMP_S8(i8Imm);
7490 } IEM_MC_ENDIF();
7491 IEM_MC_END();
7492 return VINF_SUCCESS;
7493
7494 case IEMMODE_32BIT:
7495 IEM_MC_BEGIN(0,0);
7496 IEM_MC_IF_ECX_IS_NZ() {
7497 IEM_MC_ADVANCE_RIP();
7498 } IEM_MC_ELSE() {
7499 IEM_MC_REL_JMP_S8(i8Imm);
7500 } IEM_MC_ENDIF();
7501 IEM_MC_END();
7502 return VINF_SUCCESS;
7503
7504 case IEMMODE_64BIT:
7505 IEM_MC_BEGIN(0,0);
7506 IEM_MC_IF_RCX_IS_NZ() {
7507 IEM_MC_ADVANCE_RIP();
7508 } IEM_MC_ELSE() {
7509 IEM_MC_REL_JMP_S8(i8Imm);
7510 } IEM_MC_ENDIF();
7511 IEM_MC_END();
7512 return VINF_SUCCESS;
7513
7514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7515 }
7516}
7517
7518
7519/** Opcode 0xe4 */
7520FNIEMOP_DEF(iemOp_in_AL_Ib)
7521{
7522 IEMOP_MNEMONIC("in eAX,Ib");
7523 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7524 IEMOP_HLP_NO_LOCK_PREFIX();
7525 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
7526}
7527
7528
7529/** Opcode 0xe5 */
7530FNIEMOP_DEF(iemOp_in_eAX_Ib)
7531{
7532 IEMOP_MNEMONIC("in eAX,Ib");
7533 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7534 IEMOP_HLP_NO_LOCK_PREFIX();
7535 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
7536}
7537
7538
7539/** Opcode 0xe6 */
7540FNIEMOP_DEF(iemOp_out_Ib_AL)
7541{
7542 IEMOP_MNEMONIC("out Ib,AL");
7543 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7544 IEMOP_HLP_NO_LOCK_PREFIX();
7545 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
7546}
7547
7548
7549/** Opcode 0xe7 */
7550FNIEMOP_DEF(iemOp_out_Ib_eAX)
7551{
7552 IEMOP_MNEMONIC("out Ib,eAX");
7553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7554 IEMOP_HLP_NO_LOCK_PREFIX();
7555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
7556}
7557
7558
7559/** Opcode 0xe8. */
7560FNIEMOP_DEF(iemOp_call_Jv)
7561{
7562 IEMOP_MNEMONIC("call Jv");
7563 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7564 switch (pIemCpu->enmEffOpSize)
7565 {
7566 case IEMMODE_16BIT:
7567 {
7568 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
7569 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int32_t)u16Imm);
7570 }
7571
7572 case IEMMODE_32BIT:
7573 {
7574 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
7575 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
7576 }
7577
7578 case IEMMODE_64BIT:
7579 {
7580 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
7581 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
7582 }
7583
7584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7585 }
7586}
7587
7588
7589/** Opcode 0xe9. */
7590FNIEMOP_DEF(iemOp_jmp_Jv)
7591{
7592 IEMOP_MNEMONIC("jmp Jv");
7593 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7594 switch (pIemCpu->enmEffOpSize)
7595 {
7596 case IEMMODE_16BIT:
7597 {
7598 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
7599 IEM_MC_BEGIN(0, 0);
7600 IEM_MC_REL_JMP_S16((int16_t)u16Imm);
7601 IEM_MC_END();
7602 return VINF_SUCCESS;
7603 }
7604
7605 case IEMMODE_64BIT:
7606 case IEMMODE_32BIT:
7607 {
7608 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
7609 IEM_MC_BEGIN(0, 0);
7610 IEM_MC_REL_JMP_S32((int32_t)u32Imm);
7611 IEM_MC_END();
7612 return VINF_SUCCESS;
7613 }
7614
7615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7616 }
7617}
7618
7619
7620/** Opcode 0xea. */
7621FNIEMOP_DEF(iemOp_jmp_Ap)
7622{
7623 IEMOP_MNEMONIC("jmp Ap");
7624 IEMOP_HLP_NO_64BIT();
7625
7626 /* Decode the far pointer address and pass it on to the far call C implementation. */
7627 uint32_t offSeg;
7628 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
7629 IEM_OPCODE_GET_NEXT_U32(pIemCpu, &offSeg);
7630 else
7631 {
7632 uint16_t offSeg16; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &offSeg16);
7633 offSeg = offSeg16;
7634 }
7635 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &uSel);
7636 IEMOP_HLP_NO_LOCK_PREFIX();
7637 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_FarJmp, uSel, offSeg);
7638}
7639
7640
7641/** Opcode 0xeb. */
7642FNIEMOP_DEF(iemOp_jmp_Jb)
7643{
7644 IEMOP_MNEMONIC("jmp Jb");
7645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7646 IEMOP_HLP_NO_LOCK_PREFIX();
7647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7648
7649 IEM_MC_BEGIN(0, 0);
7650 IEM_MC_REL_JMP_S8((int8_t)u8Imm);
7651 IEM_MC_END();
7652 return VINF_SUCCESS;
7653}
7654
7655
7656/** Opcode 0xec */
7657FNIEMOP_DEF(iemOp_in_AL_DX)
7658{
7659 IEMOP_MNEMONIC("in AL,DX");
7660 IEMOP_HLP_NO_LOCK_PREFIX();
7661 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
7662}
7663
7664
7665/** Opcode 0xed */
7666FNIEMOP_DEF(iemOp_eAX_DX)
7667{
7668 IEMOP_MNEMONIC("in eAX,DX");
7669 IEMOP_HLP_NO_LOCK_PREFIX();
7670 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
7671}
7672
7673
7674/** Opcode 0xee */
7675FNIEMOP_DEF(iemOp_out_DX_AL)
7676{
7677 IEMOP_MNEMONIC("out DX,AL");
7678 IEMOP_HLP_NO_LOCK_PREFIX();
7679 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
7680}
7681
7682
7683/** Opcode 0xef */
7684FNIEMOP_DEF(iemOp_out_DX_eAX)
7685{
7686 IEMOP_MNEMONIC("out DX,eAX");
7687 IEMOP_HLP_NO_LOCK_PREFIX();
7688 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
7689}
7690
7691
7692/** Opcode 0xf0. */
7693FNIEMOP_DEF(iemOp_lock)
7694{
7695 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
7696
7697 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
7698 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7699}
7700
7701
7702/** Opcode 0xf2. */
7703FNIEMOP_DEF(iemOp_repne)
7704{
7705 /* This overrides any previous REPE prefix. */
7706 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
7707 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
7708
7709 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
7710 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7711}
7712
7713
7714/** Opcode 0xf3. */
7715FNIEMOP_DEF(iemOp_repe)
7716{
7717 /* This overrides any previous REPNE prefix. */
7718 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
7719 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
7720
7721 uint8_t b; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &b);
7722 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7723}
7724
7725
7726/** Opcode 0xf4. */
7727FNIEMOP_DEF(iemOp_hlt)
7728{
7729 IEMOP_HLP_NO_LOCK_PREFIX();
7730 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
7731}
7732
7733
7734/** Opcode 0xf5. */
7735FNIEMOP_STUB(iemOp_cmc);
7736
7737
7738/**
7739 * Common implementation of 'inc/dec/not/neg Eb'.
7740 *
7741 * @param bRm The RM byte.
7742 * @param pImpl The instruction implementation.
7743 */
7744FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
7745{
7746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7747 {
7748 /* register access */
7749 IEM_MC_BEGIN(2, 0);
7750 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7751 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7752 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7753 IEM_MC_REF_EFLAGS(pEFlags);
7754 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
7755 IEM_MC_ADVANCE_RIP();
7756 IEM_MC_END();
7757 }
7758 else
7759 {
7760 /* memory access. */
7761 IEM_MC_BEGIN(2, 2);
7762 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
7764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7765
7766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7767 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7768 IEM_MC_FETCH_EFLAGS(EFlags);
7769 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7770 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
7771 else
7772 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
7773
7774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7775 IEM_MC_COMMIT_EFLAGS(EFlags);
7776 IEM_MC_ADVANCE_RIP();
7777 IEM_MC_END();
7778 }
7779 return VINF_SUCCESS;
7780}
7781
7782
7783/**
7784 * Common implementation of 'inc/dec/not/neg Ev'.
7785 *
7786 * @param bRm The RM byte.
7787 * @param pImpl The instruction implementation.
7788 */
7789FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
7790{
7791 /* Registers are handled by a common worker. */
7792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7793 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7794
7795 /* Memory we do here. */
7796 switch (pIemCpu->enmEffOpSize)
7797 {
7798 case IEMMODE_16BIT:
7799 IEM_MC_BEGIN(2, 2);
7800 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7801 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
7802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7803
7804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7805 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7806 IEM_MC_FETCH_EFLAGS(EFlags);
7807 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7808 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7809 else
7810 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
7811
7812 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7813 IEM_MC_COMMIT_EFLAGS(EFlags);
7814 IEM_MC_ADVANCE_RIP();
7815 IEM_MC_END();
7816 return VINF_SUCCESS;
7817
7818 case IEMMODE_32BIT:
7819 IEM_MC_BEGIN(2, 2);
7820 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
7822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7823
7824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7825 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7826 IEM_MC_FETCH_EFLAGS(EFlags);
7827 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7828 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7829 else
7830 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
7831
7832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7833 IEM_MC_COMMIT_EFLAGS(EFlags);
7834 IEM_MC_ADVANCE_RIP();
7835 IEM_MC_END();
7836 return VINF_SUCCESS;
7837
7838 case IEMMODE_64BIT:
7839 IEM_MC_BEGIN(2, 2);
7840 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7841 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
7842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7843
7844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7845 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7846 IEM_MC_FETCH_EFLAGS(EFlags);
7847 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7848 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7849 else
7850 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
7851
7852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7853 IEM_MC_COMMIT_EFLAGS(EFlags);
7854 IEM_MC_ADVANCE_RIP();
7855 IEM_MC_END();
7856 return VINF_SUCCESS;
7857
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860}
7861
7862
7863/** Opcode 0xf6 /0. */
7864FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
7865{
7866 IEMOP_MNEMONIC("test Eb,Ib");
7867
7868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7869 {
7870 /* register access */
7871 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7872 IEMOP_HLP_NO_LOCK_PREFIX();
7873
7874 IEM_MC_BEGIN(3, 0);
7875 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7876 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
7877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7878 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7879 IEM_MC_REF_EFLAGS(pEFlags);
7880 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
7881 IEM_MC_ADVANCE_RIP();
7882 IEM_MC_END();
7883 }
7884 else
7885 {
7886 /* memory access. */
7887 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7888
7889 IEM_MC_BEGIN(3, 2);
7890 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7891 IEM_MC_ARG(uint8_t, u8Src, 1);
7892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7894
7895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7896 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &u8Imm);
7897 IEM_MC_ASSIGN(u8Src, u8Imm);
7898 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7899 IEM_MC_FETCH_EFLAGS(EFlags);
7900 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
7901
7902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
7903 IEM_MC_COMMIT_EFLAGS(EFlags);
7904 IEM_MC_ADVANCE_RIP();
7905 IEM_MC_END();
7906 }
7907 return VINF_SUCCESS;
7908}
7909
7910
7911/** Opcode 0xf7 /0. */
7912FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
7913{
7914 IEMOP_MNEMONIC("test Ev,Iv");
7915 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
7916
7917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7918 {
7919 /* register access */
7920 switch (pIemCpu->enmEffOpSize)
7921 {
7922 case IEMMODE_16BIT:
7923 {
7924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
7925 IEM_MC_BEGIN(3, 0);
7926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7927 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
7928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7929 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7930 IEM_MC_REF_EFLAGS(pEFlags);
7931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
7932 IEM_MC_ADVANCE_RIP();
7933 IEM_MC_END();
7934 return VINF_SUCCESS;
7935 }
7936
7937 case IEMMODE_32BIT:
7938 {
7939 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
7940 IEM_MC_BEGIN(3, 0);
7941 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7942 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
7943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7944 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7945 IEM_MC_REF_EFLAGS(pEFlags);
7946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
7947 IEM_MC_ADVANCE_RIP();
7948 IEM_MC_END();
7949 return VINF_SUCCESS;
7950 }
7951
7952 case IEMMODE_64BIT:
7953 {
7954 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
7955 IEM_MC_BEGIN(3, 0);
7956 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7957 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
7958 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7959 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7960 IEM_MC_REF_EFLAGS(pEFlags);
7961 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 return VINF_SUCCESS;
7965 }
7966
7967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7968 }
7969 }
7970 else
7971 {
7972 /* memory access. */
7973 switch (pIemCpu->enmEffOpSize)
7974 {
7975 case IEMMODE_16BIT:
7976 {
7977 IEM_MC_BEGIN(3, 2);
7978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7979 IEM_MC_ARG(uint16_t, u16Src, 1);
7980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7982
7983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7984 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &u16Imm);
7985 IEM_MC_ASSIGN(u16Src, u16Imm);
7986 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7987 IEM_MC_FETCH_EFLAGS(EFlags);
7988 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
7989
7990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
7991 IEM_MC_COMMIT_EFLAGS(EFlags);
7992 IEM_MC_ADVANCE_RIP();
7993 IEM_MC_END();
7994 return VINF_SUCCESS;
7995 }
7996
7997 case IEMMODE_32BIT:
7998 {
7999 IEM_MC_BEGIN(3, 2);
8000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8001 IEM_MC_ARG(uint32_t, u32Src, 1);
8002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8004
8005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8006 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(pIemCpu, &u32Imm);
8007 IEM_MC_ASSIGN(u32Src, u32Imm);
8008 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8009 IEM_MC_FETCH_EFLAGS(EFlags);
8010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
8011
8012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
8013 IEM_MC_COMMIT_EFLAGS(EFlags);
8014 IEM_MC_ADVANCE_RIP();
8015 IEM_MC_END();
8016 return VINF_SUCCESS;
8017 }
8018
8019 case IEMMODE_64BIT:
8020 {
8021 IEM_MC_BEGIN(3, 2);
8022 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8023 IEM_MC_ARG(uint64_t, u64Src, 1);
8024 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8026
8027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(pIemCpu, &u64Imm);
8029 IEM_MC_ASSIGN(u64Src, u64Imm);
8030 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8031 IEM_MC_FETCH_EFLAGS(EFlags);
8032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
8033
8034 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
8035 IEM_MC_COMMIT_EFLAGS(EFlags);
8036 IEM_MC_ADVANCE_RIP();
8037 IEM_MC_END();
8038 return VINF_SUCCESS;
8039 }
8040
8041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8042 }
8043 }
8044}
8045
8046
8047/** Opcode 0xf6 /4, /5, /6 and /7. */
8048FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
8049{
8050 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8051#ifdef IEM_VERIFICATION_MODE
8052 pIemCpu->fMulDivHack = true;
8053#endif
8054
8055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8056 {
8057 /* register access */
8058 IEMOP_HLP_NO_LOCK_PREFIX();
8059 IEM_MC_BEGIN(3, 0);
8060 IEM_MC_ARG(uint16_t *, pu16AX, 0);
8061 IEM_MC_ARG(uint8_t, u8Value, 1);
8062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8063 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8064 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
8065 IEM_MC_REF_EFLAGS(pEFlags);
8066 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
8067 IEM_MC_ADVANCE_RIP();
8068 IEM_MC_END();
8069 }
8070 else
8071 {
8072 /* memory access. */
8073 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8074
8075 IEM_MC_BEGIN(3, 1);
8076 IEM_MC_ARG(uint16_t *, pu16AX, 0);
8077 IEM_MC_ARG(uint8_t, u8Value, 1);
8078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8080
8081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8082 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
8083 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
8084 IEM_MC_REF_EFLAGS(pEFlags);
8085 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
8086
8087 IEM_MC_ADVANCE_RIP();
8088 IEM_MC_END();
8089 }
8090 return VINF_SUCCESS;
8091}
8092
8093
8094/** Opcode 0xf7 /4, /5, /6 and /7. */
8095FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
8096{
8097 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8098#ifdef IEM_VERIFICATION_MODE
8099 pIemCpu->fMulDivHack = true;
8100#endif
8101
8102 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8103 {
8104 /* register access */
8105 switch (pIemCpu->enmEffOpSize)
8106 {
8107 case IEMMODE_16BIT:
8108 {
8109 IEMOP_HLP_NO_LOCK_PREFIX();
8110 IEM_MC_BEGIN(3, 1);
8111 IEM_MC_ARG(uint16_t *, pu16AX, 0);
8112 IEM_MC_ARG(uint16_t *, pu16DX, 1);
8113 IEM_MC_ARG(uint16_t, u16Value, 2);
8114 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8115 IEM_MC_LOCAL(int32_t, rc);
8116
8117 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8118 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
8119 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
8120 IEM_MC_REF_EFLAGS(pEFlags);
8121 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
8122 IEM_MC_IF_LOCAL_IS_Z(rc) {
8123 IEM_MC_ADVANCE_RIP();
8124 } IEM_MC_ELSE() {
8125 IEM_MC_RAISE_DIVIDE_ERROR();
8126 } IEM_MC_ENDIF();
8127
8128 IEM_MC_END();
8129 return VINF_SUCCESS;
8130 }
8131
8132 case IEMMODE_32BIT:
8133 {
8134 IEMOP_HLP_NO_LOCK_PREFIX();
8135 IEM_MC_BEGIN(3, 1);
8136 IEM_MC_ARG(uint32_t *, pu32AX, 0);
8137 IEM_MC_ARG(uint32_t *, pu32DX, 1);
8138 IEM_MC_ARG(uint32_t, u32Value, 2);
8139 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8140 IEM_MC_LOCAL(int32_t, rc);
8141
8142 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8143 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
8144 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
8145 IEM_MC_REF_EFLAGS(pEFlags);
8146 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
8147 IEM_MC_IF_LOCAL_IS_Z(rc) {
8148 IEM_MC_ADVANCE_RIP();
8149 } IEM_MC_ELSE() {
8150 IEM_MC_RAISE_DIVIDE_ERROR();
8151 } IEM_MC_ENDIF();
8152
8153 IEM_MC_END();
8154 return VINF_SUCCESS;
8155 }
8156
8157 case IEMMODE_64BIT:
8158 {
8159 IEMOP_HLP_NO_LOCK_PREFIX();
8160 IEM_MC_BEGIN(3, 1);
8161 IEM_MC_ARG(uint64_t *, pu64AX, 0);
8162 IEM_MC_ARG(uint64_t *, pu64DX, 1);
8163 IEM_MC_ARG(uint64_t, u64Value, 2);
8164 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8165 IEM_MC_LOCAL(int32_t, rc);
8166
8167 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8168 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
8169 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
8170 IEM_MC_REF_EFLAGS(pEFlags);
8171 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
8172 IEM_MC_IF_LOCAL_IS_Z(rc) {
8173 IEM_MC_ADVANCE_RIP();
8174 } IEM_MC_ELSE() {
8175 IEM_MC_RAISE_DIVIDE_ERROR();
8176 } IEM_MC_ENDIF();
8177
8178 IEM_MC_END();
8179 return VINF_SUCCESS;
8180 }
8181
8182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8183 }
8184 }
8185 else
8186 {
8187 /* memory access. */
8188 switch (pIemCpu->enmEffOpSize)
8189 {
8190 case IEMMODE_16BIT:
8191 {
8192 IEMOP_HLP_NO_LOCK_PREFIX();
8193 IEM_MC_BEGIN(3, 2);
8194 IEM_MC_ARG(uint16_t *, pu16AX, 0);
8195 IEM_MC_ARG(uint16_t *, pu16DX, 1);
8196 IEM_MC_ARG(uint16_t, u16Value, 2);
8197 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8199 IEM_MC_LOCAL(int32_t, rc);
8200
8201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8202 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8203 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
8204 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
8205 IEM_MC_REF_EFLAGS(pEFlags);
8206 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
8207 IEM_MC_IF_LOCAL_IS_Z(rc) {
8208 IEM_MC_ADVANCE_RIP();
8209 } IEM_MC_ELSE() {
8210 IEM_MC_RAISE_DIVIDE_ERROR();
8211 } IEM_MC_ENDIF();
8212
8213 IEM_MC_END();
8214 return VINF_SUCCESS;
8215 }
8216
8217 case IEMMODE_32BIT:
8218 {
8219 IEMOP_HLP_NO_LOCK_PREFIX();
8220 IEM_MC_BEGIN(3, 2);
8221 IEM_MC_ARG(uint32_t *, pu32AX, 0);
8222 IEM_MC_ARG(uint32_t *, pu32DX, 1);
8223 IEM_MC_ARG(uint32_t, u32Value, 2);
8224 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8226 IEM_MC_LOCAL(int32_t, rc);
8227
8228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8229 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
8230 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
8231 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
8232 IEM_MC_REF_EFLAGS(pEFlags);
8233 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
8234 IEM_MC_IF_LOCAL_IS_Z(rc) {
8235 IEM_MC_ADVANCE_RIP();
8236 } IEM_MC_ELSE() {
8237 IEM_MC_RAISE_DIVIDE_ERROR();
8238 } IEM_MC_ENDIF();
8239
8240 IEM_MC_END();
8241 return VINF_SUCCESS;
8242 }
8243
8244 case IEMMODE_64BIT:
8245 {
8246 IEMOP_HLP_NO_LOCK_PREFIX();
8247 IEM_MC_BEGIN(3, 2);
8248 IEM_MC_ARG(uint64_t *, pu64AX, 0);
8249 IEM_MC_ARG(uint64_t *, pu64DX, 1);
8250 IEM_MC_ARG(uint64_t, u64Value, 2);
8251 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253 IEM_MC_LOCAL(int32_t, rc);
8254
8255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8256 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8257 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
8258 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
8259 IEM_MC_REF_EFLAGS(pEFlags);
8260 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
8261 IEM_MC_IF_LOCAL_IS_Z(rc) {
8262 IEM_MC_ADVANCE_RIP();
8263 } IEM_MC_ELSE() {
8264 IEM_MC_RAISE_DIVIDE_ERROR();
8265 } IEM_MC_ENDIF();
8266
8267 IEM_MC_END();
8268 return VINF_SUCCESS;
8269 }
8270
8271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8272 }
8273 }
8274}
8275
8276/** Opcode 0xf6. */
8277FNIEMOP_DEF(iemOp_Grp3_Eb)
8278{
8279 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
8280 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8281 {
8282 case 0:
8283 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
8284 case 1:
8285 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
8286 case 2:
8287 IEMOP_MNEMONIC("not Eb");
8288 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
8289 case 3:
8290 IEMOP_MNEMONIC("neg Eb");
8291 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
8292 case 4:
8293 IEMOP_MNEMONIC("mul Eb");
8294 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_mul_u8);
8295 case 5:
8296 IEMOP_MNEMONIC("imul Eb");
8297 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_imul_u8);
8298 case 6:
8299 IEMOP_MNEMONIC("div Eb");
8300 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_div_u8);
8301 case 7:
8302 IEMOP_MNEMONIC("idiv Eb");
8303 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, &iemAImpl_idiv_u8);
8304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8305 }
8306}
8307
8308
8309/** Opcode 0xf7. */
8310FNIEMOP_DEF(iemOp_Grp3_Ev)
8311{
8312 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
8313 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8314 {
8315 case 0:
8316 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
8317 case 1:
8318 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
8319 case 2:
8320 IEMOP_MNEMONIC("not Ev");
8321 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
8322 case 3:
8323 IEMOP_MNEMONIC("neg Ev");
8324 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
8325 case 4:
8326 IEMOP_MNEMONIC("mul Ev");
8327 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
8328 case 5:
8329 IEMOP_MNEMONIC("imul Ev");
8330 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
8331 case 6:
8332 IEMOP_MNEMONIC("div Ev");
8333 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
8334 case 7:
8335 IEMOP_MNEMONIC("idiv Ev");
8336 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
8337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8338 }
8339}
8340
8341
8342/** Opcode 0xf8. */
8343FNIEMOP_DEF(iemOp_clc)
8344{
8345 IEMOP_MNEMONIC("clc");
8346 IEMOP_HLP_NO_LOCK_PREFIX();
8347 IEM_MC_BEGIN(0, 0);
8348 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
8349 IEM_MC_ADVANCE_RIP();
8350 IEM_MC_END();
8351 return VINF_SUCCESS;
8352}
8353
8354
8355/** Opcode 0xf9. */
8356FNIEMOP_DEF(iemOp_stc)
8357{
8358 IEMOP_MNEMONIC("slc");
8359 IEMOP_HLP_NO_LOCK_PREFIX();
8360 IEM_MC_BEGIN(0, 0);
8361 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
8362 IEM_MC_ADVANCE_RIP();
8363 IEM_MC_END();
8364 return VINF_SUCCESS;
8365}
8366
8367
8368/** Opcode 0xfa. */
8369FNIEMOP_DEF(iemOp_cli)
8370{
8371 IEMOP_MNEMONIC("cli");
8372 IEMOP_HLP_NO_LOCK_PREFIX();
8373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
8374}
8375
8376
8377FNIEMOP_DEF(iemOp_sti)
8378{
8379 IEMOP_MNEMONIC("sti");
8380 IEMOP_HLP_NO_LOCK_PREFIX();
8381 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
8382}
8383
8384
8385/** Opcode 0xfc. */
8386FNIEMOP_DEF(iemOp_cld)
8387{
8388 IEMOP_MNEMONIC("cld");
8389 IEMOP_HLP_NO_LOCK_PREFIX();
8390 IEM_MC_BEGIN(0, 0);
8391 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
8392 IEM_MC_ADVANCE_RIP();
8393 IEM_MC_END();
8394 return VINF_SUCCESS;
8395}
8396
8397
8398/** Opcode 0xfd. */
8399FNIEMOP_DEF(iemOp_std)
8400{
8401 IEMOP_MNEMONIC("std");
8402 IEMOP_HLP_NO_LOCK_PREFIX();
8403 IEM_MC_BEGIN(0, 0);
8404 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
8405 IEM_MC_ADVANCE_RIP();
8406 IEM_MC_END();
8407 return VINF_SUCCESS;
8408}
8409
8410
8411/** Opcode 0xfe. */
8412FNIEMOP_DEF(iemOp_Grp4)
8413{
8414 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
8415 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8416 {
8417 case 0:
8418 IEMOP_MNEMONIC("inc Ev");
8419 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
8420 case 1:
8421 IEMOP_MNEMONIC("dec Ev");
8422 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
8423 default:
8424 IEMOP_MNEMONIC("grp4-ud");
8425 return IEMOP_RAISE_INVALID_OPCODE();
8426 }
8427}
8428
8429
8430/**
8431 * Opcode 0xff /2.
8432 * @param bRm The RM byte.
8433 */
8434FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
8435{
8436 IEMOP_MNEMONIC("calln Ev");
8437 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
8438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8439
8440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8441 {
8442 /* The new RIP is taken from a register. */
8443 switch (pIemCpu->enmEffOpSize)
8444 {
8445 case IEMMODE_16BIT:
8446 IEM_MC_BEGIN(1, 0);
8447 IEM_MC_ARG(uint16_t, u16Target, 0);
8448 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8449 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
8450 IEM_MC_END()
8451 return VINF_SUCCESS;
8452
8453 case IEMMODE_32BIT:
8454 IEM_MC_BEGIN(1, 0);
8455 IEM_MC_ARG(uint32_t, u32Target, 0);
8456 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8457 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
8458 IEM_MC_END()
8459 return VINF_SUCCESS;
8460
8461 case IEMMODE_64BIT:
8462 IEM_MC_BEGIN(1, 0);
8463 IEM_MC_ARG(uint64_t, u64Target, 0);
8464 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8465 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
8466 IEM_MC_END()
8467 return VINF_SUCCESS;
8468
8469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8470 }
8471 }
8472 else
8473 {
8474 /* The new RIP is taken from a register. */
8475 switch (pIemCpu->enmEffOpSize)
8476 {
8477 case IEMMODE_16BIT:
8478 IEM_MC_BEGIN(1, 1);
8479 IEM_MC_ARG(uint16_t, u16Target, 0);
8480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8482 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8483 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
8484 IEM_MC_END()
8485 return VINF_SUCCESS;
8486
8487 case IEMMODE_32BIT:
8488 IEM_MC_BEGIN(1, 1);
8489 IEM_MC_ARG(uint32_t, u32Target, 0);
8490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8492 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8493 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
8494 IEM_MC_END()
8495 return VINF_SUCCESS;
8496
8497 case IEMMODE_64BIT:
8498 IEM_MC_BEGIN(1, 1);
8499 IEM_MC_ARG(uint64_t, u64Target, 0);
8500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8502 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8503 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
8504 IEM_MC_END()
8505 return VINF_SUCCESS;
8506
8507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8508 }
8509 }
8510}
8511
8512
8513/**
8514 * Opcode 0xff /3.
8515 * @param bRm The RM byte.
8516 */
8517FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
8518{
8519 IEMOP_MNEMONIC("callf Ep");
8520 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
8521
8522 /* Registers? How?? */
8523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8524 {
8525 /** @todo How the heck does a 'callf eax' work? Probably just have to
8526 * search the docs... */
8527 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
8528 }
8529
8530 /* Far pointer loaded from memory. */
8531 switch (pIemCpu->enmEffOpSize)
8532 {
8533 case IEMMODE_16BIT:
8534 IEM_MC_BEGIN(3, 1);
8535 IEM_MC_ARG(uint16_t, u16Sel, 0);
8536 IEM_MC_ARG(uint16_t, offSeg, 1);
8537 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2);
8538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8540 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
8541 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 2);
8542 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_16BIT);
8543 IEM_MC_END();
8544 return VINF_SUCCESS;
8545
8546 case IEMMODE_32BIT:
8547 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
8548 {
8549 IEM_MC_BEGIN(3, 1);
8550 IEM_MC_ARG(uint16_t, u16Sel, 0);
8551 IEM_MC_ARG(uint32_t, offSeg, 1);
8552 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8555 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
8556 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4);
8557 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_32BIT);
8558 IEM_MC_END();
8559 }
8560 else
8561 {
8562 IEM_MC_BEGIN(3, 1);
8563 IEM_MC_ARG(uint16_t, u16Sel, 0);
8564 IEM_MC_ARG(uint64_t, offSeg, 1);
8565 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8568 IEM_MC_FETCH_MEM_S32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
8569 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4);
8570 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_32BIT);
8571 IEM_MC_END();
8572 }
8573 return VINF_SUCCESS;
8574
8575 case IEMMODE_64BIT:
8576 IEM_MC_BEGIN(3, 1);
8577 IEM_MC_ARG(uint16_t, u16Sel, 0);
8578 IEM_MC_ARG(uint64_t, offSeg, 1);
8579 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2);
8580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8582 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
8583 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 8);
8584 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_64BIT);
8585 IEM_MC_END();
8586 return VINF_SUCCESS;
8587
8588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8589 }
8590}
8591
8592
8593/**
8594 * Opcode 0xff /4.
8595 * @param bRm The RM byte.
8596 */
8597FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
8598{
8599 IEMOP_MNEMONIC("callf Ep");
8600 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
8601 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8602
8603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8604 {
8605 /* The new RIP is taken from a register. */
8606 switch (pIemCpu->enmEffOpSize)
8607 {
8608 case IEMMODE_16BIT:
8609 IEM_MC_BEGIN(0, 1);
8610 IEM_MC_LOCAL(uint16_t, u16Target);
8611 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8612 IEM_MC_SET_RIP_U16(u16Target);
8613 IEM_MC_END()
8614 return VINF_SUCCESS;
8615
8616 case IEMMODE_32BIT:
8617 IEM_MC_BEGIN(0, 1);
8618 IEM_MC_LOCAL(uint32_t, u32Target);
8619 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8620 IEM_MC_SET_RIP_U32(u32Target);
8621 IEM_MC_END()
8622 return VINF_SUCCESS;
8623
8624 case IEMMODE_64BIT:
8625 IEM_MC_BEGIN(0, 1);
8626 IEM_MC_LOCAL(uint64_t, u64Target);
8627 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8628 IEM_MC_SET_RIP_U64(u64Target);
8629 IEM_MC_END()
8630 return VINF_SUCCESS;
8631
8632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8633 }
8634 }
8635 else
8636 {
8637 /* The new RIP is taken from a register. */
8638 switch (pIemCpu->enmEffOpSize)
8639 {
8640 case IEMMODE_16BIT:
8641 IEM_MC_BEGIN(0, 2);
8642 IEM_MC_LOCAL(uint16_t, u16Target);
8643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8645 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8646 IEM_MC_SET_RIP_U16(u16Target);
8647 IEM_MC_END()
8648 return VINF_SUCCESS;
8649
8650 case IEMMODE_32BIT:
8651 IEM_MC_BEGIN(0, 2);
8652 IEM_MC_LOCAL(uint32_t, u32Target);
8653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8655 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8656 IEM_MC_SET_RIP_U32(u32Target);
8657 IEM_MC_END()
8658 return VINF_SUCCESS;
8659
8660 case IEMMODE_64BIT:
8661 IEM_MC_BEGIN(0, 2);
8662 IEM_MC_LOCAL(uint32_t, u32Target);
8663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8665 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
8666 IEM_MC_SET_RIP_U32(u32Target);
8667 IEM_MC_END()
8668 return VINF_SUCCESS;
8669
8670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8671 }
8672 }
8673}
8674
8675
8676/**
8677 * Opcode 0xff /5.
8678 * @param bRm The RM byte.
8679 */
8680FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
8681{
8682 /* decode and use a C worker. */
8683 AssertFailed(); // FNIEMOP_STUB
8684 return VERR_NOT_IMPLEMENTED;
8685}
8686
8687
8688/**
8689 * Opcode 0xff /6.
8690 * @param bRm The RM byte.
8691 */
8692FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
8693{
8694 IEMOP_MNEMONIC("push Ev");
8695 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
8696
8697 /* Registers are handled by a common worker. */
8698 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8699 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8700
8701 /* Memory we do here. */
8702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8703 switch (pIemCpu->enmEffOpSize)
8704 {
8705 case IEMMODE_16BIT:
8706 IEM_MC_BEGIN(0, 2);
8707 IEM_MC_LOCAL(uint16_t, u16Src);
8708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8710 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
8711 IEM_MC_PUSH_U16(u16Src);
8712 IEM_MC_ADVANCE_RIP();
8713 IEM_MC_END();
8714 return VINF_SUCCESS;
8715
8716 case IEMMODE_32BIT:
8717 IEM_MC_BEGIN(0, 2);
8718 IEM_MC_LOCAL(uint32_t, u32Src);
8719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8721 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
8722 IEM_MC_PUSH_U32(u32Src);
8723 IEM_MC_ADVANCE_RIP();
8724 IEM_MC_END();
8725 return VINF_SUCCESS;
8726
8727 case IEMMODE_64BIT:
8728 IEM_MC_BEGIN(0, 2);
8729 IEM_MC_LOCAL(uint64_t, u64Src);
8730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8732 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
8733 IEM_MC_PUSH_U64(u64Src);
8734 IEM_MC_ADVANCE_RIP();
8735 IEM_MC_END();
8736 return VINF_SUCCESS;
8737 }
8738 AssertFailedReturn(VERR_NOT_IMPLEMENTED);
8739}
8740
8741
8742/** Opcode 0xff. */
8743FNIEMOP_DEF(iemOp_Grp5)
8744{
8745 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm);
8746 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8747 {
8748 case 0:
8749 IEMOP_MNEMONIC("inc Ev");
8750 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
8751 case 1:
8752 IEMOP_MNEMONIC("dec Ev");
8753 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
8754 case 2:
8755 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
8756 case 3:
8757 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
8758 case 4:
8759 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
8760 case 5:
8761 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
8762 case 6:
8763 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
8764 case 7:
8765 IEMOP_MNEMONIC("grp5-ud");
8766 return IEMOP_RAISE_INVALID_OPCODE();
8767 }
8768 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
8769}
8770
8771
8772
8773const PFNIEMOP g_apfnOneByteMap[256] =
8774{
8775 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
8776 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
8777 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
8778 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
8779 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
8780 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
8781 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
8782 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
8783 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
8784 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
8785 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
8786 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
8787 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
8788 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
8789 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
8790 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
8791 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
8792 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
8793 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
8794 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
8795 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
8796 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
8797 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
8798 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
8799 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw,
8800 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
8801 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
8802 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
8803 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
8804 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
8805 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
8806 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
8807 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
8808 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
8809 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
8810 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_pop_Ev,
8811 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
8812 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
8813 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
8814 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
8815 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
8816 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
8817 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
8818 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
8819 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
8820 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
8821 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
8822 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
8823 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
8824 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
8825 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
8826 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
8827 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
8828 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
8829 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
8830 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
8831 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
8832 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
8833 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
8834 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
8835 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe,
8836 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
8837 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
8838 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
8839};
8840
8841
8842/** @} */
8843
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette