VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 97519

Last change on this file since 97519 was 97519, checked in by vboxsync, 2 years ago

VMM/IEM: Micro optimization of retn and retn imm16 by having separate C workers for the two instructions and each of their efficient operand size variations. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 396.6 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 97519 2022-11-11 23:58:22Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/* Instruction specification format - work in progress: */
60
61/**
62 * @opcode 0x00
63 * @opmnemonic add
64 * @op1 rm:Eb
65 * @op2 reg:Gb
66 * @opmaps one
67 * @openc ModR/M
68 * @opflmodify cf,pf,af,zf,sf,of
69 * @ophints harmless ignores_op_sizes
70 * @opstats add_Eb_Gb
71 * @opgroup og_gen_arith_bin
72 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
73 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
74 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
75 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
76 */
77FNIEMOP_DEF(iemOp_add_Eb_Gb)
78{
79 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
81}
82
83
84/**
85 * @opcode 0x01
86 * @opgroup og_gen_arith_bin
87 * @opflmodify cf,pf,af,zf,sf,of
88 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
89 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
90 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
91 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
92 */
93FNIEMOP_DEF(iemOp_add_Ev_Gv)
94{
95 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
97}
98
99
100/**
101 * @opcode 0x02
102 * @opgroup og_gen_arith_bin
103 * @opflmodify cf,pf,af,zf,sf,of
104 * @opcopytests iemOp_add_Eb_Gb
105 */
106FNIEMOP_DEF(iemOp_add_Gb_Eb)
107{
108 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
110}
111
112
113/**
114 * @opcode 0x03
115 * @opgroup og_gen_arith_bin
116 * @opflmodify cf,pf,af,zf,sf,of
117 * @opcopytests iemOp_add_Ev_Gv
118 */
119FNIEMOP_DEF(iemOp_add_Gv_Ev)
120{
121 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
123}
124
125
126/**
127 * @opcode 0x04
128 * @opgroup og_gen_arith_bin
129 * @opflmodify cf,pf,af,zf,sf,of
130 * @opcopytests iemOp_add_Eb_Gb
131 */
132FNIEMOP_DEF(iemOp_add_Al_Ib)
133{
134 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
136}
137
138
139/**
140 * @opcode 0x05
141 * @opgroup og_gen_arith_bin
142 * @opflmodify cf,pf,af,zf,sf,of
143 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
144 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
145 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
146 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
147 */
148FNIEMOP_DEF(iemOp_add_eAX_Iz)
149{
150 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
152}
153
154
155/**
156 * @opcode 0x06
157 * @opgroup og_stack_sreg
158 */
159FNIEMOP_DEF(iemOp_push_ES)
160{
161 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
162 IEMOP_HLP_NO_64BIT();
163 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
164}
165
166
167/**
168 * @opcode 0x07
169 * @opgroup og_stack_sreg
170 */
171FNIEMOP_DEF(iemOp_pop_ES)
172{
173 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
174 IEMOP_HLP_NO_64BIT();
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
177}
178
179
180/**
181 * @opcode 0x08
182 * @opgroup og_gen_arith_bin
183 * @opflmodify cf,pf,af,zf,sf,of
184 * @opflundef af
185 * @opflclear of,cf
186 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
187 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
188 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
189 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
190 */
191FNIEMOP_DEF(iemOp_or_Eb_Gb)
192{
193 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
196}
197
198
199/*
200 * @opcode 0x09
201 * @opgroup og_gen_arith_bin
202 * @opflmodify cf,pf,af,zf,sf,of
203 * @opflundef af
204 * @opflclear of,cf
205 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
206 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
207 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
208 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
209 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
210 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
211 */
212FNIEMOP_DEF(iemOp_or_Ev_Gv)
213{
214 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
215 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
217}
218
219
220/**
221 * @opcode 0x0a
222 * @opgroup og_gen_arith_bin
223 * @opflmodify cf,pf,af,zf,sf,of
224 * @opflundef af
225 * @opflclear of,cf
226 * @opcopytests iemOp_or_Eb_Gb
227 */
228FNIEMOP_DEF(iemOp_or_Gb_Eb)
229{
230 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
233}
234
235
236/**
237 * @opcode 0x0b
238 * @opgroup og_gen_arith_bin
239 * @opflmodify cf,pf,af,zf,sf,of
240 * @opflundef af
241 * @opflclear of,cf
242 * @opcopytests iemOp_or_Ev_Gv
243 */
244FNIEMOP_DEF(iemOp_or_Gv_Ev)
245{
246 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
249}
250
251
252/**
253 * @opcode 0x0c
254 * @opgroup og_gen_arith_bin
255 * @opflmodify cf,pf,af,zf,sf,of
256 * @opflundef af
257 * @opflclear of,cf
258 * @opcopytests iemOp_or_Eb_Gb
259 */
260FNIEMOP_DEF(iemOp_or_Al_Ib)
261{
262 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
265}
266
267
268/**
269 * @opcode 0x0d
270 * @opgroup og_gen_arith_bin
271 * @opflmodify cf,pf,af,zf,sf,of
272 * @opflundef af
273 * @opflclear of,cf
274 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
275 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
276 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
277 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
278 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
279 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
280 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
281 */
282FNIEMOP_DEF(iemOp_or_eAX_Iz)
283{
284 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
287}
288
289
290/**
291 * @opcode 0x0e
292 * @opgroup og_stack_sreg
293 */
294FNIEMOP_DEF(iemOp_push_CS)
295{
296 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
297 IEMOP_HLP_NO_64BIT();
298 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
299}
300
301
302/**
303 * @opcode 0x0f
304 * @opmnemonic EscTwo0f
305 * @openc two0f
306 * @opdisenum OP_2B_ESC
307 * @ophints harmless
308 * @opgroup og_escapes
309 */
310FNIEMOP_DEF(iemOp_2byteEscape)
311{
312#ifdef VBOX_STRICT
313 /* Sanity check the table the first time around. */
314 static bool s_fTested = false;
315 if (RT_LIKELY(s_fTested)) { /* likely */ }
316 else
317 {
318 s_fTested = true;
319 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
320 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
321 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
322 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
323 }
324#endif
325
326 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
327 {
328 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
329 IEMOP_HLP_MIN_286();
330 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
331 }
332 /* @opdone */
333
334 /*
335 * On the 8086 this is a POP CS instruction.
336 * For the time being we don't specify this this.
337 */
338 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
339 IEMOP_HLP_NO_64BIT();
340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
341 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
342}
343
344/**
345 * @opcode 0x10
346 * @opgroup og_gen_arith_bin
347 * @opfltest cf
348 * @opflmodify cf,pf,af,zf,sf,of
349 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
350 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
351 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
352 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
353 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
354 */
355FNIEMOP_DEF(iemOp_adc_Eb_Gb)
356{
357 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
359}
360
361
362/**
363 * @opcode 0x11
364 * @opgroup og_gen_arith_bin
365 * @opfltest cf
366 * @opflmodify cf,pf,af,zf,sf,of
367 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
368 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
369 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
370 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
371 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
372 */
373FNIEMOP_DEF(iemOp_adc_Ev_Gv)
374{
375 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
377}
378
379
380/**
381 * @opcode 0x12
382 * @opgroup og_gen_arith_bin
383 * @opfltest cf
384 * @opflmodify cf,pf,af,zf,sf,of
385 * @opcopytests iemOp_adc_Eb_Gb
386 */
387FNIEMOP_DEF(iemOp_adc_Gb_Eb)
388{
389 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
391}
392
393
394/**
395 * @opcode 0x13
396 * @opgroup og_gen_arith_bin
397 * @opfltest cf
398 * @opflmodify cf,pf,af,zf,sf,of
399 * @opcopytests iemOp_adc_Ev_Gv
400 */
401FNIEMOP_DEF(iemOp_adc_Gv_Ev)
402{
403 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
405}
406
407
408/**
409 * @opcode 0x14
410 * @opgroup og_gen_arith_bin
411 * @opfltest cf
412 * @opflmodify cf,pf,af,zf,sf,of
413 * @opcopytests iemOp_adc_Eb_Gb
414 */
415FNIEMOP_DEF(iemOp_adc_Al_Ib)
416{
417 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
419}
420
421
422/**
423 * @opcode 0x15
424 * @opgroup og_gen_arith_bin
425 * @opfltest cf
426 * @opflmodify cf,pf,af,zf,sf,of
427 * @opcopytests iemOp_adc_Ev_Gv
428 */
429FNIEMOP_DEF(iemOp_adc_eAX_Iz)
430{
431 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
433}
434
435
436/**
437 * @opcode 0x16
438 */
439FNIEMOP_DEF(iemOp_push_SS)
440{
441 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
442 IEMOP_HLP_NO_64BIT();
443 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
444}
445
446
447/**
448 * @opcode 0x17
449 * @opgroup og_gen_arith_bin
450 * @opfltest cf
451 * @opflmodify cf,pf,af,zf,sf,of
452 */
453FNIEMOP_DEF(iemOp_pop_SS)
454{
455 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_HLP_NO_64BIT();
458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
459}
460
461
462/**
463 * @opcode 0x18
464 * @opgroup og_gen_arith_bin
465 * @opfltest cf
466 * @opflmodify cf,pf,af,zf,sf,of
467 */
468FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
469{
470 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
472}
473
474
475/**
476 * @opcode 0x19
477 * @opgroup og_gen_arith_bin
478 * @opfltest cf
479 * @opflmodify cf,pf,af,zf,sf,of
480 */
481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
482{
483 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
485}
486
487
488/**
489 * @opcode 0x1a
490 * @opgroup og_gen_arith_bin
491 * @opfltest cf
492 * @opflmodify cf,pf,af,zf,sf,of
493 */
494FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
495{
496 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
498}
499
500
501/**
502 * @opcode 0x1b
503 * @opgroup og_gen_arith_bin
504 * @opfltest cf
505 * @opflmodify cf,pf,af,zf,sf,of
506 */
507FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
508{
509 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
511}
512
513
514/**
515 * @opcode 0x1c
516 * @opgroup og_gen_arith_bin
517 * @opfltest cf
518 * @opflmodify cf,pf,af,zf,sf,of
519 */
520FNIEMOP_DEF(iemOp_sbb_Al_Ib)
521{
522 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
524}
525
526
527/**
528 * @opcode 0x1d
529 * @opgroup og_gen_arith_bin
530 * @opfltest cf
531 * @opflmodify cf,pf,af,zf,sf,of
532 */
533FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
534{
535 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
537}
538
539
540/**
541 * @opcode 0x1e
542 * @opgroup og_stack_sreg
543 */
544FNIEMOP_DEF(iemOp_push_DS)
545{
546 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
547 IEMOP_HLP_NO_64BIT();
548 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
549}
550
551
552/**
553 * @opcode 0x1f
554 * @opgroup og_stack_sreg
555 */
556FNIEMOP_DEF(iemOp_pop_DS)
557{
558 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
560 IEMOP_HLP_NO_64BIT();
561 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
562}
563
564
565/**
566 * @opcode 0x20
567 * @opgroup og_gen_arith_bin
568 * @opflmodify cf,pf,af,zf,sf,of
569 * @opflundef af
570 * @opflclear of,cf
571 */
572FNIEMOP_DEF(iemOp_and_Eb_Gb)
573{
574 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
575 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
577}
578
579
580/**
581 * @opcode 0x21
582 * @opgroup og_gen_arith_bin
583 * @opflmodify cf,pf,af,zf,sf,of
584 * @opflundef af
585 * @opflclear of,cf
586 */
587FNIEMOP_DEF(iemOp_and_Ev_Gv)
588{
589 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
592}
593
594
595/**
596 * @opcode 0x22
597 * @opgroup og_gen_arith_bin
598 * @opflmodify cf,pf,af,zf,sf,of
599 * @opflundef af
600 * @opflclear of,cf
601 */
602FNIEMOP_DEF(iemOp_and_Gb_Eb)
603{
604 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
605 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
607}
608
609
610/**
611 * @opcode 0x23
612 * @opgroup og_gen_arith_bin
613 * @opflmodify cf,pf,af,zf,sf,of
614 * @opflundef af
615 * @opflclear of,cf
616 */
617FNIEMOP_DEF(iemOp_and_Gv_Ev)
618{
619 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
620 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
622}
623
624
625/**
626 * @opcode 0x24
627 * @opgroup og_gen_arith_bin
628 * @opflmodify cf,pf,af,zf,sf,of
629 * @opflundef af
630 * @opflclear of,cf
631 */
632FNIEMOP_DEF(iemOp_and_Al_Ib)
633{
634 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
637}
638
639
640/**
641 * @opcode 0x25
642 * @opgroup og_gen_arith_bin
643 * @opflmodify cf,pf,af,zf,sf,of
644 * @opflundef af
645 * @opflclear of,cf
646 */
647FNIEMOP_DEF(iemOp_and_eAX_Iz)
648{
649 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
652}
653
654
655/**
656 * @opcode 0x26
657 * @opmnemonic SEG
658 * @op1 ES
659 * @opgroup og_prefix
660 * @openc prefix
661 * @opdisenum OP_SEG
662 * @ophints harmless
663 */
664FNIEMOP_DEF(iemOp_seg_ES)
665{
666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
668 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
669
670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
672}
673
674
675/**
676 * @opcode 0x27
677 * @opfltest af,cf
678 * @opflmodify cf,pf,af,zf,sf,of
679 * @opflundef of
680 */
681FNIEMOP_DEF(iemOp_daa)
682{
683 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
684 IEMOP_HLP_NO_64BIT();
685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
686 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
688}
689
690
691/**
692 * @opcode 0x28
693 * @opgroup og_gen_arith_bin
694 * @opflmodify cf,pf,af,zf,sf,of
695 */
696FNIEMOP_DEF(iemOp_sub_Eb_Gb)
697{
698 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
700}
701
702
703/**
704 * @opcode 0x29
705 * @opgroup og_gen_arith_bin
706 * @opflmodify cf,pf,af,zf,sf,of
707 */
708FNIEMOP_DEF(iemOp_sub_Ev_Gv)
709{
710 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
712}
713
714
715/**
716 * @opcode 0x2a
717 * @opgroup og_gen_arith_bin
718 * @opflmodify cf,pf,af,zf,sf,of
719 */
720FNIEMOP_DEF(iemOp_sub_Gb_Eb)
721{
722 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
724}
725
726
727/**
728 * @opcode 0x2b
729 * @opgroup og_gen_arith_bin
730 * @opflmodify cf,pf,af,zf,sf,of
731 */
732FNIEMOP_DEF(iemOp_sub_Gv_Ev)
733{
734 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
736}
737
738
739/**
740 * @opcode 0x2c
741 * @opgroup og_gen_arith_bin
742 * @opflmodify cf,pf,af,zf,sf,of
743 */
744FNIEMOP_DEF(iemOp_sub_Al_Ib)
745{
746 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
748}
749
750
751/**
752 * @opcode 0x2d
753 * @opgroup og_gen_arith_bin
754 * @opflmodify cf,pf,af,zf,sf,of
755 */
756FNIEMOP_DEF(iemOp_sub_eAX_Iz)
757{
758 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
760}
761
762
763/**
764 * @opcode 0x2e
765 * @opmnemonic SEG
766 * @op1 CS
767 * @opgroup og_prefix
768 * @openc prefix
769 * @opdisenum OP_SEG
770 * @ophints harmless
771 */
772FNIEMOP_DEF(iemOp_seg_CS)
773{
774 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
775 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
776 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
777
778 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
779 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
780}
781
782
783/**
784 * @opcode 0x2f
785 * @opfltest af,cf
786 * @opflmodify cf,pf,af,zf,sf,of
787 * @opflundef of
788 */
789FNIEMOP_DEF(iemOp_das)
790{
791 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
796}
797
798
799/**
800 * @opcode 0x30
801 * @opgroup og_gen_arith_bin
802 * @opflmodify cf,pf,af,zf,sf,of
803 * @opflundef af
804 * @opflclear of,cf
805 */
806FNIEMOP_DEF(iemOp_xor_Eb_Gb)
807{
808 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
811}
812
813
814/**
815 * @opcode 0x31
816 * @opgroup og_gen_arith_bin
817 * @opflmodify cf,pf,af,zf,sf,of
818 * @opflundef af
819 * @opflclear of,cf
820 */
821FNIEMOP_DEF(iemOp_xor_Ev_Gv)
822{
823 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
826}
827
828
829/**
830 * @opcode 0x32
831 * @opgroup og_gen_arith_bin
832 * @opflmodify cf,pf,af,zf,sf,of
833 * @opflundef af
834 * @opflclear of,cf
835 */
836FNIEMOP_DEF(iemOp_xor_Gb_Eb)
837{
838 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
841}
842
843
844/**
845 * @opcode 0x33
846 * @opgroup og_gen_arith_bin
847 * @opflmodify cf,pf,af,zf,sf,of
848 * @opflundef af
849 * @opflclear of,cf
850 */
851FNIEMOP_DEF(iemOp_xor_Gv_Ev)
852{
853 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
856}
857
858
859/**
860 * @opcode 0x34
861 * @opgroup og_gen_arith_bin
862 * @opflmodify cf,pf,af,zf,sf,of
863 * @opflundef af
864 * @opflclear of,cf
865 */
866FNIEMOP_DEF(iemOp_xor_Al_Ib)
867{
868 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
871}
872
873
874/**
875 * @opcode 0x35
876 * @opgroup og_gen_arith_bin
877 * @opflmodify cf,pf,af,zf,sf,of
878 * @opflundef af
879 * @opflclear of,cf
880 */
881FNIEMOP_DEF(iemOp_xor_eAX_Iz)
882{
883 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
886}
887
888
889/**
890 * @opcode 0x36
891 * @opmnemonic SEG
892 * @op1 SS
893 * @opgroup og_prefix
894 * @openc prefix
895 * @opdisenum OP_SEG
896 * @ophints harmless
897 */
898FNIEMOP_DEF(iemOp_seg_SS)
899{
900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
902 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
903
904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
906}
907
908
909/**
910 * @opcode 0x37
911 * @opfltest af,cf
912 * @opflmodify cf,pf,af,zf,sf,of
913 * @opflundef pf,zf,sf,of
914 * @opgroup og_gen_arith_dec
915 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
916 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
917 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
918 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
919 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
920 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
921 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
922 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
925 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
926 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
927 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
928 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
929 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
930 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
931 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
933 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
936 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
937 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
938 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
939 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
940 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
941 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
942 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
943 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
944 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
945 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
946 */
947FNIEMOP_DEF(iemOp_aaa)
948{
949 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
950 IEMOP_HLP_NO_64BIT();
951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
953
954 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
955}
956
957
958/**
959 * @opcode 0x38
960 */
961FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
962{
963 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x39
970 */
971FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
972{
973 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3a
980 */
981FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
982{
983 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3b
990 */
991FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
992{
993 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3c
1000 */
1001FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1002{
1003 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3d
1010 */
1011FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1012{
1013 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1015}
1016
1017
1018/**
1019 * @opcode 0x3e
1020 */
1021FNIEMOP_DEF(iemOp_seg_DS)
1022{
1023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1025 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1026
1027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1029}
1030
1031
1032/**
1033 * @opcode 0x3f
1034 * @opfltest af,cf
1035 * @opflmodify cf,pf,af,zf,sf,of
1036 * @opflundef pf,zf,sf,of
1037 * @opgroup og_gen_arith_dec
1038 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1039 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1040 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1041 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1042 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1043 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1045 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1046 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1047 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1048 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1049 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1050 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1051 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1052 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1053 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1058 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1059 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1060 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1061 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1062 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1063 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1064 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1065 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1066 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1067 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1068 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1069 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1070 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1071 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1072 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1073 */
1074FNIEMOP_DEF(iemOp_aas)
1075{
1076 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1077 IEMOP_HLP_NO_64BIT();
1078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1080
1081 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1082}
1083
1084
1085/**
1086 * Common 'inc/dec/not/neg register' helper.
1087 */
1088FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1089{
1090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1091 switch (pVCpu->iem.s.enmEffOpSize)
1092 {
1093 case IEMMODE_16BIT:
1094 IEM_MC_BEGIN(2, 0);
1095 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1096 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1097 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1098 IEM_MC_REF_EFLAGS(pEFlags);
1099 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1100 IEM_MC_ADVANCE_RIP_AND_FINISH();
1101 IEM_MC_END();
1102 return VINF_SUCCESS;
1103
1104 case IEMMODE_32BIT:
1105 IEM_MC_BEGIN(2, 0);
1106 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1107 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1108 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1109 IEM_MC_REF_EFLAGS(pEFlags);
1110 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1111 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1112 IEM_MC_ADVANCE_RIP_AND_FINISH();
1113 IEM_MC_END();
1114 return VINF_SUCCESS;
1115
1116 case IEMMODE_64BIT:
1117 IEM_MC_BEGIN(2, 0);
1118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1119 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1120 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1121 IEM_MC_REF_EFLAGS(pEFlags);
1122 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1123 IEM_MC_ADVANCE_RIP_AND_FINISH();
1124 IEM_MC_END();
1125 return VINF_SUCCESS;
1126
1127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1128 }
1129}
1130
1131
1132/**
1133 * @opcode 0x40
1134 */
1135FNIEMOP_DEF(iemOp_inc_eAX)
1136{
1137 /*
1138 * This is a REX prefix in 64-bit mode.
1139 */
1140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1141 {
1142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1144
1145 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1146 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1147 }
1148
1149 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1150 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1151}
1152
1153
1154/**
1155 * @opcode 0x41
1156 */
1157FNIEMOP_DEF(iemOp_inc_eCX)
1158{
1159 /*
1160 * This is a REX prefix in 64-bit mode.
1161 */
1162 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1163 {
1164 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1165 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1166 pVCpu->iem.s.uRexB = 1 << 3;
1167
1168 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1169 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1170 }
1171
1172 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1173 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1174}
1175
1176
1177/**
1178 * @opcode 0x42
1179 */
1180FNIEMOP_DEF(iemOp_inc_eDX)
1181{
1182 /*
1183 * This is a REX prefix in 64-bit mode.
1184 */
1185 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1186 {
1187 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1188 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1189 pVCpu->iem.s.uRexIndex = 1 << 3;
1190
1191 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1192 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1193 }
1194
1195 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1196 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1197}
1198
1199
1200
1201/**
1202 * @opcode 0x43
1203 */
1204FNIEMOP_DEF(iemOp_inc_eBX)
1205{
1206 /*
1207 * This is a REX prefix in 64-bit mode.
1208 */
1209 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1210 {
1211 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1212 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1213 pVCpu->iem.s.uRexB = 1 << 3;
1214 pVCpu->iem.s.uRexIndex = 1 << 3;
1215
1216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1217 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1218 }
1219
1220 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1221 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1222}
1223
1224
1225/**
1226 * @opcode 0x44
1227 */
1228FNIEMOP_DEF(iemOp_inc_eSP)
1229{
1230 /*
1231 * This is a REX prefix in 64-bit mode.
1232 */
1233 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1234 {
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1237 pVCpu->iem.s.uRexReg = 1 << 3;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241 }
1242
1243 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1244 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1245}
1246
1247
1248/**
1249 * @opcode 0x45
1250 */
1251FNIEMOP_DEF(iemOp_inc_eBP)
1252{
1253 /*
1254 * This is a REX prefix in 64-bit mode.
1255 */
1256 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1257 {
1258 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1259 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1260 pVCpu->iem.s.uRexReg = 1 << 3;
1261 pVCpu->iem.s.uRexB = 1 << 3;
1262
1263 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1264 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1265 }
1266
1267 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1268 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1269}
1270
1271
1272/**
1273 * @opcode 0x46
1274 */
1275FNIEMOP_DEF(iemOp_inc_eSI)
1276{
1277 /*
1278 * This is a REX prefix in 64-bit mode.
1279 */
1280 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1281 {
1282 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1283 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1284 pVCpu->iem.s.uRexReg = 1 << 3;
1285 pVCpu->iem.s.uRexIndex = 1 << 3;
1286
1287 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1288 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1289 }
1290
1291 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1292 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1293}
1294
1295
1296/**
1297 * @opcode 0x47
1298 */
1299FNIEMOP_DEF(iemOp_inc_eDI)
1300{
1301 /*
1302 * This is a REX prefix in 64-bit mode.
1303 */
1304 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1305 {
1306 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1307 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1308 pVCpu->iem.s.uRexReg = 1 << 3;
1309 pVCpu->iem.s.uRexB = 1 << 3;
1310 pVCpu->iem.s.uRexIndex = 1 << 3;
1311
1312 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1313 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1314 }
1315
1316 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1317 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1318}
1319
1320
1321/**
1322 * @opcode 0x48
1323 */
1324FNIEMOP_DEF(iemOp_dec_eAX)
1325{
1326 /*
1327 * This is a REX prefix in 64-bit mode.
1328 */
1329 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1330 {
1331 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1332 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1333 iemRecalEffOpSize(pVCpu);
1334
1335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1337 }
1338
1339 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1341}
1342
1343
1344/**
1345 * @opcode 0x49
1346 */
1347FNIEMOP_DEF(iemOp_dec_eCX)
1348{
1349 /*
1350 * This is a REX prefix in 64-bit mode.
1351 */
1352 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1353 {
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1356 pVCpu->iem.s.uRexB = 1 << 3;
1357 iemRecalEffOpSize(pVCpu);
1358
1359 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1360 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1361 }
1362
1363 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1364 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1365}
1366
1367
1368/**
1369 * @opcode 0x4a
1370 */
1371FNIEMOP_DEF(iemOp_dec_eDX)
1372{
1373 /*
1374 * This is a REX prefix in 64-bit mode.
1375 */
1376 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1377 {
1378 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1379 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1380 pVCpu->iem.s.uRexIndex = 1 << 3;
1381 iemRecalEffOpSize(pVCpu);
1382
1383 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1384 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1385 }
1386
1387 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1388 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1389}
1390
1391
1392/**
1393 * @opcode 0x4b
1394 */
1395FNIEMOP_DEF(iemOp_dec_eBX)
1396{
1397 /*
1398 * This is a REX prefix in 64-bit mode.
1399 */
1400 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1401 {
1402 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1403 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1404 pVCpu->iem.s.uRexB = 1 << 3;
1405 pVCpu->iem.s.uRexIndex = 1 << 3;
1406 iemRecalEffOpSize(pVCpu);
1407
1408 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1409 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1410 }
1411
1412 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1413 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1414}
1415
1416
1417/**
1418 * @opcode 0x4c
1419 */
1420FNIEMOP_DEF(iemOp_dec_eSP)
1421{
1422 /*
1423 * This is a REX prefix in 64-bit mode.
1424 */
1425 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1426 {
1427 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1428 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1429 pVCpu->iem.s.uRexReg = 1 << 3;
1430 iemRecalEffOpSize(pVCpu);
1431
1432 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1433 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1434 }
1435
1436 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1437 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1438}
1439
1440
1441/**
1442 * @opcode 0x4d
1443 */
1444FNIEMOP_DEF(iemOp_dec_eBP)
1445{
1446 /*
1447 * This is a REX prefix in 64-bit mode.
1448 */
1449 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1450 {
1451 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1452 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1453 pVCpu->iem.s.uRexReg = 1 << 3;
1454 pVCpu->iem.s.uRexB = 1 << 3;
1455 iemRecalEffOpSize(pVCpu);
1456
1457 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1458 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1459 }
1460
1461 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1462 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1463}
1464
1465
1466/**
1467 * @opcode 0x4e
1468 */
1469FNIEMOP_DEF(iemOp_dec_eSI)
1470{
1471 /*
1472 * This is a REX prefix in 64-bit mode.
1473 */
1474 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1475 {
1476 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1477 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1478 pVCpu->iem.s.uRexReg = 1 << 3;
1479 pVCpu->iem.s.uRexIndex = 1 << 3;
1480 iemRecalEffOpSize(pVCpu);
1481
1482 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1483 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1484 }
1485
1486 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1487 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1488}
1489
1490
1491/**
1492 * @opcode 0x4f
1493 */
1494FNIEMOP_DEF(iemOp_dec_eDI)
1495{
1496 /*
1497 * This is a REX prefix in 64-bit mode.
1498 */
1499 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1500 {
1501 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1502 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1503 pVCpu->iem.s.uRexReg = 1 << 3;
1504 pVCpu->iem.s.uRexB = 1 << 3;
1505 pVCpu->iem.s.uRexIndex = 1 << 3;
1506 iemRecalEffOpSize(pVCpu);
1507
1508 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1509 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1510 }
1511
1512 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1513 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1514}
1515
1516
1517/**
1518 * Common 'push register' helper.
1519 */
1520FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1521{
1522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1523 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1524 {
1525 iReg |= pVCpu->iem.s.uRexB;
1526 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1527 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1528 }
1529
1530 switch (pVCpu->iem.s.enmEffOpSize)
1531 {
1532 case IEMMODE_16BIT:
1533 IEM_MC_BEGIN(0, 1);
1534 IEM_MC_LOCAL(uint16_t, u16Value);
1535 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1536 IEM_MC_PUSH_U16(u16Value);
1537 IEM_MC_ADVANCE_RIP_AND_FINISH();
1538 IEM_MC_END();
1539 break;
1540
1541 case IEMMODE_32BIT:
1542 IEM_MC_BEGIN(0, 1);
1543 IEM_MC_LOCAL(uint32_t, u32Value);
1544 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1545 IEM_MC_PUSH_U32(u32Value);
1546 IEM_MC_ADVANCE_RIP_AND_FINISH();
1547 IEM_MC_END();
1548 break;
1549
1550 case IEMMODE_64BIT:
1551 IEM_MC_BEGIN(0, 1);
1552 IEM_MC_LOCAL(uint64_t, u64Value);
1553 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1554 IEM_MC_PUSH_U64(u64Value);
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 break;
1558
1559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1560 }
1561}
1562
1563
1564/**
1565 * @opcode 0x50
1566 */
1567FNIEMOP_DEF(iemOp_push_eAX)
1568{
1569 IEMOP_MNEMONIC(push_rAX, "push rAX");
1570 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1571}
1572
1573
1574/**
1575 * @opcode 0x51
1576 */
1577FNIEMOP_DEF(iemOp_push_eCX)
1578{
1579 IEMOP_MNEMONIC(push_rCX, "push rCX");
1580 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1581}
1582
1583
1584/**
1585 * @opcode 0x52
1586 */
1587FNIEMOP_DEF(iemOp_push_eDX)
1588{
1589 IEMOP_MNEMONIC(push_rDX, "push rDX");
1590 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1591}
1592
1593
1594/**
1595 * @opcode 0x53
1596 */
1597FNIEMOP_DEF(iemOp_push_eBX)
1598{
1599 IEMOP_MNEMONIC(push_rBX, "push rBX");
1600 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1601}
1602
1603
1604/**
1605 * @opcode 0x54
1606 */
1607FNIEMOP_DEF(iemOp_push_eSP)
1608{
1609 IEMOP_MNEMONIC(push_rSP, "push rSP");
1610 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1611 {
1612 IEM_MC_BEGIN(0, 1);
1613 IEM_MC_LOCAL(uint16_t, u16Value);
1614 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1615 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1616 IEM_MC_PUSH_U16(u16Value);
1617 IEM_MC_ADVANCE_RIP_AND_FINISH();
1618 IEM_MC_END();
1619 }
1620 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1621}
1622
1623
1624/**
1625 * @opcode 0x55
1626 */
1627FNIEMOP_DEF(iemOp_push_eBP)
1628{
1629 IEMOP_MNEMONIC(push_rBP, "push rBP");
1630 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1631}
1632
1633
1634/**
1635 * @opcode 0x56
1636 */
1637FNIEMOP_DEF(iemOp_push_eSI)
1638{
1639 IEMOP_MNEMONIC(push_rSI, "push rSI");
1640 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1641}
1642
1643
1644/**
1645 * @opcode 0x57
1646 */
1647FNIEMOP_DEF(iemOp_push_eDI)
1648{
1649 IEMOP_MNEMONIC(push_rDI, "push rDI");
1650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1651}
1652
1653
1654/**
1655 * Common 'pop register' helper.
1656 */
1657FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1658{
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1661 {
1662 iReg |= pVCpu->iem.s.uRexB;
1663 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1664 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1665 }
1666
1667 switch (pVCpu->iem.s.enmEffOpSize)
1668 {
1669 case IEMMODE_16BIT:
1670 IEM_MC_BEGIN(0, 1);
1671 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1672 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1673 IEM_MC_POP_U16(pu16Dst);
1674 IEM_MC_ADVANCE_RIP_AND_FINISH();
1675 IEM_MC_END();
1676 break;
1677
1678 case IEMMODE_32BIT:
1679 IEM_MC_BEGIN(0, 1);
1680 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1681 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1682 IEM_MC_POP_U32(pu32Dst);
1683 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1684 IEM_MC_ADVANCE_RIP_AND_FINISH();
1685 IEM_MC_END();
1686 break;
1687
1688 case IEMMODE_64BIT:
1689 IEM_MC_BEGIN(0, 1);
1690 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1691 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1692 IEM_MC_POP_U64(pu64Dst);
1693 IEM_MC_ADVANCE_RIP_AND_FINISH();
1694 IEM_MC_END();
1695 break;
1696
1697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1698 }
1699}
1700
1701
1702/**
1703 * @opcode 0x58
1704 */
1705FNIEMOP_DEF(iemOp_pop_eAX)
1706{
1707 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1708 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1709}
1710
1711
1712/**
1713 * @opcode 0x59
1714 */
1715FNIEMOP_DEF(iemOp_pop_eCX)
1716{
1717 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1718 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1719}
1720
1721
1722/**
1723 * @opcode 0x5a
1724 */
1725FNIEMOP_DEF(iemOp_pop_eDX)
1726{
1727 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1728 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1729}
1730
1731
1732/**
1733 * @opcode 0x5b
1734 */
1735FNIEMOP_DEF(iemOp_pop_eBX)
1736{
1737 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1738 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1739}
1740
1741
1742/**
1743 * @opcode 0x5c
1744 */
1745FNIEMOP_DEF(iemOp_pop_eSP)
1746{
1747 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1748 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1749 {
1750 if (pVCpu->iem.s.uRexB)
1751 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1752 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1753 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1754 }
1755
1756 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1757 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1758 /** @todo add testcase for this instruction. */
1759 switch (pVCpu->iem.s.enmEffOpSize)
1760 {
1761 case IEMMODE_16BIT:
1762 IEM_MC_BEGIN(0, 1);
1763 IEM_MC_LOCAL(uint16_t, u16Dst);
1764 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1765 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1766 IEM_MC_ADVANCE_RIP_AND_FINISH();
1767 IEM_MC_END();
1768 break;
1769
1770 case IEMMODE_32BIT:
1771 IEM_MC_BEGIN(0, 1);
1772 IEM_MC_LOCAL(uint32_t, u32Dst);
1773 IEM_MC_POP_U32(&u32Dst);
1774 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1775 IEM_MC_ADVANCE_RIP_AND_FINISH();
1776 IEM_MC_END();
1777 break;
1778
1779 case IEMMODE_64BIT:
1780 IEM_MC_BEGIN(0, 1);
1781 IEM_MC_LOCAL(uint64_t, u64Dst);
1782 IEM_MC_POP_U64(&u64Dst);
1783 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1784 IEM_MC_ADVANCE_RIP_AND_FINISH();
1785 IEM_MC_END();
1786 break;
1787
1788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1789 }
1790}
1791
1792
1793/**
1794 * @opcode 0x5d
1795 */
1796FNIEMOP_DEF(iemOp_pop_eBP)
1797{
1798 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1799 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1800}
1801
1802
1803/**
1804 * @opcode 0x5e
1805 */
1806FNIEMOP_DEF(iemOp_pop_eSI)
1807{
1808 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1809 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1810}
1811
1812
1813/**
1814 * @opcode 0x5f
1815 */
1816FNIEMOP_DEF(iemOp_pop_eDI)
1817{
1818 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1819 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1820}
1821
1822
1823/**
1824 * @opcode 0x60
1825 */
1826FNIEMOP_DEF(iemOp_pusha)
1827{
1828 IEMOP_MNEMONIC(pusha, "pusha");
1829 IEMOP_HLP_MIN_186();
1830 IEMOP_HLP_NO_64BIT();
1831 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1833 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1834 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1835}
1836
1837
1838/**
1839 * @opcode 0x61
1840 */
1841FNIEMOP_DEF(iemOp_popa__mvex)
1842{
1843 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1844 {
1845 IEMOP_MNEMONIC(popa, "popa");
1846 IEMOP_HLP_MIN_186();
1847 IEMOP_HLP_NO_64BIT();
1848 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1849 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1850 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1851 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1852 }
1853 IEMOP_MNEMONIC(mvex, "mvex");
1854 Log(("mvex prefix is not supported!\n"));
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856}
1857
1858
1859/**
1860 * @opcode 0x62
1861 * @opmnemonic bound
1862 * @op1 Gv_RO
1863 * @op2 Ma
1864 * @opmincpu 80186
1865 * @ophints harmless invalid_64
1866 * @optest op1=0 op2=0 ->
1867 * @optest op1=1 op2=0 -> value.xcpt=5
1868 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1869 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1870 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1871 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1872 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1873 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1874 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1875 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1876 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1877 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1878 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1879 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1880 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1881 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1882 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1883 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1884 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1885 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1886 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1887 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1888 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1889 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1890 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1891 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1892 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1893 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1894 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1895 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1896 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1897 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1898 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1899 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1900 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1901 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1902 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1903 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1904 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1905 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1906 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1907 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1908 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1909 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1910 */
1911FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1912{
1913 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1914 compatability mode it is invalid with MOD=3.
1915
1916 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1917 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1918 given as R and X without an exact description, so we assume it builds on
1919 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1920 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1921 uint8_t bRm;
1922 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1923 {
1924 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1925 IEMOP_HLP_MIN_186();
1926 IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if (IEM_IS_MODRM_MEM_MODE(bRm))
1928 {
1929 /** @todo testcase: check that there are two memory accesses involved. Check
1930 * whether they're both read before the \#BR triggers. */
1931 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1932 {
1933 IEM_MC_BEGIN(3, 1);
1934 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1935 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1936 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1941
1942 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
1943 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1944 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1945
1946 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1947 IEM_MC_END();
1948 }
1949 else /* 32-bit operands */
1950 {
1951 IEM_MC_BEGIN(3, 1);
1952 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1953 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1954 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1956
1957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959
1960 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
1961 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1962 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1963
1964 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1965 IEM_MC_END();
1966 }
1967 }
1968
1969 /*
1970 * @opdone
1971 */
1972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1973 {
1974 /* Note that there is no need for the CPU to fetch further bytes
1975 here because MODRM.MOD == 3. */
1976 Log(("evex not supported by the guest CPU!\n"));
1977 return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 }
1980 else
1981 {
1982 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1983 * does modr/m read, whereas AMD probably doesn't... */
1984 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1985 {
1986 Log(("evex not supported by the guest CPU!\n"));
1987 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1988 }
1989 IEM_OPCODE_GET_NEXT_U8(&bRm);
1990 }
1991
1992 IEMOP_MNEMONIC(evex, "evex");
1993 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1994 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1995 Log(("evex prefix is not implemented!\n"));
1996 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1997}
1998
1999
2000/** Opcode 0x63 - non-64-bit modes. */
2001FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2002{
2003 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2004 IEMOP_HLP_MIN_286();
2005 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2007
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /* Register */
2011 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2012 IEM_MC_BEGIN(3, 0);
2013 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2014 IEM_MC_ARG(uint16_t, u16Src, 1);
2015 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2016
2017 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2018 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2019 IEM_MC_REF_EFLAGS(pEFlags);
2020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2021
2022 IEM_MC_ADVANCE_RIP_AND_FINISH();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /* Memory */
2028 IEM_MC_BEGIN(3, 2);
2029 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2030 IEM_MC_ARG(uint16_t, u16Src, 1);
2031 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2035 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2036 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2037 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2038 IEM_MC_FETCH_EFLAGS(EFlags);
2039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2040
2041 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2042 IEM_MC_COMMIT_EFLAGS(EFlags);
2043 IEM_MC_ADVANCE_RIP_AND_FINISH();
2044 IEM_MC_END();
2045 }
2046}
2047
2048
2049/**
2050 * @opcode 0x63
2051 *
2052 * @note This is a weird one. It works like a regular move instruction if
2053 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2054 * @todo This definitely needs a testcase to verify the odd cases. */
2055FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2056{
2057 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2058
2059 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2061
2062 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2063 {
2064 if (IEM_IS_MODRM_REG_MODE(bRm))
2065 {
2066 /*
2067 * Register to register.
2068 */
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_BEGIN(0, 1);
2071 IEM_MC_LOCAL(uint64_t, u64Value);
2072 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2073 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /*
2080 * We're loading a register from memory.
2081 */
2082 IEM_MC_BEGIN(0, 2);
2083 IEM_MC_LOCAL(uint64_t, u64Value);
2084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2088 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2089 IEM_MC_ADVANCE_RIP_AND_FINISH();
2090 IEM_MC_END();
2091 }
2092 }
2093 else
2094 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2095}
2096
2097
2098/**
2099 * @opcode 0x64
2100 * @opmnemonic segfs
2101 * @opmincpu 80386
2102 * @opgroup og_prefixes
2103 */
2104FNIEMOP_DEF(iemOp_seg_FS)
2105{
2106 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2107 IEMOP_HLP_MIN_386();
2108
2109 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2110 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2111
2112 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2113 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2114}
2115
2116
2117/**
2118 * @opcode 0x65
2119 * @opmnemonic seggs
2120 * @opmincpu 80386
2121 * @opgroup og_prefixes
2122 */
2123FNIEMOP_DEF(iemOp_seg_GS)
2124{
2125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2126 IEMOP_HLP_MIN_386();
2127
2128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2129 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2130
2131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2133}
2134
2135
2136/**
2137 * @opcode 0x66
2138 * @opmnemonic opsize
2139 * @openc prefix
2140 * @opmincpu 80386
2141 * @ophints harmless
2142 * @opgroup og_prefixes
2143 */
2144FNIEMOP_DEF(iemOp_op_size)
2145{
2146 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2147 IEMOP_HLP_MIN_386();
2148
2149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2150 iemRecalEffOpSize(pVCpu);
2151
2152 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2153 when REPZ or REPNZ are present. */
2154 if (pVCpu->iem.s.idxPrefix == 0)
2155 pVCpu->iem.s.idxPrefix = 1;
2156
2157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2159}
2160
2161
2162/**
2163 * @opcode 0x67
2164 * @opmnemonic addrsize
2165 * @openc prefix
2166 * @opmincpu 80386
2167 * @ophints harmless
2168 * @opgroup og_prefixes
2169 */
2170FNIEMOP_DEF(iemOp_addr_size)
2171{
2172 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2173 IEMOP_HLP_MIN_386();
2174
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2176 switch (pVCpu->iem.s.enmDefAddrMode)
2177 {
2178 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2179 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2180 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2181 default: AssertFailed();
2182 }
2183
2184 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2185 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2186}
2187
2188
2189/**
2190 * @opcode 0x68
2191 */
2192FNIEMOP_DEF(iemOp_push_Iz)
2193{
2194 IEMOP_MNEMONIC(push_Iz, "push Iz");
2195 IEMOP_HLP_MIN_186();
2196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2197 switch (pVCpu->iem.s.enmEffOpSize)
2198 {
2199 case IEMMODE_16BIT:
2200 {
2201 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2203 IEM_MC_BEGIN(0,0);
2204 IEM_MC_PUSH_U16(u16Imm);
2205 IEM_MC_ADVANCE_RIP_AND_FINISH();
2206 IEM_MC_END();
2207 return VINF_SUCCESS;
2208 }
2209
2210 case IEMMODE_32BIT:
2211 {
2212 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2214 IEM_MC_BEGIN(0,0);
2215 IEM_MC_PUSH_U32(u32Imm);
2216 IEM_MC_ADVANCE_RIP_AND_FINISH();
2217 IEM_MC_END();
2218 return VINF_SUCCESS;
2219 }
2220
2221 case IEMMODE_64BIT:
2222 {
2223 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2225 IEM_MC_BEGIN(0,0);
2226 IEM_MC_PUSH_U64(u64Imm);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 return VINF_SUCCESS;
2230 }
2231
2232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x69
2239 */
2240FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2241{
2242 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2243 IEMOP_HLP_MIN_186();
2244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2246
2247 switch (pVCpu->iem.s.enmEffOpSize)
2248 {
2249 case IEMMODE_16BIT:
2250 {
2251 if (IEM_IS_MODRM_REG_MODE(bRm))
2252 {
2253 /* register operand */
2254 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2256
2257 IEM_MC_BEGIN(3, 1);
2258 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2259 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2261 IEM_MC_LOCAL(uint16_t, u16Tmp);
2262
2263 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2264 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2265 IEM_MC_REF_EFLAGS(pEFlags);
2266 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2267 pu16Dst, u16Src, pEFlags);
2268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2269
2270 IEM_MC_ADVANCE_RIP_AND_FINISH();
2271 IEM_MC_END();
2272 }
2273 else
2274 {
2275 /* memory operand */
2276 IEM_MC_BEGIN(3, 2);
2277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2278 IEM_MC_ARG(uint16_t, u16Src, 1);
2279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2280 IEM_MC_LOCAL(uint16_t, u16Tmp);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2282
2283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2285 IEM_MC_ASSIGN(u16Src, u16Imm);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2287 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2288 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2289 IEM_MC_REF_EFLAGS(pEFlags);
2290 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2291 pu16Dst, u16Src, pEFlags);
2292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297 return VINF_SUCCESS;
2298 }
2299
2300 case IEMMODE_32BIT:
2301 {
2302 if (IEM_IS_MODRM_REG_MODE(bRm))
2303 {
2304 /* register operand */
2305 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2307
2308 IEM_MC_BEGIN(3, 1);
2309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2310 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2312 IEM_MC_LOCAL(uint32_t, u32Tmp);
2313
2314 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2315 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2316 IEM_MC_REF_EFLAGS(pEFlags);
2317 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2318 pu32Dst, u32Src, pEFlags);
2319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2320
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /* memory operand */
2327 IEM_MC_BEGIN(3, 2);
2328 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2329 IEM_MC_ARG(uint32_t, u32Src, 1);
2330 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2331 IEM_MC_LOCAL(uint32_t, u32Tmp);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2335 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2336 IEM_MC_ASSIGN(u32Src, u32Imm);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2338 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2339 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2340 IEM_MC_REF_EFLAGS(pEFlags);
2341 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2342 pu32Dst, u32Src, pEFlags);
2343 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348 return VINF_SUCCESS;
2349 }
2350
2351 case IEMMODE_64BIT:
2352 {
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /* register operand */
2356 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2358
2359 IEM_MC_BEGIN(3, 1);
2360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2361 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2363 IEM_MC_LOCAL(uint64_t, u64Tmp);
2364
2365 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2366 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2367 IEM_MC_REF_EFLAGS(pEFlags);
2368 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2369 pu64Dst, u64Src, pEFlags);
2370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2371
2372 IEM_MC_ADVANCE_RIP_AND_FINISH();
2373 IEM_MC_END();
2374 }
2375 else
2376 {
2377 /* memory operand */
2378 IEM_MC_BEGIN(3, 2);
2379 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2380 IEM_MC_ARG(uint64_t, u64Src, 1);
2381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2382 IEM_MC_LOCAL(uint64_t, u64Tmp);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2386 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2387 IEM_MC_ASSIGN(u64Src, u64Imm);
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2390 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2391 IEM_MC_REF_EFLAGS(pEFlags);
2392 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2393 pu64Dst, u64Src, pEFlags);
2394 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2395
2396 IEM_MC_ADVANCE_RIP_AND_FINISH();
2397 IEM_MC_END();
2398 }
2399 return VINF_SUCCESS;
2400 }
2401
2402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x6a
2409 */
2410FNIEMOP_DEF(iemOp_push_Ib)
2411{
2412 IEMOP_MNEMONIC(push_Ib, "push Ib");
2413 IEMOP_HLP_MIN_186();
2414 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2417
2418 IEM_MC_BEGIN(0,0);
2419 switch (pVCpu->iem.s.enmEffOpSize)
2420 {
2421 case IEMMODE_16BIT:
2422 IEM_MC_PUSH_U16(i8Imm);
2423 break;
2424 case IEMMODE_32BIT:
2425 IEM_MC_PUSH_U32(i8Imm);
2426 break;
2427 case IEMMODE_64BIT:
2428 IEM_MC_PUSH_U64(i8Imm);
2429 break;
2430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2431 }
2432 IEM_MC_ADVANCE_RIP_AND_FINISH();
2433 IEM_MC_END();
2434}
2435
2436
2437/**
2438 * @opcode 0x6b
2439 */
2440FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2441{
2442 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2443 IEMOP_HLP_MIN_186();
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2446
2447 switch (pVCpu->iem.s.enmEffOpSize)
2448 {
2449 case IEMMODE_16BIT:
2450 if (IEM_IS_MODRM_REG_MODE(bRm))
2451 {
2452 /* register operand */
2453 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2455
2456 IEM_MC_BEGIN(3, 1);
2457 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2458 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2459 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2460 IEM_MC_LOCAL(uint16_t, u16Tmp);
2461
2462 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2463 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2464 IEM_MC_REF_EFLAGS(pEFlags);
2465 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2466 pu16Dst, u16Src, pEFlags);
2467 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2468
2469 IEM_MC_ADVANCE_RIP_AND_FINISH();
2470 IEM_MC_END();
2471 }
2472 else
2473 {
2474 /* memory operand */
2475 IEM_MC_BEGIN(3, 2);
2476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2477 IEM_MC_ARG(uint16_t, u16Src, 1);
2478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2479 IEM_MC_LOCAL(uint16_t, u16Tmp);
2480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2481
2482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2483 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2484 IEM_MC_ASSIGN(u16Src, u16Imm);
2485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2486 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2487 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2488 IEM_MC_REF_EFLAGS(pEFlags);
2489 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2490 pu16Dst, u16Src, pEFlags);
2491 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496 return VINF_SUCCESS;
2497
2498 case IEMMODE_32BIT:
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /* register operand */
2502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2504
2505 IEM_MC_BEGIN(3, 1);
2506 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2507 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2509 IEM_MC_LOCAL(uint32_t, u32Tmp);
2510
2511 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2512 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2513 IEM_MC_REF_EFLAGS(pEFlags);
2514 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2515 pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP_AND_FINISH();
2519 IEM_MC_END();
2520 }
2521 else
2522 {
2523 /* memory operand */
2524 IEM_MC_BEGIN(3, 2);
2525 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2526 IEM_MC_ARG(uint32_t, u32Src, 1);
2527 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2528 IEM_MC_LOCAL(uint32_t, u32Tmp);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2532 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2533 IEM_MC_ASSIGN(u32Src, u32Imm);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2536 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2537 IEM_MC_REF_EFLAGS(pEFlags);
2538 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2539 pu32Dst, u32Src, pEFlags);
2540 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2541
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 return VINF_SUCCESS;
2546
2547 case IEMMODE_64BIT:
2548 if (IEM_IS_MODRM_REG_MODE(bRm))
2549 {
2550 /* register operand */
2551 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2553
2554 IEM_MC_BEGIN(3, 1);
2555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2556 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2557 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2558 IEM_MC_LOCAL(uint64_t, u64Tmp);
2559
2560 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2561 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2562 IEM_MC_REF_EFLAGS(pEFlags);
2563 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2564 pu64Dst, u64Src, pEFlags);
2565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2566
2567 IEM_MC_ADVANCE_RIP_AND_FINISH();
2568 IEM_MC_END();
2569 }
2570 else
2571 {
2572 /* memory operand */
2573 IEM_MC_BEGIN(3, 2);
2574 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2575 IEM_MC_ARG(uint64_t, u64Src, 1);
2576 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2577 IEM_MC_LOCAL(uint64_t, u64Tmp);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2581 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2582 IEM_MC_ASSIGN(u64Src, u64Imm);
2583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2584 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2585 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2586 IEM_MC_REF_EFLAGS(pEFlags);
2587 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2588 pu64Dst, u64Src, pEFlags);
2589 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2590
2591 IEM_MC_ADVANCE_RIP_AND_FINISH();
2592 IEM_MC_END();
2593 }
2594 return VINF_SUCCESS;
2595
2596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2597 }
2598 AssertFailedReturn(VERR_IEM_IPE_8);
2599}
2600
2601
2602/**
2603 * @opcode 0x6c
2604 */
2605FNIEMOP_DEF(iemOp_insb_Yb_DX)
2606{
2607 IEMOP_HLP_MIN_186();
2608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2609 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2610 {
2611 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2612 switch (pVCpu->iem.s.enmEffAddrMode)
2613 {
2614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2618 }
2619 }
2620 else
2621 {
2622 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2623 switch (pVCpu->iem.s.enmEffAddrMode)
2624 {
2625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2629 }
2630 }
2631}
2632
2633
2634/**
2635 * @opcode 0x6d
2636 */
2637FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2638{
2639 IEMOP_HLP_MIN_186();
2640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2641 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2642 {
2643 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2644 switch (pVCpu->iem.s.enmEffOpSize)
2645 {
2646 case IEMMODE_16BIT:
2647 switch (pVCpu->iem.s.enmEffAddrMode)
2648 {
2649 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2650 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2651 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2653 }
2654 break;
2655 case IEMMODE_64BIT:
2656 case IEMMODE_32BIT:
2657 switch (pVCpu->iem.s.enmEffAddrMode)
2658 {
2659 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2660 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2661 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2663 }
2664 break;
2665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2666 }
2667 }
2668 else
2669 {
2670 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2671 switch (pVCpu->iem.s.enmEffOpSize)
2672 {
2673 case IEMMODE_16BIT:
2674 switch (pVCpu->iem.s.enmEffAddrMode)
2675 {
2676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2680 }
2681 break;
2682 case IEMMODE_64BIT:
2683 case IEMMODE_32BIT:
2684 switch (pVCpu->iem.s.enmEffAddrMode)
2685 {
2686 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2687 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2688 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2690 }
2691 break;
2692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2693 }
2694 }
2695}
2696
2697
2698/**
2699 * @opcode 0x6e
2700 */
2701FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2702{
2703 IEMOP_HLP_MIN_186();
2704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2705 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2706 {
2707 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2708 switch (pVCpu->iem.s.enmEffAddrMode)
2709 {
2710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2714 }
2715 }
2716 else
2717 {
2718 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2719 switch (pVCpu->iem.s.enmEffAddrMode)
2720 {
2721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2725 }
2726 }
2727}
2728
2729
2730/**
2731 * @opcode 0x6f
2732 */
2733FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2734{
2735 IEMOP_HLP_MIN_186();
2736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2737 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2738 {
2739 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2740 switch (pVCpu->iem.s.enmEffOpSize)
2741 {
2742 case IEMMODE_16BIT:
2743 switch (pVCpu->iem.s.enmEffAddrMode)
2744 {
2745 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2747 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2749 }
2750 break;
2751 case IEMMODE_64BIT:
2752 case IEMMODE_32BIT:
2753 switch (pVCpu->iem.s.enmEffAddrMode)
2754 {
2755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2759 }
2760 break;
2761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2762 }
2763 }
2764 else
2765 {
2766 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2767 switch (pVCpu->iem.s.enmEffOpSize)
2768 {
2769 case IEMMODE_16BIT:
2770 switch (pVCpu->iem.s.enmEffAddrMode)
2771 {
2772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2776 }
2777 break;
2778 case IEMMODE_64BIT:
2779 case IEMMODE_32BIT:
2780 switch (pVCpu->iem.s.enmEffAddrMode)
2781 {
2782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2786 }
2787 break;
2788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2789 }
2790 }
2791}
2792
2793
2794/**
2795 * @opcode 0x70
2796 */
2797FNIEMOP_DEF(iemOp_jo_Jb)
2798{
2799 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2800 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2803
2804 IEM_MC_BEGIN(0, 0);
2805 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2806 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2807 } IEM_MC_ELSE() {
2808 IEM_MC_ADVANCE_RIP_AND_FINISH();
2809 } IEM_MC_ENDIF();
2810 IEM_MC_END();
2811}
2812
2813
2814/**
2815 * @opcode 0x71
2816 */
2817FNIEMOP_DEF(iemOp_jno_Jb)
2818{
2819 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2823
2824 IEM_MC_BEGIN(0, 0);
2825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2826 IEM_MC_ADVANCE_RIP_AND_FINISH();
2827 } IEM_MC_ELSE() {
2828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2829 } IEM_MC_ENDIF();
2830 IEM_MC_END();
2831}
2832
2833/**
2834 * @opcode 0x72
2835 */
2836FNIEMOP_DEF(iemOp_jc_Jb)
2837{
2838 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2841 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2842
2843 IEM_MC_BEGIN(0, 0);
2844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2845 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2846 } IEM_MC_ELSE() {
2847 IEM_MC_ADVANCE_RIP_AND_FINISH();
2848 } IEM_MC_ENDIF();
2849 IEM_MC_END();
2850}
2851
2852
2853/**
2854 * @opcode 0x73
2855 */
2856FNIEMOP_DEF(iemOp_jnc_Jb)
2857{
2858 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2862
2863 IEM_MC_BEGIN(0, 0);
2864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2865 IEM_MC_ADVANCE_RIP_AND_FINISH();
2866 } IEM_MC_ELSE() {
2867 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2868 } IEM_MC_ENDIF();
2869 IEM_MC_END();
2870}
2871
2872
2873/**
2874 * @opcode 0x74
2875 */
2876FNIEMOP_DEF(iemOp_je_Jb)
2877{
2878 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2879 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2882
2883 IEM_MC_BEGIN(0, 0);
2884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2885 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2886 } IEM_MC_ELSE() {
2887 IEM_MC_ADVANCE_RIP_AND_FINISH();
2888 } IEM_MC_ENDIF();
2889 IEM_MC_END();
2890}
2891
2892
2893/**
2894 * @opcode 0x75
2895 */
2896FNIEMOP_DEF(iemOp_jne_Jb)
2897{
2898 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2899 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2902
2903 IEM_MC_BEGIN(0, 0);
2904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2905 IEM_MC_ADVANCE_RIP_AND_FINISH();
2906 } IEM_MC_ELSE() {
2907 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2908 } IEM_MC_ENDIF();
2909 IEM_MC_END();
2910}
2911
2912
2913/**
2914 * @opcode 0x76
2915 */
2916FNIEMOP_DEF(iemOp_jbe_Jb)
2917{
2918 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2919 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2921 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2922
2923 IEM_MC_BEGIN(0, 0);
2924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2925 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2926 } IEM_MC_ELSE() {
2927 IEM_MC_ADVANCE_RIP_AND_FINISH();
2928 } IEM_MC_ENDIF();
2929 IEM_MC_END();
2930}
2931
2932
2933/**
2934 * @opcode 0x77
2935 */
2936FNIEMOP_DEF(iemOp_jnbe_Jb)
2937{
2938 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 } IEM_MC_ELSE() {
2947 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950}
2951
2952
2953/**
2954 * @opcode 0x78
2955 */
2956FNIEMOP_DEF(iemOp_js_Jb)
2957{
2958 IEMOP_MNEMONIC(js_Jb, "js Jb");
2959 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2962
2963 IEM_MC_BEGIN(0, 0);
2964 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2965 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2966 } IEM_MC_ELSE() {
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 } IEM_MC_ENDIF();
2969 IEM_MC_END();
2970}
2971
2972
2973/**
2974 * @opcode 0x79
2975 */
2976FNIEMOP_DEF(iemOp_jns_Jb)
2977{
2978 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2979 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
2982
2983 IEM_MC_BEGIN(0, 0);
2984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 } IEM_MC_ELSE() {
2987 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
2988 } IEM_MC_ENDIF();
2989 IEM_MC_END();
2990}
2991
2992
2993/**
2994 * @opcode 0x7a
2995 */
2996FNIEMOP_DEF(iemOp_jp_Jb)
2997{
2998 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2999 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3002
3003 IEM_MC_BEGIN(0, 0);
3004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3005 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3006 } IEM_MC_ELSE() {
3007 IEM_MC_ADVANCE_RIP_AND_FINISH();
3008 } IEM_MC_ENDIF();
3009 IEM_MC_END();
3010}
3011
3012
3013/**
3014 * @opcode 0x7b
3015 */
3016FNIEMOP_DEF(iemOp_jnp_Jb)
3017{
3018 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3019 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3022
3023 IEM_MC_BEGIN(0, 0);
3024 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 } IEM_MC_ELSE() {
3027 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3028 } IEM_MC_ENDIF();
3029 IEM_MC_END();
3030}
3031
3032
3033/**
3034 * @opcode 0x7c
3035 */
3036FNIEMOP_DEF(iemOp_jl_Jb)
3037{
3038 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3039 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3041 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3042
3043 IEM_MC_BEGIN(0, 0);
3044 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3045 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3046 } IEM_MC_ELSE() {
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 } IEM_MC_ENDIF();
3049 IEM_MC_END();
3050}
3051
3052
3053/**
3054 * @opcode 0x7d
3055 */
3056FNIEMOP_DEF(iemOp_jnl_Jb)
3057{
3058 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3059 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3061 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3062
3063 IEM_MC_BEGIN(0, 0);
3064 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 } IEM_MC_ELSE() {
3067 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3068 } IEM_MC_ENDIF();
3069 IEM_MC_END();
3070}
3071
3072
3073/**
3074 * @opcode 0x7e
3075 */
3076FNIEMOP_DEF(iemOp_jle_Jb)
3077{
3078 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3079 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3082
3083 IEM_MC_BEGIN(0, 0);
3084 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3085 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3086 } IEM_MC_ELSE() {
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 } IEM_MC_ENDIF();
3089 IEM_MC_END();
3090}
3091
3092
3093/**
3094 * @opcode 0x7f
3095 */
3096FNIEMOP_DEF(iemOp_jnle_Jb)
3097{
3098 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3099 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3102
3103 IEM_MC_BEGIN(0, 0);
3104 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3105 IEM_MC_ADVANCE_RIP_AND_FINISH();
3106 } IEM_MC_ELSE() {
3107 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3108 } IEM_MC_ENDIF();
3109 IEM_MC_END();
3110}
3111
3112
3113/**
3114 * @opcode 0x80
3115 */
3116FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3117{
3118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3119 switch (IEM_GET_MODRM_REG_8(bRm))
3120 {
3121 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3122 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3123 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3124 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3125 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3126 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3127 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3128 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3129 }
3130 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3131
3132 if (IEM_IS_MODRM_REG_MODE(bRm))
3133 {
3134 /* register target */
3135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(3, 0);
3138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3139 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3141
3142 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3143 IEM_MC_REF_EFLAGS(pEFlags);
3144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory target */
3152 uint32_t fAccess;
3153 if (pImpl->pfnLockedU8)
3154 fAccess = IEM_ACCESS_DATA_RW;
3155 else /* CMP */
3156 fAccess = IEM_ACCESS_DATA_R;
3157 IEM_MC_BEGIN(3, 2);
3158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3161
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3164 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3165 if (pImpl->pfnLockedU8)
3166 IEMOP_HLP_DONE_DECODING();
3167 else
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169
3170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3171 IEM_MC_FETCH_EFLAGS(EFlags);
3172 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3174 else
3175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3176
3177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3178 IEM_MC_COMMIT_EFLAGS(EFlags);
3179 IEM_MC_ADVANCE_RIP_AND_FINISH();
3180 IEM_MC_END();
3181 }
3182}
3183
3184
3185/**
3186 * @opcode 0x81
3187 */
3188FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 switch (IEM_GET_MODRM_REG_8(bRm))
3192 {
3193 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3194 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3195 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3196 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3197 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3198 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3199 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3200 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3201 }
3202 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3203
3204 switch (pVCpu->iem.s.enmEffOpSize)
3205 {
3206 case IEMMODE_16BIT:
3207 {
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register target */
3211 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 IEM_MC_BEGIN(3, 0);
3214 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3215 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217
3218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3219 IEM_MC_REF_EFLAGS(pEFlags);
3220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3221
3222 IEM_MC_ADVANCE_RIP_AND_FINISH();
3223 IEM_MC_END();
3224 }
3225 else
3226 {
3227 /* memory target */
3228 uint32_t fAccess;
3229 if (pImpl->pfnLockedU16)
3230 fAccess = IEM_ACCESS_DATA_RW;
3231 else /* CMP, TEST */
3232 fAccess = IEM_ACCESS_DATA_R;
3233 IEM_MC_BEGIN(3, 2);
3234 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3235 IEM_MC_ARG(uint16_t, u16Src, 1);
3236 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3238
3239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3240 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3241 IEM_MC_ASSIGN(u16Src, u16Imm);
3242 if (pImpl->pfnLockedU16)
3243 IEMOP_HLP_DONE_DECODING();
3244 else
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3247 IEM_MC_FETCH_EFLAGS(EFlags);
3248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3250 else
3251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3252
3253 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3254 IEM_MC_COMMIT_EFLAGS(EFlags);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 break;
3259 }
3260
3261 case IEMMODE_32BIT:
3262 {
3263 if (IEM_IS_MODRM_REG_MODE(bRm))
3264 {
3265 /* register target */
3266 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 IEM_MC_BEGIN(3, 0);
3269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3270 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3272
3273 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3274 IEM_MC_REF_EFLAGS(pEFlags);
3275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3276 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3278
3279 IEM_MC_ADVANCE_RIP_AND_FINISH();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /* memory target */
3285 uint32_t fAccess;
3286 if (pImpl->pfnLockedU32)
3287 fAccess = IEM_ACCESS_DATA_RW;
3288 else /* CMP, TEST */
3289 fAccess = IEM_ACCESS_DATA_R;
3290 IEM_MC_BEGIN(3, 2);
3291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3292 IEM_MC_ARG(uint32_t, u32Src, 1);
3293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3295
3296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3298 IEM_MC_ASSIGN(u32Src, u32Imm);
3299 if (pImpl->pfnLockedU32)
3300 IEMOP_HLP_DONE_DECODING();
3301 else
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3304 IEM_MC_FETCH_EFLAGS(EFlags);
3305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3307 else
3308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3309
3310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3311 IEM_MC_COMMIT_EFLAGS(EFlags);
3312 IEM_MC_ADVANCE_RIP_AND_FINISH();
3313 IEM_MC_END();
3314 }
3315 break;
3316 }
3317
3318 case IEMMODE_64BIT:
3319 {
3320 if (IEM_IS_MODRM_REG_MODE(bRm))
3321 {
3322 /* register target */
3323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 IEM_MC_BEGIN(3, 0);
3326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3327 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3329
3330 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3331 IEM_MC_REF_EFLAGS(pEFlags);
3332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3333
3334 IEM_MC_ADVANCE_RIP_AND_FINISH();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 /* memory target */
3340 uint32_t fAccess;
3341 if (pImpl->pfnLockedU64)
3342 fAccess = IEM_ACCESS_DATA_RW;
3343 else /* CMP */
3344 fAccess = IEM_ACCESS_DATA_R;
3345 IEM_MC_BEGIN(3, 2);
3346 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3347 IEM_MC_ARG(uint64_t, u64Src, 1);
3348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3350
3351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3353 if (pImpl->pfnLockedU64)
3354 IEMOP_HLP_DONE_DECODING();
3355 else
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_ASSIGN(u64Src, u64Imm);
3358 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3359 IEM_MC_FETCH_EFLAGS(EFlags);
3360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3362 else
3363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3364
3365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3366 IEM_MC_COMMIT_EFLAGS(EFlags);
3367 IEM_MC_ADVANCE_RIP_AND_FINISH();
3368 IEM_MC_END();
3369 }
3370 break;
3371 }
3372
3373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3374 }
3375}
3376
3377
3378/**
3379 * @opcode 0x82
3380 * @opmnemonic grp1_82
3381 * @opgroup og_groups
3382 */
3383FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3384{
3385 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3386 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3387}
3388
3389
3390/**
3391 * @opcode 0x83
3392 */
3393FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3394{
3395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3396 switch (IEM_GET_MODRM_REG_8(bRm))
3397 {
3398 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3399 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3400 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3401 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3402 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3403 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3404 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3405 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3406 }
3407 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3408 to the 386 even if absent in the intel reference manuals and some
3409 3rd party opcode listings. */
3410 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[IEM_GET_MODRM_REG_8(bRm)];
3411
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register target
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3418 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3419 switch (pVCpu->iem.s.enmEffOpSize)
3420 {
3421 case IEMMODE_16BIT:
3422 {
3423 IEM_MC_BEGIN(3, 0);
3424 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3425 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3427
3428 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3429 IEM_MC_REF_EFLAGS(pEFlags);
3430 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3431
3432 IEM_MC_ADVANCE_RIP_AND_FINISH();
3433 IEM_MC_END();
3434 break;
3435 }
3436
3437 case IEMMODE_32BIT:
3438 {
3439 IEM_MC_BEGIN(3, 0);
3440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3441 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3443
3444 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_REF_EFLAGS(pEFlags);
3446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3447 if (pImpl != &g_iemAImpl_cmp) /* TEST won't get here, no need to check for it. */
3448 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3449
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 break;
3453 }
3454
3455 case IEMMODE_64BIT:
3456 {
3457 IEM_MC_BEGIN(3, 0);
3458 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3459 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3460 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3461
3462 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
3463 IEM_MC_REF_EFLAGS(pEFlags);
3464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3465
3466 IEM_MC_ADVANCE_RIP_AND_FINISH();
3467 IEM_MC_END();
3468 break;
3469 }
3470
3471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3472 }
3473 }
3474 else
3475 {
3476 /*
3477 * Memory target.
3478 */
3479 uint32_t fAccess;
3480 if (pImpl->pfnLockedU16)
3481 fAccess = IEM_ACCESS_DATA_RW;
3482 else /* CMP */
3483 fAccess = IEM_ACCESS_DATA_R;
3484
3485 switch (pVCpu->iem.s.enmEffOpSize)
3486 {
3487 case IEMMODE_16BIT:
3488 {
3489 IEM_MC_BEGIN(3, 2);
3490 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3491 IEM_MC_ARG(uint16_t, u16Src, 1);
3492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3494
3495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3496 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3497 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3498 if (pImpl->pfnLockedU16)
3499 IEMOP_HLP_DONE_DECODING();
3500 else
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3503 IEM_MC_FETCH_EFLAGS(EFlags);
3504 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3506 else
3507 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3508
3509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3510 IEM_MC_COMMIT_EFLAGS(EFlags);
3511 IEM_MC_ADVANCE_RIP_AND_FINISH();
3512 IEM_MC_END();
3513 break;
3514 }
3515
3516 case IEMMODE_32BIT:
3517 {
3518 IEM_MC_BEGIN(3, 2);
3519 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3520 IEM_MC_ARG(uint32_t, u32Src, 1);
3521 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3523
3524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3525 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3526 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3527 if (pImpl->pfnLockedU32)
3528 IEMOP_HLP_DONE_DECODING();
3529 else
3530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3531 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3532 IEM_MC_FETCH_EFLAGS(EFlags);
3533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3535 else
3536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3537
3538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3539 IEM_MC_COMMIT_EFLAGS(EFlags);
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 IEM_MC_END();
3542 break;
3543 }
3544
3545 case IEMMODE_64BIT:
3546 {
3547 IEM_MC_BEGIN(3, 2);
3548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3549 IEM_MC_ARG(uint64_t, u64Src, 1);
3550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3554 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3555 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3556 if (pImpl->pfnLockedU64)
3557 IEMOP_HLP_DONE_DECODING();
3558 else
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3561 IEM_MC_FETCH_EFLAGS(EFlags);
3562 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3564 else
3565 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3566
3567 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3568 IEM_MC_COMMIT_EFLAGS(EFlags);
3569 IEM_MC_ADVANCE_RIP_AND_FINISH();
3570 IEM_MC_END();
3571 break;
3572 }
3573
3574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3575 }
3576 }
3577}
3578
3579
3580/**
3581 * @opcode 0x84
3582 */
3583FNIEMOP_DEF(iemOp_test_Eb_Gb)
3584{
3585 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3587 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3588}
3589
3590
3591/**
3592 * @opcode 0x85
3593 */
3594FNIEMOP_DEF(iemOp_test_Ev_Gv)
3595{
3596 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3597 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3599}
3600
3601
3602/**
3603 * @opcode 0x86
3604 */
3605FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3606{
3607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3608 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3609
3610 /*
3611 * If rm is denoting a register, no more instruction bytes.
3612 */
3613 if (IEM_IS_MODRM_REG_MODE(bRm))
3614 {
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616
3617 IEM_MC_BEGIN(0, 2);
3618 IEM_MC_LOCAL(uint8_t, uTmp1);
3619 IEM_MC_LOCAL(uint8_t, uTmp2);
3620
3621 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3622 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3623 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3624 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3625
3626 IEM_MC_ADVANCE_RIP_AND_FINISH();
3627 IEM_MC_END();
3628 }
3629 else
3630 {
3631 /*
3632 * We're accessing memory.
3633 */
3634/** @todo the register must be committed separately! */
3635 IEM_MC_BEGIN(2, 2);
3636 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3637 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3639
3640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3641 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3642 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3643 if (!pVCpu->iem.s.fDisregardLock)
3644 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3645 else
3646 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3648
3649 IEM_MC_ADVANCE_RIP_AND_FINISH();
3650 IEM_MC_END();
3651 }
3652}
3653
3654
3655/**
3656 * @opcode 0x87
3657 */
3658FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3659{
3660 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3662
3663 /*
3664 * If rm is denoting a register, no more instruction bytes.
3665 */
3666 if (IEM_IS_MODRM_REG_MODE(bRm))
3667 {
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669
3670 switch (pVCpu->iem.s.enmEffOpSize)
3671 {
3672 case IEMMODE_16BIT:
3673 IEM_MC_BEGIN(0, 2);
3674 IEM_MC_LOCAL(uint16_t, uTmp1);
3675 IEM_MC_LOCAL(uint16_t, uTmp2);
3676
3677 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3679 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3680 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3681
3682 IEM_MC_ADVANCE_RIP_AND_FINISH();
3683 IEM_MC_END();
3684 return VINF_SUCCESS;
3685
3686 case IEMMODE_32BIT:
3687 IEM_MC_BEGIN(0, 2);
3688 IEM_MC_LOCAL(uint32_t, uTmp1);
3689 IEM_MC_LOCAL(uint32_t, uTmp2);
3690
3691 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3692 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3693 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3694 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3695
3696 IEM_MC_ADVANCE_RIP_AND_FINISH();
3697 IEM_MC_END();
3698 return VINF_SUCCESS;
3699
3700 case IEMMODE_64BIT:
3701 IEM_MC_BEGIN(0, 2);
3702 IEM_MC_LOCAL(uint64_t, uTmp1);
3703 IEM_MC_LOCAL(uint64_t, uTmp2);
3704
3705 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
3706 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
3707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
3708 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 return VINF_SUCCESS;
3713
3714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3715 }
3716 }
3717 else
3718 {
3719 /*
3720 * We're accessing memory.
3721 */
3722 switch (pVCpu->iem.s.enmEffOpSize)
3723 {
3724/** @todo the register must be committed separately! */
3725 case IEMMODE_16BIT:
3726 IEM_MC_BEGIN(2, 2);
3727 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3728 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3732 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3733 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3734 if (!pVCpu->iem.s.fDisregardLock)
3735 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3736 else
3737 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3739
3740 IEM_MC_ADVANCE_RIP_AND_FINISH();
3741 IEM_MC_END();
3742 return VINF_SUCCESS;
3743
3744 case IEMMODE_32BIT:
3745 IEM_MC_BEGIN(2, 2);
3746 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3747 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3749
3750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3751 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3752 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3753 if (!pVCpu->iem.s.fDisregardLock)
3754 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3755 else
3756 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3758
3759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3760 IEM_MC_ADVANCE_RIP_AND_FINISH();
3761 IEM_MC_END();
3762 return VINF_SUCCESS;
3763
3764 case IEMMODE_64BIT:
3765 IEM_MC_BEGIN(2, 2);
3766 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3767 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3769
3770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3771 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3772 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
3773 if (!pVCpu->iem.s.fDisregardLock)
3774 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3775 else
3776 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 return VINF_SUCCESS;
3782
3783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3784 }
3785 }
3786}
3787
3788
3789/**
3790 * @opcode 0x88
3791 */
3792FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3793{
3794 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3795
3796 uint8_t bRm;
3797 IEM_OPCODE_GET_NEXT_U8(&bRm);
3798
3799 /*
3800 * If rm is denoting a register, no more instruction bytes.
3801 */
3802 if (IEM_IS_MODRM_REG_MODE(bRm))
3803 {
3804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3805 IEM_MC_BEGIN(0, 1);
3806 IEM_MC_LOCAL(uint8_t, u8Value);
3807 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3808 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
3809 IEM_MC_ADVANCE_RIP_AND_FINISH();
3810 IEM_MC_END();
3811 }
3812 else
3813 {
3814 /*
3815 * We're writing a register to memory.
3816 */
3817 IEM_MC_BEGIN(0, 2);
3818 IEM_MC_LOCAL(uint8_t, u8Value);
3819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3823 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3824 IEM_MC_ADVANCE_RIP_AND_FINISH();
3825 IEM_MC_END();
3826 }
3827}
3828
3829
3830/**
3831 * @opcode 0x89
3832 */
3833FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3834{
3835 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3836
3837 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3838
3839 /*
3840 * If rm is denoting a register, no more instruction bytes.
3841 */
3842 if (IEM_IS_MODRM_REG_MODE(bRm))
3843 {
3844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3845 switch (pVCpu->iem.s.enmEffOpSize)
3846 {
3847 case IEMMODE_16BIT:
3848 IEM_MC_BEGIN(0, 1);
3849 IEM_MC_LOCAL(uint16_t, u16Value);
3850 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3851 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
3852 IEM_MC_ADVANCE_RIP_AND_FINISH();
3853 IEM_MC_END();
3854 break;
3855
3856 case IEMMODE_32BIT:
3857 IEM_MC_BEGIN(0, 1);
3858 IEM_MC_LOCAL(uint32_t, u32Value);
3859 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 break;
3864
3865 case IEMMODE_64BIT:
3866 IEM_MC_BEGIN(0, 1);
3867 IEM_MC_LOCAL(uint64_t, u64Value);
3868 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3869 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
3870 IEM_MC_ADVANCE_RIP_AND_FINISH();
3871 IEM_MC_END();
3872 break;
3873
3874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3875 }
3876 }
3877 else
3878 {
3879 /*
3880 * We're writing a register to memory.
3881 */
3882 switch (pVCpu->iem.s.enmEffOpSize)
3883 {
3884 case IEMMODE_16BIT:
3885 IEM_MC_BEGIN(0, 2);
3886 IEM_MC_LOCAL(uint16_t, u16Value);
3887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3890 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3891 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3892 IEM_MC_ADVANCE_RIP_AND_FINISH();
3893 IEM_MC_END();
3894 break;
3895
3896 case IEMMODE_32BIT:
3897 IEM_MC_BEGIN(0, 2);
3898 IEM_MC_LOCAL(uint32_t, u32Value);
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3904 IEM_MC_ADVANCE_RIP_AND_FINISH();
3905 IEM_MC_END();
3906 break;
3907
3908 case IEMMODE_64BIT:
3909 IEM_MC_BEGIN(0, 2);
3910 IEM_MC_LOCAL(uint64_t, u64Value);
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3914 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
3915 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3916 IEM_MC_ADVANCE_RIP_AND_FINISH();
3917 IEM_MC_END();
3918 break;
3919
3920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3921 }
3922 }
3923}
3924
3925
3926/**
3927 * @opcode 0x8a
3928 */
3929FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3930{
3931 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3932
3933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3934
3935 /*
3936 * If rm is denoting a register, no more instruction bytes.
3937 */
3938 if (IEM_IS_MODRM_REG_MODE(bRm))
3939 {
3940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3941 IEM_MC_BEGIN(0, 1);
3942 IEM_MC_LOCAL(uint8_t, u8Value);
3943 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3944 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3945 IEM_MC_ADVANCE_RIP_AND_FINISH();
3946 IEM_MC_END();
3947 }
3948 else
3949 {
3950 /*
3951 * We're loading a register from memory.
3952 */
3953 IEM_MC_BEGIN(0, 2);
3954 IEM_MC_LOCAL(uint8_t, u8Value);
3955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3958 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3959 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 IEM_MC_END();
3962 }
3963}
3964
3965
3966/**
3967 * @opcode 0x8b
3968 */
3969FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3970{
3971 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3972
3973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3974
3975 /*
3976 * If rm is denoting a register, no more instruction bytes.
3977 */
3978 if (IEM_IS_MODRM_REG_MODE(bRm))
3979 {
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3981 switch (pVCpu->iem.s.enmEffOpSize)
3982 {
3983 case IEMMODE_16BIT:
3984 IEM_MC_BEGIN(0, 1);
3985 IEM_MC_LOCAL(uint16_t, u16Value);
3986 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3987 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
3988 IEM_MC_ADVANCE_RIP_AND_FINISH();
3989 IEM_MC_END();
3990 break;
3991
3992 case IEMMODE_32BIT:
3993 IEM_MC_BEGIN(0, 1);
3994 IEM_MC_LOCAL(uint32_t, u32Value);
3995 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
3996 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 break;
4000
4001 case IEMMODE_64BIT:
4002 IEM_MC_BEGIN(0, 1);
4003 IEM_MC_LOCAL(uint64_t, u64Value);
4004 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4005 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4006 IEM_MC_ADVANCE_RIP_AND_FINISH();
4007 IEM_MC_END();
4008 break;
4009
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4011 }
4012 }
4013 else
4014 {
4015 /*
4016 * We're loading a register from memory.
4017 */
4018 switch (pVCpu->iem.s.enmEffOpSize)
4019 {
4020 case IEMMODE_16BIT:
4021 IEM_MC_BEGIN(0, 2);
4022 IEM_MC_LOCAL(uint16_t, u16Value);
4023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4026 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4027 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4028 IEM_MC_ADVANCE_RIP_AND_FINISH();
4029 IEM_MC_END();
4030 break;
4031
4032 case IEMMODE_32BIT:
4033 IEM_MC_BEGIN(0, 2);
4034 IEM_MC_LOCAL(uint32_t, u32Value);
4035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4038 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4039 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 break;
4043
4044 case IEMMODE_64BIT:
4045 IEM_MC_BEGIN(0, 2);
4046 IEM_MC_LOCAL(uint64_t, u64Value);
4047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4050 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4051 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 IEM_MC_END();
4054 break;
4055
4056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4057 }
4058 }
4059}
4060
4061
4062/**
4063 * opcode 0x63
4064 * @todo Table fixme
4065 */
4066FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4067{
4068 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4069 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4070 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4071 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4072 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4073}
4074
4075
4076/**
4077 * @opcode 0x8c
4078 */
4079FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4080{
4081 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4082
4083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4084
4085 /*
4086 * Check that the destination register exists. The REX.R prefix is ignored.
4087 */
4088 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4089 if ( iSegReg > X86_SREG_GS)
4090 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4091
4092 /*
4093 * If rm is denoting a register, no more instruction bytes.
4094 * In that case, the operand size is respected and the upper bits are
4095 * cleared (starting with some pentium).
4096 */
4097 if (IEM_IS_MODRM_REG_MODE(bRm))
4098 {
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4100 switch (pVCpu->iem.s.enmEffOpSize)
4101 {
4102 case IEMMODE_16BIT:
4103 IEM_MC_BEGIN(0, 1);
4104 IEM_MC_LOCAL(uint16_t, u16Value);
4105 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4106 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4107 IEM_MC_ADVANCE_RIP_AND_FINISH();
4108 IEM_MC_END();
4109 break;
4110
4111 case IEMMODE_32BIT:
4112 IEM_MC_BEGIN(0, 1);
4113 IEM_MC_LOCAL(uint32_t, u32Value);
4114 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4115 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4116 IEM_MC_ADVANCE_RIP_AND_FINISH();
4117 IEM_MC_END();
4118 break;
4119
4120 case IEMMODE_64BIT:
4121 IEM_MC_BEGIN(0, 1);
4122 IEM_MC_LOCAL(uint64_t, u64Value);
4123 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4124 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4125 IEM_MC_ADVANCE_RIP_AND_FINISH();
4126 IEM_MC_END();
4127 break;
4128
4129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4130 }
4131 }
4132 else
4133 {
4134 /*
4135 * We're saving the register to memory. The access is word sized
4136 * regardless of operand size prefixes.
4137 */
4138#if 0 /* not necessary */
4139 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4140#endif
4141 IEM_MC_BEGIN(0, 2);
4142 IEM_MC_LOCAL(uint16_t, u16Value);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4147 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4148 IEM_MC_ADVANCE_RIP_AND_FINISH();
4149 IEM_MC_END();
4150 }
4151}
4152
4153
4154
4155
4156/**
4157 * @opcode 0x8d
4158 */
4159FNIEMOP_DEF(iemOp_lea_Gv_M)
4160{
4161 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4163 if (IEM_IS_MODRM_REG_MODE(bRm))
4164 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4165
4166 switch (pVCpu->iem.s.enmEffOpSize)
4167 {
4168 case IEMMODE_16BIT:
4169 IEM_MC_BEGIN(0, 2);
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4171 IEM_MC_LOCAL(uint16_t, u16Cast);
4172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4174 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4175 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
4176 IEM_MC_ADVANCE_RIP_AND_FINISH();
4177 IEM_MC_END();
4178 break;
4179
4180 case IEMMODE_32BIT:
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4183 IEM_MC_LOCAL(uint32_t, u32Cast);
4184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4187 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
4188 IEM_MC_ADVANCE_RIP_AND_FINISH();
4189 IEM_MC_END();
4190 break;
4191
4192 case IEMMODE_64BIT:
4193 IEM_MC_BEGIN(0, 1);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4197 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
4198 IEM_MC_ADVANCE_RIP_AND_FINISH();
4199 IEM_MC_END();
4200 break;
4201
4202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4203 }
4204}
4205
4206
4207/**
4208 * @opcode 0x8e
4209 */
4210FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4211{
4212 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4213
4214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4215
4216 /*
4217 * The practical operand size is 16-bit.
4218 */
4219#if 0 /* not necessary */
4220 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4221#endif
4222
4223 /*
4224 * Check that the destination register exists and can be used with this
4225 * instruction. The REX.R prefix is ignored.
4226 */
4227 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
4228 if ( iSegReg == X86_SREG_CS
4229 || iSegReg > X86_SREG_GS)
4230 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4231
4232 /*
4233 * If rm is denoting a register, no more instruction bytes.
4234 */
4235 if (IEM_IS_MODRM_REG_MODE(bRm))
4236 {
4237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4238 IEM_MC_BEGIN(2, 0);
4239 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4240 IEM_MC_ARG(uint16_t, u16Value, 1);
4241 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4242 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /*
4248 * We're loading the register from memory. The access is word sized
4249 * regardless of operand size prefixes.
4250 */
4251 IEM_MC_BEGIN(2, 1);
4252 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4253 IEM_MC_ARG(uint16_t, u16Value, 1);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4257 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4258 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4259 IEM_MC_END();
4260 }
4261 return VINF_SUCCESS;
4262}
4263
4264
4265/** Opcode 0x8f /0. */
4266FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4267{
4268 /* This bugger is rather annoying as it requires rSP to be updated before
4269 doing the effective address calculations. Will eventually require a
4270 split between the R/M+SIB decoding and the effective address
4271 calculation - which is something that is required for any attempt at
4272 reusing this code for a recompiler. It may also be good to have if we
4273 need to delay #UD exception caused by invalid lock prefixes.
4274
4275 For now, we'll do a mostly safe interpreter-only implementation here. */
4276 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4277 * now until tests show it's checked.. */
4278 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4279
4280 /* Register access is relatively easy and can share code. */
4281 if (IEM_IS_MODRM_REG_MODE(bRm))
4282 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
4283
4284 /*
4285 * Memory target.
4286 *
4287 * Intel says that RSP is incremented before it's used in any effective
4288 * address calcuations. This means some serious extra annoyance here since
4289 * we decode and calculate the effective address in one step and like to
4290 * delay committing registers till everything is done.
4291 *
4292 * So, we'll decode and calculate the effective address twice. This will
4293 * require some recoding if turned into a recompiler.
4294 */
4295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4296
4297#ifndef TST_IEM_CHECK_MC
4298 /* Calc effective address with modified ESP. */
4299/** @todo testcase */
4300 RTGCPTR GCPtrEff;
4301 VBOXSTRICTRC rcStrict;
4302 switch (pVCpu->iem.s.enmEffOpSize)
4303 {
4304 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4305 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4306 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4308 }
4309 if (rcStrict != VINF_SUCCESS)
4310 return rcStrict;
4311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4312
4313 /* Perform the operation - this should be CImpl. */
4314 RTUINT64U TmpRsp;
4315 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4316 switch (pVCpu->iem.s.enmEffOpSize)
4317 {
4318 case IEMMODE_16BIT:
4319 {
4320 uint16_t u16Value;
4321 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4322 if (rcStrict == VINF_SUCCESS)
4323 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4324 break;
4325 }
4326
4327 case IEMMODE_32BIT:
4328 {
4329 uint32_t u32Value;
4330 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4331 if (rcStrict == VINF_SUCCESS)
4332 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4333 break;
4334 }
4335
4336 case IEMMODE_64BIT:
4337 {
4338 uint64_t u64Value;
4339 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4340 if (rcStrict == VINF_SUCCESS)
4341 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4342 break;
4343 }
4344
4345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4346 }
4347 if (rcStrict == VINF_SUCCESS)
4348 {
4349 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4350 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
4351 }
4352 return rcStrict;
4353
4354#else
4355 return VERR_IEM_IPE_2;
4356#endif
4357}
4358
4359
4360/**
4361 * @opcode 0x8f
4362 */
4363FNIEMOP_DEF(iemOp_Grp1A__xop)
4364{
4365 /*
4366 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4367 * three byte VEX prefix, except that the mmmmm field cannot have the values
4368 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4369 */
4370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4371 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4372 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4373
4374 IEMOP_MNEMONIC(xop, "xop");
4375 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4376 {
4377 /** @todo Test when exctly the XOP conformance checks kick in during
4378 * instruction decoding and fetching (using \#PF). */
4379 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4380 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4381 if ( ( pVCpu->iem.s.fPrefixes
4382 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4383 == 0)
4384 {
4385 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4386 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4387 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4388 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4389 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4390 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4391 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4392 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4393 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4394
4395 /** @todo XOP: Just use new tables and decoders. */
4396 switch (bRm & 0x1f)
4397 {
4398 case 8: /* xop opcode map 8. */
4399 IEMOP_BITCH_ABOUT_STUB();
4400 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4401
4402 case 9: /* xop opcode map 9. */
4403 IEMOP_BITCH_ABOUT_STUB();
4404 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4405
4406 case 10: /* xop opcode map 10. */
4407 IEMOP_BITCH_ABOUT_STUB();
4408 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4409
4410 default:
4411 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4412 return IEMOP_RAISE_INVALID_OPCODE();
4413 }
4414 }
4415 else
4416 Log(("XOP: Invalid prefix mix!\n"));
4417 }
4418 else
4419 Log(("XOP: XOP support disabled!\n"));
4420 return IEMOP_RAISE_INVALID_OPCODE();
4421}
4422
4423
4424/**
4425 * Common 'xchg reg,rAX' helper.
4426 */
4427FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4428{
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4430
4431 iReg |= pVCpu->iem.s.uRexB;
4432 switch (pVCpu->iem.s.enmEffOpSize)
4433 {
4434 case IEMMODE_16BIT:
4435 IEM_MC_BEGIN(0, 2);
4436 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4437 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4438 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4439 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4440 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4441 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 break;
4445
4446 case IEMMODE_32BIT:
4447 IEM_MC_BEGIN(0, 2);
4448 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4449 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4450 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4451 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4452 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4453 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 break;
4457
4458 case IEMMODE_64BIT:
4459 IEM_MC_BEGIN(0, 2);
4460 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4461 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4462 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4463 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4464 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4465 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4466 IEM_MC_ADVANCE_RIP_AND_FINISH();
4467 IEM_MC_END();
4468 break;
4469
4470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4471 }
4472}
4473
4474
4475/**
4476 * @opcode 0x90
4477 */
4478FNIEMOP_DEF(iemOp_nop)
4479{
4480 /* R8/R8D and RAX/EAX can be exchanged. */
4481 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4482 {
4483 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4484 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4485 }
4486
4487 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4488 {
4489 IEMOP_MNEMONIC(pause, "pause");
4490#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4491 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4492 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4493#endif
4494#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4495 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4497#endif
4498 }
4499 else
4500 IEMOP_MNEMONIC(nop, "nop");
4501 IEM_MC_BEGIN(0, 0);
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504}
4505
4506
4507/**
4508 * @opcode 0x91
4509 */
4510FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4511{
4512 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4513 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4514}
4515
4516
4517/**
4518 * @opcode 0x92
4519 */
4520FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4521{
4522 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4523 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4524}
4525
4526
4527/**
4528 * @opcode 0x93
4529 */
4530FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4531{
4532 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4533 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4534}
4535
4536
4537/**
4538 * @opcode 0x94
4539 */
4540FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4541{
4542 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4543 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4544}
4545
4546
4547/**
4548 * @opcode 0x95
4549 */
4550FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4551{
4552 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4553 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4554}
4555
4556
4557/**
4558 * @opcode 0x96
4559 */
4560FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4561{
4562 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4563 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4564}
4565
4566
4567/**
4568 * @opcode 0x97
4569 */
4570FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4571{
4572 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4573 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4574}
4575
4576
4577/**
4578 * @opcode 0x98
4579 */
4580FNIEMOP_DEF(iemOp_cbw)
4581{
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583 switch (pVCpu->iem.s.enmEffOpSize)
4584 {
4585 case IEMMODE_16BIT:
4586 IEMOP_MNEMONIC(cbw, "cbw");
4587 IEM_MC_BEGIN(0, 1);
4588 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4589 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4590 } IEM_MC_ELSE() {
4591 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4592 } IEM_MC_ENDIF();
4593 IEM_MC_ADVANCE_RIP_AND_FINISH();
4594 IEM_MC_END();
4595 break;
4596
4597 case IEMMODE_32BIT:
4598 IEMOP_MNEMONIC(cwde, "cwde");
4599 IEM_MC_BEGIN(0, 1);
4600 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4601 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4602 } IEM_MC_ELSE() {
4603 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4604 } IEM_MC_ENDIF();
4605 IEM_MC_ADVANCE_RIP_AND_FINISH();
4606 IEM_MC_END();
4607 break;
4608
4609 case IEMMODE_64BIT:
4610 IEMOP_MNEMONIC(cdqe, "cdqe");
4611 IEM_MC_BEGIN(0, 1);
4612 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4613 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4614 } IEM_MC_ELSE() {
4615 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4616 } IEM_MC_ENDIF();
4617 IEM_MC_ADVANCE_RIP_AND_FINISH();
4618 IEM_MC_END();
4619 break;
4620
4621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4622 }
4623}
4624
4625
4626/**
4627 * @opcode 0x99
4628 */
4629FNIEMOP_DEF(iemOp_cwd)
4630{
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 switch (pVCpu->iem.s.enmEffOpSize)
4633 {
4634 case IEMMODE_16BIT:
4635 IEMOP_MNEMONIC(cwd, "cwd");
4636 IEM_MC_BEGIN(0, 1);
4637 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4638 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4639 } IEM_MC_ELSE() {
4640 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4641 } IEM_MC_ENDIF();
4642 IEM_MC_ADVANCE_RIP_AND_FINISH();
4643 IEM_MC_END();
4644 break;
4645
4646 case IEMMODE_32BIT:
4647 IEMOP_MNEMONIC(cdq, "cdq");
4648 IEM_MC_BEGIN(0, 1);
4649 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4650 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4651 } IEM_MC_ELSE() {
4652 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4653 } IEM_MC_ENDIF();
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_64BIT:
4659 IEMOP_MNEMONIC(cqo, "cqo");
4660 IEM_MC_BEGIN(0, 1);
4661 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4662 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4663 } IEM_MC_ELSE() {
4664 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4665 } IEM_MC_ENDIF();
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 break;
4669
4670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4671 }
4672}
4673
4674
4675/**
4676 * @opcode 0x9a
4677 */
4678FNIEMOP_DEF(iemOp_call_Ap)
4679{
4680 IEMOP_MNEMONIC(call_Ap, "call Ap");
4681 IEMOP_HLP_NO_64BIT();
4682
4683 /* Decode the far pointer address and pass it on to the far call C implementation. */
4684 uint32_t offSeg;
4685 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4686 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4687 else
4688 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4689 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4692}
4693
4694
4695/** Opcode 0x9b. (aka fwait) */
4696FNIEMOP_DEF(iemOp_wait)
4697{
4698 IEMOP_MNEMONIC(wait, "wait");
4699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4700
4701 IEM_MC_BEGIN(0, 0);
4702 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4704 IEM_MC_ADVANCE_RIP_AND_FINISH();
4705 IEM_MC_END();
4706}
4707
4708
4709/**
4710 * @opcode 0x9c
4711 */
4712FNIEMOP_DEF(iemOp_pushf_Fv)
4713{
4714 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4717 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4718}
4719
4720
4721/**
4722 * @opcode 0x9d
4723 */
4724FNIEMOP_DEF(iemOp_popf_Fv)
4725{
4726 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4729 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4730}
4731
4732
4733/**
4734 * @opcode 0x9e
4735 */
4736FNIEMOP_DEF(iemOp_sahf)
4737{
4738 IEMOP_MNEMONIC(sahf, "sahf");
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4740 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4741 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4742 return IEMOP_RAISE_INVALID_OPCODE();
4743 IEM_MC_BEGIN(0, 2);
4744 IEM_MC_LOCAL(uint32_t, u32Flags);
4745 IEM_MC_LOCAL(uint32_t, EFlags);
4746 IEM_MC_FETCH_EFLAGS(EFlags);
4747 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4748 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4749 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4750 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4751 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4752 IEM_MC_COMMIT_EFLAGS(EFlags);
4753 IEM_MC_ADVANCE_RIP_AND_FINISH();
4754 IEM_MC_END();
4755}
4756
4757
4758/**
4759 * @opcode 0x9f
4760 */
4761FNIEMOP_DEF(iemOp_lahf)
4762{
4763 IEMOP_MNEMONIC(lahf, "lahf");
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4765 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4766 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4767 return IEMOP_RAISE_INVALID_OPCODE();
4768 IEM_MC_BEGIN(0, 1);
4769 IEM_MC_LOCAL(uint8_t, u8Flags);
4770 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4771 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4772 IEM_MC_ADVANCE_RIP_AND_FINISH();
4773 IEM_MC_END();
4774}
4775
4776
4777/**
4778 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4779 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4780 * prefixes. Will return on failures.
4781 * @param a_GCPtrMemOff The variable to store the offset in.
4782 */
4783#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4784 do \
4785 { \
4786 switch (pVCpu->iem.s.enmEffAddrMode) \
4787 { \
4788 case IEMMODE_16BIT: \
4789 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4790 break; \
4791 case IEMMODE_32BIT: \
4792 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4793 break; \
4794 case IEMMODE_64BIT: \
4795 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4796 break; \
4797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4798 } \
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4800 } while (0)
4801
4802/**
4803 * @opcode 0xa0
4804 */
4805FNIEMOP_DEF(iemOp_mov_AL_Ob)
4806{
4807 /*
4808 * Get the offset and fend off lock prefixes.
4809 */
4810 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4811 RTGCPTR GCPtrMemOff;
4812 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4813
4814 /*
4815 * Fetch AL.
4816 */
4817 IEM_MC_BEGIN(0,1);
4818 IEM_MC_LOCAL(uint8_t, u8Tmp);
4819 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4820 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4821 IEM_MC_ADVANCE_RIP_AND_FINISH();
4822 IEM_MC_END();
4823}
4824
4825
4826/**
4827 * @opcode 0xa1
4828 */
4829FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4830{
4831 /*
4832 * Get the offset and fend off lock prefixes.
4833 */
4834 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4835 RTGCPTR GCPtrMemOff;
4836 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4837
4838 /*
4839 * Fetch rAX.
4840 */
4841 switch (pVCpu->iem.s.enmEffOpSize)
4842 {
4843 case IEMMODE_16BIT:
4844 IEM_MC_BEGIN(0,1);
4845 IEM_MC_LOCAL(uint16_t, u16Tmp);
4846 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4847 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4848 IEM_MC_ADVANCE_RIP_AND_FINISH();
4849 IEM_MC_END();
4850 break;
4851
4852 case IEMMODE_32BIT:
4853 IEM_MC_BEGIN(0,1);
4854 IEM_MC_LOCAL(uint32_t, u32Tmp);
4855 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4856 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4857 IEM_MC_ADVANCE_RIP_AND_FINISH();
4858 IEM_MC_END();
4859 break;
4860
4861 case IEMMODE_64BIT:
4862 IEM_MC_BEGIN(0,1);
4863 IEM_MC_LOCAL(uint64_t, u64Tmp);
4864 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4865 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4866 IEM_MC_ADVANCE_RIP_AND_FINISH();
4867 IEM_MC_END();
4868 break;
4869
4870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4871 }
4872}
4873
4874
4875/**
4876 * @opcode 0xa2
4877 */
4878FNIEMOP_DEF(iemOp_mov_Ob_AL)
4879{
4880 /*
4881 * Get the offset and fend off lock prefixes.
4882 */
4883 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4884 RTGCPTR GCPtrMemOff;
4885 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4886
4887 /*
4888 * Store AL.
4889 */
4890 IEM_MC_BEGIN(0,1);
4891 IEM_MC_LOCAL(uint8_t, u8Tmp);
4892 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4893 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4894 IEM_MC_ADVANCE_RIP_AND_FINISH();
4895 IEM_MC_END();
4896}
4897
4898
4899/**
4900 * @opcode 0xa3
4901 */
4902FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4903{
4904 /*
4905 * Get the offset and fend off lock prefixes.
4906 */
4907 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4908 RTGCPTR GCPtrMemOff;
4909 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4910
4911 /*
4912 * Store rAX.
4913 */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(0,1);
4918 IEM_MC_LOCAL(uint16_t, u16Tmp);
4919 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4920 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4921 IEM_MC_ADVANCE_RIP_AND_FINISH();
4922 IEM_MC_END();
4923 break;
4924
4925 case IEMMODE_32BIT:
4926 IEM_MC_BEGIN(0,1);
4927 IEM_MC_LOCAL(uint32_t, u32Tmp);
4928 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4929 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4930 IEM_MC_ADVANCE_RIP_AND_FINISH();
4931 IEM_MC_END();
4932 break;
4933
4934 case IEMMODE_64BIT:
4935 IEM_MC_BEGIN(0,1);
4936 IEM_MC_LOCAL(uint64_t, u64Tmp);
4937 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4938 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4939 IEM_MC_ADVANCE_RIP_AND_FINISH();
4940 IEM_MC_END();
4941 break;
4942
4943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4944 }
4945}
4946
4947/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4948#define IEM_MOVS_CASE(ValBits, AddrBits) \
4949 IEM_MC_BEGIN(0, 2); \
4950 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4951 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4952 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4953 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4954 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4955 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4958 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4959 } IEM_MC_ELSE() { \
4960 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4961 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4962 } IEM_MC_ENDIF(); \
4963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4964 IEM_MC_END()
4965
4966/**
4967 * @opcode 0xa4
4968 */
4969FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4970{
4971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4972
4973 /*
4974 * Use the C implementation if a repeat prefix is encountered.
4975 */
4976 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4977 {
4978 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4979 switch (pVCpu->iem.s.enmEffAddrMode)
4980 {
4981 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4982 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4983 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4985 }
4986 }
4987 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4988
4989 /*
4990 * Sharing case implementation with movs[wdq] below.
4991 */
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4995 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4996 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999}
5000
5001
5002/**
5003 * @opcode 0xa5
5004 */
5005FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5006{
5007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5008
5009 /*
5010 * Use the C implementation if a repeat prefix is encountered.
5011 */
5012 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5013 {
5014 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5015 switch (pVCpu->iem.s.enmEffOpSize)
5016 {
5017 case IEMMODE_16BIT:
5018 switch (pVCpu->iem.s.enmEffAddrMode)
5019 {
5020 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5021 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5022 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5024 }
5025 break;
5026 case IEMMODE_32BIT:
5027 switch (pVCpu->iem.s.enmEffAddrMode)
5028 {
5029 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5030 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5031 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5033 }
5034 case IEMMODE_64BIT:
5035 switch (pVCpu->iem.s.enmEffAddrMode)
5036 {
5037 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5038 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5039 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5043 }
5044 }
5045 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5046
5047 /*
5048 * Annoying double switch here.
5049 * Using ugly macro for implementing the cases, sharing it with movsb.
5050 */
5051 switch (pVCpu->iem.s.enmEffOpSize)
5052 {
5053 case IEMMODE_16BIT:
5054 switch (pVCpu->iem.s.enmEffAddrMode)
5055 {
5056 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5057 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5058 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5059 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5060 }
5061 break;
5062
5063 case IEMMODE_32BIT:
5064 switch (pVCpu->iem.s.enmEffAddrMode)
5065 {
5066 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5067 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5068 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 break;
5072
5073 case IEMMODE_64BIT:
5074 switch (pVCpu->iem.s.enmEffAddrMode)
5075 {
5076 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5077 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5078 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5083 }
5084}
5085
5086#undef IEM_MOVS_CASE
5087
5088/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5089#define IEM_CMPS_CASE(ValBits, AddrBits) \
5090 IEM_MC_BEGIN(3, 3); \
5091 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5092 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5093 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5094 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5095 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5096 \
5097 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5098 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5099 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5100 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5101 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5102 IEM_MC_REF_EFLAGS(pEFlags); \
5103 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5104 \
5105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5106 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5107 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5108 } IEM_MC_ELSE() { \
5109 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5110 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5111 } IEM_MC_ENDIF(); \
5112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5113 IEM_MC_END()
5114
5115/**
5116 * @opcode 0xa6
5117 */
5118FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5119{
5120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5121
5122 /*
5123 * Use the C implementation if a repeat prefix is encountered.
5124 */
5125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5126 {
5127 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5128 switch (pVCpu->iem.s.enmEffAddrMode)
5129 {
5130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5134 }
5135 }
5136 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5137 {
5138 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5139 switch (pVCpu->iem.s.enmEffAddrMode)
5140 {
5141 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5143 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5144 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5145 }
5146 }
5147 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5148
5149 /*
5150 * Sharing case implementation with cmps[wdq] below.
5151 */
5152 switch (pVCpu->iem.s.enmEffAddrMode)
5153 {
5154 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5155 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5156 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5158 }
5159}
5160
5161
5162/**
5163 * @opcode 0xa7
5164 */
5165FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5166{
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168
5169 /*
5170 * Use the C implementation if a repeat prefix is encountered.
5171 */
5172 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5173 {
5174 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5175 switch (pVCpu->iem.s.enmEffOpSize)
5176 {
5177 case IEMMODE_16BIT:
5178 switch (pVCpu->iem.s.enmEffAddrMode)
5179 {
5180 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5181 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5182 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5184 }
5185 break;
5186 case IEMMODE_32BIT:
5187 switch (pVCpu->iem.s.enmEffAddrMode)
5188 {
5189 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5190 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5191 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5193 }
5194 case IEMMODE_64BIT:
5195 switch (pVCpu->iem.s.enmEffAddrMode)
5196 {
5197 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5198 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5199 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5201 }
5202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5203 }
5204 }
5205
5206 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5207 {
5208 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 switch (pVCpu->iem.s.enmEffAddrMode)
5213 {
5214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5218 }
5219 break;
5220 case IEMMODE_32BIT:
5221 switch (pVCpu->iem.s.enmEffAddrMode)
5222 {
5223 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5224 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5225 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 case IEMMODE_64BIT:
5229 switch (pVCpu->iem.s.enmEffAddrMode)
5230 {
5231 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5232 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5233 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5235 }
5236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5237 }
5238 }
5239
5240 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5241
5242 /*
5243 * Annoying double switch here.
5244 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5245 */
5246 switch (pVCpu->iem.s.enmEffOpSize)
5247 {
5248 case IEMMODE_16BIT:
5249 switch (pVCpu->iem.s.enmEffAddrMode)
5250 {
5251 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5252 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5253 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 break;
5257
5258 case IEMMODE_32BIT:
5259 switch (pVCpu->iem.s.enmEffAddrMode)
5260 {
5261 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5262 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5263 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 break;
5267
5268 case IEMMODE_64BIT:
5269 switch (pVCpu->iem.s.enmEffAddrMode)
5270 {
5271 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5272 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5273 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 break;
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5278 }
5279}
5280
5281#undef IEM_CMPS_CASE
5282
5283/**
5284 * @opcode 0xa8
5285 */
5286FNIEMOP_DEF(iemOp_test_AL_Ib)
5287{
5288 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5291}
5292
5293
5294/**
5295 * @opcode 0xa9
5296 */
5297FNIEMOP_DEF(iemOp_test_eAX_Iz)
5298{
5299 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5301 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5302}
5303
5304
5305/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5306#define IEM_STOS_CASE(ValBits, AddrBits) \
5307 IEM_MC_BEGIN(0, 2); \
5308 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5309 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5310 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5311 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5312 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5314 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5315 } IEM_MC_ELSE() { \
5316 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5317 } IEM_MC_ENDIF(); \
5318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5319 IEM_MC_END()
5320
5321/**
5322 * @opcode 0xaa
5323 */
5324FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5325{
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5327
5328 /*
5329 * Use the C implementation if a repeat prefix is encountered.
5330 */
5331 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5332 {
5333 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5334 switch (pVCpu->iem.s.enmEffAddrMode)
5335 {
5336 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5337 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5338 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5340 }
5341 }
5342 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5343
5344 /*
5345 * Sharing case implementation with stos[wdq] below.
5346 */
5347 switch (pVCpu->iem.s.enmEffAddrMode)
5348 {
5349 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5350 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5351 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5353 }
5354}
5355
5356
5357/**
5358 * @opcode 0xab
5359 */
5360FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5361{
5362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5363
5364 /*
5365 * Use the C implementation if a repeat prefix is encountered.
5366 */
5367 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5368 {
5369 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5370 switch (pVCpu->iem.s.enmEffOpSize)
5371 {
5372 case IEMMODE_16BIT:
5373 switch (pVCpu->iem.s.enmEffAddrMode)
5374 {
5375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5379 }
5380 break;
5381 case IEMMODE_32BIT:
5382 switch (pVCpu->iem.s.enmEffAddrMode)
5383 {
5384 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5385 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5386 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5388 }
5389 case IEMMODE_64BIT:
5390 switch (pVCpu->iem.s.enmEffAddrMode)
5391 {
5392 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5396 }
5397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5398 }
5399 }
5400 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5401
5402 /*
5403 * Annoying double switch here.
5404 * Using ugly macro for implementing the cases, sharing it with stosb.
5405 */
5406 switch (pVCpu->iem.s.enmEffOpSize)
5407 {
5408 case IEMMODE_16BIT:
5409 switch (pVCpu->iem.s.enmEffAddrMode)
5410 {
5411 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5412 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5413 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5415 }
5416 break;
5417
5418 case IEMMODE_32BIT:
5419 switch (pVCpu->iem.s.enmEffAddrMode)
5420 {
5421 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5422 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5423 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5425 }
5426 break;
5427
5428 case IEMMODE_64BIT:
5429 switch (pVCpu->iem.s.enmEffAddrMode)
5430 {
5431 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5432 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5433 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5435 }
5436 break;
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439}
5440
5441#undef IEM_STOS_CASE
5442
5443/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5444#define IEM_LODS_CASE(ValBits, AddrBits) \
5445 IEM_MC_BEGIN(0, 2); \
5446 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5447 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5448 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5449 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5450 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5452 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5453 } IEM_MC_ELSE() { \
5454 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5455 } IEM_MC_ENDIF(); \
5456 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5457 IEM_MC_END()
5458
5459/**
5460 * @opcode 0xac
5461 */
5462FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5463{
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465
5466 /*
5467 * Use the C implementation if a repeat prefix is encountered.
5468 */
5469 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5470 {
5471 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5472 switch (pVCpu->iem.s.enmEffAddrMode)
5473 {
5474 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5478 }
5479 }
5480 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5481
5482 /*
5483 * Sharing case implementation with stos[wdq] below.
5484 */
5485 switch (pVCpu->iem.s.enmEffAddrMode)
5486 {
5487 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5488 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5489 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5491 }
5492}
5493
5494
5495/**
5496 * @opcode 0xad
5497 */
5498FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5499{
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501
5502 /*
5503 * Use the C implementation if a repeat prefix is encountered.
5504 */
5505 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5506 {
5507 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5508 switch (pVCpu->iem.s.enmEffOpSize)
5509 {
5510 case IEMMODE_16BIT:
5511 switch (pVCpu->iem.s.enmEffAddrMode)
5512 {
5513 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5514 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5515 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5517 }
5518 break;
5519 case IEMMODE_32BIT:
5520 switch (pVCpu->iem.s.enmEffAddrMode)
5521 {
5522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5526 }
5527 case IEMMODE_64BIT:
5528 switch (pVCpu->iem.s.enmEffAddrMode)
5529 {
5530 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5531 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5532 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5534 }
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 }
5538 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5539
5540 /*
5541 * Annoying double switch here.
5542 * Using ugly macro for implementing the cases, sharing it with lodsb.
5543 */
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 switch (pVCpu->iem.s.enmEffAddrMode)
5548 {
5549 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5550 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5551 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5553 }
5554 break;
5555
5556 case IEMMODE_32BIT:
5557 switch (pVCpu->iem.s.enmEffAddrMode)
5558 {
5559 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5560 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5561 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5563 }
5564 break;
5565
5566 case IEMMODE_64BIT:
5567 switch (pVCpu->iem.s.enmEffAddrMode)
5568 {
5569 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5570 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5571 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5573 }
5574 break;
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577}
5578
5579#undef IEM_LODS_CASE
5580
5581/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5582#define IEM_SCAS_CASE(ValBits, AddrBits) \
5583 IEM_MC_BEGIN(3, 2); \
5584 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5585 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5586 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5587 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5588 \
5589 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5590 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5591 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5592 IEM_MC_REF_EFLAGS(pEFlags); \
5593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5594 \
5595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5597 } IEM_MC_ELSE() { \
5598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5599 } IEM_MC_ENDIF(); \
5600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5601 IEM_MC_END()
5602
5603/**
5604 * @opcode 0xae
5605 */
5606FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5607{
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609
5610 /*
5611 * Use the C implementation if a repeat prefix is encountered.
5612 */
5613 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5614 {
5615 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5616 switch (pVCpu->iem.s.enmEffAddrMode)
5617 {
5618 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5619 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5620 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5625 {
5626 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5627 switch (pVCpu->iem.s.enmEffAddrMode)
5628 {
5629 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5633 }
5634 }
5635 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5636
5637 /*
5638 * Sharing case implementation with stos[wdq] below.
5639 */
5640 switch (pVCpu->iem.s.enmEffAddrMode)
5641 {
5642 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5643 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5644 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5646 }
5647}
5648
5649
5650/**
5651 * @opcode 0xaf
5652 */
5653FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5654{
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656
5657 /*
5658 * Use the C implementation if a repeat prefix is encountered.
5659 */
5660 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5661 {
5662 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5663 switch (pVCpu->iem.s.enmEffOpSize)
5664 {
5665 case IEMMODE_16BIT:
5666 switch (pVCpu->iem.s.enmEffAddrMode)
5667 {
5668 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5669 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5670 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5672 }
5673 break;
5674 case IEMMODE_32BIT:
5675 switch (pVCpu->iem.s.enmEffAddrMode)
5676 {
5677 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5678 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5679 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5681 }
5682 case IEMMODE_64BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5691 }
5692 }
5693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5694 {
5695 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5696 switch (pVCpu->iem.s.enmEffOpSize)
5697 {
5698 case IEMMODE_16BIT:
5699 switch (pVCpu->iem.s.enmEffAddrMode)
5700 {
5701 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5702 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5703 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5705 }
5706 break;
5707 case IEMMODE_32BIT:
5708 switch (pVCpu->iem.s.enmEffAddrMode)
5709 {
5710 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5711 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5712 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5714 }
5715 case IEMMODE_64BIT:
5716 switch (pVCpu->iem.s.enmEffAddrMode)
5717 {
5718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5727
5728 /*
5729 * Annoying double switch here.
5730 * Using ugly macro for implementing the cases, sharing it with scasb.
5731 */
5732 switch (pVCpu->iem.s.enmEffOpSize)
5733 {
5734 case IEMMODE_16BIT:
5735 switch (pVCpu->iem.s.enmEffAddrMode)
5736 {
5737 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5738 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5739 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 break;
5743
5744 case IEMMODE_32BIT:
5745 switch (pVCpu->iem.s.enmEffAddrMode)
5746 {
5747 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5748 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5749 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5751 }
5752 break;
5753
5754 case IEMMODE_64BIT:
5755 switch (pVCpu->iem.s.enmEffAddrMode)
5756 {
5757 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5758 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5759 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 break;
5763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5764 }
5765}
5766
5767#undef IEM_SCAS_CASE
5768
5769/**
5770 * Common 'mov r8, imm8' helper.
5771 */
5772FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5773{
5774 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5776
5777 IEM_MC_BEGIN(0, 1);
5778 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5779 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5780 IEM_MC_ADVANCE_RIP_AND_FINISH();
5781 IEM_MC_END();
5782}
5783
5784
5785/**
5786 * @opcode 0xb0
5787 */
5788FNIEMOP_DEF(iemOp_mov_AL_Ib)
5789{
5790 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5791 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5792}
5793
5794
5795/**
5796 * @opcode 0xb1
5797 */
5798FNIEMOP_DEF(iemOp_CL_Ib)
5799{
5800 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5801 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5802}
5803
5804
5805/**
5806 * @opcode 0xb2
5807 */
5808FNIEMOP_DEF(iemOp_DL_Ib)
5809{
5810 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5811 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5812}
5813
5814
5815/**
5816 * @opcode 0xb3
5817 */
5818FNIEMOP_DEF(iemOp_BL_Ib)
5819{
5820 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5821 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5822}
5823
5824
5825/**
5826 * @opcode 0xb4
5827 */
5828FNIEMOP_DEF(iemOp_mov_AH_Ib)
5829{
5830 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5831 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5832}
5833
5834
5835/**
5836 * @opcode 0xb5
5837 */
5838FNIEMOP_DEF(iemOp_CH_Ib)
5839{
5840 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5841 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5842}
5843
5844
5845/**
5846 * @opcode 0xb6
5847 */
5848FNIEMOP_DEF(iemOp_DH_Ib)
5849{
5850 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5851 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5852}
5853
5854
5855/**
5856 * @opcode 0xb7
5857 */
5858FNIEMOP_DEF(iemOp_BH_Ib)
5859{
5860 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5861 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5862}
5863
5864
5865/**
5866 * Common 'mov regX,immX' helper.
5867 */
5868FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5869{
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 {
5874 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5879 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5880 IEM_MC_ADVANCE_RIP_AND_FINISH();
5881 IEM_MC_END();
5882 break;
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889
5890 IEM_MC_BEGIN(0, 1);
5891 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5892 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5893 IEM_MC_ADVANCE_RIP_AND_FINISH();
5894 IEM_MC_END();
5895 break;
5896 }
5897 case IEMMODE_64BIT:
5898 {
5899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 1);
5903 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5904 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5905 IEM_MC_ADVANCE_RIP_AND_FINISH();
5906 IEM_MC_END();
5907 break;
5908 }
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911}
5912
5913
5914/**
5915 * @opcode 0xb8
5916 */
5917FNIEMOP_DEF(iemOp_eAX_Iv)
5918{
5919 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5920 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5921}
5922
5923
5924/**
5925 * @opcode 0xb9
5926 */
5927FNIEMOP_DEF(iemOp_eCX_Iv)
5928{
5929 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5930 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5931}
5932
5933
5934/**
5935 * @opcode 0xba
5936 */
5937FNIEMOP_DEF(iemOp_eDX_Iv)
5938{
5939 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5940 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5941}
5942
5943
5944/**
5945 * @opcode 0xbb
5946 */
5947FNIEMOP_DEF(iemOp_eBX_Iv)
5948{
5949 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5950 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5951}
5952
5953
5954/**
5955 * @opcode 0xbc
5956 */
5957FNIEMOP_DEF(iemOp_eSP_Iv)
5958{
5959 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5960 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5961}
5962
5963
5964/**
5965 * @opcode 0xbd
5966 */
5967FNIEMOP_DEF(iemOp_eBP_Iv)
5968{
5969 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5971}
5972
5973
5974/**
5975 * @opcode 0xbe
5976 */
5977FNIEMOP_DEF(iemOp_eSI_Iv)
5978{
5979 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5981}
5982
5983
5984/**
5985 * @opcode 0xbf
5986 */
5987FNIEMOP_DEF(iemOp_eDI_Iv)
5988{
5989 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5991}
5992
5993
5994/**
5995 * @opcode 0xc0
5996 */
5997FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5998{
5999 IEMOP_HLP_MIN_186();
6000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6001 PCIEMOPSHIFTSIZES pImpl;
6002 switch (IEM_GET_MODRM_REG_8(bRm))
6003 {
6004 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6005 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6006 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6007 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6008 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6009 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6010 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6011 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6013 }
6014 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6015
6016 if (IEM_IS_MODRM_REG_MODE(bRm))
6017 {
6018 /* register */
6019 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6021 IEM_MC_BEGIN(3, 0);
6022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6025 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6026 IEM_MC_REF_EFLAGS(pEFlags);
6027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6028 IEM_MC_ADVANCE_RIP_AND_FINISH();
6029 IEM_MC_END();
6030 }
6031 else
6032 {
6033 /* memory */
6034 IEM_MC_BEGIN(3, 2);
6035 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6042 IEM_MC_ASSIGN(cShiftArg, cShift);
6043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6044 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6045 IEM_MC_FETCH_EFLAGS(EFlags);
6046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6047
6048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6049 IEM_MC_COMMIT_EFLAGS(EFlags);
6050 IEM_MC_ADVANCE_RIP_AND_FINISH();
6051 IEM_MC_END();
6052 }
6053}
6054
6055
6056/**
6057 * @opcode 0xc1
6058 */
6059FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6060{
6061 IEMOP_HLP_MIN_186();
6062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6063 PCIEMOPSHIFTSIZES pImpl;
6064 switch (IEM_GET_MODRM_REG_8(bRm))
6065 {
6066 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6067 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6068 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6069 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6070 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6071 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6072 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6073 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6074 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6075 }
6076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6077
6078 if (IEM_IS_MODRM_REG_MODE(bRm))
6079 {
6080 /* register */
6081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6083 switch (pVCpu->iem.s.enmEffOpSize)
6084 {
6085 case IEMMODE_16BIT:
6086 IEM_MC_BEGIN(3, 0);
6087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6090 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6091 IEM_MC_REF_EFLAGS(pEFlags);
6092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6093 IEM_MC_ADVANCE_RIP_AND_FINISH();
6094 IEM_MC_END();
6095 break;
6096
6097 case IEMMODE_32BIT:
6098 IEM_MC_BEGIN(3, 0);
6099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6100 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6101 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6102 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6103 IEM_MC_REF_EFLAGS(pEFlags);
6104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6105 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6106 IEM_MC_ADVANCE_RIP_AND_FINISH();
6107 IEM_MC_END();
6108 break;
6109
6110 case IEMMODE_64BIT:
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6113 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6118 IEM_MC_ADVANCE_RIP_AND_FINISH();
6119 IEM_MC_END();
6120 break;
6121
6122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6123 }
6124 }
6125 else
6126 {
6127 /* memory */
6128 switch (pVCpu->iem.s.enmEffOpSize)
6129 {
6130 case IEMMODE_16BIT:
6131 IEM_MC_BEGIN(3, 2);
6132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6133 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6134 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6136
6137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6138 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6139 IEM_MC_ASSIGN(cShiftArg, cShift);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6142 IEM_MC_FETCH_EFLAGS(EFlags);
6143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6144
6145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6146 IEM_MC_COMMIT_EFLAGS(EFlags);
6147 IEM_MC_ADVANCE_RIP_AND_FINISH();
6148 IEM_MC_END();
6149 break;
6150
6151 case IEMMODE_32BIT:
6152 IEM_MC_BEGIN(3, 2);
6153 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6154 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6155 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6157
6158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6159 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6160 IEM_MC_ASSIGN(cShiftArg, cShift);
6161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6162 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6165
6166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6167 IEM_MC_COMMIT_EFLAGS(EFlags);
6168 IEM_MC_ADVANCE_RIP_AND_FINISH();
6169 IEM_MC_END();
6170 break;
6171
6172 case IEMMODE_64BIT:
6173 IEM_MC_BEGIN(3, 2);
6174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6175 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6176 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6178
6179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6180 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6181 IEM_MC_ASSIGN(cShiftArg, cShift);
6182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6183 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6184 IEM_MC_FETCH_EFLAGS(EFlags);
6185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6186
6187 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6188 IEM_MC_COMMIT_EFLAGS(EFlags);
6189 IEM_MC_ADVANCE_RIP_AND_FINISH();
6190 IEM_MC_END();
6191 break;
6192
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 }
6196}
6197
6198
6199/**
6200 * @opcode 0xc2
6201 */
6202FNIEMOP_DEF(iemOp_retn_Iw)
6203{
6204 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6205 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6208 switch (pVCpu->iem.s.enmEffOpSize)
6209 {
6210 case IEMMODE_16BIT:
6211 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
6212 case IEMMODE_32BIT:
6213 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
6214 case IEMMODE_64BIT:
6215 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
6216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6217 }
6218}
6219
6220
6221/**
6222 * @opcode 0xc3
6223 */
6224FNIEMOP_DEF(iemOp_retn)
6225{
6226 IEMOP_MNEMONIC(retn, "retn");
6227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 switch (pVCpu->iem.s.enmEffOpSize)
6230 {
6231 case IEMMODE_16BIT:
6232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
6233 case IEMMODE_32BIT:
6234 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
6235 case IEMMODE_64BIT:
6236 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
6237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6238 }
6239}
6240
6241
6242/**
6243 * @opcode 0xc4
6244 */
6245FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6246{
6247 /* The LDS instruction is invalid 64-bit mode. In legacy and
6248 compatability mode it is invalid with MOD=3.
6249 The use as a VEX prefix is made possible by assigning the inverted
6250 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6251 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6253 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6254 || IEM_IS_MODRM_REG_MODE(bRm) )
6255 {
6256 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6257 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6258 {
6259 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6260 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6261 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6262 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6263 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6264 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6266 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6267 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6268 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6269 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6270 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6271 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6272
6273 switch (bRm & 0x1f)
6274 {
6275 case 1: /* 0x0f lead opcode byte. */
6276#ifdef IEM_WITH_VEX
6277 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6278#else
6279 IEMOP_BITCH_ABOUT_STUB();
6280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6281#endif
6282
6283 case 2: /* 0x0f 0x38 lead opcode bytes. */
6284#ifdef IEM_WITH_VEX
6285 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6286#else
6287 IEMOP_BITCH_ABOUT_STUB();
6288 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6289#endif
6290
6291 case 3: /* 0x0f 0x3a lead opcode bytes. */
6292#ifdef IEM_WITH_VEX
6293 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6294#else
6295 IEMOP_BITCH_ABOUT_STUB();
6296 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6297#endif
6298
6299 default:
6300 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6301 return IEMOP_RAISE_INVALID_OPCODE();
6302 }
6303 }
6304 Log(("VEX3: AVX support disabled!\n"));
6305 return IEMOP_RAISE_INVALID_OPCODE();
6306 }
6307
6308 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6309 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6310}
6311
6312
6313/**
6314 * @opcode 0xc5
6315 */
6316FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6317{
6318 /* The LES instruction is invalid 64-bit mode. In legacy and
6319 compatability mode it is invalid with MOD=3.
6320 The use as a VEX prefix is made possible by assigning the inverted
6321 REX.R to the top MOD bit, and the top bit in the inverted register
6322 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6323 to accessing registers 0..7 in this VEX form. */
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6326 || IEM_IS_MODRM_REG_MODE(bRm))
6327 {
6328 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6329 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6330 {
6331 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6332 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6333 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6334 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6335 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6336 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6337 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6338 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6339
6340#ifdef IEM_WITH_VEX
6341 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6342#else
6343 IEMOP_BITCH_ABOUT_STUB();
6344 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6345#endif
6346 }
6347
6348 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6349 Log(("VEX2: AVX support disabled!\n"));
6350 return IEMOP_RAISE_INVALID_OPCODE();
6351 }
6352
6353 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6354 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6355}
6356
6357
6358/**
6359 * @opcode 0xc6
6360 */
6361FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6362{
6363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6364 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6365 return IEMOP_RAISE_INVALID_OPCODE();
6366 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6367
6368 if (IEM_IS_MODRM_REG_MODE(bRm))
6369 {
6370 /* register access */
6371 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 IEM_MC_BEGIN(0, 0);
6374 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 }
6378 else
6379 {
6380 /* memory access. */
6381 IEM_MC_BEGIN(0, 1);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6386 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389 }
6390}
6391
6392
6393/**
6394 * @opcode 0xc7
6395 */
6396FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6397{
6398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6399 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6400 return IEMOP_RAISE_INVALID_OPCODE();
6401 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6402
6403 if (IEM_IS_MODRM_REG_MODE(bRm))
6404 {
6405 /* register access */
6406 switch (pVCpu->iem.s.enmEffOpSize)
6407 {
6408 case IEMMODE_16BIT:
6409 IEM_MC_BEGIN(0, 0);
6410 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
6413 IEM_MC_ADVANCE_RIP_AND_FINISH();
6414 IEM_MC_END();
6415 break;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(0, 0);
6419 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
6422 IEM_MC_ADVANCE_RIP_AND_FINISH();
6423 IEM_MC_END();
6424 break;
6425
6426 case IEMMODE_64BIT:
6427 IEM_MC_BEGIN(0, 0);
6428 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 break;
6434
6435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6436 }
6437 }
6438 else
6439 {
6440 /* memory access. */
6441 switch (pVCpu->iem.s.enmEffOpSize)
6442 {
6443 case IEMMODE_16BIT:
6444 IEM_MC_BEGIN(0, 1);
6445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6450 IEM_MC_ADVANCE_RIP_AND_FINISH();
6451 IEM_MC_END();
6452 break;
6453
6454 case IEMMODE_32BIT:
6455 IEM_MC_BEGIN(0, 1);
6456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6458 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6461 IEM_MC_ADVANCE_RIP_AND_FINISH();
6462 IEM_MC_END();
6463 break;
6464
6465 case IEMMODE_64BIT:
6466 IEM_MC_BEGIN(0, 1);
6467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6469 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6472 IEM_MC_ADVANCE_RIP_AND_FINISH();
6473 IEM_MC_END();
6474 break;
6475
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 }
6479}
6480
6481
6482
6483
6484/**
6485 * @opcode 0xc8
6486 */
6487FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6488{
6489 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6490 IEMOP_HLP_MIN_186();
6491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6492 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6493 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6496}
6497
6498
6499/**
6500 * @opcode 0xc9
6501 */
6502FNIEMOP_DEF(iemOp_leave)
6503{
6504 IEMOP_MNEMONIC(leave, "leave");
6505 IEMOP_HLP_MIN_186();
6506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6509}
6510
6511
6512/**
6513 * @opcode 0xca
6514 */
6515FNIEMOP_DEF(iemOp_retf_Iw)
6516{
6517 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6518 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6520 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6521 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6522}
6523
6524
6525/**
6526 * @opcode 0xcb
6527 */
6528FNIEMOP_DEF(iemOp_retf)
6529{
6530 IEMOP_MNEMONIC(retf, "retf");
6531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6533 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6534}
6535
6536
6537/**
6538 * @opcode 0xcc
6539 */
6540FNIEMOP_DEF(iemOp_int3)
6541{
6542 IEMOP_MNEMONIC(int3, "int3");
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6544 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6545}
6546
6547
6548/**
6549 * @opcode 0xcd
6550 */
6551FNIEMOP_DEF(iemOp_int_Ib)
6552{
6553 IEMOP_MNEMONIC(int_Ib, "int Ib");
6554 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6556 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6557}
6558
6559
6560/**
6561 * @opcode 0xce
6562 */
6563FNIEMOP_DEF(iemOp_into)
6564{
6565 IEMOP_MNEMONIC(into, "into");
6566 IEMOP_HLP_NO_64BIT();
6567
6568 IEM_MC_BEGIN(2, 0);
6569 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6570 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6571 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6572 IEM_MC_END();
6573 return VINF_SUCCESS;
6574}
6575
6576
6577/**
6578 * @opcode 0xcf
6579 */
6580FNIEMOP_DEF(iemOp_iret)
6581{
6582 IEMOP_MNEMONIC(iret, "iret");
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6584 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6585}
6586
6587
6588/**
6589 * @opcode 0xd0
6590 */
6591FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6592{
6593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6594 PCIEMOPSHIFTSIZES pImpl;
6595 switch (IEM_GET_MODRM_REG_8(bRm))
6596 {
6597 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6598 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6599 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6600 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6601 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6602 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6603 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6604 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6605 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6606 }
6607 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6608
6609 if (IEM_IS_MODRM_REG_MODE(bRm))
6610 {
6611 /* register */
6612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6613 IEM_MC_BEGIN(3, 0);
6614 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6615 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6616 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6617 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6618 IEM_MC_REF_EFLAGS(pEFlags);
6619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6620 IEM_MC_ADVANCE_RIP_AND_FINISH();
6621 IEM_MC_END();
6622 }
6623 else
6624 {
6625 /* memory */
6626 IEM_MC_BEGIN(3, 2);
6627 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6628 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6629 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6631
6632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6635 IEM_MC_FETCH_EFLAGS(EFlags);
6636 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6637
6638 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6639 IEM_MC_COMMIT_EFLAGS(EFlags);
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642 }
6643}
6644
6645
6646
6647/**
6648 * @opcode 0xd1
6649 */
6650FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6651{
6652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6653 PCIEMOPSHIFTSIZES pImpl;
6654 switch (IEM_GET_MODRM_REG_8(bRm))
6655 {
6656 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6657 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6658 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6659 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6660 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6661 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6662 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6663 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6665 }
6666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6667
6668 if (IEM_IS_MODRM_REG_MODE(bRm))
6669 {
6670 /* register */
6671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6672 switch (pVCpu->iem.s.enmEffOpSize)
6673 {
6674 case IEMMODE_16BIT:
6675 IEM_MC_BEGIN(3, 0);
6676 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6677 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6678 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6679 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6680 IEM_MC_REF_EFLAGS(pEFlags);
6681 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6682 IEM_MC_ADVANCE_RIP_AND_FINISH();
6683 IEM_MC_END();
6684 break;
6685
6686 case IEMMODE_32BIT:
6687 IEM_MC_BEGIN(3, 0);
6688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6689 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6691 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6692 IEM_MC_REF_EFLAGS(pEFlags);
6693 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6694 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6695 IEM_MC_ADVANCE_RIP_AND_FINISH();
6696 IEM_MC_END();
6697 break;
6698
6699 case IEMMODE_64BIT:
6700 IEM_MC_BEGIN(3, 0);
6701 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6702 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6703 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6704 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6705 IEM_MC_REF_EFLAGS(pEFlags);
6706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6707 IEM_MC_ADVANCE_RIP_AND_FINISH();
6708 IEM_MC_END();
6709 break;
6710
6711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6712 }
6713 }
6714 else
6715 {
6716 /* memory */
6717 switch (pVCpu->iem.s.enmEffOpSize)
6718 {
6719 case IEMMODE_16BIT:
6720 IEM_MC_BEGIN(3, 2);
6721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6723 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6725
6726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6729 IEM_MC_FETCH_EFLAGS(EFlags);
6730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6731
6732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6733 IEM_MC_COMMIT_EFLAGS(EFlags);
6734 IEM_MC_ADVANCE_RIP_AND_FINISH();
6735 IEM_MC_END();
6736 break;
6737
6738 case IEMMODE_32BIT:
6739 IEM_MC_BEGIN(3, 2);
6740 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6741 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6742 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6744
6745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6747 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6748 IEM_MC_FETCH_EFLAGS(EFlags);
6749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6750
6751 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6752 IEM_MC_COMMIT_EFLAGS(EFlags);
6753 IEM_MC_ADVANCE_RIP_AND_FINISH();
6754 IEM_MC_END();
6755 break;
6756
6757 case IEMMODE_64BIT:
6758 IEM_MC_BEGIN(3, 2);
6759 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6760 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6761 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6763
6764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6767 IEM_MC_FETCH_EFLAGS(EFlags);
6768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6769
6770 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6771 IEM_MC_COMMIT_EFLAGS(EFlags);
6772 IEM_MC_ADVANCE_RIP_AND_FINISH();
6773 IEM_MC_END();
6774 break;
6775
6776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6777 }
6778 }
6779}
6780
6781
6782/**
6783 * @opcode 0xd2
6784 */
6785FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6786{
6787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6788 PCIEMOPSHIFTSIZES pImpl;
6789 switch (IEM_GET_MODRM_REG_8(bRm))
6790 {
6791 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6792 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6793 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6794 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6795 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6796 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6797 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6798 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6799 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6800 }
6801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6802
6803 if (IEM_IS_MODRM_REG_MODE(bRm))
6804 {
6805 /* register */
6806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6807 IEM_MC_BEGIN(3, 0);
6808 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6809 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6811 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6812 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6813 IEM_MC_REF_EFLAGS(pEFlags);
6814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6815 IEM_MC_ADVANCE_RIP_AND_FINISH();
6816 IEM_MC_END();
6817 }
6818 else
6819 {
6820 /* memory */
6821 IEM_MC_BEGIN(3, 2);
6822 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6823 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6824 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6826
6827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6829 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6830 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6831 IEM_MC_FETCH_EFLAGS(EFlags);
6832 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6833
6834 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6835 IEM_MC_COMMIT_EFLAGS(EFlags);
6836 IEM_MC_ADVANCE_RIP_AND_FINISH();
6837 IEM_MC_END();
6838 }
6839}
6840
6841
6842/**
6843 * @opcode 0xd3
6844 */
6845FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6846{
6847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6848 PCIEMOPSHIFTSIZES pImpl;
6849 switch (IEM_GET_MODRM_REG_8(bRm))
6850 {
6851 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6852 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6853 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6854 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6855 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6856 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6857 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6858 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6859 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6860 }
6861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6862
6863 if (IEM_IS_MODRM_REG_MODE(bRm))
6864 {
6865 /* register */
6866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6867 switch (pVCpu->iem.s.enmEffOpSize)
6868 {
6869 case IEMMODE_16BIT:
6870 IEM_MC_BEGIN(3, 0);
6871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6872 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6876 IEM_MC_REF_EFLAGS(pEFlags);
6877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6878 IEM_MC_ADVANCE_RIP_AND_FINISH();
6879 IEM_MC_END();
6880 break;
6881
6882 case IEMMODE_32BIT:
6883 IEM_MC_BEGIN(3, 0);
6884 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6885 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6887 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6888 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6889 IEM_MC_REF_EFLAGS(pEFlags);
6890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6891 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6892 IEM_MC_ADVANCE_RIP_AND_FINISH();
6893 IEM_MC_END();
6894 break;
6895
6896 case IEMMODE_64BIT:
6897 IEM_MC_BEGIN(3, 0);
6898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6899 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6902 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6903 IEM_MC_REF_EFLAGS(pEFlags);
6904 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6905 IEM_MC_ADVANCE_RIP_AND_FINISH();
6906 IEM_MC_END();
6907 break;
6908
6909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6910 }
6911 }
6912 else
6913 {
6914 /* memory */
6915 switch (pVCpu->iem.s.enmEffOpSize)
6916 {
6917 case IEMMODE_16BIT:
6918 IEM_MC_BEGIN(3, 2);
6919 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6920 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6923
6924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6927 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6928 IEM_MC_FETCH_EFLAGS(EFlags);
6929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6930
6931 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6932 IEM_MC_COMMIT_EFLAGS(EFlags);
6933 IEM_MC_ADVANCE_RIP_AND_FINISH();
6934 IEM_MC_END();
6935 break;
6936
6937 case IEMMODE_32BIT:
6938 IEM_MC_BEGIN(3, 2);
6939 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6940 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6941 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6943
6944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6947 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6948 IEM_MC_FETCH_EFLAGS(EFlags);
6949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6950
6951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6952 IEM_MC_COMMIT_EFLAGS(EFlags);
6953 IEM_MC_ADVANCE_RIP_AND_FINISH();
6954 IEM_MC_END();
6955 break;
6956
6957 case IEMMODE_64BIT:
6958 IEM_MC_BEGIN(3, 2);
6959 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6960 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6961 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6963
6964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6966 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6967 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6968 IEM_MC_FETCH_EFLAGS(EFlags);
6969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6970
6971 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6972 IEM_MC_COMMIT_EFLAGS(EFlags);
6973 IEM_MC_ADVANCE_RIP_AND_FINISH();
6974 IEM_MC_END();
6975 break;
6976
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6978 }
6979 }
6980}
6981
6982/**
6983 * @opcode 0xd4
6984 */
6985FNIEMOP_DEF(iemOp_aam_Ib)
6986{
6987 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6988 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6990 IEMOP_HLP_NO_64BIT();
6991 if (!bImm)
6992 return IEMOP_RAISE_DIVIDE_ERROR();
6993 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6994}
6995
6996
6997/**
6998 * @opcode 0xd5
6999 */
7000FNIEMOP_DEF(iemOp_aad_Ib)
7001{
7002 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7003 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7005 IEMOP_HLP_NO_64BIT();
7006 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7007}
7008
7009
7010/**
7011 * @opcode 0xd6
7012 */
7013FNIEMOP_DEF(iemOp_salc)
7014{
7015 IEMOP_MNEMONIC(salc, "salc");
7016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7017 IEMOP_HLP_NO_64BIT();
7018
7019 IEM_MC_BEGIN(0, 0);
7020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7021 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7022 } IEM_MC_ELSE() {
7023 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7024 } IEM_MC_ENDIF();
7025 IEM_MC_ADVANCE_RIP_AND_FINISH();
7026 IEM_MC_END();
7027}
7028
7029
7030/**
7031 * @opcode 0xd7
7032 */
7033FNIEMOP_DEF(iemOp_xlat)
7034{
7035 IEMOP_MNEMONIC(xlat, "xlat");
7036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7037 switch (pVCpu->iem.s.enmEffAddrMode)
7038 {
7039 case IEMMODE_16BIT:
7040 IEM_MC_BEGIN(2, 0);
7041 IEM_MC_LOCAL(uint8_t, u8Tmp);
7042 IEM_MC_LOCAL(uint16_t, u16Addr);
7043 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7044 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7045 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7046 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7047 IEM_MC_ADVANCE_RIP_AND_FINISH();
7048 IEM_MC_END();
7049 break;
7050
7051 case IEMMODE_32BIT:
7052 IEM_MC_BEGIN(2, 0);
7053 IEM_MC_LOCAL(uint8_t, u8Tmp);
7054 IEM_MC_LOCAL(uint32_t, u32Addr);
7055 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7056 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7057 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7058 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7059 IEM_MC_ADVANCE_RIP_AND_FINISH();
7060 IEM_MC_END();
7061 break;
7062
7063 case IEMMODE_64BIT:
7064 IEM_MC_BEGIN(2, 0);
7065 IEM_MC_LOCAL(uint8_t, u8Tmp);
7066 IEM_MC_LOCAL(uint64_t, u64Addr);
7067 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7068 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7069 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7070 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073 break;
7074
7075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7076 }
7077}
7078
7079
7080/**
7081 * Common worker for FPU instructions working on ST0 and STn, and storing the
7082 * result in ST0.
7083 *
7084 * @param bRm Mod R/M byte.
7085 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7086 */
7087FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7088{
7089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7090
7091 IEM_MC_BEGIN(3, 1);
7092 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7093 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7095 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7096
7097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7099 IEM_MC_PREPARE_FPU_USAGE();
7100 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7101 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7102 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7103 IEM_MC_ELSE()
7104 IEM_MC_FPU_STACK_UNDERFLOW(0);
7105 IEM_MC_ENDIF();
7106 IEM_MC_ADVANCE_RIP_AND_FINISH();
7107
7108 IEM_MC_END();
7109}
7110
7111
7112/**
7113 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7114 * flags.
7115 *
7116 * @param bRm Mod R/M byte.
7117 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7118 */
7119FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7120{
7121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7122
7123 IEM_MC_BEGIN(3, 1);
7124 IEM_MC_LOCAL(uint16_t, u16Fsw);
7125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7126 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7128
7129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7131 IEM_MC_PREPARE_FPU_USAGE();
7132 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7133 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7134 IEM_MC_UPDATE_FSW(u16Fsw);
7135 IEM_MC_ELSE()
7136 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7137 IEM_MC_ENDIF();
7138 IEM_MC_ADVANCE_RIP_AND_FINISH();
7139
7140 IEM_MC_END();
7141}
7142
7143
7144/**
7145 * Common worker for FPU instructions working on ST0 and STn, only affecting
7146 * flags, and popping when done.
7147 *
7148 * @param bRm Mod R/M byte.
7149 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7150 */
7151FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7152{
7153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7154
7155 IEM_MC_BEGIN(3, 1);
7156 IEM_MC_LOCAL(uint16_t, u16Fsw);
7157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7160
7161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7163 IEM_MC_PREPARE_FPU_USAGE();
7164 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7165 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7166 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7167 IEM_MC_ELSE()
7168 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7169 IEM_MC_ENDIF();
7170 IEM_MC_ADVANCE_RIP_AND_FINISH();
7171
7172 IEM_MC_END();
7173}
7174
7175
7176/** Opcode 0xd8 11/0. */
7177FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7178{
7179 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7180 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7181}
7182
7183
7184/** Opcode 0xd8 11/1. */
7185FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7186{
7187 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7189}
7190
7191
7192/** Opcode 0xd8 11/2. */
7193FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7194{
7195 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7196 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7197}
7198
7199
7200/** Opcode 0xd8 11/3. */
7201FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7202{
7203 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7204 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7205}
7206
7207
7208/** Opcode 0xd8 11/4. */
7209FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7210{
7211 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7212 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7213}
7214
7215
7216/** Opcode 0xd8 11/5. */
7217FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7218{
7219 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7220 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7221}
7222
7223
7224/** Opcode 0xd8 11/6. */
7225FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7226{
7227 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7228 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7229}
7230
7231
7232/** Opcode 0xd8 11/7. */
7233FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7234{
7235 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7237}
7238
7239
7240/**
7241 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7242 * the result in ST0.
7243 *
7244 * @param bRm Mod R/M byte.
7245 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7246 */
7247FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7248{
7249 IEM_MC_BEGIN(3, 3);
7250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7251 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7252 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7253 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7255 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7256
7257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7259
7260 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7261 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7262 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7263
7264 IEM_MC_PREPARE_FPU_USAGE();
7265 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7266 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7267 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7268 IEM_MC_ELSE()
7269 IEM_MC_FPU_STACK_UNDERFLOW(0);
7270 IEM_MC_ENDIF();
7271 IEM_MC_ADVANCE_RIP_AND_FINISH();
7272
7273 IEM_MC_END();
7274}
7275
7276
7277/** Opcode 0xd8 !11/0. */
7278FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7279{
7280 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7281 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7282}
7283
7284
7285/** Opcode 0xd8 !11/1. */
7286FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7287{
7288 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7289 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7290}
7291
7292
7293/** Opcode 0xd8 !11/2. */
7294FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7295{
7296 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7297
7298 IEM_MC_BEGIN(3, 3);
7299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7300 IEM_MC_LOCAL(uint16_t, u16Fsw);
7301 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7302 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7304 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7305
7306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7308
7309 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7310 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7311 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7312
7313 IEM_MC_PREPARE_FPU_USAGE();
7314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7315 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7316 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7317 IEM_MC_ELSE()
7318 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7319 IEM_MC_ENDIF();
7320 IEM_MC_ADVANCE_RIP_AND_FINISH();
7321
7322 IEM_MC_END();
7323}
7324
7325
7326/** Opcode 0xd8 !11/3. */
7327FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7328{
7329 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7330
7331 IEM_MC_BEGIN(3, 3);
7332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7333 IEM_MC_LOCAL(uint16_t, u16Fsw);
7334 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7338
7339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7341
7342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7344 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7345
7346 IEM_MC_PREPARE_FPU_USAGE();
7347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7349 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7350 IEM_MC_ELSE()
7351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7352 IEM_MC_ENDIF();
7353 IEM_MC_ADVANCE_RIP_AND_FINISH();
7354
7355 IEM_MC_END();
7356}
7357
7358
7359/** Opcode 0xd8 !11/4. */
7360FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7361{
7362 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7363 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7364}
7365
7366
7367/** Opcode 0xd8 !11/5. */
7368FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7369{
7370 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7371 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7372}
7373
7374
7375/** Opcode 0xd8 !11/6. */
7376FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7377{
7378 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7379 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7380}
7381
7382
7383/** Opcode 0xd8 !11/7. */
7384FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7385{
7386 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7387 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7388}
7389
7390
7391/**
7392 * @opcode 0xd8
7393 */
7394FNIEMOP_DEF(iemOp_EscF0)
7395{
7396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7397 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7398
7399 if (IEM_IS_MODRM_REG_MODE(bRm))
7400 {
7401 switch (IEM_GET_MODRM_REG_8(bRm))
7402 {
7403 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7404 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7405 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7406 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7407 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7408 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7409 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7410 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7412 }
7413 }
7414 else
7415 {
7416 switch (IEM_GET_MODRM_REG_8(bRm))
7417 {
7418 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7419 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7420 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7421 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7422 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7423 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7424 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7425 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7427 }
7428 }
7429}
7430
7431
7432/** Opcode 0xd9 /0 mem32real
7433 * @sa iemOp_fld_m64r */
7434FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7435{
7436 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7437
7438 IEM_MC_BEGIN(2, 3);
7439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7440 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7441 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7442 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7443 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7444
7445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7447
7448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7450 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7451
7452 IEM_MC_PREPARE_FPU_USAGE();
7453 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7454 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
7455 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7456 IEM_MC_ELSE()
7457 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7458 IEM_MC_ENDIF();
7459 IEM_MC_ADVANCE_RIP_AND_FINISH();
7460
7461 IEM_MC_END();
7462}
7463
7464
7465/** Opcode 0xd9 !11/2 mem32real */
7466FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7467{
7468 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7469 IEM_MC_BEGIN(3, 2);
7470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7471 IEM_MC_LOCAL(uint16_t, u16Fsw);
7472 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7473 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7474 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7475
7476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7480
7481 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7482 IEM_MC_PREPARE_FPU_USAGE();
7483 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7484 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7485 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7486 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7487 IEM_MC_ELSE()
7488 IEM_MC_IF_FCW_IM()
7489 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7490 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7491 IEM_MC_ENDIF();
7492 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7493 IEM_MC_ENDIF();
7494 IEM_MC_ADVANCE_RIP_AND_FINISH();
7495
7496 IEM_MC_END();
7497}
7498
7499
7500/** Opcode 0xd9 !11/3 */
7501FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7502{
7503 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7504 IEM_MC_BEGIN(3, 2);
7505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7506 IEM_MC_LOCAL(uint16_t, u16Fsw);
7507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7508 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7509 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7510
7511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7514 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7515
7516 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7517 IEM_MC_PREPARE_FPU_USAGE();
7518 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7519 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7520 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7521 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7522 IEM_MC_ELSE()
7523 IEM_MC_IF_FCW_IM()
7524 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7525 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7526 IEM_MC_ENDIF();
7527 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7528 IEM_MC_ENDIF();
7529 IEM_MC_ADVANCE_RIP_AND_FINISH();
7530
7531 IEM_MC_END();
7532}
7533
7534
7535/** Opcode 0xd9 !11/4 */
7536FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7537{
7538 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7539 IEM_MC_BEGIN(3, 0);
7540 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7541 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7542 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7545 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7546 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7547 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7548 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7549 IEM_MC_END();
7550 return VINF_SUCCESS;
7551}
7552
7553
7554/** Opcode 0xd9 !11/5 */
7555FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7556{
7557 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7558 IEM_MC_BEGIN(1, 1);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7560 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7564 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7565 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7566 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7567 IEM_MC_END();
7568 return VINF_SUCCESS;
7569}
7570
7571
7572/** Opcode 0xd9 !11/6 */
7573FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7574{
7575 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7576 IEM_MC_BEGIN(3, 0);
7577 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7578 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7579 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7583 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7584 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7585 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7586 IEM_MC_END();
7587 return VINF_SUCCESS;
7588}
7589
7590
7591/** Opcode 0xd9 !11/7 */
7592FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7593{
7594 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7595 IEM_MC_BEGIN(2, 0);
7596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7597 IEM_MC_LOCAL(uint16_t, u16Fcw);
7598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7600 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7601 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7602 IEM_MC_FETCH_FCW(u16Fcw);
7603 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7604 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7605 IEM_MC_END();
7606}
7607
7608
7609/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7610FNIEMOP_DEF(iemOp_fnop)
7611{
7612 IEMOP_MNEMONIC(fnop, "fnop");
7613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7614
7615 IEM_MC_BEGIN(0, 0);
7616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7618 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7619 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7620 * intel optimizations. Investigate. */
7621 IEM_MC_UPDATE_FPU_OPCODE_IP();
7622 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7623 IEM_MC_END();
7624}
7625
7626
7627/** Opcode 0xd9 11/0 stN */
7628FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7629{
7630 IEMOP_MNEMONIC(fld_stN, "fld stN");
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632
7633 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7634 * indicates that it does. */
7635 IEM_MC_BEGIN(0, 2);
7636 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7637 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7640
7641 IEM_MC_PREPARE_FPU_USAGE();
7642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm))
7643 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7644 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7645 IEM_MC_ELSE()
7646 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7647 IEM_MC_ENDIF();
7648
7649 IEM_MC_ADVANCE_RIP_AND_FINISH();
7650 IEM_MC_END();
7651}
7652
7653
7654/** Opcode 0xd9 11/3 stN */
7655FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7656{
7657 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7659
7660 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7661 * indicates that it does. */
7662 IEM_MC_BEGIN(1, 3);
7663 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7664 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7665 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7666 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
7667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7669
7670 IEM_MC_PREPARE_FPU_USAGE();
7671 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm))
7672 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7673 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
7674 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7675 IEM_MC_ELSE()
7676 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7677 IEM_MC_ENDIF();
7678
7679 IEM_MC_ADVANCE_RIP_AND_FINISH();
7680 IEM_MC_END();
7681}
7682
7683
7684/** Opcode 0xd9 11/4, 0xdd 11/2. */
7685FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7686{
7687 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689
7690 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7691 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
7692 if (!iDstReg)
7693 {
7694 IEM_MC_BEGIN(0, 1);
7695 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7698
7699 IEM_MC_PREPARE_FPU_USAGE();
7700 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7701 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7702 IEM_MC_ELSE()
7703 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7704 IEM_MC_ENDIF();
7705
7706 IEM_MC_ADVANCE_RIP_AND_FINISH();
7707 IEM_MC_END();
7708 }
7709 else
7710 {
7711 IEM_MC_BEGIN(0, 2);
7712 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7713 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7716
7717 IEM_MC_PREPARE_FPU_USAGE();
7718 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7719 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7720 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7721 IEM_MC_ELSE()
7722 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7723 IEM_MC_ENDIF();
7724
7725 IEM_MC_ADVANCE_RIP_AND_FINISH();
7726 IEM_MC_END();
7727 }
7728}
7729
7730
7731/**
7732 * Common worker for FPU instructions working on ST0 and replaces it with the
7733 * result, i.e. unary operators.
7734 *
7735 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7736 */
7737FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7738{
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740
7741 IEM_MC_BEGIN(2, 1);
7742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7745
7746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7748 IEM_MC_PREPARE_FPU_USAGE();
7749 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7750 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7751 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7752 IEM_MC_ELSE()
7753 IEM_MC_FPU_STACK_UNDERFLOW(0);
7754 IEM_MC_ENDIF();
7755 IEM_MC_ADVANCE_RIP_AND_FINISH();
7756
7757 IEM_MC_END();
7758}
7759
7760
7761/** Opcode 0xd9 0xe0. */
7762FNIEMOP_DEF(iemOp_fchs)
7763{
7764 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7765 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7766}
7767
7768
7769/** Opcode 0xd9 0xe1. */
7770FNIEMOP_DEF(iemOp_fabs)
7771{
7772 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7773 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7774}
7775
7776
7777/** Opcode 0xd9 0xe4. */
7778FNIEMOP_DEF(iemOp_ftst)
7779{
7780 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7782
7783 IEM_MC_BEGIN(2, 1);
7784 IEM_MC_LOCAL(uint16_t, u16Fsw);
7785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7787
7788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7790 IEM_MC_PREPARE_FPU_USAGE();
7791 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7792 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
7793 IEM_MC_UPDATE_FSW(u16Fsw);
7794 IEM_MC_ELSE()
7795 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7796 IEM_MC_ENDIF();
7797 IEM_MC_ADVANCE_RIP_AND_FINISH();
7798
7799 IEM_MC_END();
7800}
7801
7802
7803/** Opcode 0xd9 0xe5. */
7804FNIEMOP_DEF(iemOp_fxam)
7805{
7806 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808
7809 IEM_MC_BEGIN(2, 1);
7810 IEM_MC_LOCAL(uint16_t, u16Fsw);
7811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7812 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7813
7814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7816 IEM_MC_PREPARE_FPU_USAGE();
7817 IEM_MC_REF_FPUREG(pr80Value, 0);
7818 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
7819 IEM_MC_UPDATE_FSW(u16Fsw);
7820 IEM_MC_ADVANCE_RIP_AND_FINISH();
7821
7822 IEM_MC_END();
7823}
7824
7825
7826/**
7827 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7828 *
7829 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7830 */
7831FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7832{
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834
7835 IEM_MC_BEGIN(1, 1);
7836 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7837 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7838
7839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7841 IEM_MC_PREPARE_FPU_USAGE();
7842 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7843 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7844 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7845 IEM_MC_ELSE()
7846 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7847 IEM_MC_ENDIF();
7848 IEM_MC_ADVANCE_RIP_AND_FINISH();
7849
7850 IEM_MC_END();
7851}
7852
7853
7854/** Opcode 0xd9 0xe8. */
7855FNIEMOP_DEF(iemOp_fld1)
7856{
7857 IEMOP_MNEMONIC(fld1, "fld1");
7858 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7859}
7860
7861
7862/** Opcode 0xd9 0xe9. */
7863FNIEMOP_DEF(iemOp_fldl2t)
7864{
7865 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7866 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7867}
7868
7869
7870/** Opcode 0xd9 0xea. */
7871FNIEMOP_DEF(iemOp_fldl2e)
7872{
7873 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7874 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7875}
7876
7877/** Opcode 0xd9 0xeb. */
7878FNIEMOP_DEF(iemOp_fldpi)
7879{
7880 IEMOP_MNEMONIC(fldpi, "fldpi");
7881 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7882}
7883
7884
7885/** Opcode 0xd9 0xec. */
7886FNIEMOP_DEF(iemOp_fldlg2)
7887{
7888 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7889 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7890}
7891
7892/** Opcode 0xd9 0xed. */
7893FNIEMOP_DEF(iemOp_fldln2)
7894{
7895 IEMOP_MNEMONIC(fldln2, "fldln2");
7896 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7897}
7898
7899
7900/** Opcode 0xd9 0xee. */
7901FNIEMOP_DEF(iemOp_fldz)
7902{
7903 IEMOP_MNEMONIC(fldz, "fldz");
7904 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7905}
7906
7907
7908/** Opcode 0xd9 0xf0.
7909 *
7910 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
7911 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
7912 * to produce proper results for +Inf and -Inf.
7913 *
7914 * This is probably usful in the implementation pow() and similar.
7915 */
7916FNIEMOP_DEF(iemOp_f2xm1)
7917{
7918 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7919 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7920}
7921
7922
7923/**
7924 * Common worker for FPU instructions working on STn and ST0, storing the result
7925 * in STn, and popping the stack unless IE, DE or ZE was raised.
7926 *
7927 * @param bRm Mod R/M byte.
7928 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7929 */
7930FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7931{
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933
7934 IEM_MC_BEGIN(3, 1);
7935 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7936 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7937 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7939
7940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7942
7943 IEM_MC_PREPARE_FPU_USAGE();
7944 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
7945 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7946 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
7947 IEM_MC_ELSE()
7948 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
7949 IEM_MC_ENDIF();
7950 IEM_MC_ADVANCE_RIP_AND_FINISH();
7951
7952 IEM_MC_END();
7953}
7954
7955
7956/** Opcode 0xd9 0xf1. */
7957FNIEMOP_DEF(iemOp_fyl2x)
7958{
7959 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7960 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7961}
7962
7963
7964/**
7965 * Common worker for FPU instructions working on ST0 and having two outputs, one
7966 * replacing ST0 and one pushed onto the stack.
7967 *
7968 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7969 */
7970FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7971{
7972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7973
7974 IEM_MC_BEGIN(2, 1);
7975 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7976 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7978
7979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7981 IEM_MC_PREPARE_FPU_USAGE();
7982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7983 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7984 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7985 IEM_MC_ELSE()
7986 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7987 IEM_MC_ENDIF();
7988 IEM_MC_ADVANCE_RIP_AND_FINISH();
7989
7990 IEM_MC_END();
7991}
7992
7993
7994/** Opcode 0xd9 0xf2. */
7995FNIEMOP_DEF(iemOp_fptan)
7996{
7997 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7998 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7999}
8000
8001
8002/** Opcode 0xd9 0xf3. */
8003FNIEMOP_DEF(iemOp_fpatan)
8004{
8005 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8006 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8007}
8008
8009
8010/** Opcode 0xd9 0xf4. */
8011FNIEMOP_DEF(iemOp_fxtract)
8012{
8013 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8014 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8015}
8016
8017
8018/** Opcode 0xd9 0xf5. */
8019FNIEMOP_DEF(iemOp_fprem1)
8020{
8021 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8023}
8024
8025
8026/** Opcode 0xd9 0xf6. */
8027FNIEMOP_DEF(iemOp_fdecstp)
8028{
8029 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8032 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8033 * FINCSTP and FDECSTP. */
8034
8035 IEM_MC_BEGIN(0,0);
8036
8037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8039
8040 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8041 IEM_MC_FPU_STACK_DEC_TOP();
8042 IEM_MC_UPDATE_FSW_CONST(0);
8043
8044 IEM_MC_ADVANCE_RIP_AND_FINISH();
8045 IEM_MC_END();
8046}
8047
8048
8049/** Opcode 0xd9 0xf7. */
8050FNIEMOP_DEF(iemOp_fincstp)
8051{
8052 IEMOP_MNEMONIC(fincstp, "fincstp");
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8055 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8056 * FINCSTP and FDECSTP. */
8057
8058 IEM_MC_BEGIN(0,0);
8059
8060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8062
8063 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8064 IEM_MC_FPU_STACK_INC_TOP();
8065 IEM_MC_UPDATE_FSW_CONST(0);
8066
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 IEM_MC_END();
8069}
8070
8071
8072/** Opcode 0xd9 0xf8. */
8073FNIEMOP_DEF(iemOp_fprem)
8074{
8075 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8076 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8077}
8078
8079
8080/** Opcode 0xd9 0xf9. */
8081FNIEMOP_DEF(iemOp_fyl2xp1)
8082{
8083 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8084 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8085}
8086
8087
8088/** Opcode 0xd9 0xfa. */
8089FNIEMOP_DEF(iemOp_fsqrt)
8090{
8091 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8092 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8093}
8094
8095
8096/** Opcode 0xd9 0xfb. */
8097FNIEMOP_DEF(iemOp_fsincos)
8098{
8099 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8100 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8101}
8102
8103
8104/** Opcode 0xd9 0xfc. */
8105FNIEMOP_DEF(iemOp_frndint)
8106{
8107 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8108 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8109}
8110
8111
8112/** Opcode 0xd9 0xfd. */
8113FNIEMOP_DEF(iemOp_fscale)
8114{
8115 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8117}
8118
8119
8120/** Opcode 0xd9 0xfe. */
8121FNIEMOP_DEF(iemOp_fsin)
8122{
8123 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8124 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8125}
8126
8127
8128/** Opcode 0xd9 0xff. */
8129FNIEMOP_DEF(iemOp_fcos)
8130{
8131 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8132 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8133}
8134
8135
8136/** Used by iemOp_EscF1. */
8137IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8138{
8139 /* 0xe0 */ iemOp_fchs,
8140 /* 0xe1 */ iemOp_fabs,
8141 /* 0xe2 */ iemOp_Invalid,
8142 /* 0xe3 */ iemOp_Invalid,
8143 /* 0xe4 */ iemOp_ftst,
8144 /* 0xe5 */ iemOp_fxam,
8145 /* 0xe6 */ iemOp_Invalid,
8146 /* 0xe7 */ iemOp_Invalid,
8147 /* 0xe8 */ iemOp_fld1,
8148 /* 0xe9 */ iemOp_fldl2t,
8149 /* 0xea */ iemOp_fldl2e,
8150 /* 0xeb */ iemOp_fldpi,
8151 /* 0xec */ iemOp_fldlg2,
8152 /* 0xed */ iemOp_fldln2,
8153 /* 0xee */ iemOp_fldz,
8154 /* 0xef */ iemOp_Invalid,
8155 /* 0xf0 */ iemOp_f2xm1,
8156 /* 0xf1 */ iemOp_fyl2x,
8157 /* 0xf2 */ iemOp_fptan,
8158 /* 0xf3 */ iemOp_fpatan,
8159 /* 0xf4 */ iemOp_fxtract,
8160 /* 0xf5 */ iemOp_fprem1,
8161 /* 0xf6 */ iemOp_fdecstp,
8162 /* 0xf7 */ iemOp_fincstp,
8163 /* 0xf8 */ iemOp_fprem,
8164 /* 0xf9 */ iemOp_fyl2xp1,
8165 /* 0xfa */ iemOp_fsqrt,
8166 /* 0xfb */ iemOp_fsincos,
8167 /* 0xfc */ iemOp_frndint,
8168 /* 0xfd */ iemOp_fscale,
8169 /* 0xfe */ iemOp_fsin,
8170 /* 0xff */ iemOp_fcos
8171};
8172
8173
8174/**
8175 * @opcode 0xd9
8176 */
8177FNIEMOP_DEF(iemOp_EscF1)
8178{
8179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8180 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8181
8182 if (IEM_IS_MODRM_REG_MODE(bRm))
8183 {
8184 switch (IEM_GET_MODRM_REG_8(bRm))
8185 {
8186 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8187 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8188 case 2:
8189 if (bRm == 0xd0)
8190 return FNIEMOP_CALL(iemOp_fnop);
8191 return IEMOP_RAISE_INVALID_OPCODE();
8192 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8193 case 4:
8194 case 5:
8195 case 6:
8196 case 7:
8197 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8198 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8200 }
8201 }
8202 else
8203 {
8204 switch (IEM_GET_MODRM_REG_8(bRm))
8205 {
8206 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8207 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8208 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8209 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8210 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8211 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8212 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8213 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8215 }
8216 }
8217}
8218
8219
8220/** Opcode 0xda 11/0. */
8221FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8222{
8223 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8225
8226 IEM_MC_BEGIN(0, 1);
8227 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8228
8229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8231
8232 IEM_MC_PREPARE_FPU_USAGE();
8233 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8234 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8235 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8236 IEM_MC_ENDIF();
8237 IEM_MC_UPDATE_FPU_OPCODE_IP();
8238 IEM_MC_ELSE()
8239 IEM_MC_FPU_STACK_UNDERFLOW(0);
8240 IEM_MC_ENDIF();
8241 IEM_MC_ADVANCE_RIP_AND_FINISH();
8242
8243 IEM_MC_END();
8244}
8245
8246
8247/** Opcode 0xda 11/1. */
8248FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8249{
8250 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8252
8253 IEM_MC_BEGIN(0, 1);
8254 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8255
8256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8258
8259 IEM_MC_PREPARE_FPU_USAGE();
8260 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8262 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8263 IEM_MC_ENDIF();
8264 IEM_MC_UPDATE_FPU_OPCODE_IP();
8265 IEM_MC_ELSE()
8266 IEM_MC_FPU_STACK_UNDERFLOW(0);
8267 IEM_MC_ENDIF();
8268 IEM_MC_ADVANCE_RIP_AND_FINISH();
8269
8270 IEM_MC_END();
8271}
8272
8273
8274/** Opcode 0xda 11/2. */
8275FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8276{
8277 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8279
8280 IEM_MC_BEGIN(0, 1);
8281 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8282
8283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8285
8286 IEM_MC_PREPARE_FPU_USAGE();
8287 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8288 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8289 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8290 IEM_MC_ENDIF();
8291 IEM_MC_UPDATE_FPU_OPCODE_IP();
8292 IEM_MC_ELSE()
8293 IEM_MC_FPU_STACK_UNDERFLOW(0);
8294 IEM_MC_ENDIF();
8295 IEM_MC_ADVANCE_RIP_AND_FINISH();
8296
8297 IEM_MC_END();
8298}
8299
8300
8301/** Opcode 0xda 11/3. */
8302FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8303{
8304 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306
8307 IEM_MC_BEGIN(0, 1);
8308 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8309
8310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8312
8313 IEM_MC_PREPARE_FPU_USAGE();
8314 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8316 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8317 IEM_MC_ENDIF();
8318 IEM_MC_UPDATE_FPU_OPCODE_IP();
8319 IEM_MC_ELSE()
8320 IEM_MC_FPU_STACK_UNDERFLOW(0);
8321 IEM_MC_ENDIF();
8322 IEM_MC_ADVANCE_RIP_AND_FINISH();
8323
8324 IEM_MC_END();
8325}
8326
8327
8328/**
8329 * Common worker for FPU instructions working on ST0 and ST1, only affecting
8330 * flags, and popping twice when done.
8331 *
8332 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8333 */
8334FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8335{
8336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8337
8338 IEM_MC_BEGIN(3, 1);
8339 IEM_MC_LOCAL(uint16_t, u16Fsw);
8340 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8342 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8343
8344 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8345 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8346
8347 IEM_MC_PREPARE_FPU_USAGE();
8348 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8349 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8350 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8351 IEM_MC_ELSE()
8352 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8353 IEM_MC_ENDIF();
8354 IEM_MC_ADVANCE_RIP_AND_FINISH();
8355
8356 IEM_MC_END();
8357}
8358
8359
8360/** Opcode 0xda 0xe9. */
8361FNIEMOP_DEF(iemOp_fucompp)
8362{
8363 IEMOP_MNEMONIC(fucompp, "fucompp");
8364 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
8365}
8366
8367
8368/**
8369 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8370 * the result in ST0.
8371 *
8372 * @param bRm Mod R/M byte.
8373 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8374 */
8375FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8376{
8377 IEM_MC_BEGIN(3, 3);
8378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8379 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8380 IEM_MC_LOCAL(int32_t, i32Val2);
8381 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8382 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8383 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8384
8385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387
8388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8390 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8391
8392 IEM_MC_PREPARE_FPU_USAGE();
8393 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8394 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8395 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8396 IEM_MC_ELSE()
8397 IEM_MC_FPU_STACK_UNDERFLOW(0);
8398 IEM_MC_ENDIF();
8399 IEM_MC_ADVANCE_RIP_AND_FINISH();
8400
8401 IEM_MC_END();
8402}
8403
8404
8405/** Opcode 0xda !11/0. */
8406FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8407{
8408 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8410}
8411
8412
8413/** Opcode 0xda !11/1. */
8414FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8415{
8416 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8418}
8419
8420
8421/** Opcode 0xda !11/2. */
8422FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8423{
8424 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8425
8426 IEM_MC_BEGIN(3, 3);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8428 IEM_MC_LOCAL(uint16_t, u16Fsw);
8429 IEM_MC_LOCAL(int32_t, i32Val2);
8430 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8432 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8433
8434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8436
8437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8439 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8440
8441 IEM_MC_PREPARE_FPU_USAGE();
8442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8443 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8444 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8445 IEM_MC_ELSE()
8446 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8447 IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP_AND_FINISH();
8449
8450 IEM_MC_END();
8451}
8452
8453
8454/** Opcode 0xda !11/3. */
8455FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8456{
8457 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8458
8459 IEM_MC_BEGIN(3, 3);
8460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8461 IEM_MC_LOCAL(uint16_t, u16Fsw);
8462 IEM_MC_LOCAL(int32_t, i32Val2);
8463 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8464 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8465 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8466
8467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8469
8470 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8471 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8472 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8473
8474 IEM_MC_PREPARE_FPU_USAGE();
8475 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8476 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8477 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8478 IEM_MC_ELSE()
8479 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8480 IEM_MC_ENDIF();
8481 IEM_MC_ADVANCE_RIP_AND_FINISH();
8482
8483 IEM_MC_END();
8484}
8485
8486
8487/** Opcode 0xda !11/4. */
8488FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8489{
8490 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8491 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8492}
8493
8494
8495/** Opcode 0xda !11/5. */
8496FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8497{
8498 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8499 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8500}
8501
8502
8503/** Opcode 0xda !11/6. */
8504FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8505{
8506 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8507 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8508}
8509
8510
8511/** Opcode 0xda !11/7. */
8512FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8513{
8514 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8515 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8516}
8517
8518
8519/**
8520 * @opcode 0xda
8521 */
8522FNIEMOP_DEF(iemOp_EscF2)
8523{
8524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8525 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8526 if (IEM_IS_MODRM_REG_MODE(bRm))
8527 {
8528 switch (IEM_GET_MODRM_REG_8(bRm))
8529 {
8530 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8531 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8532 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8533 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8534 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8535 case 5:
8536 if (bRm == 0xe9)
8537 return FNIEMOP_CALL(iemOp_fucompp);
8538 return IEMOP_RAISE_INVALID_OPCODE();
8539 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8540 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8542 }
8543 }
8544 else
8545 {
8546 switch (IEM_GET_MODRM_REG_8(bRm))
8547 {
8548 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8549 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8550 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8551 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8552 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8553 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8554 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8555 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8557 }
8558 }
8559}
8560
8561
8562/** Opcode 0xdb !11/0. */
8563FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8564{
8565 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8566
8567 IEM_MC_BEGIN(2, 3);
8568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8570 IEM_MC_LOCAL(int32_t, i32Val);
8571 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8572 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8573
8574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8576
8577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8579 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8580
8581 IEM_MC_PREPARE_FPU_USAGE();
8582 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8583 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
8584 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8585 IEM_MC_ELSE()
8586 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8587 IEM_MC_ENDIF();
8588 IEM_MC_ADVANCE_RIP_AND_FINISH();
8589
8590 IEM_MC_END();
8591}
8592
8593
8594/** Opcode 0xdb !11/1. */
8595FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8596{
8597 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8598 IEM_MC_BEGIN(3, 2);
8599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8600 IEM_MC_LOCAL(uint16_t, u16Fsw);
8601 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8602 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8603 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8604
8605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8609
8610 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8611 IEM_MC_PREPARE_FPU_USAGE();
8612 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8613 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8614 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8615 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8616 IEM_MC_ELSE()
8617 IEM_MC_IF_FCW_IM()
8618 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8619 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8620 IEM_MC_ENDIF();
8621 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8622 IEM_MC_ENDIF();
8623 IEM_MC_ADVANCE_RIP_AND_FINISH();
8624
8625 IEM_MC_END();
8626}
8627
8628
8629/** Opcode 0xdb !11/2. */
8630FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8631{
8632 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8633 IEM_MC_BEGIN(3, 2);
8634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8635 IEM_MC_LOCAL(uint16_t, u16Fsw);
8636 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8637 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8638 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8639
8640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8644
8645 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8646 IEM_MC_PREPARE_FPU_USAGE();
8647 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8648 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8649 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8650 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8651 IEM_MC_ELSE()
8652 IEM_MC_IF_FCW_IM()
8653 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8654 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8655 IEM_MC_ENDIF();
8656 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8657 IEM_MC_ENDIF();
8658 IEM_MC_ADVANCE_RIP_AND_FINISH();
8659
8660 IEM_MC_END();
8661}
8662
8663
8664/** Opcode 0xdb !11/3. */
8665FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8666{
8667 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8668 IEM_MC_BEGIN(3, 2);
8669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8670 IEM_MC_LOCAL(uint16_t, u16Fsw);
8671 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8672 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8674
8675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8679
8680 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8681 IEM_MC_PREPARE_FPU_USAGE();
8682 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8683 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8684 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8685 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8686 IEM_MC_ELSE()
8687 IEM_MC_IF_FCW_IM()
8688 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8689 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8690 IEM_MC_ENDIF();
8691 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8692 IEM_MC_ENDIF();
8693 IEM_MC_ADVANCE_RIP_AND_FINISH();
8694
8695 IEM_MC_END();
8696}
8697
8698
8699/** Opcode 0xdb !11/5. */
8700FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8701{
8702 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8703
8704 IEM_MC_BEGIN(2, 3);
8705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8706 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8707 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8708 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8709 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8710
8711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8713
8714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8716 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8717
8718 IEM_MC_PREPARE_FPU_USAGE();
8719 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8720 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8721 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8722 IEM_MC_ELSE()
8723 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8724 IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726
8727 IEM_MC_END();
8728}
8729
8730
8731/** Opcode 0xdb !11/7. */
8732FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8733{
8734 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8735 IEM_MC_BEGIN(3, 2);
8736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8737 IEM_MC_LOCAL(uint16_t, u16Fsw);
8738 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8739 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8740 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8741
8742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8744 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8746
8747 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
8748 IEM_MC_PREPARE_FPU_USAGE();
8749 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8750 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8751 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8752 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8753 IEM_MC_ELSE()
8754 IEM_MC_IF_FCW_IM()
8755 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8756 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8757 IEM_MC_ENDIF();
8758 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8759 IEM_MC_ENDIF();
8760 IEM_MC_ADVANCE_RIP_AND_FINISH();
8761
8762 IEM_MC_END();
8763}
8764
8765
8766/** Opcode 0xdb 11/0. */
8767FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8768{
8769 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8771
8772 IEM_MC_BEGIN(0, 1);
8773 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8774
8775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8776 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8777
8778 IEM_MC_PREPARE_FPU_USAGE();
8779 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8780 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8781 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8782 IEM_MC_ENDIF();
8783 IEM_MC_UPDATE_FPU_OPCODE_IP();
8784 IEM_MC_ELSE()
8785 IEM_MC_FPU_STACK_UNDERFLOW(0);
8786 IEM_MC_ENDIF();
8787 IEM_MC_ADVANCE_RIP_AND_FINISH();
8788
8789 IEM_MC_END();
8790}
8791
8792
8793/** Opcode 0xdb 11/1. */
8794FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8795{
8796 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798
8799 IEM_MC_BEGIN(0, 1);
8800 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8801
8802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8804
8805 IEM_MC_PREPARE_FPU_USAGE();
8806 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8807 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8808 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8809 IEM_MC_ENDIF();
8810 IEM_MC_UPDATE_FPU_OPCODE_IP();
8811 IEM_MC_ELSE()
8812 IEM_MC_FPU_STACK_UNDERFLOW(0);
8813 IEM_MC_ENDIF();
8814 IEM_MC_ADVANCE_RIP_AND_FINISH();
8815
8816 IEM_MC_END();
8817}
8818
8819
8820/** Opcode 0xdb 11/2. */
8821FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8822{
8823 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8825
8826 IEM_MC_BEGIN(0, 1);
8827 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8828
8829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8831
8832 IEM_MC_PREPARE_FPU_USAGE();
8833 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8834 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8835 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8836 IEM_MC_ENDIF();
8837 IEM_MC_UPDATE_FPU_OPCODE_IP();
8838 IEM_MC_ELSE()
8839 IEM_MC_FPU_STACK_UNDERFLOW(0);
8840 IEM_MC_ENDIF();
8841 IEM_MC_ADVANCE_RIP_AND_FINISH();
8842
8843 IEM_MC_END();
8844}
8845
8846
8847/** Opcode 0xdb 11/3. */
8848FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8849{
8850 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8852
8853 IEM_MC_BEGIN(0, 1);
8854 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8855
8856 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8857 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8858
8859 IEM_MC_PREPARE_FPU_USAGE();
8860 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0)
8861 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8862 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8863 IEM_MC_ENDIF();
8864 IEM_MC_UPDATE_FPU_OPCODE_IP();
8865 IEM_MC_ELSE()
8866 IEM_MC_FPU_STACK_UNDERFLOW(0);
8867 IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869
8870 IEM_MC_END();
8871}
8872
8873
8874/** Opcode 0xdb 0xe0. */
8875FNIEMOP_DEF(iemOp_fneni)
8876{
8877 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8879 IEM_MC_BEGIN(0,0);
8880 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8881 IEM_MC_ADVANCE_RIP_AND_FINISH();
8882 IEM_MC_END();
8883}
8884
8885
8886/** Opcode 0xdb 0xe1. */
8887FNIEMOP_DEF(iemOp_fndisi)
8888{
8889 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_BEGIN(0,0);
8892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8893 IEM_MC_ADVANCE_RIP_AND_FINISH();
8894 IEM_MC_END();
8895}
8896
8897
8898/** Opcode 0xdb 0xe2. */
8899FNIEMOP_DEF(iemOp_fnclex)
8900{
8901 IEMOP_MNEMONIC(fnclex, "fnclex");
8902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8903
8904 IEM_MC_BEGIN(0,0);
8905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8906 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8907 IEM_MC_CLEAR_FSW_EX();
8908 IEM_MC_ADVANCE_RIP_AND_FINISH();
8909 IEM_MC_END();
8910}
8911
8912
8913/** Opcode 0xdb 0xe3. */
8914FNIEMOP_DEF(iemOp_fninit)
8915{
8916 IEMOP_MNEMONIC(fninit, "fninit");
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8919}
8920
8921
8922/** Opcode 0xdb 0xe4. */
8923FNIEMOP_DEF(iemOp_fnsetpm)
8924{
8925 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8927 IEM_MC_BEGIN(0,0);
8928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8929 IEM_MC_ADVANCE_RIP_AND_FINISH();
8930 IEM_MC_END();
8931}
8932
8933
8934/** Opcode 0xdb 0xe5. */
8935FNIEMOP_DEF(iemOp_frstpm)
8936{
8937 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8938#if 0 /* #UDs on newer CPUs */
8939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8940 IEM_MC_BEGIN(0,0);
8941 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8942 IEM_MC_ADVANCE_RIP_AND_FINISH();
8943 IEM_MC_END();
8944 return VINF_SUCCESS;
8945#else
8946 return IEMOP_RAISE_INVALID_OPCODE();
8947#endif
8948}
8949
8950
8951/** Opcode 0xdb 11/5. */
8952FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8953{
8954 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8955 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8956}
8957
8958
8959/** Opcode 0xdb 11/6. */
8960FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8961{
8962 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8963 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8964}
8965
8966
8967/**
8968 * @opcode 0xdb
8969 */
8970FNIEMOP_DEF(iemOp_EscF3)
8971{
8972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8973 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8974 if (IEM_IS_MODRM_REG_MODE(bRm))
8975 {
8976 switch (IEM_GET_MODRM_REG_8(bRm))
8977 {
8978 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8979 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8980 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8981 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8982 case 4:
8983 switch (bRm)
8984 {
8985 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8986 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8987 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8988 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8989 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8990 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8991 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8992 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8994 }
8995 break;
8996 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8997 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8998 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9000 }
9001 }
9002 else
9003 {
9004 switch (IEM_GET_MODRM_REG_8(bRm))
9005 {
9006 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9007 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9008 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9009 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9010 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9011 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9012 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9013 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9015 }
9016 }
9017}
9018
9019
9020/**
9021 * Common worker for FPU instructions working on STn and ST0, and storing the
9022 * result in STn unless IE, DE or ZE was raised.
9023 *
9024 * @param bRm Mod R/M byte.
9025 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9026 */
9027FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9028{
9029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9030
9031 IEM_MC_BEGIN(3, 1);
9032 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9033 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9034 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9035 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9036
9037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9039
9040 IEM_MC_PREPARE_FPU_USAGE();
9041 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0)
9042 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9043 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9044 IEM_MC_ELSE()
9045 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9046 IEM_MC_ENDIF();
9047 IEM_MC_ADVANCE_RIP_AND_FINISH();
9048
9049 IEM_MC_END();
9050}
9051
9052
9053/** Opcode 0xdc 11/0. */
9054FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9055{
9056 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9057 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9058}
9059
9060
9061/** Opcode 0xdc 11/1. */
9062FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9063{
9064 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9065 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9066}
9067
9068
9069/** Opcode 0xdc 11/4. */
9070FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9071{
9072 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9073 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9074}
9075
9076
9077/** Opcode 0xdc 11/5. */
9078FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9079{
9080 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9081 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9082}
9083
9084
9085/** Opcode 0xdc 11/6. */
9086FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9089 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9090}
9091
9092
9093/** Opcode 0xdc 11/7. */
9094FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9095{
9096 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9097 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9098}
9099
9100
9101/**
9102 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9103 * memory operand, and storing the result in ST0.
9104 *
9105 * @param bRm Mod R/M byte.
9106 * @param pfnImpl Pointer to the instruction implementation (assembly).
9107 */
9108FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9109{
9110 IEM_MC_BEGIN(3, 3);
9111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9112 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9113 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9114 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9115 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9116 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9117
9118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9122
9123 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9124 IEM_MC_PREPARE_FPU_USAGE();
9125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9126 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9127 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9128 IEM_MC_ELSE()
9129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9130 IEM_MC_ENDIF();
9131 IEM_MC_ADVANCE_RIP_AND_FINISH();
9132
9133 IEM_MC_END();
9134}
9135
9136
9137/** Opcode 0xdc !11/0. */
9138FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9139{
9140 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9141 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9142}
9143
9144
9145/** Opcode 0xdc !11/1. */
9146FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9147{
9148 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9149 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9150}
9151
9152
9153/** Opcode 0xdc !11/2. */
9154FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9155{
9156 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9157
9158 IEM_MC_BEGIN(3, 3);
9159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9160 IEM_MC_LOCAL(uint16_t, u16Fsw);
9161 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9162 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9164 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9165
9166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9168
9169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9171 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9172
9173 IEM_MC_PREPARE_FPU_USAGE();
9174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9175 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9176 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9177 IEM_MC_ELSE()
9178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9179 IEM_MC_ENDIF();
9180 IEM_MC_ADVANCE_RIP_AND_FINISH();
9181
9182 IEM_MC_END();
9183}
9184
9185
9186/** Opcode 0xdc !11/3. */
9187FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9188{
9189 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9190
9191 IEM_MC_BEGIN(3, 3);
9192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9193 IEM_MC_LOCAL(uint16_t, u16Fsw);
9194 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9195 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9197 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9198
9199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9201
9202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9204 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9205
9206 IEM_MC_PREPARE_FPU_USAGE();
9207 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9208 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9209 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9210 IEM_MC_ELSE()
9211 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9212 IEM_MC_ENDIF();
9213 IEM_MC_ADVANCE_RIP_AND_FINISH();
9214
9215 IEM_MC_END();
9216}
9217
9218
9219/** Opcode 0xdc !11/4. */
9220FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9221{
9222 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9223 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9224}
9225
9226
9227/** Opcode 0xdc !11/5. */
9228FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9229{
9230 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9231 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9232}
9233
9234
9235/** Opcode 0xdc !11/6. */
9236FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9237{
9238 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9239 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9240}
9241
9242
9243/** Opcode 0xdc !11/7. */
9244FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9245{
9246 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9247 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9248}
9249
9250
9251/**
9252 * @opcode 0xdc
9253 */
9254FNIEMOP_DEF(iemOp_EscF4)
9255{
9256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9257 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9258 if (IEM_IS_MODRM_REG_MODE(bRm))
9259 {
9260 switch (IEM_GET_MODRM_REG_8(bRm))
9261 {
9262 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9263 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9264 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9265 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9266 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9267 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9268 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9269 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9271 }
9272 }
9273 else
9274 {
9275 switch (IEM_GET_MODRM_REG_8(bRm))
9276 {
9277 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9278 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9279 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9280 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9281 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9282 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9283 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9284 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9286 }
9287 }
9288}
9289
9290
9291/** Opcode 0xdd !11/0.
9292 * @sa iemOp_fld_m32r */
9293FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9294{
9295 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9296
9297 IEM_MC_BEGIN(2, 3);
9298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9299 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9300 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9301 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9302 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9303
9304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9308
9309 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9310 IEM_MC_PREPARE_FPU_USAGE();
9311 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9312 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
9313 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9314 IEM_MC_ELSE()
9315 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9316 IEM_MC_ENDIF();
9317 IEM_MC_ADVANCE_RIP_AND_FINISH();
9318
9319 IEM_MC_END();
9320}
9321
9322
9323/** Opcode 0xdd !11/0. */
9324FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9325{
9326 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9327 IEM_MC_BEGIN(3, 2);
9328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9329 IEM_MC_LOCAL(uint16_t, u16Fsw);
9330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9331 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9333
9334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9336 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9337 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9338
9339 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9340 IEM_MC_PREPARE_FPU_USAGE();
9341 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9342 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9343 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9344 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9345 IEM_MC_ELSE()
9346 IEM_MC_IF_FCW_IM()
9347 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9348 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9349 IEM_MC_ENDIF();
9350 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9351 IEM_MC_ENDIF();
9352 IEM_MC_ADVANCE_RIP_AND_FINISH();
9353
9354 IEM_MC_END();
9355}
9356
9357
9358/** Opcode 0xdd !11/0. */
9359FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9360{
9361 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9362 IEM_MC_BEGIN(3, 2);
9363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9364 IEM_MC_LOCAL(uint16_t, u16Fsw);
9365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9366 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9368
9369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9373
9374 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9375 IEM_MC_PREPARE_FPU_USAGE();
9376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9378 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9379 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9380 IEM_MC_ELSE()
9381 IEM_MC_IF_FCW_IM()
9382 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9383 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9384 IEM_MC_ENDIF();
9385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9386 IEM_MC_ENDIF();
9387 IEM_MC_ADVANCE_RIP_AND_FINISH();
9388
9389 IEM_MC_END();
9390}
9391
9392
9393
9394
9395/** Opcode 0xdd !11/0. */
9396FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9397{
9398 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9399 IEM_MC_BEGIN(3, 2);
9400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9401 IEM_MC_LOCAL(uint16_t, u16Fsw);
9402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9403 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9404 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9405
9406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9410
9411 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9412 IEM_MC_PREPARE_FPU_USAGE();
9413 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9414 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9415 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9416 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9417 IEM_MC_ELSE()
9418 IEM_MC_IF_FCW_IM()
9419 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9420 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9421 IEM_MC_ENDIF();
9422 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9423 IEM_MC_ENDIF();
9424 IEM_MC_ADVANCE_RIP_AND_FINISH();
9425
9426 IEM_MC_END();
9427}
9428
9429
9430/** Opcode 0xdd !11/0. */
9431FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9432{
9433 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9434 IEM_MC_BEGIN(3, 0);
9435 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9436 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9437 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9442 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9443 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9444 IEM_MC_END();
9445 return VINF_SUCCESS;
9446}
9447
9448
9449/** Opcode 0xdd !11/0. */
9450FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9451{
9452 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9453 IEM_MC_BEGIN(3, 0);
9454 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9455 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9456 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
9461 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9462 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9463 IEM_MC_END();
9464 return VINF_SUCCESS;
9465
9466}
9467
9468/** Opcode 0xdd !11/0. */
9469FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9470{
9471 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9472
9473 IEM_MC_BEGIN(0, 2);
9474 IEM_MC_LOCAL(uint16_t, u16Tmp);
9475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9476
9477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9480
9481 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9482 IEM_MC_FETCH_FSW(u16Tmp);
9483 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9484 IEM_MC_ADVANCE_RIP_AND_FINISH();
9485
9486/** @todo Debug / drop a hint to the verifier that things may differ
9487 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9488 * NT4SP1. (X86_FSW_PE) */
9489 IEM_MC_END();
9490}
9491
9492
9493/** Opcode 0xdd 11/0. */
9494FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9495{
9496 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9498 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9499 unmodified. */
9500
9501 IEM_MC_BEGIN(0, 0);
9502
9503 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9504 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9505
9506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9507 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9508 IEM_MC_UPDATE_FPU_OPCODE_IP();
9509
9510 IEM_MC_ADVANCE_RIP_AND_FINISH();
9511 IEM_MC_END();
9512}
9513
9514
9515/** Opcode 0xdd 11/1. */
9516FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9517{
9518 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9520
9521 IEM_MC_BEGIN(0, 2);
9522 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9523 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9526
9527 IEM_MC_PREPARE_FPU_USAGE();
9528 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9529 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9530 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9531 IEM_MC_ELSE()
9532 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9533 IEM_MC_ENDIF();
9534
9535 IEM_MC_ADVANCE_RIP_AND_FINISH();
9536 IEM_MC_END();
9537}
9538
9539
9540/** Opcode 0xdd 11/3. */
9541FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9542{
9543 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9544 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9545}
9546
9547
9548/** Opcode 0xdd 11/4. */
9549FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9550{
9551 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9552 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9553}
9554
9555
9556/**
9557 * @opcode 0xdd
9558 */
9559FNIEMOP_DEF(iemOp_EscF5)
9560{
9561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9562 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9563 if (IEM_IS_MODRM_REG_MODE(bRm))
9564 {
9565 switch (IEM_GET_MODRM_REG_8(bRm))
9566 {
9567 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9568 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9569 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9570 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9571 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9572 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9573 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9574 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9576 }
9577 }
9578 else
9579 {
9580 switch (IEM_GET_MODRM_REG_8(bRm))
9581 {
9582 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9583 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9584 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9585 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9586 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9587 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9588 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9589 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9591 }
9592 }
9593}
9594
9595
9596/** Opcode 0xde 11/0. */
9597FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9598{
9599 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9600 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9601}
9602
9603
9604/** Opcode 0xde 11/0. */
9605FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9606{
9607 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9608 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9609}
9610
9611
9612/** Opcode 0xde 0xd9. */
9613FNIEMOP_DEF(iemOp_fcompp)
9614{
9615 IEMOP_MNEMONIC(fcompp, "fcompp");
9616 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
9617}
9618
9619
9620/** Opcode 0xde 11/4. */
9621FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9622{
9623 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9624 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9625}
9626
9627
9628/** Opcode 0xde 11/5. */
9629FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9630{
9631 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9632 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9633}
9634
9635
9636/** Opcode 0xde 11/6. */
9637FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9638{
9639 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9640 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9641}
9642
9643
9644/** Opcode 0xde 11/7. */
9645FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9646{
9647 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9648 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9649}
9650
9651
9652/**
9653 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9654 * the result in ST0.
9655 *
9656 * @param bRm Mod R/M byte.
9657 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9658 */
9659FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9660{
9661 IEM_MC_BEGIN(3, 3);
9662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9664 IEM_MC_LOCAL(int16_t, i16Val2);
9665 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9667 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9668
9669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9671
9672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9674 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9675
9676 IEM_MC_PREPARE_FPU_USAGE();
9677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9678 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9679 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9680 IEM_MC_ELSE()
9681 IEM_MC_FPU_STACK_UNDERFLOW(0);
9682 IEM_MC_ENDIF();
9683 IEM_MC_ADVANCE_RIP_AND_FINISH();
9684
9685 IEM_MC_END();
9686}
9687
9688
9689/** Opcode 0xde !11/0. */
9690FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9691{
9692 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9693 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9694}
9695
9696
9697/** Opcode 0xde !11/1. */
9698FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9699{
9700 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9701 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9702}
9703
9704
9705/** Opcode 0xde !11/2. */
9706FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9707{
9708 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9709
9710 IEM_MC_BEGIN(3, 3);
9711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9712 IEM_MC_LOCAL(uint16_t, u16Fsw);
9713 IEM_MC_LOCAL(int16_t, i16Val2);
9714 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9715 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9716 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9717
9718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9720
9721 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9722 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9723 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9724
9725 IEM_MC_PREPARE_FPU_USAGE();
9726 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9727 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9728 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9729 IEM_MC_ELSE()
9730 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9731 IEM_MC_ENDIF();
9732 IEM_MC_ADVANCE_RIP_AND_FINISH();
9733
9734 IEM_MC_END();
9735}
9736
9737
9738/** Opcode 0xde !11/3. */
9739FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9740{
9741 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9742
9743 IEM_MC_BEGIN(3, 3);
9744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9745 IEM_MC_LOCAL(uint16_t, u16Fsw);
9746 IEM_MC_LOCAL(int16_t, i16Val2);
9747 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9748 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9749 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9750
9751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9753
9754 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9755 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9756 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9757
9758 IEM_MC_PREPARE_FPU_USAGE();
9759 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9760 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9761 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9762 IEM_MC_ELSE()
9763 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9764 IEM_MC_ENDIF();
9765 IEM_MC_ADVANCE_RIP_AND_FINISH();
9766
9767 IEM_MC_END();
9768}
9769
9770
9771/** Opcode 0xde !11/4. */
9772FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9773{
9774 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9775 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9776}
9777
9778
9779/** Opcode 0xde !11/5. */
9780FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9781{
9782 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9783 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9784}
9785
9786
9787/** Opcode 0xde !11/6. */
9788FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9789{
9790 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9791 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9792}
9793
9794
9795/** Opcode 0xde !11/7. */
9796FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9799 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9800}
9801
9802
9803/**
9804 * @opcode 0xde
9805 */
9806FNIEMOP_DEF(iemOp_EscF6)
9807{
9808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9809 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9810 if (IEM_IS_MODRM_REG_MODE(bRm))
9811 {
9812 switch (IEM_GET_MODRM_REG_8(bRm))
9813 {
9814 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9815 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9816 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9817 case 3: if (bRm == 0xd9)
9818 return FNIEMOP_CALL(iemOp_fcompp);
9819 return IEMOP_RAISE_INVALID_OPCODE();
9820 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9821 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9822 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9823 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9825 }
9826 }
9827 else
9828 {
9829 switch (IEM_GET_MODRM_REG_8(bRm))
9830 {
9831 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9832 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9833 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9834 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9835 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9836 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9837 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9838 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9840 }
9841 }
9842}
9843
9844
9845/** Opcode 0xdf 11/0.
9846 * Undocument instruction, assumed to work like ffree + fincstp. */
9847FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9848{
9849 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9851
9852 IEM_MC_BEGIN(0, 0);
9853
9854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9856
9857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9858 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
9859 IEM_MC_FPU_STACK_INC_TOP();
9860 IEM_MC_UPDATE_FPU_OPCODE_IP();
9861
9862 IEM_MC_ADVANCE_RIP_AND_FINISH();
9863 IEM_MC_END();
9864}
9865
9866
9867/** Opcode 0xdf 0xe0. */
9868FNIEMOP_DEF(iemOp_fnstsw_ax)
9869{
9870 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9872
9873 IEM_MC_BEGIN(0, 1);
9874 IEM_MC_LOCAL(uint16_t, u16Tmp);
9875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9876 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9877 IEM_MC_FETCH_FSW(u16Tmp);
9878 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9879 IEM_MC_ADVANCE_RIP_AND_FINISH();
9880 IEM_MC_END();
9881}
9882
9883
9884/** Opcode 0xdf 11/5. */
9885FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9886{
9887 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9888 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9889}
9890
9891
9892/** Opcode 0xdf 11/6. */
9893FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9896 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9897}
9898
9899
9900/** Opcode 0xdf !11/0. */
9901FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9902{
9903 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9904
9905 IEM_MC_BEGIN(2, 3);
9906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9907 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9908 IEM_MC_LOCAL(int16_t, i16Val);
9909 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9910 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9911
9912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9914
9915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9917 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9918
9919 IEM_MC_PREPARE_FPU_USAGE();
9920 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9921 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
9922 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9923 IEM_MC_ELSE()
9924 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9925 IEM_MC_ENDIF();
9926 IEM_MC_ADVANCE_RIP_AND_FINISH();
9927
9928 IEM_MC_END();
9929}
9930
9931
9932/** Opcode 0xdf !11/1. */
9933FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9934{
9935 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9936 IEM_MC_BEGIN(3, 2);
9937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9938 IEM_MC_LOCAL(uint16_t, u16Fsw);
9939 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9940 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9941 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9942
9943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9947
9948 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9949 IEM_MC_PREPARE_FPU_USAGE();
9950 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9951 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9952 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9953 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9954 IEM_MC_ELSE()
9955 IEM_MC_IF_FCW_IM()
9956 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9957 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9958 IEM_MC_ENDIF();
9959 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9960 IEM_MC_ENDIF();
9961 IEM_MC_ADVANCE_RIP_AND_FINISH();
9962
9963 IEM_MC_END();
9964}
9965
9966
9967/** Opcode 0xdf !11/2. */
9968FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9969{
9970 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9971 IEM_MC_BEGIN(3, 2);
9972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9973 IEM_MC_LOCAL(uint16_t, u16Fsw);
9974 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9975 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9977
9978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9982
9983 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9984 IEM_MC_PREPARE_FPU_USAGE();
9985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9986 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9987 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9988 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9989 IEM_MC_ELSE()
9990 IEM_MC_IF_FCW_IM()
9991 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9992 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9993 IEM_MC_ENDIF();
9994 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9995 IEM_MC_ENDIF();
9996 IEM_MC_ADVANCE_RIP_AND_FINISH();
9997
9998 IEM_MC_END();
9999}
10000
10001
10002/** Opcode 0xdf !11/3. */
10003FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10004{
10005 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10006 IEM_MC_BEGIN(3, 2);
10007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10008 IEM_MC_LOCAL(uint16_t, u16Fsw);
10009 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10010 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10011 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10012
10013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10017
10018 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10019 IEM_MC_PREPARE_FPU_USAGE();
10020 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10021 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10022 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10023 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10024 IEM_MC_ELSE()
10025 IEM_MC_IF_FCW_IM()
10026 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10027 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10028 IEM_MC_ENDIF();
10029 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10030 IEM_MC_ENDIF();
10031 IEM_MC_ADVANCE_RIP_AND_FINISH();
10032
10033 IEM_MC_END();
10034}
10035
10036
10037/** Opcode 0xdf !11/4. */
10038FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10039{
10040 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10041
10042 IEM_MC_BEGIN(2, 3);
10043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10044 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10045 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10046 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10047 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10048
10049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10051
10052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10054 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10055
10056 IEM_MC_PREPARE_FPU_USAGE();
10057 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10058 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10059 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10060 IEM_MC_ELSE()
10061 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10062 IEM_MC_ENDIF();
10063 IEM_MC_ADVANCE_RIP_AND_FINISH();
10064
10065 IEM_MC_END();
10066}
10067
10068
10069/** Opcode 0xdf !11/5. */
10070FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10071{
10072 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10073
10074 IEM_MC_BEGIN(2, 3);
10075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10076 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10077 IEM_MC_LOCAL(int64_t, i64Val);
10078 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10079 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10080
10081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10083
10084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10086 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10087
10088 IEM_MC_PREPARE_FPU_USAGE();
10089 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10090 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
10091 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10092 IEM_MC_ELSE()
10093 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10094 IEM_MC_ENDIF();
10095 IEM_MC_ADVANCE_RIP_AND_FINISH();
10096
10097 IEM_MC_END();
10098}
10099
10100
10101/** Opcode 0xdf !11/6. */
10102FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10103{
10104 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10105 IEM_MC_BEGIN(3, 2);
10106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10107 IEM_MC_LOCAL(uint16_t, u16Fsw);
10108 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10109 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10110 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10111
10112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10116
10117 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10118 IEM_MC_PREPARE_FPU_USAGE();
10119 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10120 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10121 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10122 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10123 IEM_MC_ELSE()
10124 IEM_MC_IF_FCW_IM()
10125 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10126 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10127 IEM_MC_ENDIF();
10128 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10129 IEM_MC_ENDIF();
10130 IEM_MC_ADVANCE_RIP_AND_FINISH();
10131
10132 IEM_MC_END();
10133}
10134
10135
10136/** Opcode 0xdf !11/7. */
10137FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10138{
10139 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10140 IEM_MC_BEGIN(3, 2);
10141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10142 IEM_MC_LOCAL(uint16_t, u16Fsw);
10143 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10144 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10146
10147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10149 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10150 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10151
10152 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10153 IEM_MC_PREPARE_FPU_USAGE();
10154 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10155 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10156 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10157 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10158 IEM_MC_ELSE()
10159 IEM_MC_IF_FCW_IM()
10160 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10161 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10162 IEM_MC_ENDIF();
10163 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10164 IEM_MC_ENDIF();
10165 IEM_MC_ADVANCE_RIP_AND_FINISH();
10166
10167 IEM_MC_END();
10168}
10169
10170
10171/**
10172 * @opcode 0xdf
10173 */
10174FNIEMOP_DEF(iemOp_EscF7)
10175{
10176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10177 if (IEM_IS_MODRM_REG_MODE(bRm))
10178 {
10179 switch (IEM_GET_MODRM_REG_8(bRm))
10180 {
10181 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10182 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10183 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10184 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10185 case 4: if (bRm == 0xe0)
10186 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10187 return IEMOP_RAISE_INVALID_OPCODE();
10188 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10189 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10190 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10192 }
10193 }
10194 else
10195 {
10196 switch (IEM_GET_MODRM_REG_8(bRm))
10197 {
10198 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10199 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10200 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10201 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10202 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10203 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10204 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10205 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10207 }
10208 }
10209}
10210
10211
10212/**
10213 * @opcode 0xe0
10214 */
10215FNIEMOP_DEF(iemOp_loopne_Jb)
10216{
10217 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10218 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10221
10222 switch (pVCpu->iem.s.enmEffAddrMode)
10223 {
10224 case IEMMODE_16BIT:
10225 IEM_MC_BEGIN(0,0);
10226 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10227 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10228 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10229 } IEM_MC_ELSE() {
10230 IEM_MC_ADVANCE_RIP_AND_FINISH();
10231 } IEM_MC_ENDIF();
10232 IEM_MC_END();
10233 break;
10234
10235 case IEMMODE_32BIT:
10236 IEM_MC_BEGIN(0,0);
10237 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10238 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10239 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10240 } IEM_MC_ELSE() {
10241 IEM_MC_ADVANCE_RIP_AND_FINISH();
10242 } IEM_MC_ENDIF();
10243 IEM_MC_END();
10244 break;
10245
10246 case IEMMODE_64BIT:
10247 IEM_MC_BEGIN(0,0);
10248 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10249 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10250 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10251 } IEM_MC_ELSE() {
10252 IEM_MC_ADVANCE_RIP_AND_FINISH();
10253 } IEM_MC_ENDIF();
10254 IEM_MC_END();
10255 break;
10256
10257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10258 }
10259}
10260
10261
10262/**
10263 * @opcode 0xe1
10264 */
10265FNIEMOP_DEF(iemOp_loope_Jb)
10266{
10267 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10268 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10270 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10271
10272 switch (pVCpu->iem.s.enmEffAddrMode)
10273 {
10274 case IEMMODE_16BIT:
10275 IEM_MC_BEGIN(0,0);
10276 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10277 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10278 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10279 } IEM_MC_ELSE() {
10280 IEM_MC_ADVANCE_RIP_AND_FINISH();
10281 } IEM_MC_ENDIF();
10282 IEM_MC_END();
10283 break;
10284
10285 case IEMMODE_32BIT:
10286 IEM_MC_BEGIN(0,0);
10287 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10288 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10289 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10290 } IEM_MC_ELSE() {
10291 IEM_MC_ADVANCE_RIP_AND_FINISH();
10292 } IEM_MC_ENDIF();
10293 IEM_MC_END();
10294 break;
10295
10296 case IEMMODE_64BIT:
10297 IEM_MC_BEGIN(0,0);
10298 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10299 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10300 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10301 } IEM_MC_ELSE() {
10302 IEM_MC_ADVANCE_RIP_AND_FINISH();
10303 } IEM_MC_ENDIF();
10304 IEM_MC_END();
10305 break;
10306
10307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10308 }
10309}
10310
10311
10312/**
10313 * @opcode 0xe2
10314 */
10315FNIEMOP_DEF(iemOp_loop_Jb)
10316{
10317 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10318 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10321
10322 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
10323 * using the 32-bit operand size override. How can that be restarted? See
10324 * weird pseudo code in intel manual. */
10325
10326 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10327 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10328 * the loop causes guest crashes, but when logging it's nice to skip a few million
10329 * lines of useless output. */
10330#if defined(LOG_ENABLED)
10331 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10332 switch (pVCpu->iem.s.enmEffAddrMode)
10333 {
10334 case IEMMODE_16BIT:
10335 IEM_MC_BEGIN(0,0);
10336 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10337 IEM_MC_ADVANCE_RIP_AND_FINISH();
10338 IEM_MC_END();
10339 break;
10340
10341 case IEMMODE_32BIT:
10342 IEM_MC_BEGIN(0,0);
10343 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10344 IEM_MC_ADVANCE_RIP_AND_FINISH();
10345 IEM_MC_END();
10346 break;
10347
10348 case IEMMODE_64BIT:
10349 IEM_MC_BEGIN(0,0);
10350 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10351 IEM_MC_ADVANCE_RIP_AND_FINISH();
10352 IEM_MC_END();
10353 break;
10354
10355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10356 }
10357#endif
10358
10359 switch (pVCpu->iem.s.enmEffAddrMode)
10360 {
10361 case IEMMODE_16BIT:
10362 IEM_MC_BEGIN(0,0);
10363
10364 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10365 IEM_MC_IF_CX_IS_NZ() {
10366 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10367 } IEM_MC_ELSE() {
10368 IEM_MC_ADVANCE_RIP_AND_FINISH();
10369 } IEM_MC_ENDIF();
10370 IEM_MC_END();
10371 break;
10372
10373 case IEMMODE_32BIT:
10374 IEM_MC_BEGIN(0,0);
10375 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10376 IEM_MC_IF_ECX_IS_NZ() {
10377 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10378 } IEM_MC_ELSE() {
10379 IEM_MC_ADVANCE_RIP_AND_FINISH();
10380 } IEM_MC_ENDIF();
10381 IEM_MC_END();
10382 break;
10383
10384 case IEMMODE_64BIT:
10385 IEM_MC_BEGIN(0,0);
10386 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10387 IEM_MC_IF_RCX_IS_NZ() {
10388 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10389 } IEM_MC_ELSE() {
10390 IEM_MC_ADVANCE_RIP_AND_FINISH();
10391 } IEM_MC_ENDIF();
10392 IEM_MC_END();
10393 break;
10394
10395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10396 }
10397}
10398
10399
10400/**
10401 * @opcode 0xe3
10402 */
10403FNIEMOP_DEF(iemOp_jecxz_Jb)
10404{
10405 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10406 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10409
10410 switch (pVCpu->iem.s.enmEffAddrMode)
10411 {
10412 case IEMMODE_16BIT:
10413 IEM_MC_BEGIN(0,0);
10414 IEM_MC_IF_CX_IS_NZ() {
10415 IEM_MC_ADVANCE_RIP_AND_FINISH();
10416 } IEM_MC_ELSE() {
10417 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10418 } IEM_MC_ENDIF();
10419 IEM_MC_END();
10420 break;
10421
10422 case IEMMODE_32BIT:
10423 IEM_MC_BEGIN(0,0);
10424 IEM_MC_IF_ECX_IS_NZ() {
10425 IEM_MC_ADVANCE_RIP_AND_FINISH();
10426 } IEM_MC_ELSE() {
10427 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10428 } IEM_MC_ENDIF();
10429 IEM_MC_END();
10430 break;
10431
10432 case IEMMODE_64BIT:
10433 IEM_MC_BEGIN(0,0);
10434 IEM_MC_IF_RCX_IS_NZ() {
10435 IEM_MC_ADVANCE_RIP_AND_FINISH();
10436 } IEM_MC_ELSE() {
10437 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10438 } IEM_MC_ENDIF();
10439 IEM_MC_END();
10440 break;
10441
10442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10443 }
10444}
10445
10446
10447/** Opcode 0xe4 */
10448FNIEMOP_DEF(iemOp_in_AL_Ib)
10449{
10450 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10451 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10454}
10455
10456
10457/** Opcode 0xe5 */
10458FNIEMOP_DEF(iemOp_in_eAX_Ib)
10459{
10460 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10461 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10463 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10464}
10465
10466
10467/** Opcode 0xe6 */
10468FNIEMOP_DEF(iemOp_out_Ib_AL)
10469{
10470 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10471 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10473 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10474}
10475
10476
10477/** Opcode 0xe7 */
10478FNIEMOP_DEF(iemOp_out_Ib_eAX)
10479{
10480 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10481 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10483 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10484}
10485
10486
10487/**
10488 * @opcode 0xe8
10489 */
10490FNIEMOP_DEF(iemOp_call_Jv)
10491{
10492 IEMOP_MNEMONIC(call_Jv, "call Jv");
10493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10494 switch (pVCpu->iem.s.enmEffOpSize)
10495 {
10496 case IEMMODE_16BIT:
10497 {
10498 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10499 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10500 }
10501
10502 case IEMMODE_32BIT:
10503 {
10504 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10505 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10506 }
10507
10508 case IEMMODE_64BIT:
10509 {
10510 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10511 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10512 }
10513
10514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10515 }
10516}
10517
10518
10519/**
10520 * @opcode 0xe9
10521 */
10522FNIEMOP_DEF(iemOp_jmp_Jv)
10523{
10524 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10525 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10526 switch (pVCpu->iem.s.enmEffOpSize)
10527 {
10528 case IEMMODE_16BIT:
10529 {
10530 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10531 IEM_MC_BEGIN(0, 0);
10532 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535 }
10536
10537 case IEMMODE_64BIT:
10538 case IEMMODE_32BIT:
10539 {
10540 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10541 IEM_MC_BEGIN(0, 0);
10542 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
10543 IEM_MC_END();
10544 return VINF_SUCCESS;
10545 }
10546
10547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10548 }
10549}
10550
10551
10552/**
10553 * @opcode 0xea
10554 */
10555FNIEMOP_DEF(iemOp_jmp_Ap)
10556{
10557 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10558 IEMOP_HLP_NO_64BIT();
10559
10560 /* Decode the far pointer address and pass it on to the far call C implementation. */
10561 uint32_t offSeg;
10562 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10563 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10564 else
10565 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10566 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10569}
10570
10571
10572/**
10573 * @opcode 0xeb
10574 */
10575FNIEMOP_DEF(iemOp_jmp_Jb)
10576{
10577 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10578 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
10581
10582 IEM_MC_BEGIN(0, 0);
10583 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
10584 IEM_MC_END();
10585 return VINF_SUCCESS;
10586}
10587
10588
10589/** Opcode 0xec */
10590FNIEMOP_DEF(iemOp_in_AL_DX)
10591{
10592 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10595}
10596
10597
10598/** Opcode 0xed */
10599FNIEMOP_DEF(iemOp_in_eAX_DX)
10600{
10601 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10604}
10605
10606
10607/** Opcode 0xee */
10608FNIEMOP_DEF(iemOp_out_DX_AL)
10609{
10610 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10612 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10613}
10614
10615
10616/** Opcode 0xef */
10617FNIEMOP_DEF(iemOp_out_DX_eAX)
10618{
10619 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10621 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10622}
10623
10624
10625/**
10626 * @opcode 0xf0
10627 */
10628FNIEMOP_DEF(iemOp_lock)
10629{
10630 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10631 if (!pVCpu->iem.s.fDisregardLock)
10632 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10633
10634 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10635 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10636}
10637
10638
10639/**
10640 * @opcode 0xf1
10641 */
10642FNIEMOP_DEF(iemOp_int1)
10643{
10644 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10645 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
10646 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10647 * LOADALL memo. Needs some testing. */
10648 IEMOP_HLP_MIN_386();
10649 /** @todo testcase! */
10650 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10651}
10652
10653
10654/**
10655 * @opcode 0xf2
10656 */
10657FNIEMOP_DEF(iemOp_repne)
10658{
10659 /* This overrides any previous REPE prefix. */
10660 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10661 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10662 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10663
10664 /* For the 4 entry opcode tables, REPNZ overrides any previous
10665 REPZ and operand size prefixes. */
10666 pVCpu->iem.s.idxPrefix = 3;
10667
10668 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10669 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10670}
10671
10672
10673/**
10674 * @opcode 0xf3
10675 */
10676FNIEMOP_DEF(iemOp_repe)
10677{
10678 /* This overrides any previous REPNE prefix. */
10679 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10680 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10681 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10682
10683 /* For the 4 entry opcode tables, REPNZ overrides any previous
10684 REPNZ and operand size prefixes. */
10685 pVCpu->iem.s.idxPrefix = 2;
10686
10687 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10688 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10689}
10690
10691
10692/**
10693 * @opcode 0xf4
10694 */
10695FNIEMOP_DEF(iemOp_hlt)
10696{
10697 IEMOP_MNEMONIC(hlt, "hlt");
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10700}
10701
10702
10703/**
10704 * @opcode 0xf5
10705 */
10706FNIEMOP_DEF(iemOp_cmc)
10707{
10708 IEMOP_MNEMONIC(cmc, "cmc");
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_BEGIN(0, 0);
10711 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10712 IEM_MC_ADVANCE_RIP_AND_FINISH();
10713 IEM_MC_END();
10714}
10715
10716
10717/**
10718 * Common implementation of 'inc/dec/not/neg Eb'.
10719 *
10720 * @param bRm The RM byte.
10721 * @param pImpl The instruction implementation.
10722 */
10723FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10724{
10725 if (IEM_IS_MODRM_REG_MODE(bRm))
10726 {
10727 /* register access */
10728 IEM_MC_BEGIN(2, 0);
10729 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10730 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10731 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10732 IEM_MC_REF_EFLAGS(pEFlags);
10733 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10734 IEM_MC_ADVANCE_RIP_AND_FINISH();
10735 IEM_MC_END();
10736 }
10737 else
10738 {
10739 /* memory access. */
10740 IEM_MC_BEGIN(2, 2);
10741 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10742 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10744
10745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10746 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10747 IEM_MC_FETCH_EFLAGS(EFlags);
10748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10749 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10750 else
10751 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10752
10753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10754 IEM_MC_COMMIT_EFLAGS(EFlags);
10755 IEM_MC_ADVANCE_RIP_AND_FINISH();
10756 IEM_MC_END();
10757 }
10758}
10759
10760
10761/**
10762 * Common implementation of 'inc/dec/not/neg Ev'.
10763 *
10764 * @param bRm The RM byte.
10765 * @param pImpl The instruction implementation.
10766 */
10767FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10768{
10769 /* Registers are handled by a common worker. */
10770 if (IEM_IS_MODRM_REG_MODE(bRm))
10771 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
10772
10773 /* Memory we do here. */
10774 switch (pVCpu->iem.s.enmEffOpSize)
10775 {
10776 case IEMMODE_16BIT:
10777 IEM_MC_BEGIN(2, 2);
10778 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10781
10782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10783 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10784 IEM_MC_FETCH_EFLAGS(EFlags);
10785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10786 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10787 else
10788 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10789
10790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10791 IEM_MC_COMMIT_EFLAGS(EFlags);
10792 IEM_MC_ADVANCE_RIP_AND_FINISH();
10793 IEM_MC_END();
10794 break;
10795
10796 case IEMMODE_32BIT:
10797 IEM_MC_BEGIN(2, 2);
10798 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10799 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10801
10802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10803 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10804 IEM_MC_FETCH_EFLAGS(EFlags);
10805 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10806 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10807 else
10808 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10809
10810 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10811 IEM_MC_COMMIT_EFLAGS(EFlags);
10812 IEM_MC_ADVANCE_RIP_AND_FINISH();
10813 IEM_MC_END();
10814 break;
10815
10816 case IEMMODE_64BIT:
10817 IEM_MC_BEGIN(2, 2);
10818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10819 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10821
10822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10823 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10824 IEM_MC_FETCH_EFLAGS(EFlags);
10825 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10826 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10827 else
10828 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10829
10830 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10831 IEM_MC_COMMIT_EFLAGS(EFlags);
10832 IEM_MC_ADVANCE_RIP_AND_FINISH();
10833 IEM_MC_END();
10834 break;
10835
10836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10837 }
10838}
10839
10840
10841/** Opcode 0xf6 /0. */
10842FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10843{
10844 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10845 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10846
10847 if (IEM_IS_MODRM_REG_MODE(bRm))
10848 {
10849 /* register access */
10850 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10852
10853 IEM_MC_BEGIN(3, 0);
10854 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10855 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10856 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10857 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10858 IEM_MC_REF_EFLAGS(pEFlags);
10859 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10860 IEM_MC_ADVANCE_RIP_AND_FINISH();
10861 IEM_MC_END();
10862 }
10863 else
10864 {
10865 /* memory access. */
10866 IEM_MC_BEGIN(3, 2);
10867 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10868 IEM_MC_ARG(uint8_t, u8Src, 1);
10869 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10871
10872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10873 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10874 IEM_MC_ASSIGN(u8Src, u8Imm);
10875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10876 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10877 IEM_MC_FETCH_EFLAGS(EFlags);
10878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10879
10880 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10881 IEM_MC_COMMIT_EFLAGS(EFlags);
10882 IEM_MC_ADVANCE_RIP_AND_FINISH();
10883 IEM_MC_END();
10884 }
10885}
10886
10887
10888/** Opcode 0xf7 /0. */
10889FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10890{
10891 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10892 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10893
10894 if (IEM_IS_MODRM_REG_MODE(bRm))
10895 {
10896 /* register access */
10897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10898 switch (pVCpu->iem.s.enmEffOpSize)
10899 {
10900 case IEMMODE_16BIT:
10901 {
10902 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10903 IEM_MC_BEGIN(3, 0);
10904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10905 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10906 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10907 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10908 IEM_MC_REF_EFLAGS(pEFlags);
10909 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10910 IEM_MC_ADVANCE_RIP_AND_FINISH();
10911 IEM_MC_END();
10912 break;
10913 }
10914
10915 case IEMMODE_32BIT:
10916 {
10917 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10918 IEM_MC_BEGIN(3, 0);
10919 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10920 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10921 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10922 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10923 IEM_MC_REF_EFLAGS(pEFlags);
10924 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10925 /* No clearing the high dword here - test doesn't write back the result. */
10926 IEM_MC_ADVANCE_RIP_AND_FINISH();
10927 IEM_MC_END();
10928 break;
10929 }
10930
10931 case IEMMODE_64BIT:
10932 {
10933 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10934 IEM_MC_BEGIN(3, 0);
10935 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10936 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10937 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10938 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10939 IEM_MC_REF_EFLAGS(pEFlags);
10940 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10941 IEM_MC_ADVANCE_RIP_AND_FINISH();
10942 IEM_MC_END();
10943 break;
10944 }
10945
10946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10947 }
10948 }
10949 else
10950 {
10951 /* memory access. */
10952 switch (pVCpu->iem.s.enmEffOpSize)
10953 {
10954 case IEMMODE_16BIT:
10955 {
10956 IEM_MC_BEGIN(3, 2);
10957 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10958 IEM_MC_ARG(uint16_t, u16Src, 1);
10959 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10961
10962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10963 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10964 IEM_MC_ASSIGN(u16Src, u16Imm);
10965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10966 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10967 IEM_MC_FETCH_EFLAGS(EFlags);
10968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10969
10970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10971 IEM_MC_COMMIT_EFLAGS(EFlags);
10972 IEM_MC_ADVANCE_RIP_AND_FINISH();
10973 IEM_MC_END();
10974 break;
10975 }
10976
10977 case IEMMODE_32BIT:
10978 {
10979 IEM_MC_BEGIN(3, 2);
10980 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10981 IEM_MC_ARG(uint32_t, u32Src, 1);
10982 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10984
10985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10986 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10987 IEM_MC_ASSIGN(u32Src, u32Imm);
10988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10989 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10990 IEM_MC_FETCH_EFLAGS(EFlags);
10991 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10992
10993 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10994 IEM_MC_COMMIT_EFLAGS(EFlags);
10995 IEM_MC_ADVANCE_RIP_AND_FINISH();
10996 IEM_MC_END();
10997 break;
10998 }
10999
11000 case IEMMODE_64BIT:
11001 {
11002 IEM_MC_BEGIN(3, 2);
11003 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11004 IEM_MC_ARG(uint64_t, u64Src, 1);
11005 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11007
11008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11009 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11010 IEM_MC_ASSIGN(u64Src, u64Imm);
11011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11012 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11013 IEM_MC_FETCH_EFLAGS(EFlags);
11014 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11015
11016 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11017 IEM_MC_COMMIT_EFLAGS(EFlags);
11018 IEM_MC_ADVANCE_RIP_AND_FINISH();
11019 IEM_MC_END();
11020 break;
11021 }
11022
11023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11024 }
11025 }
11026}
11027
11028
11029/** Opcode 0xf6 /4, /5, /6 and /7. */
11030FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11031{
11032 if (IEM_IS_MODRM_REG_MODE(bRm))
11033 {
11034 /* register access */
11035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11036 IEM_MC_BEGIN(3, 1);
11037 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11038 IEM_MC_ARG(uint8_t, u8Value, 1);
11039 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11040 IEM_MC_LOCAL(int32_t, rc);
11041
11042 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11043 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11044 IEM_MC_REF_EFLAGS(pEFlags);
11045 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11046 IEM_MC_IF_LOCAL_IS_Z(rc) {
11047 IEM_MC_ADVANCE_RIP_AND_FINISH();
11048 } IEM_MC_ELSE() {
11049 IEM_MC_RAISE_DIVIDE_ERROR();
11050 } IEM_MC_ENDIF();
11051
11052 IEM_MC_END();
11053 }
11054 else
11055 {
11056 /* memory access. */
11057 IEM_MC_BEGIN(3, 2);
11058 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11059 IEM_MC_ARG(uint8_t, u8Value, 1);
11060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11062 IEM_MC_LOCAL(int32_t, rc);
11063
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11067 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11068 IEM_MC_REF_EFLAGS(pEFlags);
11069 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11070 IEM_MC_IF_LOCAL_IS_Z(rc) {
11071 IEM_MC_ADVANCE_RIP_AND_FINISH();
11072 } IEM_MC_ELSE() {
11073 IEM_MC_RAISE_DIVIDE_ERROR();
11074 } IEM_MC_ENDIF();
11075
11076 IEM_MC_END();
11077 }
11078}
11079
11080
11081/** Opcode 0xf7 /4, /5, /6 and /7. */
11082FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11083{
11084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11085
11086 if (IEM_IS_MODRM_REG_MODE(bRm))
11087 {
11088 /* register access */
11089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11090 switch (pVCpu->iem.s.enmEffOpSize)
11091 {
11092 case IEMMODE_16BIT:
11093 {
11094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11095 IEM_MC_BEGIN(4, 1);
11096 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11097 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11098 IEM_MC_ARG(uint16_t, u16Value, 2);
11099 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11100 IEM_MC_LOCAL(int32_t, rc);
11101
11102 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11103 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11104 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11105 IEM_MC_REF_EFLAGS(pEFlags);
11106 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11107 IEM_MC_IF_LOCAL_IS_Z(rc) {
11108 IEM_MC_ADVANCE_RIP_AND_FINISH();
11109 } IEM_MC_ELSE() {
11110 IEM_MC_RAISE_DIVIDE_ERROR();
11111 } IEM_MC_ENDIF();
11112
11113 IEM_MC_END();
11114 break;
11115 }
11116
11117 case IEMMODE_32BIT:
11118 {
11119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11120 IEM_MC_BEGIN(4, 1);
11121 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11122 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11123 IEM_MC_ARG(uint32_t, u32Value, 2);
11124 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11125 IEM_MC_LOCAL(int32_t, rc);
11126
11127 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11128 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11129 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11130 IEM_MC_REF_EFLAGS(pEFlags);
11131 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11132 IEM_MC_IF_LOCAL_IS_Z(rc) {
11133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11134 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11135 IEM_MC_ADVANCE_RIP_AND_FINISH();
11136 } IEM_MC_ELSE() {
11137 IEM_MC_RAISE_DIVIDE_ERROR();
11138 } IEM_MC_ENDIF();
11139
11140 IEM_MC_END();
11141 break;
11142 }
11143
11144 case IEMMODE_64BIT:
11145 {
11146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11147 IEM_MC_BEGIN(4, 1);
11148 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11149 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11150 IEM_MC_ARG(uint64_t, u64Value, 2);
11151 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11152 IEM_MC_LOCAL(int32_t, rc);
11153
11154 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11155 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11156 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11157 IEM_MC_REF_EFLAGS(pEFlags);
11158 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11159 IEM_MC_IF_LOCAL_IS_Z(rc) {
11160 IEM_MC_ADVANCE_RIP_AND_FINISH();
11161 } IEM_MC_ELSE() {
11162 IEM_MC_RAISE_DIVIDE_ERROR();
11163 } IEM_MC_ENDIF();
11164
11165 IEM_MC_END();
11166 break;
11167 }
11168
11169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11170 }
11171 }
11172 else
11173 {
11174 /* memory access. */
11175 switch (pVCpu->iem.s.enmEffOpSize)
11176 {
11177 case IEMMODE_16BIT:
11178 {
11179 IEM_MC_BEGIN(4, 2);
11180 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11181 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11182 IEM_MC_ARG(uint16_t, u16Value, 2);
11183 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11185 IEM_MC_LOCAL(int32_t, rc);
11186
11187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11189 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11190 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11191 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11192 IEM_MC_REF_EFLAGS(pEFlags);
11193 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11194 IEM_MC_IF_LOCAL_IS_Z(rc) {
11195 IEM_MC_ADVANCE_RIP_AND_FINISH();
11196 } IEM_MC_ELSE() {
11197 IEM_MC_RAISE_DIVIDE_ERROR();
11198 } IEM_MC_ENDIF();
11199
11200 IEM_MC_END();
11201 break;
11202 }
11203
11204 case IEMMODE_32BIT:
11205 {
11206 IEM_MC_BEGIN(4, 2);
11207 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11208 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11209 IEM_MC_ARG(uint32_t, u32Value, 2);
11210 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11212 IEM_MC_LOCAL(int32_t, rc);
11213
11214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11216 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11217 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11218 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11219 IEM_MC_REF_EFLAGS(pEFlags);
11220 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11221 IEM_MC_IF_LOCAL_IS_Z(rc) {
11222 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11223 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11224 IEM_MC_ADVANCE_RIP_AND_FINISH();
11225 } IEM_MC_ELSE() {
11226 IEM_MC_RAISE_DIVIDE_ERROR();
11227 } IEM_MC_ENDIF();
11228
11229 IEM_MC_END();
11230 break;
11231 }
11232
11233 case IEMMODE_64BIT:
11234 {
11235 IEM_MC_BEGIN(4, 2);
11236 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11237 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11238 IEM_MC_ARG(uint64_t, u64Value, 2);
11239 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11241 IEM_MC_LOCAL(int32_t, rc);
11242
11243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11245 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11246 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11247 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11248 IEM_MC_REF_EFLAGS(pEFlags);
11249 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11250 IEM_MC_IF_LOCAL_IS_Z(rc) {
11251 IEM_MC_ADVANCE_RIP_AND_FINISH();
11252 } IEM_MC_ELSE() {
11253 IEM_MC_RAISE_DIVIDE_ERROR();
11254 } IEM_MC_ENDIF();
11255
11256 IEM_MC_END();
11257 break;
11258 }
11259
11260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11261 }
11262 }
11263}
11264
11265/**
11266 * @opcode 0xf6
11267 */
11268FNIEMOP_DEF(iemOp_Grp3_Eb)
11269{
11270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11271 switch (IEM_GET_MODRM_REG_8(bRm))
11272 {
11273 case 0:
11274 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11275 case 1:
11276/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11277 return IEMOP_RAISE_INVALID_OPCODE();
11278 case 2:
11279 IEMOP_MNEMONIC(not_Eb, "not Eb");
11280 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11281 case 3:
11282 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11283 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11284 case 4:
11285 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11287 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11288 case 5:
11289 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11291 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11292 case 6:
11293 IEMOP_MNEMONIC(div_Eb, "div Eb");
11294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11295 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11296 case 7:
11297 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11299 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11301 }
11302}
11303
11304
11305/**
11306 * @opcode 0xf7
11307 */
11308FNIEMOP_DEF(iemOp_Grp3_Ev)
11309{
11310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11311 switch (IEM_GET_MODRM_REG_8(bRm))
11312 {
11313 case 0:
11314 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11315 case 1:
11316/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11317 return IEMOP_RAISE_INVALID_OPCODE();
11318 case 2:
11319 IEMOP_MNEMONIC(not_Ev, "not Ev");
11320 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11321 case 3:
11322 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11323 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11324 case 4:
11325 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11327 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11328 case 5:
11329 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11330 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11331 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11332 case 6:
11333 IEMOP_MNEMONIC(div_Ev, "div Ev");
11334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11335 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11336 case 7:
11337 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11339 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11341 }
11342}
11343
11344
11345/**
11346 * @opcode 0xf8
11347 */
11348FNIEMOP_DEF(iemOp_clc)
11349{
11350 IEMOP_MNEMONIC(clc, "clc");
11351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11352 IEM_MC_BEGIN(0, 0);
11353 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11354 IEM_MC_ADVANCE_RIP_AND_FINISH();
11355 IEM_MC_END();
11356}
11357
11358
11359/**
11360 * @opcode 0xf9
11361 */
11362FNIEMOP_DEF(iemOp_stc)
11363{
11364 IEMOP_MNEMONIC(stc, "stc");
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 IEM_MC_BEGIN(0, 0);
11367 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11368 IEM_MC_ADVANCE_RIP_AND_FINISH();
11369 IEM_MC_END();
11370}
11371
11372
11373/**
11374 * @opcode 0xfa
11375 */
11376FNIEMOP_DEF(iemOp_cli)
11377{
11378 IEMOP_MNEMONIC(cli, "cli");
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11381}
11382
11383
11384FNIEMOP_DEF(iemOp_sti)
11385{
11386 IEMOP_MNEMONIC(sti, "sti");
11387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11388 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11389}
11390
11391
11392/**
11393 * @opcode 0xfc
11394 */
11395FNIEMOP_DEF(iemOp_cld)
11396{
11397 IEMOP_MNEMONIC(cld, "cld");
11398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11399 IEM_MC_BEGIN(0, 0);
11400 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11401 IEM_MC_ADVANCE_RIP_AND_FINISH();
11402 IEM_MC_END();
11403}
11404
11405
11406/**
11407 * @opcode 0xfd
11408 */
11409FNIEMOP_DEF(iemOp_std)
11410{
11411 IEMOP_MNEMONIC(std, "std");
11412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11413 IEM_MC_BEGIN(0, 0);
11414 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11415 IEM_MC_ADVANCE_RIP_AND_FINISH();
11416 IEM_MC_END();
11417}
11418
11419
11420/**
11421 * @opcode 0xfe
11422 */
11423FNIEMOP_DEF(iemOp_Grp4)
11424{
11425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11426 switch (IEM_GET_MODRM_REG_8(bRm))
11427 {
11428 case 0:
11429 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11430 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11431 case 1:
11432 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11433 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11434 default:
11435 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11436 return IEMOP_RAISE_INVALID_OPCODE();
11437 }
11438}
11439
11440
11441/**
11442 * Opcode 0xff /2.
11443 * @param bRm The RM byte.
11444 */
11445FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11446{
11447 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11449
11450 if (IEM_IS_MODRM_REG_MODE(bRm))
11451 {
11452 /* The new RIP is taken from a register. */
11453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11454 switch (pVCpu->iem.s.enmEffOpSize)
11455 {
11456 case IEMMODE_16BIT:
11457 IEM_MC_BEGIN(1, 0);
11458 IEM_MC_ARG(uint16_t, u16Target, 0);
11459 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11460 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11461 IEM_MC_END()
11462 return VINF_SUCCESS;
11463
11464 case IEMMODE_32BIT:
11465 IEM_MC_BEGIN(1, 0);
11466 IEM_MC_ARG(uint32_t, u32Target, 0);
11467 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11468 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11469 IEM_MC_END()
11470 return VINF_SUCCESS;
11471
11472 case IEMMODE_64BIT:
11473 IEM_MC_BEGIN(1, 0);
11474 IEM_MC_ARG(uint64_t, u64Target, 0);
11475 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11476 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11477 IEM_MC_END()
11478 return VINF_SUCCESS;
11479
11480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11481 }
11482 }
11483 else
11484 {
11485 /* The new RIP is taken from a register. */
11486 switch (pVCpu->iem.s.enmEffOpSize)
11487 {
11488 case IEMMODE_16BIT:
11489 IEM_MC_BEGIN(1, 1);
11490 IEM_MC_ARG(uint16_t, u16Target, 0);
11491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11495 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11496 IEM_MC_END()
11497 return VINF_SUCCESS;
11498
11499 case IEMMODE_32BIT:
11500 IEM_MC_BEGIN(1, 1);
11501 IEM_MC_ARG(uint32_t, u32Target, 0);
11502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11505 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11506 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11507 IEM_MC_END()
11508 return VINF_SUCCESS;
11509
11510 case IEMMODE_64BIT:
11511 IEM_MC_BEGIN(1, 1);
11512 IEM_MC_ARG(uint64_t, u64Target, 0);
11513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11516 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11517 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11518 IEM_MC_END()
11519 return VINF_SUCCESS;
11520
11521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11522 }
11523 }
11524}
11525
11526typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11527
11528FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11529{
11530 /* Registers? How?? */
11531 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
11532 { /* likely */ }
11533 else
11534 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11535
11536 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
11537 /** @todo what does VIA do? */
11538 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
11539 { /* likely */ }
11540 else
11541 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
11542
11543 /* Far pointer loaded from memory. */
11544 switch (pVCpu->iem.s.enmEffOpSize)
11545 {
11546 case IEMMODE_16BIT:
11547 IEM_MC_BEGIN(3, 1);
11548 IEM_MC_ARG(uint16_t, u16Sel, 0);
11549 IEM_MC_ARG(uint16_t, offSeg, 1);
11550 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11554 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11555 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11556 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11557 IEM_MC_END();
11558 return VINF_SUCCESS;
11559
11560 case IEMMODE_32BIT:
11561 IEM_MC_BEGIN(3, 1);
11562 IEM_MC_ARG(uint16_t, u16Sel, 0);
11563 IEM_MC_ARG(uint32_t, offSeg, 1);
11564 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11568 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11569 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11570 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11571 IEM_MC_END();
11572 return VINF_SUCCESS;
11573
11574 case IEMMODE_64BIT:
11575 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
11576 IEM_MC_BEGIN(3, 1);
11577 IEM_MC_ARG(uint16_t, u16Sel, 0);
11578 IEM_MC_ARG(uint64_t, offSeg, 1);
11579 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
11580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11583 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11584 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11585 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11586 IEM_MC_END();
11587 return VINF_SUCCESS;
11588
11589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11590 }
11591}
11592
11593
11594/**
11595 * Opcode 0xff /3.
11596 * @param bRm The RM byte.
11597 */
11598FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11599{
11600 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11601 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11602}
11603
11604
11605/**
11606 * Opcode 0xff /4.
11607 * @param bRm The RM byte.
11608 */
11609FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11610{
11611 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11613
11614 if (IEM_IS_MODRM_REG_MODE(bRm))
11615 {
11616 /* The new RIP is taken from a register. */
11617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11618 switch (pVCpu->iem.s.enmEffOpSize)
11619 {
11620 case IEMMODE_16BIT:
11621 IEM_MC_BEGIN(0, 1);
11622 IEM_MC_LOCAL(uint16_t, u16Target);
11623 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11624 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11625 IEM_MC_END()
11626 return VINF_SUCCESS;
11627
11628 case IEMMODE_32BIT:
11629 IEM_MC_BEGIN(0, 1);
11630 IEM_MC_LOCAL(uint32_t, u32Target);
11631 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11632 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11633 IEM_MC_END()
11634 return VINF_SUCCESS;
11635
11636 case IEMMODE_64BIT:
11637 IEM_MC_BEGIN(0, 1);
11638 IEM_MC_LOCAL(uint64_t, u64Target);
11639 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
11640 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11641 IEM_MC_END()
11642 return VINF_SUCCESS;
11643
11644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11645 }
11646 }
11647 else
11648 {
11649 /* The new RIP is taken from a memory location. */
11650 switch (pVCpu->iem.s.enmEffOpSize)
11651 {
11652 case IEMMODE_16BIT:
11653 IEM_MC_BEGIN(0, 2);
11654 IEM_MC_LOCAL(uint16_t, u16Target);
11655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11658 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11659 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
11660 IEM_MC_END()
11661 return VINF_SUCCESS;
11662
11663 case IEMMODE_32BIT:
11664 IEM_MC_BEGIN(0, 2);
11665 IEM_MC_LOCAL(uint32_t, u32Target);
11666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11669 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11670 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
11671 IEM_MC_END()
11672 return VINF_SUCCESS;
11673
11674 case IEMMODE_64BIT:
11675 IEM_MC_BEGIN(0, 2);
11676 IEM_MC_LOCAL(uint64_t, u64Target);
11677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11681 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
11682 IEM_MC_END()
11683 return VINF_SUCCESS;
11684
11685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11686 }
11687 }
11688}
11689
11690
11691/**
11692 * Opcode 0xff /5.
11693 * @param bRm The RM byte.
11694 */
11695FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11696{
11697 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11698 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11699}
11700
11701
11702/**
11703 * Opcode 0xff /6.
11704 * @param bRm The RM byte.
11705 */
11706FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11707{
11708 IEMOP_MNEMONIC(push_Ev, "push Ev");
11709
11710 /* Registers are handled by a common worker. */
11711 if (IEM_IS_MODRM_REG_MODE(bRm))
11712 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
11713
11714 /* Memory we do here. */
11715 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11716 switch (pVCpu->iem.s.enmEffOpSize)
11717 {
11718 case IEMMODE_16BIT:
11719 IEM_MC_BEGIN(0, 2);
11720 IEM_MC_LOCAL(uint16_t, u16Src);
11721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11724 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11725 IEM_MC_PUSH_U16(u16Src);
11726 IEM_MC_ADVANCE_RIP_AND_FINISH();
11727 IEM_MC_END();
11728 break;
11729
11730 case IEMMODE_32BIT:
11731 IEM_MC_BEGIN(0, 2);
11732 IEM_MC_LOCAL(uint32_t, u32Src);
11733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11736 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11737 IEM_MC_PUSH_U32(u32Src);
11738 IEM_MC_ADVANCE_RIP_AND_FINISH();
11739 IEM_MC_END();
11740 break;
11741
11742 case IEMMODE_64BIT:
11743 IEM_MC_BEGIN(0, 2);
11744 IEM_MC_LOCAL(uint64_t, u64Src);
11745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11748 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11749 IEM_MC_PUSH_U64(u64Src);
11750 IEM_MC_ADVANCE_RIP_AND_FINISH();
11751 IEM_MC_END();
11752 break;
11753
11754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11755 }
11756}
11757
11758
11759/**
11760 * @opcode 0xff
11761 */
11762FNIEMOP_DEF(iemOp_Grp5)
11763{
11764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11765 switch (IEM_GET_MODRM_REG_8(bRm))
11766 {
11767 case 0:
11768 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11769 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11770 case 1:
11771 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11772 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11773 case 2:
11774 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11775 case 3:
11776 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11777 case 4:
11778 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11779 case 5:
11780 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11781 case 6:
11782 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11783 case 7:
11784 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11785 return IEMOP_RAISE_INVALID_OPCODE();
11786 }
11787 AssertFailedReturn(VERR_IEM_IPE_3);
11788}
11789
11790
11791
11792const PFNIEMOP g_apfnOneByteMap[256] =
11793{
11794 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11795 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11796 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11797 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11798 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11799 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11800 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11801 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11802 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11803 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11804 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11805 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11806 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11807 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11808 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11809 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11810 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11811 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11812 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11813 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11814 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11815 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11816 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11817 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11818 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11819 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11820 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11821 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11822 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11823 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11824 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11825 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11826 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11827 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11828 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11829 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11830 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11831 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11832 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11833 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11834 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11835 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11836 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11837 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11838 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11839 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11840 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11841 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11842 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11843 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11844 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11845 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11846 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11847 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11848 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11849 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11850 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11851 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11852 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11853 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11854 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11855 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11856 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11857 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11858};
11859
11860
11861/** @} */
11862
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette